| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 235 void ToBooleanStub::Generate(MacroAssembler* masm) { | 235 void ToBooleanStub::Generate(MacroAssembler* masm) { |
| 236 Label patch; | 236 Label patch; |
| 237 const Register argument = rax; | 237 const Register argument = rax; |
| 238 const Register map = rdx; | 238 const Register map = rdx; |
| 239 | 239 |
| 240 if (!types_.IsEmpty()) { | 240 if (!types_.IsEmpty()) { |
| 241 __ movq(argument, Operand(rsp, 1 * kPointerSize)); | 241 __ movq(argument, Operand(rsp, 1 * kPointerSize)); |
| 242 } | 242 } |
| 243 | 243 |
| 244 // undefined -> false | 244 // undefined -> false |
| 245 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch); | 245 CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false); |
| 246 | 246 |
| 247 // Boolean -> its value | 247 // Boolean -> its value |
| 248 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch); | 248 CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false); |
| 249 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch); | 249 CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true); |
| 250 | 250 |
| 251 // 'null' -> false. | 251 // 'null' -> false. |
| 252 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch); | 252 CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false); |
| 253 | 253 |
| 254 if (types_.Contains(SMI)) { | 254 if (types_.Contains(SMI)) { |
| 255 // Smis: 0 -> false, all other -> true | 255 // Smis: 0 -> false, all other -> true |
| 256 Label not_smi; | 256 Label not_smi; |
| 257 __ JumpIfNotSmi(argument, ¬_smi, Label::kNear); | 257 __ JumpIfNotSmi(argument, ¬_smi, Label::kNear); |
| 258 // argument contains the correct return value already | 258 // argument contains the correct return value already |
| 259 if (!tos_.is(argument)) { | 259 if (!tos_.is(argument)) { |
| 260 __ movq(tos_, argument); | 260 __ movq(tos_, argument); |
| 261 } | 261 } |
| 262 __ ret(1 * kPointerSize); | 262 __ ret(1 * kPointerSize); |
| 263 __ bind(¬_smi); | 263 __ bind(¬_smi); |
| 264 } else if (types_.NeedsMap()) { | 264 } else if (types_.NeedsMap()) { |
| 265 // If we need a map later and have a Smi -> patch. | 265 // If we need a map later and have a Smi -> patch. |
| 266 __ JumpIfSmi(argument, &patch, Label::kNear); | 266 __ JumpIfSmi(argument, &patch, Label::kNear); |
| 267 } | 267 } |
| 268 | 268 |
| 269 if (types_.NeedsMap()) { | 269 if (types_.NeedsMap()) { |
| 270 __ movq(map, FieldOperand(argument, HeapObject::kMapOffset)); | 270 __ movq(map, FieldOperand(argument, HeapObject::kMapOffset)); |
| 271 | 271 |
| 272 // Everything with a map could be undetectable, so check this now. | 272 if (types_.CanBeUndetectable()) { |
| 273 __ testb(FieldOperand(map, Map::kBitFieldOffset), | 273 __ testb(FieldOperand(map, Map::kBitFieldOffset), |
| 274 Immediate(1 << Map::kIsUndetectable)); | 274 Immediate(1 << Map::kIsUndetectable)); |
| 275 // Undetectable -> false. | 275 // Undetectable -> false. |
| 276 Label not_undetectable; | 276 Label not_undetectable; |
| 277 __ j(zero, ¬_undetectable, Label::kNear); | 277 __ j(zero, ¬_undetectable, Label::kNear); |
| 278 __ Set(tos_, 0); | 278 __ Set(tos_, 0); |
| 279 __ ret(1 * kPointerSize); | 279 __ ret(1 * kPointerSize); |
| 280 __ bind(¬_undetectable); | 280 __ bind(¬_undetectable); |
| 281 } |
| 281 } | 282 } |
| 282 | 283 |
| 283 if (types_.Contains(SPEC_OBJECT)) { | 284 if (types_.Contains(SPEC_OBJECT)) { |
| 284 // spec object -> true. | 285 // spec object -> true. |
| 285 Label not_js_object; | 286 Label not_js_object; |
| 286 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); | 287 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); |
| 287 __ j(below, ¬_js_object, Label::kNear); | 288 __ j(below, ¬_js_object, Label::kNear); |
| 288 __ Set(tos_, 1); | 289 // argument contains the correct return value already. |
| 290 if (!tos_.is(argument)) { |
| 291 __ Set(tos_, 1); |
| 292 } |
| 289 __ ret(1 * kPointerSize); | 293 __ ret(1 * kPointerSize); |
| 290 __ bind(¬_js_object); | 294 __ bind(¬_js_object); |
| 291 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 292 // We've seen a spec object for the first time -> patch. | |
| 293 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); | |
| 294 __ j(above_equal, &patch, Label::kNear); | |
| 295 } | 295 } |
| 296 | 296 |
| 297 if (types_.Contains(STRING)) { | 297 if (types_.Contains(STRING)) { |
| 298 // String value -> false iff empty. | 298 // String value -> false iff empty. |
| 299 Label not_string; | 299 Label not_string; |
| 300 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); | 300 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); |
| 301 __ j(above_equal, ¬_string, Label::kNear); | 301 __ j(above_equal, ¬_string, Label::kNear); |
| 302 __ movq(tos_, FieldOperand(argument, String::kLengthOffset)); | 302 __ movq(tos_, FieldOperand(argument, String::kLengthOffset)); |
| 303 __ ret(1 * kPointerSize); // the string length is OK as the return value | 303 __ ret(1 * kPointerSize); // the string length is OK as the return value |
| 304 __ bind(¬_string); | 304 __ bind(¬_string); |
| 305 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 306 // We've seen a string for the first time -> patch | |
| 307 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); | |
| 308 __ j(below, &patch, Label::kNear); | |
| 309 } | 305 } |
| 310 | 306 |
| 311 if (types_.Contains(HEAP_NUMBER)) { | 307 if (types_.Contains(HEAP_NUMBER)) { |
| 312 // heap number -> false iff +0, -0, or NaN. | 308 // heap number -> false iff +0, -0, or NaN. |
| 313 Label not_heap_number, false_result; | 309 Label not_heap_number, false_result; |
| 314 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); | 310 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
| 315 __ j(not_equal, ¬_heap_number, Label::kNear); | 311 __ j(not_equal, ¬_heap_number, Label::kNear); |
| 316 __ xorps(xmm0, xmm0); | 312 __ xorps(xmm0, xmm0); |
| 317 __ ucomisd(xmm0, FieldOperand(argument, HeapNumber::kValueOffset)); | 313 __ ucomisd(xmm0, FieldOperand(argument, HeapNumber::kValueOffset)); |
| 318 __ j(zero, &false_result, Label::kNear); | 314 __ j(zero, &false_result, Label::kNear); |
| 319 __ Set(tos_, 1); | 315 // argument contains the correct return value already. |
| 316 if (!tos_.is(argument)) { |
| 317 __ Set(tos_, 1); |
| 318 } |
| 320 __ ret(1 * kPointerSize); | 319 __ ret(1 * kPointerSize); |
| 321 __ bind(&false_result); | 320 __ bind(&false_result); |
| 322 __ Set(tos_, 0); | 321 __ Set(tos_, 0); |
| 323 __ ret(1 * kPointerSize); | 322 __ ret(1 * kPointerSize); |
| 324 __ bind(¬_heap_number); | 323 __ bind(¬_heap_number); |
| 325 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 326 // We've seen a heap number for the first time -> patch | |
| 327 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); | |
| 328 __ j(equal, &patch, Label::kNear); | |
| 329 } | 324 } |
| 330 | 325 |
| 331 if (types_.Contains(INTERNAL_OBJECT)) { | 326 __ bind(&patch); |
| 332 // internal objects -> true | 327 GenerateTypeTransition(masm); |
| 333 __ Set(tos_, 1); | |
| 334 __ ret(1 * kPointerSize); | |
| 335 } | |
| 336 | |
| 337 if (!types_.IsAll()) { | |
| 338 __ bind(&patch); | |
| 339 GenerateTypeTransition(masm); | |
| 340 } | |
| 341 } | 328 } |
| 342 | 329 |
| 343 | 330 |
| 344 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { | 331 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
| 345 __ PushCallerSaved(save_doubles_); | 332 __ PushCallerSaved(save_doubles_); |
| 346 const int argument_count = 1; | 333 const int argument_count = 1; |
| 347 __ PrepareCallCFunction(argument_count); | 334 __ PrepareCallCFunction(argument_count); |
| 348 #ifdef _WIN64 | 335 #ifdef _WIN64 |
| 349 __ LoadAddress(rcx, ExternalReference::isolate_address()); | 336 __ LoadAddress(rcx, ExternalReference::isolate_address()); |
| 350 #else | 337 #else |
| 351 __ LoadAddress(rdi, ExternalReference::isolate_address()); | 338 __ LoadAddress(rdi, ExternalReference::isolate_address()); |
| 352 #endif | 339 #endif |
| 353 __ CallCFunction( | 340 __ CallCFunction( |
| 354 ExternalReference::store_buffer_overflow_function(masm->isolate()), | 341 ExternalReference::store_buffer_overflow_function(masm->isolate()), |
| 355 argument_count); | 342 argument_count); |
| 356 __ PopCallerSaved(save_doubles_); | 343 __ PopCallerSaved(save_doubles_); |
| 357 __ ret(0); | 344 __ ret(0); |
| 358 } | 345 } |
| 359 | 346 |
| 360 | 347 |
| 361 void ToBooleanStub::CheckOddball(MacroAssembler* masm, | 348 void ToBooleanStub::CheckOddball(MacroAssembler* masm, |
| 362 Type type, | 349 Type type, |
| 363 Heap::RootListIndex value, | 350 Heap::RootListIndex value, |
| 364 bool result, | 351 bool result) { |
| 365 Label* patch) { | |
| 366 const Register argument = rax; | 352 const Register argument = rax; |
| 367 if (types_.Contains(type)) { | 353 if (types_.Contains(type)) { |
| 368 // If we see an expected oddball, return its ToBoolean value tos_. | 354 // If we see an expected oddball, return its ToBoolean value tos_. |
| 369 Label different_value; | 355 Label different_value; |
| 370 __ CompareRoot(argument, value); | 356 __ CompareRoot(argument, value); |
| 371 __ j(not_equal, &different_value, Label::kNear); | 357 __ j(not_equal, &different_value, Label::kNear); |
| 372 __ Set(tos_, result ? 1 : 0); | 358 if (!result) { |
| 359 // If we have to return zero, there is no way around clearing tos_. |
| 360 __ Set(tos_, 0); |
| 361 } else if (!tos_.is(argument)) { |
| 362 // If we have to return non-zero, we can re-use the argument if it is the |
| 363 // same register as the result, because we never see Smi-zero here. |
| 364 __ Set(tos_, 1); |
| 365 } |
| 373 __ ret(1 * kPointerSize); | 366 __ ret(1 * kPointerSize); |
| 374 __ bind(&different_value); | 367 __ bind(&different_value); |
| 375 } else if (types_.Contains(INTERNAL_OBJECT)) { | |
| 376 // If we see an unexpected oddball and handle internal objects, we must | |
| 377 // patch because the code for internal objects doesn't handle it explictly. | |
| 378 __ CompareRoot(argument, value); | |
| 379 __ j(equal, patch); | |
| 380 } | 368 } |
| 381 } | 369 } |
| 382 | 370 |
| 383 | 371 |
| 384 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { | 372 void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) { |
| 385 __ pop(rcx); // Get return address, operand is now on top of stack. | 373 __ pop(rcx); // Get return address, operand is now on top of stack. |
| 386 __ Push(Smi::FromInt(tos_.code())); | 374 __ Push(Smi::FromInt(tos_.code())); |
| 387 __ Push(Smi::FromInt(types_.ToByte())); | 375 __ Push(Smi::FromInt(types_.ToByte())); |
| 388 __ push(rcx); // Push return address. | 376 __ push(rcx); // Push return address. |
| 389 // Patch the caller to an appropriate specialized stub and return the | 377 // Patch the caller to an appropriate specialized stub and return the |
| (...skipping 5195 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5585 | 5573 |
| 5586 // Fall through when we need to inform the incremental marker. | 5574 // Fall through when we need to inform the incremental marker. |
| 5587 } | 5575 } |
| 5588 | 5576 |
| 5589 | 5577 |
| 5590 #undef __ | 5578 #undef __ |
| 5591 | 5579 |
| 5592 } } // namespace v8::internal | 5580 } } // namespace v8::internal |
| 5593 | 5581 |
| 5594 #endif // V8_TARGET_ARCH_X64 | 5582 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |