Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(112)

Unified Diff: src/x64/lithium-codegen-x64.cc

Issue 9227007: Version 3.8.6 (Closed) Base URL: http://v8.googlecode.com/svn/trunk/
Patch Set: Created 8 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/x64/lithium-codegen-x64.cc
===================================================================
--- src/x64/lithium-codegen-x64.cc (revision 10404)
+++ src/x64/lithium-codegen-x64.cc (working copy)
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -1755,13 +1755,17 @@
// Branches to a label or falls through with the answer in the z flag.
-// Trashes the temp register and possibly input (if it and temp are aliased).
+// Trashes the temp register.
void LCodeGen::EmitClassOfTest(Label* is_true,
Label* is_false,
Handle<String> class_name,
Register input,
Register temp,
- Register scratch) {
+ Register temp2) {
+ ASSERT(!input.is(temp));
+ ASSERT(!input.is(temp2));
+ ASSERT(!temp.is(temp2));
+
__ JumpIfSmi(input, is_false);
if (class_name->IsEqualTo(CStrVector("Function"))) {
@@ -1782,9 +1786,9 @@
// Faster code path to avoid two compares: subtract lower bound from the
// actual type and do a signed compare with the width of the type range.
__ movq(temp, FieldOperand(input, HeapObject::kMapOffset));
- __ movq(scratch, FieldOperand(temp, Map::kInstanceTypeOffset));
- __ subb(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
- __ cmpb(scratch,
+ __ movq(temp2, FieldOperand(temp, Map::kInstanceTypeOffset));
+ __ subb(temp2, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
+ __ cmpb(temp2,
Immediate(static_cast<int8_t>(LAST_NONCALLABLE_SPEC_OBJECT_TYPE -
FIRST_NONCALLABLE_SPEC_OBJECT_TYPE)));
__ j(above, is_false);
@@ -1897,9 +1901,10 @@
Register map = ToRegister(instr->TempAt(0));
__ movq(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
- __ movq(kScratchRegister, factory()->the_hole_value(),
- RelocInfo::EMBEDDED_OBJECT);
- __ cmpq(map, kScratchRegister); // Patched to cached map.
+ Handle<JSGlobalPropertyCell> cache_cell =
+ factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
+ __ movq(kScratchRegister, cache_cell, RelocInfo::GLOBAL_PROPERTY_CELL);
+ __ cmpq(map, Operand(kScratchRegister, 0));
__ j(not_equal, &cache_miss, Label::kNear);
// Patched to load either true or false.
__ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
@@ -2636,7 +2641,7 @@
__ call(FieldOperand(rdi, JSFunction::kCodeEntryOffset));
}
- // Setup deoptimization.
+ // Set up deoptimization.
RecordSafepointWithLazyDeopt(instr, RECORD_SIMPLE_SAFEPOINT, 0);
// Restore context.
@@ -2926,6 +2931,38 @@
}
+void LCodeGen::DoRandom(LRandom* instr) {
+ // Having marked this instruction as a call we can use any
+ // registers.
+ ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
+
+ // Choose the right register for the first argument depending on
+ // calling convention.
+#ifdef _WIN64
+ ASSERT(ToRegister(instr->InputAt(0)).is(rcx));
+ Register global_object = rcx;
+#else
+ ASSERT(ToRegister(instr->InputAt(0)).is(rdi));
+ Register global_object = rdi;
+#endif
+
+ __ PrepareCallCFunction(1);
+ __ movq(global_object,
+ FieldOperand(global_object, GlobalObject::kGlobalContextOffset));
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
+
+ // Convert 32 random bits in rax to 0.(32 random bits) in a double
+ // by computing:
+ // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
+ __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
+ __ movd(xmm2, rcx);
+ __ movd(xmm1, rax);
+ __ cvtss2sd(xmm2, xmm2);
+ __ xorps(xmm1, xmm2);
+ __ subsd(xmm1, xmm2);
+}
+
+
void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
TranscendentalCacheStub stub(TranscendentalCache::LOG,
@@ -3510,6 +3547,7 @@
void LCodeGen::EmitNumberUntagD(Register input_reg,
XMMRegister result_reg,
bool deoptimize_on_undefined,
+ bool deoptimize_on_minus_zero,
LEnvironment* env) {
Label load_smi, done;
@@ -3537,6 +3575,15 @@
}
// Heap number to XMM conversion.
__ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
+ if (deoptimize_on_minus_zero) {
+ XMMRegister xmm_scratch = xmm0;
+ __ xorps(xmm_scratch, xmm_scratch);
+ __ ucomisd(xmm_scratch, result_reg);
+ __ j(not_equal, &done, Label::kNear);
+ __ movmskpd(kScratchRegister, result_reg);
+ __ testq(kScratchRegister, Immediate(1));
+ DeoptimizeIf(not_zero, env);
+ }
__ jmp(&done, Label::kNear);
// Smi to XMM conversion
@@ -3628,6 +3675,7 @@
EmitNumberUntagD(input_reg, result_reg,
instr->hydrogen()->deoptimize_on_undefined(),
+ instr->hydrogen()->deoptimize_on_minus_zero(),
instr->environment());
}
@@ -3747,13 +3795,23 @@
}
+void LCodeGen::DoCheckMapCommon(Register reg,
+ Handle<Map> map,
+ CompareMapMode mode,
+ LEnvironment* env) {
+ Label success;
+ __ CompareMap(reg, map, &success, mode);
+ DeoptimizeIf(not_equal, env);
+ __ bind(&success);
+}
+
+
void LCodeGen::DoCheckMap(LCheckMap* instr) {
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
- instr->hydrogen()->map());
- DeoptimizeIf(not_equal, instr->environment());
+ Handle<Map> map = instr->hydrogen()->map();
+ DoCheckMapCommon(reg, map, instr->hydrogen()->mode(), instr->environment());
}
@@ -3819,9 +3877,8 @@
// Check prototype maps up to the holder.
while (!current_prototype.is_identical_to(holder)) {
- __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
- Handle<Map>(current_prototype->map()));
- DeoptimizeIf(not_equal, instr->environment());
+ DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
+ ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
current_prototype =
Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
// Load next prototype object.
@@ -3829,9 +3886,8 @@
}
// Check the holder map.
- __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
- Handle<Map>(current_prototype->map()));
- DeoptimizeIf(not_equal, instr->environment());
+ DoCheckMapCommon(reg, Handle<Map>(current_prototype->map()),
+ ALLOW_ELEMENT_TRANSITION_MAPS, instr->environment());
}
@@ -3855,7 +3911,7 @@
DeoptimizeIf(not_equal, instr->environment());
}
- // Setup the parameters to the stub/runtime call.
+ // Set up the parameters to the stub/runtime call.
__ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
@@ -3956,7 +4012,7 @@
Handle<FixedArray> constant_properties =
instr->hydrogen()->constant_properties();
- // Setup the parameters to the stub/runtime call.
+ // Set up the parameters to the stub/runtime call.
__ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ push(FieldOperand(rax, JSFunction::kLiteralsOffset));
__ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
« no previous file with comments | « src/x64/lithium-codegen-x64.h ('k') | src/x64/lithium-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698