Chromium Code Reviews| Index: ui/gl/gl_surface_glx.cc |
| diff --git a/ui/gl/gl_surface_glx.cc b/ui/gl/gl_surface_glx.cc |
| index 6e1675498ba3bfac0af052f9c28b0043ce37913c..f8e0594de6a420ff3d59c60202c00ba953c1242b 100644 |
| --- a/ui/gl/gl_surface_glx.cc |
| +++ b/ui/gl/gl_surface_glx.cc |
| @@ -14,6 +14,7 @@ extern "C" { |
| #include "base/memory/scoped_ptr.h" |
| #include "base/message_loop.h" |
| #include "base/process_util.h" |
| +#include "base/time.h" |
| #include "third_party/mesa/MesaLib/include/GL/osmesa.h" |
| #include "ui/base/x/x11_util.h" |
| #include "ui/gl/gl_bindings.h" |
| @@ -37,6 +38,12 @@ Display* g_display; |
| const char* g_glx_extensions = NULL; |
| bool g_glx_create_context_robustness_supported = false; |
| bool g_glx_texture_from_pixmap_supported = false; |
| +bool g_glx_oml_sync_control_supported = false; |
| + |
| +// Track support of glXGetMscRateOML separately from GLX_OML_sync_control as a |
| +// whole since on some platforms (e.g. crosbug.com/34585), glXGetMscRateOML |
| +// always fails even though GLX_OML_sync_control is reported as being supported. |
| +bool g_glx_get_msc_rate_oml_supported = false; |
| } // namespace |
| @@ -69,6 +76,10 @@ bool GLSurfaceGLX::InitializeOneOff() { |
| HasGLXExtension("GLX_ARB_create_context_robustness"); |
| g_glx_texture_from_pixmap_supported = |
| HasGLXExtension("GLX_EXT_texture_from_pixmap"); |
| + g_glx_oml_sync_control_supported = |
| + HasGLXExtension("GLX_OML_sync_control"); |
| + g_glx_get_msc_rate_oml_supported = g_glx_oml_sync_control_supported; |
| + |
| initialized = true; |
| return true; |
| @@ -94,6 +105,11 @@ bool GLSurfaceGLX::IsTextureFromPixmapSupported() { |
| return g_glx_texture_from_pixmap_supported; |
| } |
| +// static |
| +bool GLSurfaceGLX::IsOMLSyncControlSupported() { |
| + return g_glx_oml_sync_control_supported; |
| +} |
| + |
| void* GLSurfaceGLX::GetDisplay() { |
| return g_display; |
| } |
| @@ -224,6 +240,73 @@ bool NativeViewGLSurfaceGLX::PostSubBuffer( |
| return true; |
| } |
| +bool NativeViewGLSurfaceGLX::GetVSyncParameters(base::TimeTicks* timebase, |
| + base::TimeDelta* interval) { |
| + if (g_glx_oml_sync_control_supported) { |
|
jonathan.backer
2012/10/26 20:31:03
I'm concerned that this may be per monitor (in the
ajuma
2012/10/29 17:02:26
AIUI, since we only XOpenDisplay() once, we're onl
|
| + // The actual clock used for the system time returned by glXGetSyncValuesOML |
| + // is unspecified. In practice, the clock used is likely to be either |
| + // CLOCK_REALTIME or CLOCK_MONOTONIC. We test if the returned time is |
| + // "close" (within a minute) to the current time according to either of |
| + // these clocks, and if so, we assume that the corresponding clock was used |
| + // to produce this time. |
| + int64 system_time; |
| + int64 media_stream_counter; |
| + int64 swap_buffer_counter; |
| + if (glXGetSyncValuesOML(g_display, window_, &system_time, |
| + &media_stream_counter, &swap_buffer_counter)) { |
| + struct timespec real_time; |
| + struct timespec monotonic_time; |
| + clock_gettime(CLOCK_REALTIME, &real_time); |
| + clock_gettime(CLOCK_MONOTONIC, &monotonic_time); |
| + |
| + int64 real_time_in_microseconds = |
| + real_time.tv_sec * base::Time::kMicrosecondsPerSecond + |
| + real_time.tv_nsec / base::Time::kNanosecondsPerMicrosecond; |
| + int64 monotonic_time_in_microseconds = |
| + monotonic_time.tv_sec * base::Time::kMicrosecondsPerSecond + |
| + monotonic_time.tv_nsec / base::Time::kNanosecondsPerMicrosecond; |
| + |
| + if (real_time_in_microseconds - system_time <= |
| + base::Time::kMicrosecondsPerMinute) { |
| + // Convert from CLOCK_REALTIME to CLOCK_MONOTONIC. |
| + int64 time_difference = |
| + real_time_in_microseconds - monotonic_time_in_microseconds; |
| + *timebase = base::TimeTicks::FromInternalValue( |
| + system_time - time_difference); |
| + } else if (monotonic_time_in_microseconds - system_time <= |
| + base::Time::kMicrosecondsPerMinute) { |
| + *timebase = base::TimeTicks::FromInternalValue(system_time); |
| + } else { |
| + // We don't know how to interpret system_time. |
| + return false; |
| + } |
| + |
| + // On platforms where glXGetMscRateOML doesn't work, we fall back to the |
| + // assumption that we're displaying 60 frames per second. |
| + const int64 kDefaultIntervalTime = |
| + base::Time::kMicrosecondsPerSecond / 60; |
| + int64 interval_time = kDefaultIntervalTime; |
| + int32 numerator; |
| + int32 denominator; |
| + if (g_glx_get_msc_rate_oml_supported) { |
| + if (glXGetMscRateOML(g_display, window_, &numerator, &denominator)) { |
| + interval_time = |
| + (base::Time::kMicrosecondsPerSecond * denominator) / numerator; |
| + } else { |
| + // Once glXGetMscRateOML has been found to fail, don't try again, |
| + // since each failing call may spew an error message. |
| + g_glx_get_msc_rate_oml_supported = false; |
| + } |
| + } |
| + |
| + *interval = base::TimeDelta::FromMicroseconds(interval_time); |
| + return true; |
| + } |
| + } |
| + |
| + return false; |
| +} |
| + |
| NativeViewGLSurfaceGLX::NativeViewGLSurfaceGLX() |
| : window_(0), |
| config_(NULL) { |