| Index: media/tools/player_x11/x11_video_renderer.cc
|
| diff --git a/media/tools/player_x11/x11_video_renderer.cc b/media/tools/player_x11/x11_video_renderer.cc
|
| index 67c6c5cc7e8ccc95b8350891cb06d6c22fb29429..068b9715af7a9d27dfc9857f58e62eb372c916d5 100644
|
| --- a/media/tools/player_x11/x11_video_renderer.cc
|
| +++ b/media/tools/player_x11/x11_video_renderer.cc
|
| @@ -59,17 +59,10 @@ X11VideoRenderer::X11VideoRenderer(Display* display, Window window)
|
| image_(NULL),
|
| new_frame_(false),
|
| picture_(0),
|
| - use_render_(false),
|
| - use_gl_(false),
|
| - gl_context_(NULL) {
|
| - // Save the instance of the video renderer.
|
| - CHECK(!instance_);
|
| - instance_ = this;
|
| + use_render_(false) {
|
| }
|
|
|
| X11VideoRenderer::~X11VideoRenderer() {
|
| - CHECK(instance_);
|
| - instance_ = NULL;
|
| }
|
|
|
| // static
|
| @@ -81,265 +74,58 @@ bool X11VideoRenderer::IsMediaFormatSupported(
|
| }
|
|
|
| void X11VideoRenderer::OnStop() {
|
| - if (use_gl_) {
|
| - glXMakeCurrent(display_, 0, NULL);
|
| - glXDestroyContext(display_, gl_context_);
|
| - }
|
| if (image_) {
|
| XDestroyImage(image_);
|
| }
|
| - if (use_render_) {
|
| - XRenderFreePicture(display_, picture_);
|
| - }
|
| + XRenderFreePicture(display_, picture_);
|
| }
|
|
|
| -static GLXContext InitGLContext(Display* display, Window window) {
|
| - // Some versions of NVIDIA's GL libGL.so include a broken version of
|
| - // dlopen/dlsym, and so linking it into chrome breaks it. So we dynamically
|
| - // load it, and use glew to dynamically resolve symbols.
|
| - // See http://code.google.com/p/chromium/issues/detail?id=16800
|
| - void* handle = dlopen("libGL.so.1", RTLD_LAZY | RTLD_GLOBAL);
|
| - if (!handle) {
|
| - LOG(ERROR) << "Could not find libGL.so.1";
|
| - return NULL;
|
| - }
|
| - if (glxewInit() != GLEW_OK) {
|
| - LOG(ERROR) << "GLXEW failed initialization";
|
| - return NULL;
|
| - }
|
| -
|
| - XWindowAttributes attributes;
|
| - XGetWindowAttributes(display, window, &attributes);
|
| - XVisualInfo visual_info_template;
|
| - visual_info_template.visualid = XVisualIDFromVisual(attributes.visual);
|
| - int visual_info_count = 0;
|
| - XVisualInfo* visual_info_list = XGetVisualInfo(display, VisualIDMask,
|
| - &visual_info_template,
|
| - &visual_info_count);
|
| - GLXContext context = NULL;
|
| - for (int i = 0; i < visual_info_count && !context; ++i) {
|
| - context = glXCreateContext(display, visual_info_list + i, 0,
|
| - True /* Direct rendering */);
|
| - }
|
| -
|
| - XFree(visual_info_list);
|
| - if (!context) {
|
| - return NULL;
|
| - }
|
| -
|
| - if (!glXMakeCurrent(display, window, context)) {
|
| - glXDestroyContext(display, context);
|
| - return NULL;
|
| - }
|
| -
|
| - if (glewInit() != GLEW_OK) {
|
| - LOG(ERROR) << "GLEW failed initialization";
|
| - glXDestroyContext(display, context);
|
| - return NULL;
|
| - }
|
| -
|
| - if (!glewIsSupported("GL_VERSION_2_0")) {
|
| - LOG(ERROR) << "GL implementation doesn't support GL version 2.0";
|
| - glXDestroyContext(display, context);
|
| - return NULL;
|
| - }
|
| -
|
| - return context;
|
| -}
|
| -
|
| -// Matrix used for the YUV to RGB conversion.
|
| -static const float kYUV2RGB[9] = {
|
| - 1.f, 0.f, 1.403f,
|
| - 1.f, -.344f, -.714f,
|
| - 1.f, 1.772f, 0.f,
|
| -};
|
| -
|
| -// Vertices for a full screen quad.
|
| -static const float kVertices[8] = {
|
| - -1.f, 1.f,
|
| - -1.f, -1.f,
|
| - 1.f, 1.f,
|
| - 1.f, -1.f,
|
| -};
|
| -
|
| -// Texture Coordinates mapping the entire texture.
|
| -static const float kTextureCoords[8] = {
|
| - 0, 0,
|
| - 0, 1,
|
| - 1, 0,
|
| - 1, 1,
|
| -};
|
| -
|
| -// Pass-through vertex shader.
|
| -static const char kVertexShader[] =
|
| - "varying vec2 interp_tc;\n"
|
| - "\n"
|
| - "attribute vec4 in_pos;\n"
|
| - "attribute vec2 in_tc;\n"
|
| - "\n"
|
| - "void main() {\n"
|
| - " interp_tc = in_tc;\n"
|
| - " gl_Position = in_pos;\n"
|
| - "}\n";
|
| -
|
| -// YUV to RGB pixel shader. Loads a pixel from each plane and pass through the
|
| -// matrix.
|
| -static const char kFragmentShader[] =
|
| - "varying vec2 interp_tc;\n"
|
| - "\n"
|
| - "uniform sampler2D y_tex;\n"
|
| - "uniform sampler2D u_tex;\n"
|
| - "uniform sampler2D v_tex;\n"
|
| - "uniform mat3 yuv2rgb;\n"
|
| - "\n"
|
| - "void main() {\n"
|
| - " float y = texture2D(y_tex, interp_tc).x;\n"
|
| - " float u = texture2D(u_tex, interp_tc).r - .5;\n"
|
| - " float v = texture2D(v_tex, interp_tc).r - .5;\n"
|
| - " vec3 rgb = yuv2rgb * vec3(y, u, v);\n"
|
| - " gl_FragColor = vec4(rgb, 1);\n"
|
| - "}\n";
|
| -
|
| -// Buffer size for compile errors.
|
| -static const unsigned int kErrorSize = 4096;
|
| -
|
| bool X11VideoRenderer::OnInitialize(media::VideoDecoder* decoder) {
|
| if (!ParseMediaFormat(decoder->media_format(), &width_, &height_))
|
| return false;
|
|
|
| + LOG(INFO) << "Initializing X11 Renderer...";
|
| +
|
| // Resize the window to fit that of the video.
|
| XResizeWindow(display_, window_, width_, height_);
|
|
|
| - gl_context_ = InitGLContext(display_, window_);
|
| - use_gl_ = (gl_context_ != NULL);
|
| -
|
| - if (use_gl_) {
|
| - glMatrixMode(GL_MODELVIEW);
|
| - glLoadIdentity();
|
| - glViewport(0, 0, width_, height_);
|
| -
|
| - // Create 3 textures, one for each plane, and bind them to different
|
| - // texture units.
|
| - glGenTextures(media::VideoSurface::kNumYUVPlanes, textures_);
|
| - glActiveTexture(GL_TEXTURE0);
|
| - glBindTexture(GL_TEXTURE_2D, textures_[0]);
|
| - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
|
| - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
|
| - glEnable(GL_TEXTURE_2D);
|
| -
|
| - glActiveTexture(GL_TEXTURE1);
|
| - glBindTexture(GL_TEXTURE_2D, textures_[1]);
|
| - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
|
| - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
|
| - glEnable(GL_TEXTURE_2D);
|
| -
|
| - glActiveTexture(GL_TEXTURE2);
|
| - glBindTexture(GL_TEXTURE_2D, textures_[2]);
|
| - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
|
| - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
|
| - glEnable(GL_TEXTURE_2D);
|
| -
|
| - GLuint program_ = glCreateProgram();
|
| -
|
| - // Create our YUV->RGB shader.
|
| - GLuint vertex_shader_ = glCreateShader(GL_VERTEX_SHADER);
|
| - const char* vs_source = kVertexShader;
|
| - int vs_size = sizeof(kVertexShader);
|
| - glShaderSource(vertex_shader_, 1, &vs_source, &vs_size);
|
| - glCompileShader(vertex_shader_);
|
| - int result = GL_FALSE;
|
| - glGetShaderiv(vertex_shader_, GL_COMPILE_STATUS, &result);
|
| - if (!result) {
|
| - char log[kErrorSize];
|
| - int len;
|
| - glGetShaderInfoLog(vertex_shader_, kErrorSize - 1, &len, log);
|
| - log[kErrorSize - 1] = 0;
|
| - LOG(FATAL) << log;
|
| - }
|
| - glAttachShader(program_, vertex_shader_);
|
| -
|
| - GLuint fragment_shader_ = glCreateShader(GL_FRAGMENT_SHADER);
|
| - const char* ps_source = kFragmentShader;
|
| - int ps_size = sizeof(kFragmentShader);
|
| - glShaderSource(fragment_shader_, 1, &ps_source, &ps_size);
|
| - glCompileShader(fragment_shader_);
|
| - result = GL_FALSE;
|
| - glGetShaderiv(fragment_shader_, GL_COMPILE_STATUS, &result);
|
| - if (!result) {
|
| - char log[kErrorSize];
|
| - int len;
|
| - glGetShaderInfoLog(fragment_shader_, kErrorSize - 1, &len, log);
|
| - log[kErrorSize - 1] = 0;
|
| - LOG(FATAL) << log;
|
| - }
|
| - glAttachShader(program_, fragment_shader_);
|
| -
|
| - glLinkProgram(program_);
|
| - result = GL_FALSE;
|
| - glGetProgramiv(program_, GL_LINK_STATUS, &result);
|
| - if (!result) {
|
| - char log[kErrorSize];
|
| - int len;
|
| - glGetProgramInfoLog(program_, kErrorSize - 1, &len, log);
|
| - log[kErrorSize - 1] = 0;
|
| - LOG(FATAL) << log;
|
| - }
|
| - glUseProgram(program_);
|
| -
|
| - // Bind parameters.
|
| - glUniform1i(glGetUniformLocation(program_, "y_tex"), 0);
|
| - glUniform1i(glGetUniformLocation(program_, "u_tex"), 1);
|
| - glUniform1i(glGetUniformLocation(program_, "v_tex"), 2);
|
| - int yuv2rgb_location = glGetUniformLocation(program_, "yuv2rgb");
|
| - glUniformMatrix3fv(yuv2rgb_location, 1, GL_TRUE, kYUV2RGB);
|
| -
|
| - int pos_location = glGetAttribLocation(program_, "in_pos");
|
| - glEnableVertexAttribArray(pos_location);
|
| - glVertexAttribPointer(pos_location, 2, GL_FLOAT, GL_FALSE, 0, kVertices);
|
| -
|
| - int tc_location = glGetAttribLocation(program_, "in_tc");
|
| - glEnableVertexAttribArray(tc_location);
|
| - glVertexAttribPointer(tc_location, 2, GL_FLOAT, GL_FALSE, 0,
|
| - kTextureCoords);
|
| -
|
| - // We are getting called on a thread. Release the context so that it can be
|
| - // made current on the main thread.
|
| - glXMakeCurrent(display_, 0, NULL);
|
| - } else {
|
| - // Testing XRender support. We'll use the very basic of XRender
|
| - // so if it presents it is already good enough. We don't need
|
| - // to check its version.
|
| - int dummy;
|
| - use_render_ = XRenderQueryExtension(display_, &dummy, &dummy);
|
| -
|
| - if (use_render_) {
|
| - // If we are using XRender, we'll create a picture representing the
|
| - // window.
|
| - XWindowAttributes attr;
|
| - XGetWindowAttributes(display_, window_, &attr);
|
| -
|
| - XRenderPictFormat* pictformat = XRenderFindVisualFormat(
|
| - display_,
|
| - attr.visual);
|
| - CHECK(pictformat) << "XRENDER does not support default visual";
|
| -
|
| - picture_ = XRenderCreatePicture(display_, window_, pictformat, 0, NULL);
|
| - CHECK(picture_) << "Backing picture not created";
|
| - }
|
| -
|
| - // Initialize the XImage to store the output of YUV -> RGB conversion.
|
| - image_ = XCreateImage(display_,
|
| - DefaultVisual(display_, DefaultScreen(display_)),
|
| - DefaultDepth(display_, DefaultScreen(display_)),
|
| - ZPixmap,
|
| - 0,
|
| - static_cast<char*>(malloc(width_ * height_ * 4)),
|
| - width_,
|
| - height_,
|
| - 32,
|
| - width_ * 4);
|
| - DCHECK(image_);
|
| - }
|
| + // Testing XRender support. We'll use the very basic of XRender
|
| + // so if it presents it is already good enough. We don't need
|
| + // to check its version.
|
| + int dummy;
|
| + use_render_ = XRenderQueryExtension(display_, &dummy, &dummy);
|
| +
|
| + if (use_render_) {
|
| + // If we are using XRender, we'll create a picture representing the
|
| + // window.
|
| + XWindowAttributes attr;
|
| + XGetWindowAttributes(display_, window_, &attr);
|
| +
|
| + XRenderPictFormat* pictformat = XRenderFindVisualFormat(
|
| + display_,
|
| + attr.visual);
|
| + CHECK(pictformat) << "XRENDER does not support default visual";
|
| +
|
| + picture_ = XRenderCreatePicture(display_, window_, pictformat, 0, NULL);
|
| + CHECK(picture_) << "Backing picture not created";
|
| + }
|
| +
|
| + // Initialize the XImage to store the output of YUV -> RGB conversion.
|
| + image_ = XCreateImage(display_,
|
| + DefaultVisual(display_, DefaultScreen(display_)),
|
| + DefaultDepth(display_, DefaultScreen(display_)),
|
| + ZPixmap,
|
| + 0,
|
| + static_cast<char*>(malloc(width_ * height_ * 4)),
|
| + width_,
|
| + height_,
|
| + 32,
|
| + width_ * 4);
|
| + DCHECK(image_);
|
| +
|
| + // Save this instance.
|
| + DCHECK(!instance_);
|
| + instance_ = this;
|
| return true;
|
| }
|
|
|
| @@ -363,7 +149,7 @@ void X11VideoRenderer::Paint() {
|
| scoped_refptr<media::VideoFrame> video_frame;
|
| GetCurrentFrame(&video_frame);
|
|
|
| - if ((!use_gl_ && !image_) || !video_frame)
|
| + if (!image_ ||!video_frame)
|
| return;
|
|
|
| // Convert YUV frame to RGB.
|
| @@ -375,48 +161,24 @@ void X11VideoRenderer::Paint() {
|
| frame_in.strides[media::VideoSurface::kVPlane]);
|
| DCHECK(frame_in.planes == media::VideoSurface::kNumYUVPlanes);
|
|
|
| - if (use_gl_) {
|
| - if (glXGetCurrentContext() != gl_context_ ||
|
| - glXGetCurrentDrawable() != window_) {
|
| - glXMakeCurrent(display_, window_, gl_context_);
|
| - }
|
| - for (unsigned int i = 0; i < media::VideoSurface::kNumYUVPlanes; ++i) {
|
| - unsigned int width = (i == media::VideoSurface::kYPlane) ?
|
| - frame_in.width : frame_in.width / 2;
|
| - unsigned int height = (i == media::VideoSurface::kYPlane ||
|
| - frame_in.format == media::VideoSurface::YV16) ?
|
| - frame_in.height : frame_in.height / 2;
|
| - glActiveTexture(GL_TEXTURE0 + i);
|
| - glPixelStorei(GL_UNPACK_ROW_LENGTH, frame_in.strides[i]);
|
| - glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
|
| - GL_LUMINANCE, GL_UNSIGNED_BYTE, frame_in.data[i]);
|
| - }
|
| - } else {
|
| - DCHECK(image_->data);
|
| - media::YUVType yuv_type = (frame_in.format == media::VideoSurface::YV12) ?
|
| - media::YV12 : media::YV16;
|
| - media::ConvertYUVToRGB32(frame_in.data[media::VideoSurface::kYPlane],
|
| - frame_in.data[media::VideoSurface::kUPlane],
|
| - frame_in.data[media::VideoSurface::kVPlane],
|
| - (uint8*)image_->data,
|
| - frame_in.width,
|
| - frame_in.height,
|
| - frame_in.strides[media::VideoSurface::kYPlane],
|
| - frame_in.strides[media::VideoSurface::kUPlane],
|
| - image_->bytes_per_line,
|
| - yuv_type);
|
| - }
|
| + DCHECK(image_->data);
|
| + media::YUVType yuv_type = (frame_in.format == media::VideoSurface::YV12) ?
|
| + media::YV12 : media::YV16;
|
| + media::ConvertYUVToRGB32(frame_in.data[media::VideoSurface::kYPlane],
|
| + frame_in.data[media::VideoSurface::kUPlane],
|
| + frame_in.data[media::VideoSurface::kVPlane],
|
| + (uint8*)image_->data,
|
| + frame_in.width,
|
| + frame_in.height,
|
| + frame_in.strides[media::VideoSurface::kYPlane],
|
| + frame_in.strides[media::VideoSurface::kUPlane],
|
| + image_->bytes_per_line,
|
| + yuv_type);
|
| video_frame->Unlock();
|
| } else {
|
| NOTREACHED();
|
| }
|
|
|
| - if (use_gl_) {
|
| - glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
| - glXSwapBuffers(display_, window_);
|
| - return;
|
| - }
|
| -
|
| if (use_render_) {
|
| // If XRender is used, we'll upload the image to a pixmap. And then
|
| // creats a picture from the pixmap and composite the picture over
|
|
|