Chromium Code Reviews| Index: native_client_sdk/src/examples/api/media_stream_video/media_stream_video.cc |
| diff --git a/native_client_sdk/src/examples/api/media_stream_video/media_stream_video.cc b/native_client_sdk/src/examples/api/media_stream_video/media_stream_video.cc |
| index 831d98ae3636dc75c09a5f66e58c3e161924cdee..a68d1b45f1ad40e6a8bed0104b661c8c1847f931 100644 |
| --- a/native_client_sdk/src/examples/api/media_stream_video/media_stream_video.cc |
| +++ b/native_client_sdk/src/examples/api/media_stream_video/media_stream_video.cc |
| @@ -2,7 +2,11 @@ |
| // Use of this source code is governed by a BSD-style license that can be |
| // found in the LICENSE file. |
| -#include "GLES2/gl2.h" |
| +#include <GLES2/gl2.h> |
| +#include <GLES2/gl2ext.h> |
| + |
| +#include <vector> |
| + |
| #include "ppapi/c/pp_errors.h" |
| #include "ppapi/c/ppb_opengles2.h" |
| #include "ppapi/cpp/completion_callback.h" |
| @@ -53,32 +57,49 @@ class MediaStreamVideoDemoInstance : public pp::Instance, |
| // pp::Graphics3DClient implementation. |
| virtual void Graphics3DContextLost() { |
| InitGL(); |
| - CreateYUVTextures(); |
| + CreateTextures(); |
| Render(); |
| } |
| private: |
| + void DrawYUV(); |
| + void DrawRGB(); |
| void Render(); |
| // GL-related functions. |
| void InitGL(); |
| - GLuint CreateTexture(int32_t width, int32_t height, int unit); |
| + GLuint CreateTexture(int32_t width, int32_t height, int unit, bool rgba); |
| void CreateGLObjects(); |
| void CreateShader(GLuint program, GLenum type, const char* source, int size); |
| void PaintFinished(int32_t result); |
| - void CreateYUVTextures(); |
| + void CreateTextures(); |
| + void ConfigureTrack(); |
| - // Callback that is invoked when new frames are recevied. |
| + |
| + // MediaStreamVideoTrack callbacks. |
| + void OnConfigure(int32_t result); |
| void OnGetFrame(int32_t result, pp::VideoFrame frame); |
| pp::Size position_size_; |
| bool is_painting_; |
| bool needs_paint_; |
| + bool is_bgra_; |
| + GLuint program_yuv_; |
| + GLuint program_rgb_; |
| + GLuint buffer_; |
| GLuint texture_y_; |
| GLuint texture_u_; |
| GLuint texture_v_; |
| + GLuint texture_rgb_; |
| pp::MediaStreamVideoTrack video_track_; |
| pp::CompletionCallbackFactory<MediaStreamVideoDemoInstance> callback_factory_; |
| + std::vector<int32_t> attrib_list_; |
| + |
| + // MediaStreamVideoTrack attributes: |
| + bool need_config_; |
| + PP_VideoFrame_Format attrib_format_; |
| + int32_t attrib_width_; |
| + int32_t attrib_height_; |
| // Unowned pointers. |
| const struct PPB_OpenGLES2* gles2_if_; |
|
Sam Clegg
2014/03/28 18:24:56
Why not just use -lgles2_ppapi and use the GLES2 A
Peng
2014/04/01 15:36:37
Done.
|
| @@ -95,10 +116,16 @@ MediaStreamVideoDemoInstance::MediaStreamVideoDemoInstance( |
| pp::Graphics3DClient(this), |
| is_painting_(false), |
| needs_paint_(false), |
| + is_bgra_(false), |
| texture_y_(0), |
| texture_u_(0), |
| texture_v_(0), |
| + texture_rgb_(0), |
| callback_factory_(this), |
| + need_config_(false), |
| + attrib_format_(PP_VIDEOFRAME_FORMAT_I420), |
| + attrib_width_(0), |
| + attrib_height_(0), |
| context_(NULL) { |
| gles2_if_ = static_cast<const struct PPB_OpenGLES2*>( |
| module->GetBrowserInterface(PPB_OPENGLES2_INTERFACE)); |
| @@ -126,17 +153,34 @@ void MediaStreamVideoDemoInstance::DidChangeView( |
| void MediaStreamVideoDemoInstance::HandleMessage(const pp::Var& var_message) { |
| if (!var_message.is_dictionary()) |
|
Sam Clegg
2014/03/28 18:24:56
Report this case as an error somehow (even just st
Peng
2014/04/01 15:36:37
Done.
|
| return; |
| - pp::VarDictionary var_dictionary_message(var_message); |
| - pp::Var var_track = var_dictionary_message.Get("track"); |
| - if (!var_track.is_resource()) |
| - return; |
| - |
| - pp::Resource resource_track = var_track.AsResource(); |
| - video_track_ = pp::MediaStreamVideoTrack(resource_track); |
| - |
| - video_track_.GetFrame(callback_factory_.NewCallbackWithOutput( |
| - &MediaStreamVideoDemoInstance::OnGetFrame)); |
| + pp::VarDictionary var_dictionary_message(var_message); |
| + std::string command = var_dictionary_message.Get("command").AsString(); |
| + |
| + if (command == "init") { |
| + pp::Var var_track = var_dictionary_message.Get("track"); |
| + if (!var_track.is_resource()) |
| + return; |
| + pp::Resource resource_track = var_track.AsResource(); |
| + video_track_ = pp::MediaStreamVideoTrack(resource_track); |
| + ConfigureTrack(); |
| + } else if (command == "format") { |
| + std::string str_format = var_dictionary_message.Get("format").AsString(); |
|
binji
2014/04/01 00:00:52
nit: extra space after =
Peng
2014/04/01 15:36:37
Done.
|
| + if (str_format == "YV12") { |
| + attrib_format_ = PP_VIDEOFRAME_FORMAT_YV12; |
| + } else if (str_format == "I420") { |
| + attrib_format_ = PP_VIDEOFRAME_FORMAT_I420; |
| + } else if (str_format == "BGRA") { |
| + attrib_format_ = PP_VIDEOFRAME_FORMAT_BGRA; |
| + } else { |
| + attrib_format_ = PP_VIDEOFRAME_FORMAT_UNKNOWN; |
| + } |
| + need_config_ = true; |
| + } else if (command == "size") { |
| + attrib_width_ = var_dictionary_message.Get("width").AsInt(); |
| + attrib_height_ = var_dictionary_message.Get("height").AsInt(); |
| + need_config_ = true; |
| + } |
|
Sam Clegg
2014/03/28 18:24:56
else { error report in some way }
Peng
2014/04/01 15:36:37
Done.
|
| } |
| void MediaStreamVideoDemoInstance::InitGL() { |
| @@ -172,12 +216,77 @@ void MediaStreamVideoDemoInstance::InitGL() { |
| CreateGLObjects(); |
| } |
| +void MediaStreamVideoDemoInstance::DrawYUV() { |
| + PP_Resource context = context_->pp_resource(); |
| + static const float kColorMatrix[9] = { |
| + 1.1643828125f, 1.1643828125f, 1.1643828125f, |
| + 0.0f, -0.39176171875f, 2.017234375f, |
| + 1.59602734375f, -0.81296875f, 0.0f |
| + }; |
| + |
| + gles2_if_->UseProgram(context, program_yuv_); |
| + gles2_if_->Uniform1i(context, gles2_if_->GetUniformLocation( |
| + context, program_yuv_, "y_texture"), 0); |
| + gles2_if_->Uniform1i(context, gles2_if_->GetUniformLocation( |
| + context, program_yuv_, "u_texture"), 1); |
| + gles2_if_->Uniform1i(context, gles2_if_->GetUniformLocation( |
| + context, program_yuv_, "v_texture"), 2); |
| + gles2_if_->UniformMatrix3fv( |
| + context, |
| + gles2_if_->GetUniformLocation(context, program_yuv_, "color_matrix"), |
| + 1, GL_FALSE, kColorMatrix); |
| + AssertNoGLError(); |
| + |
| + GLint pos_location = gles2_if_->GetAttribLocation( |
| + context, program_yuv_, "a_position"); |
| + GLint tc_location = gles2_if_->GetAttribLocation( |
| + context, program_yuv_, "a_texCoord"); |
| + AssertNoGLError(); |
| + gles2_if_->EnableVertexAttribArray(context, pos_location); |
| + gles2_if_->VertexAttribPointer(context, pos_location, 2, |
| + GL_FLOAT, GL_FALSE, 0, 0); |
| + gles2_if_->EnableVertexAttribArray(context, tc_location); |
| + gles2_if_->VertexAttribPointer( |
| + context, tc_location, 2, GL_FLOAT, GL_FALSE, 0, |
| + static_cast<float*>(0) + 16); // Skip position coordinates. |
| + AssertNoGLError(); |
| + |
| + gles2_if_->DrawArrays(context, GL_TRIANGLE_STRIP, 0, 4); |
| + AssertNoGLError(); |
| +} |
| + |
| +void MediaStreamVideoDemoInstance::DrawRGB() { |
| + PP_Resource context = context_->pp_resource(); |
| + gles2_if_->UseProgram(context, program_rgb_); |
| + gles2_if_->Uniform1i(context, |
| + gles2_if_->GetUniformLocation(context, program_rgb_, "rgb_texture"), 3); |
| + AssertNoGLError(); |
| + |
| + GLint pos_location = gles2_if_->GetAttribLocation( |
| + context, program_rgb_, "a_position"); |
| + GLint tc_location = gles2_if_->GetAttribLocation( |
| + context, program_rgb_, "a_texCoord"); |
| + AssertNoGLError(); |
| + gles2_if_->EnableVertexAttribArray(context, pos_location); |
| + gles2_if_->VertexAttribPointer(context, pos_location, 2, |
| + GL_FLOAT, GL_FALSE, 0, 0); |
| + gles2_if_->EnableVertexAttribArray(context, tc_location); |
| + gles2_if_->VertexAttribPointer( |
| + context, tc_location, 2, GL_FLOAT, GL_FALSE, 0, |
| + static_cast<float*>(0) + 16); // Skip position coordinates. |
| + AssertNoGLError(); |
| + |
| + gles2_if_->DrawArrays(context, GL_TRIANGLE_STRIP, 4, 4); |
| +} |
| + |
| void MediaStreamVideoDemoInstance::Render() { |
| PP_DCHECK(!is_painting_); |
| is_painting_ = true; |
| needs_paint_ = false; |
| + |
| if (texture_y_) { |
| - gles2_if_->DrawArrays(context_->pp_resource(), GL_TRIANGLE_STRIP, 0, 4); |
| + DrawRGB(); |
| + DrawYUV(); |
| } else { |
| gles2_if_->Clear(context_->pp_resource(), GL_COLOR_BUFFER_BIT); |
| } |
| @@ -193,10 +302,11 @@ void MediaStreamVideoDemoInstance::PaintFinished(int32_t result) { |
| } |
| GLuint MediaStreamVideoDemoInstance::CreateTexture( |
| - int32_t width, int32_t height, int unit) { |
| + int32_t width, int32_t height, int unit, bool rgba) { |
| GLuint texture_id; |
| gles2_if_->GenTextures(context_->pp_resource(), 1, &texture_id); |
| AssertNoGLError(); |
| + |
| // Assign parameters. |
| gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0 + unit); |
| gles2_if_->BindTexture(context_->pp_resource(), GL_TEXTURE_2D, texture_id); |
| @@ -212,11 +322,12 @@ GLuint MediaStreamVideoDemoInstance::CreateTexture( |
| gles2_if_->TexParameterf( |
| context_->pp_resource(), GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, |
| GL_CLAMP_TO_EDGE); |
| - |
| // Allocate texture. |
| gles2_if_->TexImage2D( |
| - context_->pp_resource(), GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0, |
| - GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL); |
| + context_->pp_resource(), GL_TEXTURE_2D, 0, |
| + rgba ? GL_BGRA_EXT : GL_LUMINANCE, |
| + width, height, 0, |
| + rgba ? GL_BGRA_EXT : GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL); |
| AssertNoGLError(); |
| return texture_id; |
| } |
| @@ -233,7 +344,7 @@ void MediaStreamVideoDemoInstance::CreateGLObjects() { |
| " gl_Position = a_position; \n" |
| "}"; |
| - static const char kFragmentShader[] = |
| + static const char kFragmentShaderYUV[] = |
| "precision mediump float; \n" |
| "varying vec2 v_texCoord; \n" |
| "uniform sampler2D y_texture; \n" |
| @@ -250,60 +361,48 @@ void MediaStreamVideoDemoInstance::CreateGLObjects() { |
| " gl_FragColor = vec4(rgb, 1.0); \n" |
| "}"; |
| - static const float kColorMatrix[9] = { |
| - 1.1643828125f, 1.1643828125f, 1.1643828125f, |
| - 0.0f, -0.39176171875f, 2.017234375f, |
| - 1.59602734375f, -0.81296875f, 0.0f |
| - }; |
| + static const char kFragmentShaderRGB[] = |
| + "precision mediump float; \n" |
| + "varying vec2 v_texCoord; \n" |
| + "uniform sampler2D rgb_texture; \n" |
| + "void main() \n" |
| + "{ \n" |
| + " gl_FragColor = texture2D(rgb_texture, v_texCoord); \n" |
| + "}"; |
| PP_Resource context = context_->pp_resource(); |
| - // Create shader program. |
| - GLuint program = gles2_if_->CreateProgram(context); |
| - CreateShader(program, GL_VERTEX_SHADER, kVertexShader, sizeof(kVertexShader)); |
| - CreateShader( |
| - program, GL_FRAGMENT_SHADER, kFragmentShader, sizeof(kFragmentShader)); |
| - gles2_if_->LinkProgram(context, program); |
| - gles2_if_->UseProgram(context, program); |
| - gles2_if_->DeleteProgram(context, program); |
| - gles2_if_->Uniform1i( |
| - context, gles2_if_->GetUniformLocation(context, program, "y_texture"), 0); |
| - gles2_if_->Uniform1i( |
| - context, gles2_if_->GetUniformLocation(context, program, "u_texture"), 1); |
| - gles2_if_->Uniform1i( |
| - context, gles2_if_->GetUniformLocation(context, program, "v_texture"), 2); |
| - gles2_if_->UniformMatrix3fv( |
| - context, |
| - gles2_if_->GetUniformLocation(context, program, "color_matrix"), |
| - 1, GL_FALSE, kColorMatrix); |
| + // Create shader programs. |
| + program_yuv_ = gles2_if_->CreateProgram(context); |
| + CreateShader(program_yuv_, GL_VERTEX_SHADER, |
| + kVertexShader, sizeof(kVertexShader)); |
|
Sam Clegg
2014/03/28 18:24:56
Maybe don't pass size here and assume NULL termina
Peng
2014/04/01 15:36:37
Done.
|
| + CreateShader(program_yuv_, GL_FRAGMENT_SHADER, |
| + kFragmentShaderYUV, sizeof(kFragmentShaderYUV)); |
| + gles2_if_->LinkProgram(context, program_yuv_); |
| + AssertNoGLError(); |
| + |
| + program_rgb_ = gles2_if_->CreateProgram(context); |
| + CreateShader(program_rgb_, GL_VERTEX_SHADER, |
| + kVertexShader, sizeof(kVertexShader)); |
| + CreateShader(program_rgb_, GL_FRAGMENT_SHADER, |
| + kFragmentShaderRGB, sizeof(kFragmentShaderRGB)); |
| + gles2_if_->LinkProgram(context, program_rgb_); |
| AssertNoGLError(); |
| // Assign vertex positions and texture coordinates to buffers for use in |
| // shader program. |
| static const float kVertices[] = { |
| - -1, 1, -1, -1, 1, 1, 1, -1, // Position coordinates. |
| + -1, 1, -1, -1, 0, 1, 0, -1, // Position coordinates. |
| + 0, 1, 0, -1, 1, 1, 1, -1, // Position coordinates. |
| + 0, 0, 0, 1, 1, 0, 1, 1, // Texture coordinates. |
| 0, 0, 0, 1, 1, 0, 1, 1, // Texture coordinates. |
| }; |
| - GLuint buffer; |
| - gles2_if_->GenBuffers(context, 1, &buffer); |
| - gles2_if_->BindBuffer(context, GL_ARRAY_BUFFER, buffer); |
| + gles2_if_->GenBuffers(context, 1, &buffer_); |
| + gles2_if_->BindBuffer(context, GL_ARRAY_BUFFER, buffer_); |
| gles2_if_->BufferData(context, GL_ARRAY_BUFFER, |
| sizeof(kVertices), kVertices, GL_STATIC_DRAW); |
| AssertNoGLError(); |
| - GLint pos_location = gles2_if_->GetAttribLocation( |
| - context, program, "a_position"); |
| - GLint tc_location = gles2_if_->GetAttribLocation( |
| - context, program, "a_texCoord"); |
| - AssertNoGLError(); |
| - gles2_if_->EnableVertexAttribArray(context, pos_location); |
| - gles2_if_->VertexAttribPointer(context, pos_location, 2, |
| - GL_FLOAT, GL_FALSE, 0, 0); |
| - gles2_if_->EnableVertexAttribArray(context, tc_location); |
| - gles2_if_->VertexAttribPointer( |
| - context, tc_location, 2, GL_FLOAT, GL_FALSE, 0, |
| - static_cast<float*>(0) + 8); // Skip position coordinates. |
| - AssertNoGLError(); |
| } |
| void MediaStreamVideoDemoInstance::CreateShader( |
| @@ -316,7 +415,7 @@ void MediaStreamVideoDemoInstance::CreateShader( |
| gles2_if_->DeleteShader(context, shader); |
| } |
| -void MediaStreamVideoDemoInstance::CreateYUVTextures() { |
| +void MediaStreamVideoDemoInstance::CreateTextures() { |
| int32_t width = frame_size_.width(); |
| int32_t height = frame_size_.height(); |
| if (width == 0 || height == 0) |
| @@ -327,12 +426,29 @@ void MediaStreamVideoDemoInstance::CreateYUVTextures() { |
| gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_u_); |
| if (texture_v_) |
| gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_v_); |
| - texture_y_ = CreateTexture(width, height, 0); |
| + if (texture_rgb_) |
| + gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_rgb_); |
| + texture_y_ = CreateTexture(width, height, 0, false); |
| + |
| + texture_u_ = CreateTexture(width / 2, height / 2, 1, false); |
| + texture_v_ = CreateTexture(width / 2, height / 2, 2, false); |
| + texture_rgb_ = CreateTexture(width, height, 3, true); |
| +} |
| + |
| +void MediaStreamVideoDemoInstance::ConfigureTrack() { |
| + const int32_t attrib_list[] = { |
| + PP_MEDIASTREAMVIDEOTRACK_ATTRIB_FORMAT, attrib_format_, |
| + PP_MEDIASTREAMVIDEOTRACK_ATTRIB_WIDTH, attrib_width_, |
| + PP_MEDIASTREAMVIDEOTRACK_ATTRIB_HEIGHT, attrib_height_, |
| + PP_MEDIASTREAMVIDEOTRACK_ATTRIB_NONE |
| + }; |
| + video_track_.Configure(attrib_list, callback_factory_.NewCallback( |
| + &MediaStreamVideoDemoInstance::OnConfigure)); |
| +} |
| - width /= 2; |
| - height /= 2; |
| - texture_u_ = CreateTexture(width, height, 1); |
| - texture_v_ = CreateTexture(width, height, 2); |
| +void MediaStreamVideoDemoInstance::OnConfigure(int32_t result) { |
| + video_track_.GetFrame(callback_factory_.NewCallbackWithOutput( |
| + &MediaStreamVideoDemoInstance::OnGetFrame)); |
| } |
| void MediaStreamVideoDemoInstance::OnGetFrame( |
| @@ -341,33 +457,43 @@ void MediaStreamVideoDemoInstance::OnGetFrame( |
| return; |
| const char* data = static_cast<const char*>(frame.GetDataBuffer()); |
| pp::Size size; |
| - PP_DCHECK(frame.GetSize(&size)); |
| + frame.GetSize(&size); |
| + |
| if (size != frame_size_) { |
| frame_size_ = size; |
| - CreateYUVTextures(); |
| + CreateTextures(); |
| } |
| + is_bgra_ = (frame.GetFormat() == PP_VIDEOFRAME_FORMAT_BGRA); |
| + |
| int32_t width = frame_size_.width(); |
| int32_t height = frame_size_.height(); |
| - gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0); |
| - gles2_if_->TexSubImage2D( |
| - context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height, |
| - GL_LUMINANCE, GL_UNSIGNED_BYTE, data); |
| - |
| - data += width * height; |
| - width /= 2; |
| - height /= 2; |
| - |
| - gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE1); |
| - gles2_if_->TexSubImage2D( |
| - context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height, |
| - GL_LUMINANCE, GL_UNSIGNED_BYTE, data); |
| - |
| - data += width * height; |
| - gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE2); |
| - gles2_if_->TexSubImage2D( |
| - context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height, |
| - GL_LUMINANCE, GL_UNSIGNED_BYTE, data); |
| + if (!is_bgra_) { |
| + gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0); |
| + gles2_if_->TexSubImage2D( |
| + context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height, |
| + GL_LUMINANCE, GL_UNSIGNED_BYTE, data); |
| + |
| + data += width * height; |
| + width /= 2; |
| + height /= 2; |
| + |
| + gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE1); |
| + gles2_if_->TexSubImage2D( |
| + context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height, |
| + GL_LUMINANCE, GL_UNSIGNED_BYTE, data); |
| + |
| + data += width * height; |
| + gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE2); |
| + gles2_if_->TexSubImage2D( |
| + context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height, |
| + GL_LUMINANCE, GL_UNSIGNED_BYTE, data); |
| + } else { |
| + gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE3); |
| + gles2_if_->TexSubImage2D( |
| + context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height, |
| + GL_BGRA_EXT, GL_UNSIGNED_BYTE, data); |
| + } |
| if (is_painting_) |
| needs_paint_ = true; |
| @@ -375,8 +501,13 @@ void MediaStreamVideoDemoInstance::OnGetFrame( |
| Render(); |
| video_track_.RecycleFrame(frame); |
| - video_track_.GetFrame(callback_factory_.NewCallbackWithOutput( |
| - &MediaStreamVideoDemoInstance::OnGetFrame)); |
| + if (need_config_) { |
| + ConfigureTrack(); |
| + need_config_ = false; |
| + } else { |
| + video_track_.GetFrame(callback_factory_.NewCallbackWithOutput( |
| + &MediaStreamVideoDemoInstance::OnGetFrame)); |
| + } |
| } |
| pp::Instance* MediaStreamVideoModule::CreateInstance(PP_Instance instance) { |