Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(88)

Unified Diff: native_client_sdk/src/examples/api/media_stream_video/media_stream_video.cc

Issue 212533007: Sync NaCl MediaStream Video example with ppapi/examples/media_stream_video (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fix review issue Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: native_client_sdk/src/examples/api/media_stream_video/media_stream_video.cc
diff --git a/native_client_sdk/src/examples/api/media_stream_video/media_stream_video.cc b/native_client_sdk/src/examples/api/media_stream_video/media_stream_video.cc
index 831d98ae3636dc75c09a5f66e58c3e161924cdee..727077bfdd353fdb05f7bb7c7646bd038e2683e5 100644
--- a/native_client_sdk/src/examples/api/media_stream_video/media_stream_video.cc
+++ b/native_client_sdk/src/examples/api/media_stream_video/media_stream_video.cc
@@ -2,7 +2,12 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "GLES2/gl2.h"
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <string.h>
+
+#include <vector>
+
#include "ppapi/c/pp_errors.h"
#include "ppapi/c/ppb_opengles2.h"
#include "ppapi/cpp/completion_callback.h"
@@ -14,6 +19,7 @@
#include "ppapi/cpp/rect.h"
#include "ppapi/cpp/var.h"
#include "ppapi/cpp/video_frame.h"
+#include "ppapi/lib/gl/gles2/gl2ext_ppapi.h"
#include "ppapi/utility/completion_callback_factory.h"
// When compiling natively on Windows, PostMessage can be #define-d to
@@ -25,7 +31,7 @@
// Assert |context_| isn't holding any GL Errors. Done as a macro instead of a
// function to preserve line number information in the failure message.
#define AssertNoGLError() \
- PP_DCHECK(!gles2_if_->GetError(context_->pp_resource()));
+ PP_DCHECK(!glGetError());
namespace {
@@ -53,35 +59,49 @@ class MediaStreamVideoDemoInstance : public pp::Instance,
// pp::Graphics3DClient implementation.
virtual void Graphics3DContextLost() {
InitGL();
- CreateYUVTextures();
+ CreateTextures();
Render();
}
private:
+ void DrawYUV();
+ void DrawRGB();
void Render();
// GL-related functions.
void InitGL();
- GLuint CreateTexture(int32_t width, int32_t height, int unit);
+ GLuint CreateTexture(int32_t width, int32_t height, int unit, bool rgba);
void CreateGLObjects();
- void CreateShader(GLuint program, GLenum type, const char* source, int size);
+ void CreateShader(GLuint program, GLenum type, const char* source);
void PaintFinished(int32_t result);
- void CreateYUVTextures();
+ void CreateTextures();
+ void ConfigureTrack();
+
- // Callback that is invoked when new frames are recevied.
+ // MediaStreamVideoTrack callbacks.
+ void OnConfigure(int32_t result);
void OnGetFrame(int32_t result, pp::VideoFrame frame);
pp::Size position_size_;
bool is_painting_;
bool needs_paint_;
+ bool is_bgra_;
+ GLuint program_yuv_;
+ GLuint program_rgb_;
+ GLuint buffer_;
GLuint texture_y_;
GLuint texture_u_;
GLuint texture_v_;
+ GLuint texture_rgb_;
pp::MediaStreamVideoTrack video_track_;
pp::CompletionCallbackFactory<MediaStreamVideoDemoInstance> callback_factory_;
+ std::vector<int32_t> attrib_list_;
- // Unowned pointers.
- const struct PPB_OpenGLES2* gles2_if_;
+ // MediaStreamVideoTrack attributes:
+ bool need_config_;
+ PP_VideoFrame_Format attrib_format_;
+ int32_t attrib_width_;
+ int32_t attrib_height_;
// Owned data.
pp::Graphics3D* context_;
@@ -95,14 +115,21 @@ MediaStreamVideoDemoInstance::MediaStreamVideoDemoInstance(
pp::Graphics3DClient(this),
is_painting_(false),
needs_paint_(false),
+ is_bgra_(false),
texture_y_(0),
texture_u_(0),
texture_v_(0),
+ texture_rgb_(0),
callback_factory_(this),
+ need_config_(false),
+ attrib_format_(PP_VIDEOFRAME_FORMAT_I420),
+ attrib_width_(0),
+ attrib_height_(0),
context_(NULL) {
- gles2_if_ = static_cast<const struct PPB_OpenGLES2*>(
- module->GetBrowserInterface(PPB_OPENGLES2_INTERFACE));
- PP_DCHECK(gles2_if_);
+ if (!glInitializePPAPI(pp::Module::Get()->get_browser_interface())) {
+ LogToConsole(PP_LOGLEVEL_ERROR, pp::Var("Unable to initialize GL PPAPI!"));
+ assert(false);
+ }
}
MediaStreamVideoDemoInstance::~MediaStreamVideoDemoInstance() {
@@ -124,19 +151,40 @@ void MediaStreamVideoDemoInstance::DidChangeView(
}
void MediaStreamVideoDemoInstance::HandleMessage(const pp::Var& var_message) {
- if (!var_message.is_dictionary())
+ if (!var_message.is_dictionary()) {
+ LogToConsole(PP_LOGLEVEL_ERROR, pp::Var("Invalid message!"));
return;
- pp::VarDictionary var_dictionary_message(var_message);
- pp::Var var_track = var_dictionary_message.Get("track");
- if (!var_track.is_resource())
- return;
-
- pp::Resource resource_track = var_track.AsResource();
-
- video_track_ = pp::MediaStreamVideoTrack(resource_track);
+ }
- video_track_.GetFrame(callback_factory_.NewCallbackWithOutput(
- &MediaStreamVideoDemoInstance::OnGetFrame));
+ pp::VarDictionary var_dictionary_message(var_message);
+ std::string command = var_dictionary_message.Get("command").AsString();
+
+ if (command == "init") {
+ pp::Var var_track = var_dictionary_message.Get("track");
+ if (!var_track.is_resource())
+ return;
+ pp::Resource resource_track = var_track.AsResource();
+ video_track_ = pp::MediaStreamVideoTrack(resource_track);
+ ConfigureTrack();
+ } else if (command == "format") {
+ std::string str_format = var_dictionary_message.Get("format").AsString();
+ if (str_format == "YV12") {
+ attrib_format_ = PP_VIDEOFRAME_FORMAT_YV12;
+ } else if (str_format == "I420") {
+ attrib_format_ = PP_VIDEOFRAME_FORMAT_I420;
+ } else if (str_format == "BGRA") {
+ attrib_format_ = PP_VIDEOFRAME_FORMAT_BGRA;
+ } else {
+ attrib_format_ = PP_VIDEOFRAME_FORMAT_UNKNOWN;
+ }
+ need_config_ = true;
+ } else if (command == "size") {
+ attrib_width_ = var_dictionary_message.Get("width").AsInt();
+ attrib_height_ = var_dictionary_message.Get("height").AsInt();
+ need_config_ = true;
+ } else {
+ LogToConsole(PP_LOGLEVEL_ERROR, pp::Var("Invalid command!"));
+ }
}
void MediaStreamVideoDemoInstance::InitGL() {
@@ -160,11 +208,12 @@ void MediaStreamVideoDemoInstance::InitGL() {
context_ = new pp::Graphics3D(this, attributes);
PP_DCHECK(!context_->is_null());
+ glSetCurrentContextPPAPI(context_->pp_resource());
+
// Set viewport window size and clear color bit.
- gles2_if_->ClearColor(context_->pp_resource(), 1, 0, 0, 1);
- gles2_if_->Clear(context_->pp_resource(), GL_COLOR_BUFFER_BIT);
- gles2_if_->Viewport(context_->pp_resource(), 0, 0,
- position_size_.width(), position_size_.height());
+ glClearColor(1, 0, 0, 1);
+ glClear(GL_COLOR_BUFFER_BIT);
+ glViewport(0, 0, position_size_.width(), position_size_.height());
BindGraphics(*context_);
AssertNoGLError();
@@ -172,14 +221,63 @@ void MediaStreamVideoDemoInstance::InitGL() {
CreateGLObjects();
}
+void MediaStreamVideoDemoInstance::DrawYUV() {
+ static const float kColorMatrix[9] = {
+ 1.1643828125f, 1.1643828125f, 1.1643828125f,
+ 0.0f, -0.39176171875f, 2.017234375f,
+ 1.59602734375f, -0.81296875f, 0.0f
+ };
+
+ glUseProgram(program_yuv_);
+ glUniform1i(glGetUniformLocation(program_yuv_, "y_texture"), 0);
+ glUniform1i(glGetUniformLocation(program_yuv_, "u_texture"), 1);
+ glUniform1i(glGetUniformLocation(program_yuv_, "v_texture"), 2);
+ glUniformMatrix3fv(glGetUniformLocation(program_yuv_, "color_matrix"),
+ 1, GL_FALSE, kColorMatrix);
+ AssertNoGLError();
+
+ GLint pos_location = glGetAttribLocation(program_yuv_, "a_position");
+ GLint tc_location = glGetAttribLocation(program_yuv_, "a_texCoord");
+ AssertNoGLError();
+ glEnableVertexAttribArray(pos_location);
+ glVertexAttribPointer(pos_location, 2, GL_FLOAT, GL_FALSE, 0, 0);
+ glEnableVertexAttribArray(tc_location);
+ glVertexAttribPointer(tc_location, 2, GL_FLOAT, GL_FALSE, 0,
+ static_cast<float*>(0) + 16); // Skip position coordinates.
+ AssertNoGLError();
+
+ glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
+ AssertNoGLError();
+}
+
+void MediaStreamVideoDemoInstance::DrawRGB() {
+ glUseProgram(program_rgb_);
+ glUniform1i(glGetUniformLocation(program_rgb_, "rgb_texture"), 3);
+ AssertNoGLError();
+
+ GLint pos_location = glGetAttribLocation(program_rgb_, "a_position");
+ GLint tc_location = glGetAttribLocation(program_rgb_, "a_texCoord");
+ AssertNoGLError();
+ glEnableVertexAttribArray(pos_location);
+ glVertexAttribPointer(pos_location, 2, GL_FLOAT, GL_FALSE, 0, 0);
+ glEnableVertexAttribArray(tc_location);
+ glVertexAttribPointer(tc_location, 2, GL_FLOAT, GL_FALSE, 0,
+ static_cast<float*>(0) + 16); // Skip position coordinates.
+ AssertNoGLError();
+
+ glDrawArrays(GL_TRIANGLE_STRIP, 4, 4);
+}
+
void MediaStreamVideoDemoInstance::Render() {
PP_DCHECK(!is_painting_);
is_painting_ = true;
needs_paint_ = false;
+
if (texture_y_) {
- gles2_if_->DrawArrays(context_->pp_resource(), GL_TRIANGLE_STRIP, 0, 4);
+ DrawRGB();
+ DrawYUV();
} else {
- gles2_if_->Clear(context_->pp_resource(), GL_COLOR_BUFFER_BIT);
+ glClear(GL_COLOR_BUFFER_BIT);
}
pp::CompletionCallback cb = callback_factory_.NewCallback(
&MediaStreamVideoDemoInstance::PaintFinished);
@@ -193,30 +291,23 @@ void MediaStreamVideoDemoInstance::PaintFinished(int32_t result) {
}
GLuint MediaStreamVideoDemoInstance::CreateTexture(
- int32_t width, int32_t height, int unit) {
+ int32_t width, int32_t height, int unit, bool rgba) {
GLuint texture_id;
- gles2_if_->GenTextures(context_->pp_resource(), 1, &texture_id);
+ glGenTextures(1, &texture_id);
AssertNoGLError();
- // Assign parameters.
- gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0 + unit);
- gles2_if_->BindTexture(context_->pp_resource(), GL_TEXTURE_2D, texture_id);
- gles2_if_->TexParameteri(
- context_->pp_resource(), GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
- GL_NEAREST);
- gles2_if_->TexParameteri(
- context_->pp_resource(), GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
- GL_NEAREST);
- gles2_if_->TexParameterf(
- context_->pp_resource(), GL_TEXTURE_2D, GL_TEXTURE_WRAP_S,
- GL_CLAMP_TO_EDGE);
- gles2_if_->TexParameterf(
- context_->pp_resource(), GL_TEXTURE_2D, GL_TEXTURE_WRAP_T,
- GL_CLAMP_TO_EDGE);
+ // Assign parameters.
+ glActiveTexture(GL_TEXTURE0 + unit);
+ glBindTexture(GL_TEXTURE_2D, texture_id);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
+ glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// Allocate texture.
- gles2_if_->TexImage2D(
- context_->pp_resource(), GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
+ glTexImage2D(GL_TEXTURE_2D, 0,
+ rgba ? GL_BGRA_EXT : GL_LUMINANCE,
+ width, height, 0,
+ rgba ? GL_BGRA_EXT : GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
AssertNoGLError();
return texture_id;
}
@@ -233,7 +324,7 @@ void MediaStreamVideoDemoInstance::CreateGLObjects() {
" gl_Position = a_position; \n"
"}";
- static const char kFragmentShader[] =
+ static const char kFragmentShaderYUV[] =
"precision mediump float; \n"
"varying vec2 v_texCoord; \n"
"uniform sampler2D y_texture; \n"
@@ -250,89 +341,87 @@ void MediaStreamVideoDemoInstance::CreateGLObjects() {
" gl_FragColor = vec4(rgb, 1.0); \n"
"}";
- static const float kColorMatrix[9] = {
- 1.1643828125f, 1.1643828125f, 1.1643828125f,
- 0.0f, -0.39176171875f, 2.017234375f,
- 1.59602734375f, -0.81296875f, 0.0f
- };
+ static const char kFragmentShaderRGB[] =
+ "precision mediump float; \n"
+ "varying vec2 v_texCoord; \n"
+ "uniform sampler2D rgb_texture; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_FragColor = texture2D(rgb_texture, v_texCoord); \n"
+ "}";
- PP_Resource context = context_->pp_resource();
-
- // Create shader program.
- GLuint program = gles2_if_->CreateProgram(context);
- CreateShader(program, GL_VERTEX_SHADER, kVertexShader, sizeof(kVertexShader));
- CreateShader(
- program, GL_FRAGMENT_SHADER, kFragmentShader, sizeof(kFragmentShader));
- gles2_if_->LinkProgram(context, program);
- gles2_if_->UseProgram(context, program);
- gles2_if_->DeleteProgram(context, program);
- gles2_if_->Uniform1i(
- context, gles2_if_->GetUniformLocation(context, program, "y_texture"), 0);
- gles2_if_->Uniform1i(
- context, gles2_if_->GetUniformLocation(context, program, "u_texture"), 1);
- gles2_if_->Uniform1i(
- context, gles2_if_->GetUniformLocation(context, program, "v_texture"), 2);
- gles2_if_->UniformMatrix3fv(
- context,
- gles2_if_->GetUniformLocation(context, program, "color_matrix"),
- 1, GL_FALSE, kColorMatrix);
+ // Create shader programs.
+ program_yuv_ = glCreateProgram();
+ CreateShader(program_yuv_, GL_VERTEX_SHADER, kVertexShader);
+ CreateShader(program_yuv_, GL_FRAGMENT_SHADER, kFragmentShaderYUV);
+ glLinkProgram(program_yuv_);
+ AssertNoGLError();
+
+ program_rgb_ = glCreateProgram();
+ CreateShader(program_rgb_, GL_VERTEX_SHADER, kVertexShader);
+ CreateShader(program_rgb_, GL_FRAGMENT_SHADER, kFragmentShaderRGB);
+ glLinkProgram(program_rgb_);
AssertNoGLError();
// Assign vertex positions and texture coordinates to buffers for use in
// shader program.
static const float kVertices[] = {
- -1, 1, -1, -1, 1, 1, 1, -1, // Position coordinates.
+ -1, 1, -1, -1, 0, 1, 0, -1, // Position coordinates.
+ 0, 1, 0, -1, 1, 1, 1, -1, // Position coordinates.
+ 0, 0, 0, 1, 1, 0, 1, 1, // Texture coordinates.
0, 0, 0, 1, 1, 0, 1, 1, // Texture coordinates.
};
- GLuint buffer;
- gles2_if_->GenBuffers(context, 1, &buffer);
- gles2_if_->BindBuffer(context, GL_ARRAY_BUFFER, buffer);
- gles2_if_->BufferData(context, GL_ARRAY_BUFFER,
- sizeof(kVertices), kVertices, GL_STATIC_DRAW);
- AssertNoGLError();
- GLint pos_location = gles2_if_->GetAttribLocation(
- context, program, "a_position");
- GLint tc_location = gles2_if_->GetAttribLocation(
- context, program, "a_texCoord");
- AssertNoGLError();
- gles2_if_->EnableVertexAttribArray(context, pos_location);
- gles2_if_->VertexAttribPointer(context, pos_location, 2,
- GL_FLOAT, GL_FALSE, 0, 0);
- gles2_if_->EnableVertexAttribArray(context, tc_location);
- gles2_if_->VertexAttribPointer(
- context, tc_location, 2, GL_FLOAT, GL_FALSE, 0,
- static_cast<float*>(0) + 8); // Skip position coordinates.
+ glGenBuffers(1, &buffer_);
+ glBindBuffer(GL_ARRAY_BUFFER, buffer_);
+ glBufferData(GL_ARRAY_BUFFER, sizeof(kVertices), kVertices, GL_STATIC_DRAW);
AssertNoGLError();
}
void MediaStreamVideoDemoInstance::CreateShader(
- GLuint program, GLenum type, const char* source, int size) {
- PP_Resource context = context_->pp_resource();
- GLuint shader = gles2_if_->CreateShader(context, type);
- gles2_if_->ShaderSource(context, shader, 1, &source, &size);
- gles2_if_->CompileShader(context, shader);
- gles2_if_->AttachShader(context, program, shader);
- gles2_if_->DeleteShader(context, shader);
+ GLuint program, GLenum type, const char* source) {
+ GLuint shader = glCreateShader(type);
+ GLint length = strlen(source) + 1;
+ glShaderSource(shader, 1, &source, &length);
+ glCompileShader(shader);
+ glAttachShader(program, shader);
+ glDeleteShader(shader);
}
-void MediaStreamVideoDemoInstance::CreateYUVTextures() {
+void MediaStreamVideoDemoInstance::CreateTextures() {
int32_t width = frame_size_.width();
int32_t height = frame_size_.height();
if (width == 0 || height == 0)
return;
if (texture_y_)
- gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_y_);
+ glDeleteTextures(1, &texture_y_);
if (texture_u_)
- gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_u_);
+ glDeleteTextures(1, &texture_u_);
if (texture_v_)
- gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_v_);
- texture_y_ = CreateTexture(width, height, 0);
+ glDeleteTextures(1, &texture_v_);
+ if (texture_rgb_)
+ glDeleteTextures(1, &texture_rgb_);
+ texture_y_ = CreateTexture(width, height, 0, false);
+
+ texture_u_ = CreateTexture(width / 2, height / 2, 1, false);
+ texture_v_ = CreateTexture(width / 2, height / 2, 2, false);
+ texture_rgb_ = CreateTexture(width, height, 3, true);
+}
- width /= 2;
- height /= 2;
- texture_u_ = CreateTexture(width, height, 1);
- texture_v_ = CreateTexture(width, height, 2);
+void MediaStreamVideoDemoInstance::ConfigureTrack() {
+ const int32_t attrib_list[] = {
+ PP_MEDIASTREAMVIDEOTRACK_ATTRIB_FORMAT, attrib_format_,
+ PP_MEDIASTREAMVIDEOTRACK_ATTRIB_WIDTH, attrib_width_,
+ PP_MEDIASTREAMVIDEOTRACK_ATTRIB_HEIGHT, attrib_height_,
+ PP_MEDIASTREAMVIDEOTRACK_ATTRIB_NONE
+ };
+ video_track_.Configure(attrib_list, callback_factory_.NewCallback(
+ &MediaStreamVideoDemoInstance::OnConfigure));
+}
+
+void MediaStreamVideoDemoInstance::OnConfigure(int32_t result) {
+ video_track_.GetFrame(callback_factory_.NewCallbackWithOutput(
+ &MediaStreamVideoDemoInstance::OnGetFrame));
}
void MediaStreamVideoDemoInstance::OnGetFrame(
@@ -341,33 +430,39 @@ void MediaStreamVideoDemoInstance::OnGetFrame(
return;
const char* data = static_cast<const char*>(frame.GetDataBuffer());
pp::Size size;
- PP_DCHECK(frame.GetSize(&size));
+ frame.GetSize(&size);
+
if (size != frame_size_) {
frame_size_ = size;
- CreateYUVTextures();
+ CreateTextures();
}
+ is_bgra_ = (frame.GetFormat() == PP_VIDEOFRAME_FORMAT_BGRA);
+
int32_t width = frame_size_.width();
int32_t height = frame_size_.height();
- gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0);
- gles2_if_->TexSubImage2D(
- context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
-
- data += width * height;
- width /= 2;
- height /= 2;
-
- gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE1);
- gles2_if_->TexSubImage2D(
- context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
-
- data += width * height;
- gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE2);
- gles2_if_->TexSubImage2D(
- context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
+ if (!is_bgra_) {
+ glActiveTexture(GL_TEXTURE0);
+ glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height,
+ GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
+
+ data += width * height;
+ width /= 2;
+ height /= 2;
+
+ glActiveTexture(GL_TEXTURE1);
+ glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height,
+ GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
+
+ data += width * height;
+ glActiveTexture(GL_TEXTURE2);
+ glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height,
+ GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
+ } else {
+ glActiveTexture(GL_TEXTURE3);
+ glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height,
+ GL_BGRA_EXT, GL_UNSIGNED_BYTE, data);
+ }
if (is_painting_)
needs_paint_ = true;
@@ -375,8 +470,13 @@ void MediaStreamVideoDemoInstance::OnGetFrame(
Render();
video_track_.RecycleFrame(frame);
- video_track_.GetFrame(callback_factory_.NewCallbackWithOutput(
- &MediaStreamVideoDemoInstance::OnGetFrame));
+ if (need_config_) {
+ ConfigureTrack();
+ need_config_ = false;
+ } else {
+ video_track_.GetFrame(callback_factory_.NewCallbackWithOutput(
+ &MediaStreamVideoDemoInstance::OnGetFrame));
+ }
}
pp::Instance* MediaStreamVideoModule::CreateInstance(PP_Instance instance) {

Powered by Google App Engine
This is Rietveld 408576698