Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1822)

Unified Diff: ppapi/examples/media_stream_video/media_stream_video.cc

Issue 134643002: [PPAPI] Add media stream video track example (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fix build issues Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « no previous file | ppapi/examples/media_stream_video/media_stream_video.html » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: ppapi/examples/media_stream_video/media_stream_video.cc
diff --git a/ppapi/examples/video_capture/video_capture.cc b/ppapi/examples/media_stream_video/media_stream_video.cc
similarity index 56%
copy from ppapi/examples/video_capture/video_capture.cc
copy to ppapi/examples/media_stream_video/media_stream_video.cc
index b95fcace928b9ee31fffedc2ab75e9c73d4d4be6..3b05b40ce34a1bec1b02b41982730679c6829743 100644
--- a/ppapi/examples/video_capture/video_capture.cc
+++ b/ppapi/examples/media_stream_video/media_stream_video.cc
@@ -1,27 +1,19 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include <stdlib.h>
-#include <string.h>
-
-#include <map>
-#include <vector>
-
-#include "ppapi/c/dev/ppb_video_capture_dev.h"
#include "ppapi/c/pp_errors.h"
#include "ppapi/c/ppb_opengles2.h"
-#include "ppapi/cpp/dev/buffer_dev.h"
-#include "ppapi/cpp/dev/device_ref_dev.h"
-#include "ppapi/cpp/dev/video_capture_dev.h"
-#include "ppapi/cpp/dev/video_capture_client_dev.h"
#include "ppapi/cpp/completion_callback.h"
-#include "ppapi/cpp/graphics_3d_client.h"
+#include "ppapi/cpp/dev/var_resource_dev.h"
#include "ppapi/cpp/graphics_3d.h"
+#include "ppapi/cpp/graphics_3d_client.h"
#include "ppapi/cpp/instance.h"
+#include "ppapi/cpp/media_stream_video_track.h"
#include "ppapi/cpp/module.h"
#include "ppapi/cpp/rect.h"
#include "ppapi/cpp/var.h"
+#include "ppapi/cpp/video_frame.h"
#include "ppapi/lib/gl/include/GLES2/gl2.h"
#include "ppapi/utility/completion_callback_factory.h"
@@ -38,24 +30,21 @@
namespace {
-const char* const kDelimiter = "#__#";
-
// This object is the global object representing this plugin library as long
// as it is loaded.
-class VCDemoModule : public pp::Module {
+class MediaStreamVideoModule : public pp::Module {
public:
- VCDemoModule() : pp::Module() {}
- virtual ~VCDemoModule() {}
+ MediaStreamVideoModule() : pp::Module() {}
+ virtual ~MediaStreamVideoModule() {}
virtual pp::Instance* CreateInstance(PP_Instance instance);
};
-class VCDemoInstance : public pp::Instance,
- public pp::Graphics3DClient,
- public pp::VideoCaptureClient_Dev {
+class MediaStreamVideoDemoInstance : public pp::Instance,
+ public pp::Graphics3DClient {
public:
- VCDemoInstance(PP_Instance instance, pp::Module* module);
- virtual ~VCDemoInstance();
+ MediaStreamVideoDemoInstance(PP_Instance instance, pp::Module* module);
+ virtual ~MediaStreamVideoDemoInstance();
// pp::Instance implementation (see PPP_Instance).
virtual void DidChangeView(const pp::Rect& position,
@@ -69,51 +58,6 @@ class VCDemoInstance : public pp::Instance,
Render();
}
- virtual void OnDeviceInfo(PP_Resource resource,
- const PP_VideoCaptureDeviceInfo_Dev& info,
- const std::vector<pp::Buffer_Dev>& buffers) {
- capture_info_ = info;
- buffers_ = buffers;
- CreateYUVTextures();
- }
-
- virtual void OnStatus(PP_Resource resource, uint32_t status) {
- }
-
- virtual void OnError(PP_Resource resource, uint32_t error) {
- }
-
- virtual void OnBufferReady(PP_Resource resource, uint32_t buffer) {
- const char* data = static_cast<const char*>(buffers_[buffer].data());
- int32_t width = capture_info_.width;
- int32_t height = capture_info_.height;
- gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0);
- gles2_if_->TexSubImage2D(
- context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
-
- data += width * height;
- width /= 2;
- height /= 2;
-
- gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE1);
- gles2_if_->TexSubImage2D(
- context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
-
- data += width * height;
- gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE2);
- gles2_if_->TexSubImage2D(
- context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
-
- video_capture_.ReuseBuffer(buffer);
- if (is_painting_)
- needs_paint_ = true;
- else
- Render();
- }
-
private:
void Render();
@@ -125,16 +69,8 @@ class VCDemoInstance : public pp::Instance,
void PaintFinished(int32_t result);
void CreateYUVTextures();
- void Open(const pp::DeviceRef_Dev& device);
- void Stop();
- void Start();
- void EnumerateDevicesFinished(int32_t result,
- std::vector<pp::DeviceRef_Dev>& devices);
- void OpenFinished(int32_t result);
-
- static void MonitorDeviceChangeCallback(void* user_data,
- uint32_t device_count,
- const PP_Resource devices[]);
+ // Callback that is invoked when new frames are recevied.
+ void OnGetFrame(int32_t result, pp::VideoFrame frame);
pp::Size position_size_;
bool is_painting_;
@@ -142,10 +78,8 @@ class VCDemoInstance : public pp::Instance,
GLuint texture_y_;
GLuint texture_u_;
GLuint texture_v_;
- pp::VideoCapture_Dev video_capture_;
- PP_VideoCaptureDeviceInfo_Dev capture_info_;
- std::vector<pp::Buffer_Dev> buffers_;
- pp::CompletionCallbackFactory<VCDemoInstance> callback_factory_;
+ pp::MediaStreamVideoTrack video_track_;
+ pp::CompletionCallbackFactory<MediaStreamVideoDemoInstance> callback_factory_;
// Unowned pointers.
const struct PPB_OpenGLES2* gles2_if_;
@@ -153,37 +87,30 @@ class VCDemoInstance : public pp::Instance,
// Owned data.
pp::Graphics3D* context_;
- std::vector<pp::DeviceRef_Dev> enumerate_devices_;
- std::vector<pp::DeviceRef_Dev> monitor_devices_;
+ pp::Size frame_size_;
};
-VCDemoInstance::VCDemoInstance(PP_Instance instance, pp::Module* module)
+MediaStreamVideoDemoInstance::MediaStreamVideoDemoInstance(
+ PP_Instance instance, pp::Module* module)
: pp::Instance(instance),
pp::Graphics3DClient(this),
- pp::VideoCaptureClient_Dev(this),
is_painting_(false),
needs_paint_(false),
texture_y_(0),
texture_u_(0),
texture_v_(0),
- video_capture_(this),
callback_factory_(this),
context_(NULL) {
gles2_if_ = static_cast<const struct PPB_OpenGLES2*>(
module->GetBrowserInterface(PPB_OPENGLES2_INTERFACE));
PP_DCHECK(gles2_if_);
-
- capture_info_.width = 320;
- capture_info_.height = 240;
- capture_info_.frames_per_second = 30;
}
-VCDemoInstance::~VCDemoInstance() {
- video_capture_.MonitorDeviceChange(NULL, NULL);
+MediaStreamVideoDemoInstance::~MediaStreamVideoDemoInstance() {
delete context_;
}
-void VCDemoInstance::DidChangeView(
+void MediaStreamVideoDemoInstance::DidChangeView(
const pp::Rect& position, const pp::Rect& clip_ignored) {
if (position.width() == 0 || position.height() == 0)
return;
@@ -194,50 +121,26 @@ void VCDemoInstance::DidChangeView(
// Initialize graphics.
InitGL();
-
Render();
}
-void VCDemoInstance::HandleMessage(const pp::Var& message_data) {
- if (message_data.is_string()) {
- std::string event = message_data.AsString();
- if (event == "PageInitialized") {
- int32_t result = video_capture_.MonitorDeviceChange(
- &VCDemoInstance::MonitorDeviceChangeCallback, this);
- if (result != PP_OK)
- PostMessage(pp::Var("MonitorDeviceChangeFailed"));
-
- pp::CompletionCallbackWithOutput<std::vector<pp::DeviceRef_Dev> >
- callback = callback_factory_.NewCallbackWithOutput(
- &VCDemoInstance::EnumerateDevicesFinished);
- result = video_capture_.EnumerateDevices(callback);
- if (result != PP_OK_COMPLETIONPENDING)
- PostMessage(pp::Var("EnumerationFailed"));
- } else if (event == "UseDefault") {
- Open(pp::DeviceRef_Dev());
- } else if (event == "Stop") {
- Stop();
- } else if (event == "Start") {
- Start();
- } else if (event.find("Monitor:") == 0) {
- std::string index_str = event.substr(strlen("Monitor:"));
- int index = atoi(index_str.c_str());
- if (index >= 0 && index < static_cast<int>(monitor_devices_.size()))
- Open(monitor_devices_[index]);
- else
- PP_NOTREACHED();
- } else if (event.find("Enumerate:") == 0) {
- std::string index_str = event.substr(strlen("Enumerate:"));
- int index = atoi(index_str.c_str());
- if (index >= 0 && index < static_cast<int>(enumerate_devices_.size()))
- Open(enumerate_devices_[index]);
- else
- PP_NOTREACHED();
- }
- }
+void MediaStreamVideoDemoInstance::HandleMessage(const pp::Var& var_message) {
+ if (!var_message.is_dictionary())
+ return;
+ pp::VarDictionary var_dictionary_message(var_message);
+ pp::Var var_track = var_dictionary_message.Get("track");
+ if (!var_track.is_resource())
+ return;
+
+ pp::Resource resource_track = pp::VarResource_Dev(var_track).AsResource();
+
+ video_track_ = pp::MediaStreamVideoTrack(resource_track);
+
+ video_track_.GetFrame(callback_factory_.NewCallbackWithOutput(
+ &MediaStreamVideoDemoInstance::OnGetFrame));
}
-void VCDemoInstance::InitGL() {
+void MediaStreamVideoDemoInstance::InitGL() {
PP_DCHECK(position_size_.width() && position_size_.height());
is_painting_ = false;
@@ -270,7 +173,7 @@ void VCDemoInstance::InitGL() {
CreateGLObjects();
}
-void VCDemoInstance::Render() {
+void MediaStreamVideoDemoInstance::Render() {
PP_DCHECK(!is_painting_);
is_painting_ = true;
needs_paint_ = false;
@@ -280,17 +183,18 @@ void VCDemoInstance::Render() {
gles2_if_->Clear(context_->pp_resource(), GL_COLOR_BUFFER_BIT);
}
pp::CompletionCallback cb = callback_factory_.NewCallback(
- &VCDemoInstance::PaintFinished);
+ &MediaStreamVideoDemoInstance::PaintFinished);
context_->SwapBuffers(cb);
}
-void VCDemoInstance::PaintFinished(int32_t result) {
+void MediaStreamVideoDemoInstance::PaintFinished(int32_t result) {
is_painting_ = false;
if (needs_paint_)
Render();
}
-GLuint VCDemoInstance::CreateTexture(int32_t width, int32_t height, int unit) {
+GLuint MediaStreamVideoDemoInstance::CreateTexture(
+ int32_t width, int32_t height, int unit) {
GLuint texture_id;
gles2_if_->GenTextures(context_->pp_resource(), 1, &texture_id);
AssertNoGLError();
@@ -318,7 +222,7 @@ GLuint VCDemoInstance::CreateTexture(int32_t width, int32_t height, int unit) {
return texture_id;
}
-void VCDemoInstance::CreateGLObjects() {
+void MediaStreamVideoDemoInstance::CreateGLObjects() {
// Code and constants for shader.
static const char kVertexShader[] =
"varying vec2 v_texCoord; \n"
@@ -403,7 +307,7 @@ void VCDemoInstance::CreateGLObjects() {
AssertNoGLError();
}
-void VCDemoInstance::CreateShader(
+void MediaStreamVideoDemoInstance::CreateShader(
GLuint program, GLenum type, const char* source, int size) {
PP_Resource context = context_->pp_resource();
GLuint shader = gles2_if_->CreateShader(context, type);
@@ -413,9 +317,17 @@ void VCDemoInstance::CreateShader(
gles2_if_->DeleteShader(context, shader);
}
-void VCDemoInstance::CreateYUVTextures() {
- int32_t width = capture_info_.width;
- int32_t height = capture_info_.height;
+void MediaStreamVideoDemoInstance::CreateYUVTextures() {
+ int32_t width = frame_size_.width();
+ int32_t height = frame_size_.height();
+ if (width == 0 || height == 0)
+ return;
+ if (texture_y_)
+ gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_y_);
+ if (texture_u_)
+ gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_u_);
+ if (texture_v_)
+ gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_v_);
texture_y_ = CreateTexture(width, height, 0);
width /= 2;
@@ -424,74 +336,52 @@ void VCDemoInstance::CreateYUVTextures() {
texture_v_ = CreateTexture(width, height, 2);
}
-void VCDemoInstance::Open(const pp::DeviceRef_Dev& device) {
- pp::CompletionCallback callback = callback_factory_.NewCallback(
- &VCDemoInstance::OpenFinished);
- int32_t result = video_capture_.Open(device, capture_info_, 4, callback);
- if (result != PP_OK_COMPLETIONPENDING)
- PostMessage(pp::Var("OpenFailed"));
-}
+void MediaStreamVideoDemoInstance::OnGetFrame(
+ int32_t result, pp::VideoFrame frame) {
+ if (result != PP_OK)
+ return;
+ const char* data = static_cast<const char*>(frame.GetDataBuffer());
+ pp::Size size;
+ PP_DCHECK(frame.GetSize(&size));
+ if (size != frame_size_) {
+ frame_size_ = size;
+ CreateYUVTextures();
+ }
-void VCDemoInstance::Stop() {
- if (video_capture_.StopCapture() != PP_OK)
- PostMessage(pp::Var("StopFailed"));
-}
+ int32_t width = frame_size_.width();
+ int32_t height = frame_size_.height();
+ gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0);
+ gles2_if_->TexSubImage2D(
+ context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
+ GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
-void VCDemoInstance::Start() {
- if (video_capture_.StartCapture() != PP_OK)
- PostMessage(pp::Var("StartFailed"));
-}
+ data += width * height;
+ width /= 2;
+ height /= 2;
-void VCDemoInstance::EnumerateDevicesFinished(
- int32_t result,
- std::vector<pp::DeviceRef_Dev>& devices) {
- if (result == PP_OK) {
- enumerate_devices_.swap(devices);
- std::string device_names = "Enumerate:";
- for (size_t index = 0; index < enumerate_devices_.size(); ++index) {
- pp::Var name = enumerate_devices_[index].GetName();
- PP_DCHECK(name.is_string());
-
- if (index != 0)
- device_names += kDelimiter;
- device_names += name.AsString();
- }
- PostMessage(pp::Var(device_names));
- } else {
- PostMessage(pp::Var("EnumerationFailed"));
- }
-}
+ gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE1);
+ gles2_if_->TexSubImage2D(
+ context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
+ GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
-void VCDemoInstance::OpenFinished(int32_t result) {
- if (result == PP_OK)
- Start();
+ data += width * height;
+ gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE2);
+ gles2_if_->TexSubImage2D(
+ context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
+ GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
+
+ if (is_painting_)
+ needs_paint_ = true;
else
- PostMessage(pp::Var("OpenFailed"));
-}
+ Render();
-// static
-void VCDemoInstance::MonitorDeviceChangeCallback(void* user_data,
- uint32_t device_count,
- const PP_Resource devices[]) {
- VCDemoInstance* thiz = static_cast<VCDemoInstance*>(user_data);
-
- std::string device_names = "Monitor:";
- thiz->monitor_devices_.clear();
- thiz->monitor_devices_.reserve(device_count);
- for (size_t index = 0; index < device_count; ++index) {
- thiz->monitor_devices_.push_back(pp::DeviceRef_Dev(devices[index]));
- pp::Var name = thiz->monitor_devices_.back().GetName();
- PP_DCHECK(name.is_string());
-
- if (index != 0)
- device_names += kDelimiter;
- device_names += name.AsString();
- }
- thiz->PostMessage(pp::Var(device_names));
+ video_track_.RecycleFrame(frame);
+ video_track_.GetFrame(callback_factory_.NewCallbackWithOutput(
+ &MediaStreamVideoDemoInstance::OnGetFrame));
}
-pp::Instance* VCDemoModule::CreateInstance(PP_Instance instance) {
- return new VCDemoInstance(instance, this);
+pp::Instance* MediaStreamVideoModule::CreateInstance(PP_Instance instance) {
+ return new MediaStreamVideoDemoInstance(instance, this);
}
} // anonymous namespace
@@ -499,6 +389,6 @@ pp::Instance* VCDemoModule::CreateInstance(PP_Instance instance) {
namespace pp {
// Factory function for your specialization of the Module object.
Module* CreateModule() {
- return new VCDemoModule();
+ return new MediaStreamVideoModule();
}
} // namespace pp
« no previous file with comments | « no previous file | ppapi/examples/media_stream_video/media_stream_video.html » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698