Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(38)

Unified Diff: ppapi/examples/video_decoder/video_decoder_session.cc

Issue 6961018: Pepper Video Decoder API tester plugin. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: More implementation meat and clearing things all around. Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: ppapi/examples/video_decoder/video_decoder_session.cc
diff --git a/ppapi/examples/video_decoder/video_decoder_session.cc b/ppapi/examples/video_decoder/video_decoder_session.cc
new file mode 100644
index 0000000000000000000000000000000000000000..98bac3e49a9a44dcbd6a4f91b128156edd69d5f1
--- /dev/null
+++ b/ppapi/examples/video_decoder/video_decoder_session.cc
@@ -0,0 +1,595 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "ppapi/examples/video_decoder/video_decoder_session.h"
+
+#include <cstring>
+#include <fstream>
+#include <iostream>
+
+#include "ppapi/c/dev/pp_graphics_3d_dev.h"
+#include "ppapi/c/dev/ppb_buffer_dev.h"
+#include "ppapi/c/pp_errors.h"
+#include "ppapi/cpp/dev/context_3d_dev.h"
+#include "ppapi/cpp/dev/surface_3d_dev.h"
+#include "ppapi/cpp/dev/video_decoder_dev.h"
+#include "ppapi/lib/gl/include/GLES2/gl2.h"
+
+// Pull-based video source to read video data from a file.
+class TestVideoSource {
+ public:
+ TestVideoSource()
+ : file_length_(0),
+ offset_(0) {}
+
+ ~TestVideoSource() {}
+
+ bool Open(const std::string& url) {
+ // TODO(vmr): Use file_util::ReadFileToString or equivalent to read the file
+ // if one-shot reading is used.
+ std::ifstream* file =
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 scoped_ptr, then delete the deletes below :)
Ville-Mikko Rautio 2011/06/03 13:24:39 I am in impression that relying on anything from C
+ new std::ifstream(url.c_str(),
+ std::ios::in | std::ios::binary | std::ios::ate);
+ if (!file->good()) {
+ delete file;
+ return false;
+ }
+ file->seekg(0, std::ios::end);
+ uint32_t length = file->tellg();
+ file->seekg(0, std::ios::beg);
+ mem_ = new uint8_t[length];
+ file->read(reinterpret_cast<char*>(mem_), length);
+ file_length_ = length;
+ file->close();
+ delete file;
+ return true;
+ }
+
+ // Reads next packet from the input stream.
+ // Returns number of read bytes on success, 0 on when there was no valid data
+ // to be read and -1 if user gave NULL or too small buffer.
+ // TODO(vmr): Modify to differentiate between errors and EOF.
+ int32_t Read(uint8_t* target_mem, uint32_t size) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 It looks like you are doing some decoding to split
Ville-Mikko Rautio 2011/06/03 13:24:39 There is possibility to pass the whole bitstream f
+ if (!target_mem)
+ return -1;
+ uint8_t* unit_begin = NULL;
+ uint8_t* unit_end = NULL;
+ uint8_t* ptr = mem_ + offset_;
+ while (offset_ + 4 < file_length_) {
+ if (ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 1) {
+ // start code found
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Complete sentence + explanation of start code.
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ if (!unit_begin) {
+ unit_begin = ptr;
+ } else {
+ // back-up 1 byte.
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Complete sentence.
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ unit_end = ptr;
+ break;
+ }
+ }
+ ptr++;
+ offset_++;
+ }
+ if (unit_begin && offset_ + 4 == file_length_) {
+ // Last unit. Set the unit_end to point to the last byte.
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 The last buffer begins _and_ ends with 0001? In ot
Ville-Mikko Rautio 2011/06/03 13:24:39 There's no 0001 at the end. +4 needed because of t
+ unit_end = ptr + 4;
+ offset_ += 4;
+ } else if (!unit_begin || !unit_end) {
+ // No unit start codes found in buffer.
+ return 0;
+ }
+ if (static_cast<int32_t>(size) >= unit_end - unit_begin) {
+ memcpy(target_mem, unit_begin, unit_end - unit_begin);
+ return unit_end - unit_begin;
+ }
+ // Rewind to the beginning start code if there is one as it should be
+ // returned with next Read().
+ offset_ = unit_begin - mem_;
+ return -1;
+ }
+
+ private:
+ uint32_t file_length_;
+ uint32_t offset_;
+ uint8_t* mem_;
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 scoped_array? At the very least you need to delete
Ville-Mikko Rautio 2011/06/03 13:24:39 True. Added delete for destructor since I am not s
+};
+
+LocalVideoBitstreamSource::LocalVideoBitstreamSource(std::string filename)
+ : file_(filename),
+ video_source_(new TestVideoSource()),
+ video_source_open_(false) {
+}
+
+LocalVideoBitstreamSource::~LocalVideoBitstreamSource() {
+ delete video_source_;
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 No longer needed w/ scoped_ptr
Ville-Mikko Rautio 2011/06/03 13:24:39 Ditto.
+}
+
+bool LocalVideoBitstreamSource::GetBitstreamUnit(
+ void* target_mem,
+ uint32_t target_mem_size_in_bytes,
+ int32_t* unit_size_in_bytes) {
+ if (!video_source_open_) {
+ if (!video_source_->Open(file_)) {
+ return false;
+ }
+ video_source_open_ = true;
+ }
+ int32_t read_bytes = video_source_->Read(static_cast<uint8_t*>(target_mem),
+ target_mem_size_in_bytes);
+ if (read_bytes <= 0) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Nit: no {} for one-line condition
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ return false;
+ }
+ *unit_size_in_bytes = read_bytes;
+ return true;
+}
+
+VideoDecoderSessionClient::~VideoDecoderSessionClient() {
+}
+
+// Constants used by VideoDecoderSession.
+static const int32_t kBitstreamBufferCount = 3;
+static const int32_t kBitstreamBufferSize = 256 * 1024 * 1024;
+static const int32_t kDefaultWidth = 640;
+static const int32_t kDefaultHeight = 480;
+
+bool VideoDecoderSession::Initialize(
+ const std::vector<uint32_t>& decoder_config,
+ pp::CompletionCallback completion_callback) {
+ assert(video_source_ && display_);
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Do this check in the constructor.
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ // Default implementation just assumes everything is set up.
+ if (!AllocateInputBuffers()) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Nit: No {}
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ return false;
+ }
+ pp::CompletionCallback cb = cb_factory_.NewCallback(
+ &VideoDecoderSession::OnInitializeDone, completion_callback);
+ video_decoder_ = new pp::VideoDecoder(instance_, decoder_config, cb, this);
+ if (!video_decoder_) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Nit: No {}
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ return false;
+ }
+ return true;
+}
+
+bool VideoDecoderSession::Run(pp::CompletionCallback completion_callback) {
+ assert(state_ == kInitialized);
+ // Start the streaming by dispatching the first buffers one by one.
+ for (std::map<int32_t, PP_VideoBitstreamBuffer_Dev>::iterator it =
+ bitstream_buffers_.begin();
+ it == bitstream_buffers_.end();
+ it++) {
+ if (!ReadAndDispatchBitstreamUnit((*it).first)) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Nit: {}
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ return false;
+ }
+ }
+ // Once streaming has been started, we're running.
+ ChangeState(kRunning);
+ completion_callback.Run(PP_OK);
+ return true;
+}
+
+bool VideoDecoderSession::Stop(pp::CompletionCallback completion_callback) {
+ assert(state_ == kRunning);
+ // Stop the playback.
+ ChangeState(kInitialized);
+ return true;
+}
+
+bool VideoDecoderSession::Flush(pp::CompletionCallback completion_callback) {
+ assert(state_ == kRunning);
+ // Issue the flush request.
+ ChangeState(kFlushing);
+ video_decoder_->Flush(cb_factory_.NewCallback(
+ &VideoDecoderSession::OnUserFlushDone, state_, completion_callback));
+ return true;
+}
+
+bool VideoDecoderSession::Teardown(pp::CompletionCallback completion_callback) {
+ assert(state_ == kInitialized);
+ // Teardown the resources.
+ FreeInputBuffers();
+ ChangeState(kCreated);
+ completion_callback.Run(PP_OK);
+ return true;
+}
+
+void VideoDecoderSession::ProvidePictureBuffers(
+ uint32_t requested_num_of_buffers,
+ const std::vector<uint32_t>& buffer_properties) {
vjain 2011/05/31 22:29:07 What is the purpose of buffer_properties? Will GPU
Ville-Mikko Rautio 2011/06/01 11:50:08 Buffer properties will contain width & height and
vrk (LEFT CHROMIUM) 2011/06/01 22:41:31 Actually, this C++ API is not up to date with the
Ville-Mikko Rautio 2011/06/03 13:24:39 C++ API change (http://codereview.chromium.org/708
+ // Currently we support only GLES buffer allocation.
+ std::vector<PP_GLESBuffer_Dev> buffers;
+ for (uint32_t i = 0; i < requested_num_of_buffers; i++) {
+ PP_GLESBuffer_Dev gles_buffer;
+ if (!display_->ProvideGLESPictureBuffer(buffer_properties, &gles_buffer)) {
+ video_decoder_->Abort(cb_factory_.NewCallback(
+ &VideoDecoderSession::OnAbortDone));
+ return;
+ }
+ buffers.push_back(gles_buffer);
+ }
+ video_decoder_->AssignGLESBuffers(buffers.size(), buffers);
+}
+
+void VideoDecoderSession::DismissPictureBuffer(int32_t picture_buffer_id) {
+ if (!display_->DismissPictureBuffer(picture_buffer_id)) {
+ assert(!"Failed to dismiss picture buffer properly");
+ return;
+ }
+}
+
+void VideoDecoderSession::PictureReady(const PP_Picture_Dev& picture) {
+ display_->DrawPicture(picture, cb_factory_.NewCallback(
+ &VideoDecoderSession::OnDrawPictureDone, picture.picture_buffer_id));
+}
+
+void VideoDecoderSession::NotifyEndOfStream() {
+ end_of_stream_ = true;
+ video_decoder_->Flush(cb_factory_.NewCallback(
+ &VideoDecoderSession::OnInternalFlushDone));
+}
+
+void VideoDecoderSession::NotifyError(PP_VideoDecodeError_Dev error) {
+ video_decoder_->Flush(cb_factory_.NewCallback(
+ &VideoDecoderSession::OnInternalFlushDone));
+}
+
+int32_t VideoDecoderSession::GetUniqueId() {
+ // Not exactly unique in the current form but close enough for use case.
+ return next_id_++;
+}
+
+void VideoDecoderSession::OnInitializeDone(int32_t result,
+ pp::CompletionCallback callback) {
+ if (state_ != kCreated) {
+ ChangeState(kCreated);
+ callback.Run(PP_ERROR_ABORTED);
+ }
+ if (result != PP_OK) {
+ ChangeState(kInitialized);
+ callback.Run(result);
+ }
+ callback.Run(PP_OK);
+}
+
+void VideoDecoderSession::OnBitstreamBufferProcessed(
vjain 2011/06/02 01:04:52 Hi Ville-Mikko, When the GPU decoder returns the b
Ville-Mikko Rautio 2011/06/03 13:24:39 I am not sure what are you referring to. Bitstream
+ int32_t result,
+ int32_t bitstream_buffer_id) {
+ // Reuse each bitstream buffer that has been processed by reading data into it
+ // as long as there is more and pass that for decoding.
+ ReadAndDispatchBitstreamUnit(bitstream_buffer_id);
+}
+
+void VideoDecoderSession::OnDrawPictureDone(int32_t result,
+ int32_t picture_buffer_id) {
+ video_decoder_->ReusePictureBuffer(picture_buffer_id);
+}
+
+void VideoDecoderSession::OnUserFlushDone(int32_t result,
+ State target_state,
+ pp::CompletionCallback callback) {
+ assert(state_ == kFlushing);
+ // It was a Flush request, return to the state where we started.
+ ChangeState(target_state);
+ callback.Run(result);
+}
+
+void VideoDecoderSession::OnInternalFlushDone(int32_t result) {
+ if (end_of_stream_) {
+ // It was end of stream flush.
+ video_decoder_->Abort(cb_factory_.NewCallback(
+ &VideoDecoderSession::OnAbortDone));
+ } else {
+ assert(!"Unhandled flush completion!");
+ }
+}
+
+void VideoDecoderSession::OnAbortDone(int32_t result) {
+ client_->OnSessionCompleted(result);
+}
+
+bool VideoDecoderSession::AllocateInputBuffers() {
+ buffer_if_ = static_cast<const struct PPB_Buffer_Dev*>(
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 ?? Why is this field being set here? Shouldn't thi
Ville-Mikko Rautio 2011/06/03 13:24:39 Moved to ctor and added assert for variable.
+ pp::Module::Get()->GetBrowserInterface(PPB_BUFFER_DEV_INTERFACE));
+ if (!buffer_if_) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Nit: no {}
Ville-Mikko Rautio 2011/06/03 13:24:39 Removed
+ return false;
+ }
+ // Allocate |kBitstreamBufferCount| bitstream buffers of
+ // |kBitstreamBufferSize| bytes.
+ for (int32_t i = 0; i < kBitstreamBufferCount; i++) {
+ PP_VideoBitstreamBuffer_Dev bitstream_buffer;
+ bitstream_buffer.data = buffer_if_->Create(instance_->pp_instance(),
+ kBitstreamBufferSize);
+ if (bitstream_buffer.data == 0) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Nit: no {}
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ return false;
+ }
+ bitstream_buffer.size = 0;
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 This should be kBitstreamBufferSize with change me
Ville-Mikko Rautio 2011/06/03 13:24:39 This is the amount of valid data in buffer, so I d
+ bitstream_buffer.id = GetUniqueId();
+ bitstream_buffers_[bitstream_buffer.id] = bitstream_buffer;
+ }
+ return true;
+}
+
+void VideoDecoderSession::FreeInputBuffers() {
+ std::map<int32_t, PP_VideoBitstreamBuffer_Dev>::iterator it;
+ for (it = bitstream_buffers_.begin(); it != bitstream_buffers_.end(); it++) {
+ std::pair<int32_t, PP_VideoBitstreamBuffer_Dev> pair = *it;
+ PP_VideoBitstreamBuffer_Dev bitstream_buffer = pair.second;
+ pp::Module::Get()->core()->ReleaseResource(bitstream_buffer.data);
+ bitstream_buffers_.erase(it);
+ }
+}
+
+bool VideoDecoderSession::ReadAndDispatchBitstreamUnit(
+ int32_t bitstream_buffer_id) {
+ // Get the target memory and read the bitstream unit into it.
+ if (bitstream_buffers_.find(bitstream_buffer_id) ==
+ bitstream_buffers_.end()) {
+ return false;
+ }
+ PP_VideoBitstreamBuffer_Dev bitstream_buffer =
+ bitstream_buffers_[bitstream_buffer_id];
+ void* target_mem = buffer_if_->Map(bitstream_buffer.data);
+ if (target_mem == NULL) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Nit: no {}
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ return false;
+ }
+ uint32_t size_in_bytes = 0;
+ if (!buffer_if_->Describe(bitstream_buffer.data, &size_in_bytes)) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Nit: no {}
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ return false;
+ }
+ bool success = video_source_->GetBitstreamUnit(target_mem, size_in_bytes,
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 It's wasteful to allocate a buffer of kBitstreamBu
Ville-Mikko Rautio 2011/06/03 13:24:39 Already commented above. You cannot arbitrarily sp
+ &bitstream_buffer.size);
+ if (!success) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Nit: no {}
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ return false;
+ }
+ // Dispatch the bitstream unit to the decoder.
+ success = video_decoder_->Decode(
+ bitstream_buffers_[bitstream_buffer_id],
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 bitstream_buffer?
Ville-Mikko Rautio 2011/06/03 13:24:39 Yep. Also, I changed the bitstream_buffer into ref
+ cb_factory_.NewCallback(
+ &VideoDecoderSession::OnBitstreamBufferProcessed,
+ bitstream_buffer_id));
+ // Finally unmap the buffer for this round.
+ buffer_if_->Unmap(bitstream_buffer.data);
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Unmap deletes the bitstream data. Shouldn't we onl
Ville-Mikko Rautio 2011/06/03 13:24:39 True. Done.
+ return success;
+}
+
+void VideoDecoderSession::ChangeState(State to_state) {
+ state_ = to_state;
+}
+
+int32_t VideoDecoderSession::next_id_ = 1;
+
+// Pass-through vertex shader.
+static const char kVertexShader[] =
+ "precision highp float; precision highp int;\n"
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 I don't believe these precision specifiers are nec
Ville-Mikko Rautio 2011/06/03 13:24:39 I am by no means shader expert, so I'll just take
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec2 in_tc;\n"
+ "\n"
+ "void main() {\n"
+ " interp_tc = in_tc;\n"
+ " gl_Position = in_pos;\n"
+ "}\n";
+
+// Color shader for EGLImage.
+static const char kFragmentShaderEgl[] =
+ "precision mediump float;\n"
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Ditto above.
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ "precision mediump int;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform sampler2D tex;\n"
+ "\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(tex, interp_tc);\n"
+ "}\n";
+
+// Buffer size for compile errors.
+static const unsigned int kShaderErrorSize = 4096;
+
+void GLES2Display::Graphics3DContextLost() {
+ assert(!"GLES2: Unexpectedly lost graphics context");
+}
+
+bool GLES2Display::Initialize() {
+ if (!InitGL(surface_size_.width, surface_size_.height)) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 nit: no {}
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ return false;
+ }
+ ProgramShaders();
+ return true;
+}
+
+bool GLES2Display::ProvideGLESPictureBuffer(
+ const std::vector<uint32_t>& buffer_properties,
+ PP_GLESBuffer_Dev* picture_buffer) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Would make sense to also do the initial call to Te
Ville-Mikko Rautio 2011/06/03 13:24:39 I don't know GL well, so I will need your help in
+ GLuint texture;
+ // Generate texture and bind (effectively allocate) it.
+ gles2_if_->GenTextures(context_->pp_resource(), 1, &texture);
+ gles2_if_->BindTexture(context_->pp_resource(), GL_TEXTURE_2D, texture);
+ picture_buffer->context = 0; // TODO(vmr): Get proper context id.
+ picture_buffer->texture_id = texture;
+ picture_buffer->info.id = VideoDecoderSession::GetUniqueId();
+ picture_buffer->info.size.width = surface_size_.width;
+ picture_buffer->info.size.height = surface_size_.height;
+ // Store the values into the map for GLES buffers.
+ gles_buffers_[picture_buffer->info.id] = *picture_buffer;
+ assertNoGLError();
+ return true;
+}
+
+bool GLES2Display::DismissPictureBuffer(int32_t picture_buffer_id) {
+ gles2_if_->DeleteTextures(context_->pp_resource(), 1,
+ &gles_buffers_[picture_buffer_id].texture_id);
+ gles_buffers_.erase(picture_buffer_id);
+ return true;
+}
+
+bool GLES2Display::DrawPicture(const PP_Picture_Dev& picture,
+ pp::CompletionCallback completion_callback) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 nit: add space before pp
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ // Decoder has finished decoding picture into the texture, we'll have to just
+ // draw the texture to the color buffer and swap the surfaces.
+ // Clear the color buffer.
+ gles2_if_->Clear(context_->pp_resource(), GL_COLOR_BUFFER_BIT |
+ GL_DEPTH_BUFFER_BIT);
+ // Load the texture into texture unit 0.
+ gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0);
+ gles2_if_->BindTexture(context_->pp_resource(), GL_TEXTURE_2D,
+ gles_buffers_[picture.picture_buffer_id].texture_id);
+ // Draw the texture.
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 You never copy data into the texture (TexImage2D/M
Ville-Mikko Rautio 2011/06/03 13:24:39 See above comment about ProvideGLESPictureBuffer.
+ gles2_if_->DrawArrays(context_->pp_resource(), GL_TRIANGLE_STRIP, 0, 4);
+ // Force the execution of pending commands.
+ // TODO(vmr): Do we have to do this? Can we rely command buffer to execute the
+ // commands without Finish call?
+ gles2_if_->Finish(context_->pp_resource());
+ assertNoGLError();
+
+ int32_t error = surface_->SwapBuffers(completion_callback);
+ if (error != PP_OK) {
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 nit: no {}
Ville-Mikko Rautio 2011/06/03 13:24:39 Done.
+ return false;
+ }
+ assertNoGLError();
+ return true;
+}
+
+void GLES2Display::assertNoGLError() {
+ assert(!gles2_if_->GetError(context_->pp_resource()));
+}
+
+bool GLES2Display::InitGL(int width, int height) {
+ assert(width && height);
+ gles2_if_ = static_cast<const struct PPB_OpenGLES2_Dev*>(
+ pp::Module::Get()->GetBrowserInterface(PPB_OPENGLES2_DEV_INTERFACE));
+ // Firstly, we need OpenGL ES context associated with the display our plugin
+ // is rendering to.
+ if (context_) delete(context_);
+ context_ = new pp::Context3D_Dev(*instance_, 0, pp::Context3D_Dev(), NULL);
+ assert(!context_->is_null());
+ // Then we need surface bound to our fresh context. We'll be actually drawing
+ // on this surface and swapping that surface to refresh the displayable data
+ // of the plugin.
+ int32_t surface_attributes[] = {
+ PP_GRAPHICS3DATTRIB_WIDTH, surface_size_.width,
+ PP_GRAPHICS3DATTRIB_HEIGHT, surface_size_.height,
+ PP_GRAPHICS3DATTRIB_NONE
+ };
+ if (surface_) delete(surface_);
+ surface_ = new pp::Surface3D_Dev(*instance_, 0, surface_attributes);
+ assert(!surface_->is_null());
+ int32_t bind_error = context_->BindSurfaces(*surface_, *surface_);
+ if (!bind_error) {
+ assert(bind_error);
+ }
+ assertNoGLError();
+
+ bool success = instance_->BindGraphics(*surface_);
+ if (!success) {
+ assert(success);
+ }
+ // Clear the color buffer with opaque white for starters.
+ gles2_if_->ClearColor(context_->pp_resource(), 1.0, 1.0, 1.0, 0.0);
+ gles2_if_->Clear(context_->pp_resource(), GL_COLOR_BUFFER_BIT);
+ // Set the viewport to match the whole GL window.
+ gles2_if_->Viewport(context_->pp_resource(), 0, 0, surface_size_.width,
+ surface_size_.height);
+ assertNoGLError();
+ return true;
+}
+
+void GLES2Display::CreateShader(GLuint program, GLenum type,
+ const char* source,
+ int size) {
+ GLuint shader = gles2_if_->CreateShader(context_->pp_resource(), type);
+ gles2_if_->ShaderSource(
+ context_->pp_resource(), shader, 1, &source, &size);
+ gles2_if_->CompileShader(context_->pp_resource(), shader);
+
+ int result = GL_FALSE;
+ gles2_if_->GetShaderiv(
+ context_->pp_resource(), shader, GL_COMPILE_STATUS, &result);
+ if (!result) {
+ char log[kShaderErrorSize];
+ int len = 0;
+ gles2_if_->GetShaderInfoLog(context_->pp_resource(), shader,
+ kShaderErrorSize - 1, &len, log);
+ log[len] = 0;
+ assert(result);
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 You never use log... change to fail with "log" as
Ville-Mikko Rautio 2011/06/03 13:24:39 In Pepper code?
+ }
+ gles2_if_->AttachShader(context_->pp_resource(), program, shader);
+ gles2_if_->DeleteShader(context_->pp_resource(), shader);
+}
+
+void GLES2Display::LinkProgram(const PPB_OpenGLES2_Dev* gles2_if_ ) {
+ gles2_if_->LinkProgram(context_->pp_resource(), program_);
+ int result = GL_FALSE;
+ gles2_if_->GetProgramiv(context_->pp_resource(), program_, GL_LINK_STATUS,
+ &result);
+ if (!result) {
+ char log[kShaderErrorSize];
+ int len = 0;
+ gles2_if_->GetProgramInfoLog(context_->pp_resource(), program_,
+ kShaderErrorSize - 1, &len, log);
+ log[len] = 0;
+ assert(result);
+ }
+ gles2_if_->UseProgram(context_->pp_resource(), program_);
+}
+
+void GLES2Display::ProgramShaders() {
+ // Vertices for a full screen quad.
+ static const float kVertices[] = {
+ -1.f, 1.f,
+ -1.f, -1.f,
+ 1.f, 1.f,
+ 1.f, -1.f,
+ };
+
+ // Texture Coordinates mapping the entire texture for EGL image.
+ static const float kTextureCoordsEgl[] = {
+ 0, 1,
+ 0, 0,
+ 1, 1,
+ 1, 0,
+ };
+ program_ = gles2_if_->CreateProgram(context_->pp_resource());
+
+ // Create shader for EGL image
+ CreateShader(program_, GL_VERTEX_SHADER,
+ kVertexShader, sizeof(kVertexShader));
+ CreateShader(program_, GL_FRAGMENT_SHADER,
+ kFragmentShaderEgl, sizeof(kFragmentShaderEgl));
+ LinkProgram(gles2_if_);
+
+ assertNoGLError();
+ // Bind parameters.
+ gles2_if_->Uniform1i(context_->pp_resource(), gles2_if_->
+ GetUniformLocation(context_->pp_resource(), program_,
+ "tex"), 0);
+ gles2_if_->GenBuffers(context_->pp_resource(), 1, &vertex_buffer_);
+ gles2_if_->BindBuffer(context_->pp_resource(), GL_ARRAY_BUFFER,
+ vertex_buffer_);
+ gles2_if_->BufferData(context_->pp_resource(), GL_ARRAY_BUFFER,
+ 8 * sizeof(kVertices[0]), kVertices, GL_STREAM_DRAW);
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Should this be GL_STATIC_DRAW?
Ville-Mikko Rautio 2011/06/03 13:24:39 I think both will work. I took your suggestion. Do
+
+ assertNoGLError();
+ int pos_location = gles2_if_->GetAttribLocation(context_->pp_resource(),
+ program_, "in_pos");
+ gles2_if_->EnableVertexAttribArray(context_->pp_resource(), pos_location);
+ gles2_if_->VertexAttribPointer(context_->pp_resource(), pos_location, 2,
+ GL_FLOAT, GL_FALSE, 0, 0);
+
+ assertNoGLError();
+ gles2_if_->GenBuffers(context_->pp_resource(), 1, &fragment_buffer_);
+ gles2_if_->BindBuffer(context_->pp_resource(), GL_ARRAY_BUFFER,
+ fragment_buffer_);
+ gles2_if_->BufferData(context_->pp_resource(), GL_ARRAY_BUFFER,
+ 8 * sizeof(kTextureCoordsEgl[0]),
+ kTextureCoordsEgl, GL_STREAM_DRAW);
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 Ditto above.
Ville-Mikko Rautio 2011/06/03 13:24:39 Ditto.
+ assertNoGLError();
+ int tc_location = gles2_if_->GetAttribLocation(context_->pp_resource(),
+ program_, "in_tc");
+ gles2_if_->EnableVertexAttribArray(context_->pp_resource(), tc_location);
+ gles2_if_->VertexAttribPointer(context_->pp_resource(), tc_location, 2,
+ GL_FLOAT, GL_FALSE, 0, kTextureCoordsEgl);
+ gles2_if_->VertexAttribPointer(context_->pp_resource(), tc_location, 2,
+ GL_FLOAT, GL_FALSE, 0, 0);
+ gles2_if_->Enable(context_->pp_resource(), GL_DEPTH_TEST);
vrk (LEFT CHROMIUM) 2011/06/02 01:47:02 What is GL_DEPTH_TEST?
Ville-Mikko Rautio 2011/06/03 13:24:39 I believe it is feature of the fragment pipeline i
+ assertNoGLError();
+}
+

Powered by Google App Engine
This is Rietveld 408576698