OLD | NEW |
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "ppapi/tests/test_video_decoder.h" | 5 #include "ppapi/tests/test_video_decoder.h" |
6 | 6 |
7 #include "ppapi/c/dev/ppb_video_decoder_dev.h" | 7 #include <cstring> |
| 8 #include <fstream> |
| 9 #include <iostream> |
| 10 |
| 11 #include "ppapi/c/dev/pp_graphics_3d_dev.h" |
| 12 #include "ppapi/c/dev/ppb_buffer_dev.h" |
8 #include "ppapi/c/dev/ppb_testing_dev.h" | 13 #include "ppapi/c/dev/ppb_testing_dev.h" |
9 #include "ppapi/c/ppb_var.h" | 14 #include "ppapi/c/pp_errors.h" |
| 15 #include "ppapi/cpp/dev/context_3d_dev.h" |
| 16 #include "ppapi/cpp/dev/surface_3d_dev.h" |
| 17 #include "ppapi/cpp/dev/video_decoder_dev.h" |
| 18 #include "ppapi/lib/gl/include/GLES2/gl2.h" |
10 #include "ppapi/tests/testing_instance.h" | 19 #include "ppapi/tests/testing_instance.h" |
11 | 20 |
12 REGISTER_TEST_CASE(VideoDecoder); | 21 REGISTER_TEST_CASE(VideoDecoder); |
13 | 22 |
14 bool TestVideoDecoder::Init() { | 23 bool TestVideoDecoder::Init() { |
15 video_decoder_interface_ = reinterpret_cast<PPB_VideoDecoder_Dev const*>( | 24 return InitTestingInterface(); |
16 pp::Module::Get()->GetBrowserInterface(PPB_VIDEODECODER_DEV_INTERFACE)); | |
17 var_interface_ = reinterpret_cast<PPB_Var const*>( | |
18 pp::Module::Get()->GetBrowserInterface(PPB_VAR_INTERFACE)); | |
19 return video_decoder_interface_ && var_interface_ && InitTestingInterface(); | |
20 } | 25 } |
21 | 26 |
22 void TestVideoDecoder::RunTest() { | 27 void TestVideoDecoder::RunTest() { |
23 instance_->LogTest("Create", TestCreate()); | 28 instance_->LogTest("Configurations", TestConfigurations()); |
| 29 instance_->LogTest("H264", TestH264()); |
24 } | 30 } |
25 | 31 |
26 void TestVideoDecoder::QuitMessageLoop() { | 32 void TestVideoDecoder::QuitMessageLoop() { |
27 testing_interface_->QuitMessageLoop(instance_->pp_instance()); | 33 testing_interface_->QuitMessageLoop(instance_->pp_instance()); |
28 } | 34 } |
29 | 35 |
30 std::string TestVideoDecoder::TestCreate() { | 36 std::string TestVideoDecoder::TestConfigurations() { |
31 PP_Resource decoder = video_decoder_interface_->Create( | 37 std::vector<uint32_t> empty_config; |
32 instance_->pp_instance(), NULL); | 38 std::vector<uint32_t> configs; |
33 if (decoder == 0) { | 39 configs = pp::VideoDecoder::GetConfigs(instance_, empty_config); |
34 return "Error creating the decoder"; | |
35 } | |
36 PASS(); | 40 PASS(); |
37 } | 41 } |
| 42 |
| 43 std::string TestVideoDecoder::TestH264() { |
| 44 PASS(); |
| 45 } |
| 46 |
| 47 // Pull-based video source to read video data from a file. |
| 48 class TestVideoSource { |
| 49 public: |
| 50 TestVideoSource() |
| 51 : file_length_(0), |
| 52 offset_(0) {} |
| 53 |
| 54 ~TestVideoSource() {} |
| 55 |
| 56 bool Open(const std::string& url) { |
| 57 // TODO(vmr): Use file_util::ReadFileToString or equivalent to read the file |
| 58 // if one-shot reading is used. |
| 59 std::ifstream* file = |
| 60 new std::ifstream(url.c_str(), |
| 61 std::ios::in | std::ios::binary | std::ios::ate); |
| 62 if (!file->good()) { |
| 63 delete file; |
| 64 return false; |
| 65 } |
| 66 file->seekg(0, std::ios::end); |
| 67 uint32_t length = file->tellg(); |
| 68 file->seekg(0, std::ios::beg); |
| 69 mem_ = new uint8_t[length]; |
| 70 file->read(reinterpret_cast<char*>(mem_), length); |
| 71 file_length_ = length; |
| 72 file->close(); |
| 73 delete file; |
| 74 return true; |
| 75 } |
| 76 |
| 77 // Reads next packet from the input stream. |
| 78 // Returns number of read bytes on success, 0 on when there was no valid data |
| 79 // to be read and -1 if user gave NULL or too small buffer. |
| 80 // TODO(vmr): Modify to differentiate between errors and EOF. |
| 81 int32_t Read(uint8_t* target_mem, uint32_t size) { |
| 82 if (!target_mem) |
| 83 return -1; |
| 84 uint8_t* unit_begin = NULL; |
| 85 uint8_t* unit_end = NULL; |
| 86 uint8_t* ptr = mem_ + offset_; |
| 87 while (offset_ + 4 < file_length_) { |
| 88 if (ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 1) { |
| 89 // start code found |
| 90 if (!unit_begin) { |
| 91 unit_begin = ptr; |
| 92 } else { |
| 93 // back-up 1 byte. |
| 94 unit_end = ptr; |
| 95 break; |
| 96 } |
| 97 } |
| 98 ptr++; |
| 99 offset_++; |
| 100 } |
| 101 if (unit_begin && offset_ + 4 == file_length_) { |
| 102 // Last unit. Set the unit_end to point to the last byte. |
| 103 unit_end = ptr + 4; |
| 104 offset_ += 4; |
| 105 } else if (!unit_begin || !unit_end) { |
| 106 // No unit start codes found in buffer. |
| 107 return 0; |
| 108 } |
| 109 if (static_cast<int32_t>(size) >= unit_end - unit_begin) { |
| 110 memcpy(target_mem, unit_begin, unit_end - unit_begin); |
| 111 return unit_end - unit_begin; |
| 112 } |
| 113 // Rewind to the beginning start code if there is one as it should be |
| 114 // returned with next Read(). |
| 115 offset_ = unit_begin - mem_; |
| 116 return -1; |
| 117 } |
| 118 |
| 119 private: |
| 120 uint32_t file_length_; |
| 121 uint32_t offset_; |
| 122 uint8_t* mem_; |
| 123 }; |
| 124 |
| 125 LocalVideoBitstreamSource::LocalVideoBitstreamSource(std::string filename) |
| 126 : file_(filename), |
| 127 video_source_(new TestVideoSource()), |
| 128 video_source_open_(false) { |
| 129 } |
| 130 |
| 131 LocalVideoBitstreamSource::~LocalVideoBitstreamSource() { |
| 132 delete video_source_; |
| 133 } |
| 134 |
| 135 bool LocalVideoBitstreamSource::GetBitstreamUnit( |
| 136 void* target_mem, |
| 137 uint32_t target_mem_size_in_bytes, |
| 138 int32_t* unit_size_in_bytes) { |
| 139 if (!video_source_open_) { |
| 140 if (!video_source_->Open(file_)) { |
| 141 return false; |
| 142 } |
| 143 video_source_open_ = true; |
| 144 } |
| 145 int32_t read_bytes = video_source_->Read(static_cast<uint8_t*>(target_mem), |
| 146 target_mem_size_in_bytes); |
| 147 if (read_bytes <= 0) { |
| 148 return false; |
| 149 } |
| 150 *unit_size_in_bytes = read_bytes; |
| 151 return true; |
| 152 } |
| 153 |
| 154 // Constants used by VideoDecoderClient. |
| 155 static const int32_t kBitstreamBufferCount = 3; |
| 156 static const int32_t kBitstreamBufferSize = 256 * 1024 * 1024; |
| 157 static const int32_t kDefaultWidth = 640; |
| 158 static const int32_t kDefaultHeight = 480; |
| 159 |
| 160 bool VideoDecoderClient::Initialize() { |
| 161 // Default implementation just assumes everything is set up. |
| 162 if (!InitializeVideoBitstreamInterface()) { |
| 163 return false; |
| 164 } |
| 165 if (!display_->Initialize(kDefaultWidth, kDefaultHeight)) { |
| 166 return false; |
| 167 } |
| 168 video_decoder_ = new pp::VideoDecoder(instance_, decoder_config_, this); |
| 169 if (!video_decoder_) { |
| 170 return false; |
| 171 } |
| 172 ChangeState(kInitialized); |
| 173 return true; |
| 174 } |
| 175 |
| 176 bool VideoDecoderClient::Run() { |
| 177 assert(state_ == kInitialized); |
| 178 // Start the streaming by dispatching the first buffers one by one. |
| 179 for (std::map<int32_t, PP_VideoBitstreamBuffer_Dev>::iterator it = |
| 180 bitstream_buffers_.begin(); |
| 181 it == bitstream_buffers_.end(); |
| 182 it++) { |
| 183 ReadAndDispatchBitstreamUnit((*it).first); |
| 184 } |
| 185 // Once streaming has been started, we're running. |
| 186 ChangeState(kRunning); |
| 187 return true; |
| 188 } |
| 189 |
| 190 bool VideoDecoderClient::Stop() { |
| 191 assert(state_ == kRunning); |
| 192 // Stop the playback. |
| 193 ChangeState(kInitialized); |
| 194 return true; |
| 195 } |
| 196 |
| 197 bool VideoDecoderClient::Flush() { |
| 198 assert(state_ == kRunning); |
| 199 // Issue the flush request. |
| 200 video_decoder_->Flush(cb_factory_.NewCallback( |
| 201 &VideoDecoderClient::OnUserFlushDone, state_)); |
| 202 ChangeState(kFlushing); |
| 203 return true; |
| 204 } |
| 205 |
| 206 bool VideoDecoderClient::Teardown() { |
| 207 assert(state_ == kInitialized); |
| 208 // Teardown the resources. |
| 209 ChangeState(kCreated); |
| 210 return true; |
| 211 } |
| 212 |
| 213 void VideoDecoderClient::ProvidePictureBuffers( |
| 214 uint32_t requested_num_of_buffers, |
| 215 const std::vector<uint32_t>& buffer_properties) { |
| 216 std::vector<PP_GLESBuffer_Dev> buffers; |
| 217 for (uint32_t i = 0; i < requested_num_of_buffers; i++) { |
| 218 PP_GLESBuffer_Dev gles_buffer; |
| 219 if (!display_->ProvideGLESPictureBuffer(buffer_properties, &gles_buffer)) { |
| 220 // TODO(vmr): Handle error properly. |
| 221 return; |
| 222 } |
| 223 buffers.push_back(gles_buffer); |
| 224 } |
| 225 video_decoder_->AssignGLESBuffers(buffers.size(), buffers); |
| 226 } |
| 227 |
| 228 void VideoDecoderClient::DismissPictureBuffer(int32_t picture_buffer_id) { |
| 229 if (!display_->DismissPictureBuffer(picture_buffer_id)) { |
| 230 // TODO(vmr): Handle error properly. |
| 231 return; |
| 232 } |
| 233 } |
| 234 |
| 235 void VideoDecoderClient::PictureReady(const PP_Picture_Dev& picture) { |
| 236 display_->DrawPicture(picture, cb_factory_.NewCallback( |
| 237 &VideoDecoderClient::OnDrawPictureDone, picture.picture_buffer_id)); |
| 238 } |
| 239 |
| 240 void VideoDecoderClient::NotifyEndOfStream() { |
| 241 end_of_stream_ = true; |
| 242 video_decoder_->Flush(cb_factory_.NewCallback( |
| 243 &VideoDecoderClient::OnEOSFlushDone)); |
| 244 } |
| 245 |
| 246 void VideoDecoderClient::NotifyError(PP_VideoDecodeError_Dev error) { |
| 247 video_decoder_->Flush(cb_factory_.NewCallback( |
| 248 &VideoDecoderClient::OnEOSFlushDone)); |
| 249 } |
| 250 |
| 251 int32_t VideoDecoderClient::GetUniqueId() { |
| 252 // Not exactly unique in the current form but close enough for use case. |
| 253 return next_id_++; |
| 254 } |
| 255 |
| 256 void VideoDecoderClient::OnResourcesAcquired() { |
| 257 // We're running normally. |
| 258 ChangeState(kRunning); |
| 259 } |
| 260 |
| 261 void VideoDecoderClient::OnBitstreamBufferProcessed( |
| 262 int32_t result, |
| 263 int32_t bitstream_buffer_id) { |
| 264 // Reuse each bitstream buffer that has been processed by reading data into it |
| 265 // as long as there is more and pass that for decoding. |
| 266 ReadAndDispatchBitstreamUnit(bitstream_buffer_id); |
| 267 } |
| 268 |
| 269 void VideoDecoderClient::OnDrawPictureDone(int32_t result, |
| 270 int32_t picture_buffer_id) { |
| 271 video_decoder_->ReusePictureBuffer(picture_buffer_id); |
| 272 } |
| 273 |
| 274 void VideoDecoderClient::OnUserFlushDone(int32_t result, |
| 275 State target_state) { |
| 276 assert(state_ == kFlushing); |
| 277 // It was a Flush request, return to the running state. |
| 278 ChangeState(target_state); |
| 279 } |
| 280 |
| 281 void VideoDecoderClient::OnEOSFlushDone(int32_t result) { |
| 282 assert(end_of_stream_); |
| 283 // It was end of stream flush. |
| 284 video_decoder_->Abort(cb_factory_.NewCallback( |
| 285 &VideoDecoderClient::OnAbortDone)); |
| 286 } |
| 287 |
| 288 void VideoDecoderClient::OnAbortDone(int32_t result) { |
| 289 // We're done. |
| 290 } |
| 291 |
| 292 bool VideoDecoderClient::InitializeVideoBitstreamInterface() { |
| 293 buffer_if_ = static_cast<const struct PPB_Buffer_Dev*>( |
| 294 pp::Module::Get()->GetBrowserInterface(PPB_BUFFER_DEV_INTERFACE)); |
| 295 if (!buffer_if_) { |
| 296 return false; |
| 297 } |
| 298 // Allocate |kBitstreamBufferCount| bitstream buffers of |
| 299 // |kBitstreamBufferSize| bytes. |
| 300 for (int32_t i = 0; i < kBitstreamBufferCount; i++) { |
| 301 PP_VideoBitstreamBuffer_Dev bitstream_buffer; |
| 302 bitstream_buffer.data = buffer_if_->Create(instance_->pp_instance(), |
| 303 kBitstreamBufferSize); |
| 304 if (bitstream_buffer.data == 0) { |
| 305 return false; |
| 306 } |
| 307 bitstream_buffer.size = 0; |
| 308 bitstream_buffer.id = GetUniqueId(); |
| 309 bitstream_buffers_[bitstream_buffer.id] = bitstream_buffer; |
| 310 } |
| 311 return true; |
| 312 } |
| 313 |
| 314 bool VideoDecoderClient::ReadAndDispatchBitstreamUnit( |
| 315 int32_t bitstream_buffer_id) { |
| 316 // Get the target memory and read the bitstream unit into it. |
| 317 if (bitstream_buffers_.find(bitstream_buffer_id) == |
| 318 bitstream_buffers_.end()) { |
| 319 return false; |
| 320 } |
| 321 PP_VideoBitstreamBuffer_Dev bitstream_buffer = |
| 322 bitstream_buffers_[bitstream_buffer_id]; |
| 323 void* target_mem = buffer_if_->Map(bitstream_buffer.data); |
| 324 if (target_mem == NULL) { |
| 325 return false; |
| 326 } |
| 327 uint32_t size_in_bytes = 0; |
| 328 if (!buffer_if_->Describe(bitstream_buffer.data, &size_in_bytes)) { |
| 329 return false; |
| 330 } |
| 331 bool success = video_source_->GetBitstreamUnit(target_mem, size_in_bytes, |
| 332 &bitstream_buffer.size); |
| 333 if (!success) { |
| 334 return false; |
| 335 } |
| 336 // Dispatch the bitstream unit to the decoder. |
| 337 success = video_decoder_->Decode( |
| 338 bitstream_buffers_[bitstream_buffer_id], |
| 339 cb_factory_.NewCallback( |
| 340 &VideoDecoderClient::OnBitstreamBufferProcessed, |
| 341 bitstream_buffer_id)); |
| 342 return success; |
| 343 } |
| 344 |
| 345 void VideoDecoderClient::ChangeState(State to_state) { |
| 346 state_ = to_state; |
| 347 } |
| 348 |
| 349 int32_t VideoDecoderClient::next_id_ = 1; |
| 350 |
| 351 // Pass-through vertex shader. |
| 352 static const char kVertexShader[] = |
| 353 "precision highp float; precision highp int;\n" |
| 354 "varying vec2 interp_tc;\n" |
| 355 "\n" |
| 356 "attribute vec4 in_pos;\n" |
| 357 "attribute vec2 in_tc;\n" |
| 358 "\n" |
| 359 "void main() {\n" |
| 360 " interp_tc = in_tc;\n" |
| 361 " gl_Position = in_pos;\n" |
| 362 "}\n"; |
| 363 |
| 364 // Color shader for EGLImage. |
| 365 static const char kFragmentShaderEgl[] = |
| 366 "precision mediump float;\n" |
| 367 "precision mediump int;\n" |
| 368 "varying vec2 interp_tc;\n" |
| 369 "\n" |
| 370 "uniform sampler2D tex;\n" |
| 371 "\n" |
| 372 "void main() {\n" |
| 373 " gl_FragColor = texture2D(tex, interp_tc);\n" |
| 374 "}\n"; |
| 375 |
| 376 // Buffer size for compile errors. |
| 377 static const unsigned int kShaderErrorSize = 4096; |
| 378 |
| 379 void GLES2Display::Graphics3DContextLost() { |
| 380 assert(!"GLES2: Unexpectedly lost graphics context"); |
| 381 } |
| 382 |
| 383 bool GLES2Display::Initialize(int32_t width, int32_t height) { |
| 384 if (!InitGL(640, 480)) { |
| 385 return false; |
| 386 } |
| 387 ProgramShaders(); |
| 388 return true; |
| 389 } |
| 390 |
| 391 bool GLES2Display::ProvideGLESPictureBuffer( |
| 392 const std::vector<uint32_t>& buffer_properties, |
| 393 PP_GLESBuffer_Dev* picture_buffer) { |
| 394 GLuint texture; |
| 395 // Generate texture and bind (effectively allocate) it. |
| 396 gles2_if_->GenTextures(context_->pp_resource(), 1, &texture); |
| 397 gles2_if_->BindTexture(context_->pp_resource(), GL_TEXTURE_2D, texture); |
| 398 picture_buffer->context = 0; // TODO(vmr): Get proper context id. |
| 399 picture_buffer->texture_id = texture; |
| 400 picture_buffer->info.id = VideoDecoderClient::GetUniqueId(); |
| 401 picture_buffer->info.size.width = width_; |
| 402 picture_buffer->info.size.height = height_; |
| 403 // Store the values into the map for GLES buffers. |
| 404 gles_buffers_[picture_buffer->info.id] = *picture_buffer; |
| 405 assertNoGLError(); |
| 406 return true; |
| 407 } |
| 408 |
| 409 bool GLES2Display::DismissPictureBuffer(int32_t picture_buffer_id) { |
| 410 gles2_if_->DeleteTextures(context_->pp_resource(), 1, |
| 411 &gles_buffers_[picture_buffer_id].texture_id); |
| 412 gles_buffers_.erase(picture_buffer_id); |
| 413 return true; |
| 414 } |
| 415 |
| 416 bool GLES2Display::DrawPicture(const PP_Picture_Dev& picture, |
| 417 pp::CompletionCallback completion_callback) { |
| 418 // Decoder has finished decoding picture into the texture, we'll have to just |
| 419 // draw the texture to the color buffer and swap the surfaces. |
| 420 // Clear the color buffer. |
| 421 gles2_if_->Clear(context_->pp_resource(), GL_COLOR_BUFFER_BIT | |
| 422 GL_DEPTH_BUFFER_BIT); |
| 423 // Load the texture into texture unit 0. |
| 424 gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0); |
| 425 gles2_if_->BindTexture(context_->pp_resource(), GL_TEXTURE_2D, |
| 426 gles_buffers_[picture.picture_buffer_id].texture_id); |
| 427 // Draw the texture. |
| 428 gles2_if_->DrawArrays(context_->pp_resource(), GL_TRIANGLE_STRIP, 0, 4); |
| 429 // Force the execution of pending commands. |
| 430 // TODO(vmr): Do we have to do this? Can we rely command buffer to execute the |
| 431 // commands without Finish call? |
| 432 gles2_if_->Finish(context_->pp_resource()); |
| 433 assertNoGLError(); |
| 434 |
| 435 int32_t error = surface_->SwapBuffers(completion_callback); |
| 436 if (error != PP_OK) { |
| 437 return false; |
| 438 } |
| 439 assertNoGLError(); |
| 440 return true; |
| 441 } |
| 442 |
| 443 void GLES2Display::assertNoGLError() { |
| 444 assert(!gles2_if_->GetError(context_->pp_resource())); |
| 445 } |
| 446 |
| 447 bool GLES2Display::InitGL(int width, int height) { |
| 448 width_ = width; |
| 449 height_ = height; |
| 450 assert(width_ && height_); |
| 451 gles2_if_ = static_cast<const struct PPB_OpenGLES2_Dev*>( |
| 452 pp::Module::Get()->GetBrowserInterface(PPB_OPENGLES2_DEV_INTERFACE)); |
| 453 // Firstly, we need OpenGL ES context associated with the display our plugin |
| 454 // is rendering to. |
| 455 if (context_) delete(context_); |
| 456 context_ = new pp::Context3D_Dev(*instance_, 0, pp::Context3D_Dev(), NULL); |
| 457 assert(!context_->is_null()); |
| 458 // Then we need surface bound to our fresh context. We'll be actually drawing |
| 459 // on this surface and swapping that surface to refresh the displayable data |
| 460 // of the plugin. |
| 461 int32_t surface_attributes[] = { |
| 462 PP_GRAPHICS3DATTRIB_WIDTH, width_, |
| 463 PP_GRAPHICS3DATTRIB_HEIGHT, height_, |
| 464 PP_GRAPHICS3DATTRIB_NONE |
| 465 }; |
| 466 if (surface_) delete(surface_); |
| 467 surface_ = new pp::Surface3D_Dev(*instance_, 0, surface_attributes); |
| 468 assert(!surface_->is_null()); |
| 469 int32_t bind_error = context_->BindSurfaces(*surface_, *surface_); |
| 470 if (!bind_error) { |
| 471 assert(bind_error); |
| 472 } |
| 473 assertNoGLError(); |
| 474 |
| 475 bool success = instance_->BindGraphics(*surface_); |
| 476 if (!success) { |
| 477 assert(success); |
| 478 } |
| 479 // Clear the color buffer with opaque white for starters. |
| 480 gles2_if_->ClearColor(context_->pp_resource(), 1.0, 1.0, 1.0, 0.0); |
| 481 gles2_if_->Clear(context_->pp_resource(), GL_COLOR_BUFFER_BIT); |
| 482 // Set the viewport to match the whole GL window. |
| 483 gles2_if_->Viewport(context_->pp_resource(), 0, 0, width_, height_); |
| 484 assertNoGLError(); |
| 485 return true; |
| 486 } |
| 487 |
| 488 void GLES2Display::CreateShader(GLuint program, GLenum type, |
| 489 const char* source, |
| 490 int size) { |
| 491 GLuint shader = gles2_if_->CreateShader(context_->pp_resource(), type); |
| 492 gles2_if_->ShaderSource( |
| 493 context_->pp_resource(), shader, 1, &source, &size); |
| 494 gles2_if_->CompileShader(context_->pp_resource(), shader); |
| 495 |
| 496 int result = GL_FALSE; |
| 497 gles2_if_->GetShaderiv( |
| 498 context_->pp_resource(), shader, GL_COMPILE_STATUS, &result); |
| 499 if (!result) { |
| 500 char log[kShaderErrorSize]; |
| 501 int len = 0; |
| 502 gles2_if_->GetShaderInfoLog(context_->pp_resource(), shader, |
| 503 kShaderErrorSize - 1, &len, log); |
| 504 log[len] = 0; |
| 505 assert(result); |
| 506 } |
| 507 gles2_if_->AttachShader(context_->pp_resource(), program, shader); |
| 508 gles2_if_->DeleteShader(context_->pp_resource(), shader); |
| 509 } |
| 510 |
| 511 void GLES2Display::LinkProgram(const PPB_OpenGLES2_Dev* gles2_if_ ) { |
| 512 gles2_if_->LinkProgram(context_->pp_resource(), program_); |
| 513 int result = GL_FALSE; |
| 514 gles2_if_->GetProgramiv(context_->pp_resource(), program_, GL_LINK_STATUS, |
| 515 &result); |
| 516 if (!result) { |
| 517 char log[kShaderErrorSize]; |
| 518 int len = 0; |
| 519 gles2_if_->GetProgramInfoLog(context_->pp_resource(), program_, |
| 520 kShaderErrorSize - 1, &len, log); |
| 521 log[len] = 0; |
| 522 assert(result); |
| 523 } |
| 524 gles2_if_->UseProgram(context_->pp_resource(), program_); |
| 525 } |
| 526 |
| 527 void GLES2Display::ProgramShaders() { |
| 528 // Vertices for a full screen quad. |
| 529 static const float kVertices[] = { |
| 530 -1.f, 1.f, |
| 531 -1.f, -1.f, |
| 532 1.f, 1.f, |
| 533 1.f, -1.f, |
| 534 }; |
| 535 |
| 536 // Texture Coordinates mapping the entire texture for EGL image. |
| 537 static const float kTextureCoordsEgl[] = { |
| 538 0, 1, |
| 539 0, 0, |
| 540 1, 1, |
| 541 1, 0, |
| 542 }; |
| 543 program_ = gles2_if_->CreateProgram(context_->pp_resource()); |
| 544 |
| 545 // Create shader for EGL image |
| 546 CreateShader(program_, GL_VERTEX_SHADER, |
| 547 kVertexShader, sizeof(kVertexShader)); |
| 548 CreateShader(program_, GL_FRAGMENT_SHADER, |
| 549 kFragmentShaderEgl, sizeof(kFragmentShaderEgl)); |
| 550 LinkProgram(gles2_if_); |
| 551 |
| 552 assertNoGLError(); |
| 553 // Bind parameters. |
| 554 gles2_if_->Uniform1i(context_->pp_resource(), gles2_if_-> |
| 555 GetUniformLocation(context_->pp_resource(), program_, |
| 556 "tex"), 0); |
| 557 gles2_if_->GenBuffers(context_->pp_resource(), 1, &vertex_); |
| 558 gles2_if_->BindBuffer(context_->pp_resource(), GL_ARRAY_BUFFER, |
| 559 vertex_); |
| 560 gles2_if_->BufferData(context_->pp_resource(), GL_ARRAY_BUFFER, |
| 561 8 * sizeof(kVertices[0]), kVertices, GL_STREAM_DRAW); |
| 562 |
| 563 assertNoGLError(); |
| 564 int pos_location = gles2_if_->GetAttribLocation(context_->pp_resource(), |
| 565 program_, "in_pos"); |
| 566 gles2_if_->EnableVertexAttribArray(context_->pp_resource(), pos_location); |
| 567 gles2_if_->VertexAttribPointer(context_->pp_resource(), pos_location, 2, |
| 568 GL_FLOAT, GL_FALSE, 0, 0); |
| 569 |
| 570 assertNoGLError(); |
| 571 gles2_if_->GenBuffers(context_->pp_resource(), 1, &fragment_); |
| 572 gles2_if_->BindBuffer(context_->pp_resource(), GL_ARRAY_BUFFER, |
| 573 fragment_); |
| 574 gles2_if_->BufferData(context_->pp_resource(), GL_ARRAY_BUFFER, |
| 575 8 * sizeof(kTextureCoordsEgl[0]), |
| 576 kTextureCoordsEgl, GL_STREAM_DRAW); |
| 577 assertNoGLError(); |
| 578 int tc_location = gles2_if_->GetAttribLocation(context_->pp_resource(), |
| 579 program_, "in_tc"); |
| 580 gles2_if_->EnableVertexAttribArray(context_->pp_resource(), tc_location); |
| 581 gles2_if_->VertexAttribPointer(context_->pp_resource(), tc_location, 2, |
| 582 GL_FLOAT, GL_FALSE, 0, kTextureCoordsEgl); |
| 583 gles2_if_->VertexAttribPointer(context_->pp_resource(), tc_location, 2, |
| 584 GL_FLOAT, GL_FALSE, 0, 0); |
| 585 gles2_if_->Enable(context_->pp_resource(), GL_DEPTH_TEST); |
| 586 assertNoGLError(); |
| 587 } |
| 588 |
OLD | NEW |