Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(226)

Side by Side Diff: ppapi/examples/video_decoder/video_decoder_session.cc

Issue 6961018: Pepper Video Decoder API tester plugin. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Rebase & compilation fixes related to it. Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « ppapi/examples/video_decoder/video_decoder_session.h ('k') | ppapi/ppapi_tests.gypi » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "ppapi/examples/video_decoder/video_decoder_session.h"
6
7 #include <cstring>
8 #include <fstream>
9 #include <iostream>
10
11 #include "ppapi/c/dev/pp_graphics_3d_dev.h"
12 #include "ppapi/c/dev/ppb_buffer_dev.h"
13 #include "ppapi/c/pp_errors.h"
14 #include "ppapi/cpp/dev/context_3d_dev.h"
15 #include "ppapi/cpp/dev/surface_3d_dev.h"
16 #include "ppapi/cpp/dev/video_decoder_dev.h"
17 #include "ppapi/lib/gl/include/GLES2/gl2.h"
18
19 // Pull-based video source to read video data from a file.
20 class TestVideoSource {
21 public:
22 TestVideoSource()
23 : file_length_(0),
24 offset_(0) {}
25
26 ~TestVideoSource() {
27 delete mem_;
28 }
29
30 bool Open(const std::string& url) {
31 // TODO(vmr): Use file_util::ReadFileToString or equivalent to read the file
32 // if one-shot reading is used.
33 std::ifstream* file =
34 new std::ifstream(url.c_str(),
35 std::ios::in | std::ios::binary | std::ios::ate);
36 if (!file->good()) {
37 delete file;
38 return false;
39 }
40 file->seekg(0, std::ios::end);
41 uint32_t length = file->tellg();
42 file->seekg(0, std::ios::beg);
43 mem_ = new uint8_t[length];
44 file->read(reinterpret_cast<char*>(mem_), length);
45 file_length_ = length;
46 file->close();
47 delete file;
48 return true;
49 }
50
51 // Reads next packet from the input stream.
52 // Returns number of read bytes on success, 0 on when there was no valid data
53 // to be read and -1 if user gave NULL or too small buffer.
54 // TODO(vmr): Modify to differentiate between errors and EOF.
55 int32_t Read(uint8_t* target_mem, uint32_t size) {
56 if (!target_mem)
57 return -1;
58 uint8_t* unit_begin = NULL;
59 uint8_t* unit_end = NULL;
60 uint8_t* ptr = mem_ + offset_;
61 // Parses H.264 access units from the file. Access units are delimited by
62 // four-byte start code (0x0001) as specified by ISO 14496-10 Annex B.
63 // Outputted data will look like:
64 // Unit #1 0001 <data>
65 // Unit #2 0001 <data>
66 // ...
67 // Unit #3 0001 <data>
68 while (offset_ + 4 < file_length_) {
69 if (ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 1) {
70 // Start code found, store the location.
71 if (!unit_begin) {
72 unit_begin = ptr;
73 } else {
74 // Back-up 1 byte as we have read already one byte of next unit.
75 unit_end = ptr;
76 break;
77 }
78 }
79 ptr++;
80 offset_++;
81 }
82 if (unit_begin && offset_ + 4 == file_length_) {
83 // Last unit. Set the unit_end to point to the last byte including the
84 // remaining content that does not include.
85 unit_end = ptr + 4;
86 offset_ += 4;
87 } else if (!unit_begin || !unit_end) {
88 // No unit start codes found in buffer.
89 return 0;
90 }
91 if (static_cast<int32_t>(size) >= unit_end - unit_begin) {
92 memcpy(target_mem, unit_begin, unit_end - unit_begin);
93 return unit_end - unit_begin;
94 }
95 // Rewind to the beginning start code if there is one as it should be
96 // returned with next Read().
97 offset_ = unit_begin - mem_;
98 return -1;
99 }
100
101 private:
102 uint32_t file_length_;
103 uint32_t offset_;
104 uint8_t* mem_;
105 };
106
107 LocalVideoBitstreamSource::LocalVideoBitstreamSource(std::string filename)
108 : file_(filename),
109 video_source_(new TestVideoSource()),
110 video_source_open_(false) {
111 }
112
113 LocalVideoBitstreamSource::~LocalVideoBitstreamSource() {
114 delete video_source_;
115 }
116
117 bool LocalVideoBitstreamSource::GetBitstreamUnit(
118 void* target_mem,
119 uint32_t target_mem_size_in_bytes,
120 int32_t* unit_size_in_bytes) {
121 if (!video_source_open_) {
122 if (!video_source_->Open(file_))
123 return false;
124 video_source_open_ = true;
125 }
126 int32_t read_bytes = video_source_->Read(static_cast<uint8_t*>(target_mem),
127 target_mem_size_in_bytes);
128 if (read_bytes <= 0)
129 return false;
130
131 *unit_size_in_bytes = read_bytes;
132 return true;
133 }
134
135 VideoDecoderSessionClient::~VideoDecoderSessionClient() {
136 }
137
138 // Constants used by VideoDecoderSession.
139 static const int32_t kBitstreamBufferCount = 3;
140 static const int32_t kBitstreamBufferSize = 256 * 1024 * 1024;
141 static const int32_t kDefaultWidth = 640;
142 static const int32_t kDefaultHeight = 480;
143
144 VideoDecoderSession::VideoDecoderSession(
145 pp::Instance* instance,
146 VideoDecoderSessionClient* client,
147 VideoBitstreamInterface* video_bitstream_if,
148 DisplayInterface* display_if)
149 : cb_factory_(this),
150 instance_(instance),
151 client_(client),
152 video_source_(video_bitstream_if),
153 display_(display_if),
154 end_of_stream_(false),
155 state_(kCreated),
156 next_id_(1) {
157 buffer_if_ = static_cast<const struct PPB_Buffer_Dev*>(
158 pp::Module::Get()->GetBrowserInterface(PPB_BUFFER_DEV_INTERFACE));
159 assert(video_source_ && display_ && buffer_if_);
160 }
161
162 VideoDecoderSession::~VideoDecoderSession() {}
163
164 bool VideoDecoderSession::Initialize(
165 const PP_VideoConfigElement* decoder_config,
166 pp::CompletionCallback completion_callback) {
167 // Default implementation just assumes everything is set up.
168 if (!AllocateInputBuffers())
169 return false;
170
171 pp::CompletionCallback cb = cb_factory_.NewCallback(
172 &VideoDecoderSession::OnInitializeDone, completion_callback);
173 video_decoder_ = new pp::VideoDecoder(instance_, decoder_config, cb, this);
174 if (!video_decoder_)
175 return false;
176
177 return true;
178 }
179
180 bool VideoDecoderSession::Run(pp::CompletionCallback completion_callback) {
181 assert(state_ == kInitialized);
182 // Start the streaming by dispatching the first buffers one by one.
183 for (std::map<int32_t, PP_VideoBitstreamBuffer_Dev>::iterator it =
184 bitstream_buffers_.begin();
185 it == bitstream_buffers_.end();
186 it++) {
187 if (!ReadAndDispatchBitstreamUnit((*it).first))
188 return false;
189 }
190 // Once streaming has been started, we're running.
191 ChangeState(kRunning);
192 completion_callback.Run(PP_OK);
193 return true;
194 }
195
196 bool VideoDecoderSession::Stop(pp::CompletionCallback completion_callback) {
197 assert(state_ == kRunning);
198 // Stop the playback.
199 ChangeState(kInitialized);
200 return true;
201 }
202
203 bool VideoDecoderSession::Flush(pp::CompletionCallback completion_callback) {
204 assert(state_ == kRunning);
205 // Issue the flush request.
206 ChangeState(kFlushing);
207 video_decoder_->Flush(cb_factory_.NewCallback(
208 &VideoDecoderSession::OnUserFlushDone, state_, completion_callback));
209 return true;
210 }
211
212 bool VideoDecoderSession::Teardown(pp::CompletionCallback completion_callback) {
213 assert(state_ == kInitialized);
214 // Teardown the resources.
215 FreeInputBuffers();
216 ChangeState(kCreated);
217 completion_callback.Run(PP_OK);
218 return true;
219 }
220
221 void VideoDecoderSession::ProvidePictureBuffers(
222 uint32_t req_num_of_bufs,
223 PP_Size dimensions,
224 enum PP_PictureBufferType_Dev type) {
225 // Currently we support only GLES buffer allocation.
226 if (type == PP_PICTUREBUFFERTYPE_GLESTEXTURE) {
227 std::vector<PP_GLESBuffer_Dev> buffers;
228 if (!display_->ProvideGLESPictureBuffers(req_num_of_bufs, dimensions,
229 buffers)) {
230 video_decoder_->Abort(cb_factory_.NewCallback(
231 &VideoDecoderSession::OnAbortDone));
232 return;
233 }
234 video_decoder_->AssignGLESBuffers(buffers);
235 } else {
236 assert(!"VideoDecoderSession does not support this type of pic buffers");
237 }
238 }
239
240 void VideoDecoderSession::DismissPictureBuffer(int32_t picture_buffer_id) {
241 if (!display_->DismissPictureBuffer(picture_buffer_id)) {
242 assert(!"Failed to dismiss picture buffer properly");
243 return;
244 }
245 }
246
247 void VideoDecoderSession::PictureReady(const PP_Picture_Dev& picture) {
248 display_->DrawPicture(picture, cb_factory_.NewCallback(
249 &VideoDecoderSession::OnDrawPictureDone, picture.picture_buffer_id));
250 }
251
252 void VideoDecoderSession::EndOfStream() {
253 end_of_stream_ = true;
254 video_decoder_->Flush(cb_factory_.NewCallback(
255 &VideoDecoderSession::OnInternalFlushDone));
256 }
257
258 void VideoDecoderSession::NotifyError(PP_VideoDecodeError_Dev error) {
259 video_decoder_->Flush(cb_factory_.NewCallback(
260 &VideoDecoderSession::OnInternalFlushDone));
261 }
262
263 void VideoDecoderSession::OnInitializeDone(int32_t result,
264 pp::CompletionCallback callback) {
265 if (state_ != kCreated) {
266 ChangeState(kCreated);
267 callback.Run(PP_ERROR_ABORTED);
268 }
269 if (result != PP_OK) {
270 ChangeState(kInitialized);
271 callback.Run(result);
272 }
273 callback.Run(PP_OK);
274 }
275
276 void VideoDecoderSession::OnBitstreamBufferProcessed(
277 int32_t result,
278 int32_t bitstream_buffer_id) {
279 // Reuse each bitstream buffer that has been processed by reading data into it
280 // as long as there is more and pass that for decoding.
281 ReadAndDispatchBitstreamUnit(bitstream_buffer_id);
282 }
283
284 void VideoDecoderSession::OnDrawPictureDone(int32_t result,
285 int32_t picture_buffer_id) {
286 video_decoder_->ReusePictureBuffer(picture_buffer_id);
287 }
288
289 void VideoDecoderSession::OnUserFlushDone(int32_t result,
290 State target_state,
291 pp::CompletionCallback callback) {
292 assert(state_ == kFlushing);
293 // It was a Flush request, return to the state where we started.
294 ChangeState(target_state);
295 callback.Run(result);
296 }
297
298 void VideoDecoderSession::OnInternalFlushDone(int32_t result) {
299 if (end_of_stream_) {
300 // It was end of stream flush.
301 video_decoder_->Abort(cb_factory_.NewCallback(
302 &VideoDecoderSession::OnAbortDone));
303 } else {
304 assert(!"Unhandled flush completion!");
305 }
306 }
307
308 void VideoDecoderSession::OnAbortDone(int32_t result) {
309 client_->OnSessionCompleted(result);
310 }
311
312 bool VideoDecoderSession::AllocateInputBuffers() {
313 // Allocate |kBitstreamBufferCount| bitstream buffers of
314 // |kBitstreamBufferSize| bytes.
315 for (int32_t i = 0; i < kBitstreamBufferCount; i++) {
316 PP_VideoBitstreamBuffer_Dev bitstream_buffer;
317 bitstream_buffer.data = buffer_if_->Create(instance_->pp_instance(),
318 kBitstreamBufferSize);
319 if (bitstream_buffer.data == 0)
320 return false;
321 bitstream_buffer.size = 0;
322 bitstream_buffer.id = GetUniqueId();
323 bitstream_buffers_[bitstream_buffer.id] = bitstream_buffer;
324 }
325 return true;
326 }
327
328 void VideoDecoderSession::FreeInputBuffers() {
329 std::map<int32_t, PP_VideoBitstreamBuffer_Dev>::iterator it;
330 for (it = bitstream_buffers_.begin(); it != bitstream_buffers_.end(); it++) {
331 std::pair<int32_t, PP_VideoBitstreamBuffer_Dev> pair = *it;
332 PP_VideoBitstreamBuffer_Dev bitstream_buffer = pair.second;
333 pp::Module::Get()->core()->ReleaseResource(bitstream_buffer.data);
334 bitstream_buffers_.erase(it);
335 }
336 }
337
338 bool VideoDecoderSession::ReadAndDispatchBitstreamUnit(
339 int32_t bitstream_buffer_id) {
340 // Get the target memory and read the bitstream unit into it.
341 if (bitstream_buffers_.find(bitstream_buffer_id) ==
342 bitstream_buffers_.end())
343 return false;
344
345 PP_VideoBitstreamBuffer_Dev& bitstream_buffer =
346 bitstream_buffers_[bitstream_buffer_id];
347 void* target_mem = buffer_if_->Map(bitstream_buffer.data);
348 if (target_mem == NULL)
349 return false;
350
351 uint32_t size_in_bytes = 0;
352 if (!buffer_if_->Describe(bitstream_buffer.data, &size_in_bytes))
353 return false;
354
355 bool success = video_source_->GetBitstreamUnit(target_mem, size_in_bytes,
356 &bitstream_buffer.size);
357 if (!success)
358 return false;
359
360 // Dispatch the bitstream unit to the decoder.
361 success = video_decoder_->Decode(
362 bitstream_buffer,
363 cb_factory_.NewCallback(
364 &VideoDecoderSession::OnBitstreamBufferProcessed,
365 bitstream_buffer_id));
366 // Finally unmap the buffer for this round.
367 buffer_if_->Unmap(bitstream_buffer.data);
368 return success;
369 }
370
371 void VideoDecoderSession::ChangeState(State to_state) {
372 state_ = to_state;
373 }
374
375 int32_t VideoDecoderSession::GetUniqueId() {
376 // Not exactly unique in the current form but close enough for use case.
377 return next_id_++;
378 }
379
380 // Pass-through vertex shader.
381 static const char kVertexShader[] =
382 "varying vec2 interp_tc;\n"
383 "\n"
384 "attribute vec4 in_pos;\n"
385 "attribute vec2 in_tc;\n"
386 "\n"
387 "void main() {\n"
388 " interp_tc = in_tc;\n"
389 " gl_Position = in_pos;\n"
390 "}\n";
391
392 // Color shader for EGLImage.
393 static const char kFragmentShaderEgl[] =
394 "varying vec2 interp_tc;\n"
395 "\n"
396 "uniform sampler2D tex;\n"
397 "\n"
398 "void main() {\n"
399 " gl_FragColor = texture2D(tex, interp_tc);\n"
400 "}\n";
401
402 // Buffer size for compile errors.
403 static const unsigned int kShaderErrorSize = 4096;
404
405 GLES2Display::GLES2Display(pp::Instance* instance, PP_Size size)
406 : pp::Graphics3DClient_Dev(instance),
407 instance_(instance),
408 surface_size_(size),
409 next_id_(1) {}
410
411 GLES2Display::~GLES2Display() {}
412
413 void GLES2Display::Graphics3DContextLost() {
414 assert(!"GLES2: Unexpectedly lost graphics context");
415 }
416
417 bool GLES2Display::Initialize() {
418 if (!InitGL(surface_size_.width, surface_size_.height))
419 return false;
420 ProgramShaders();
421 return true;
422 }
423
424 bool GLES2Display::ProvideGLESPictureBuffers(
425 uint32_t req_num_of_bufs,
426 PP_Size dimensions,
427 std::vector<PP_GLESBuffer_Dev>& gles_buffers) {
428 GLuint texture;
429 for (uint32_t i = 0; i < req_num_of_bufs; i++) {
430 PP_GLESBuffer_Dev picture_buffer;
431 // Generate texture and bind (effectively allocate) it.
432 gles2_if_->GenTextures(context_->pp_resource(), 1, &texture);
433 gles2_if_->BindTexture(context_->pp_resource(), GL_TEXTURE_2D, texture);
434 picture_buffer.context = 0; // TODO(vmr): Get proper context id.
435 picture_buffer.texture_id = texture;
436 picture_buffer.info.id = GetUniqueId();
437 picture_buffer.info.size.width = surface_size_.width;
438 picture_buffer.info.size.height = surface_size_.height;
439 // Add to output vector and store the values into the map for GLES buffers.
440 gles_buffers.push_back(picture_buffer);
441 gles_buffers_[picture_buffer.info.id] = picture_buffer;
442 AssertNoGLError();
443 }
444 return true;
445 }
446
447 bool GLES2Display::DismissPictureBuffer(int32_t picture_buffer_id) {
448 gles2_if_->DeleteTextures(context_->pp_resource(), 1,
449 &gles_buffers_[picture_buffer_id].texture_id);
450 gles_buffers_.erase(picture_buffer_id);
451 return true;
452 }
453
454 bool GLES2Display::DrawPicture(const PP_Picture_Dev& picture,
455 pp::CompletionCallback completion_callback) {
456 // Decoder has finished decoding picture into the texture, we'll have to just
457 // draw the texture to the color buffer and swap the surfaces.
458 // Clear the color buffer.
459 gles2_if_->Clear(context_->pp_resource(), GL_COLOR_BUFFER_BIT |
460 GL_DEPTH_BUFFER_BIT);
461 // Load the texture into texture unit 0.
462 gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0);
463 gles2_if_->BindTexture(context_->pp_resource(), GL_TEXTURE_2D,
464 gles_buffers_[picture.picture_buffer_id].texture_id);
465 // Draw the texture.
466 gles2_if_->DrawArrays(context_->pp_resource(), GL_TRIANGLE_STRIP, 0, 4);
467 // Force the execution of pending commands.
468 // TODO(vmr): Do we have to do this? Can we rely command buffer to execute the
469 // commands without Finish call?
470 gles2_if_->Finish(context_->pp_resource());
471 AssertNoGLError();
472
473 int32_t error = surface_->SwapBuffers(completion_callback);
474 if (error != PP_OK)
475 return false;
476
477 AssertNoGLError();
478 return true;
479 }
480
481 void GLES2Display::AssertNoGLError() {
482 assert(!gles2_if_->GetError(context_->pp_resource()));
483 }
484
485 bool GLES2Display::InitGL(int width, int height) {
486 assert(width && height);
487 gles2_if_ = static_cast<const struct PPB_OpenGLES2_Dev*>(
488 pp::Module::Get()->GetBrowserInterface(PPB_OPENGLES2_DEV_INTERFACE));
489 // Firstly, we need OpenGL ES context associated with the display our plugin
490 // is rendering to.
491 if (context_) delete(context_);
492 context_ = new pp::Context3D_Dev(*instance_, 0, pp::Context3D_Dev(), NULL);
493 assert(!context_->is_null());
494 // Then we need surface bound to our fresh context. We'll be actually drawing
495 // on this surface and swapping that surface to refresh the displayable data
496 // of the plugin.
497 int32_t surface_attributes[] = {
498 PP_GRAPHICS3DATTRIB_WIDTH, surface_size_.width,
499 PP_GRAPHICS3DATTRIB_HEIGHT, surface_size_.height,
500 PP_GRAPHICS3DATTRIB_NONE
501 };
502 if (surface_) delete(surface_);
503 surface_ = new pp::Surface3D_Dev(*instance_, 0, surface_attributes);
504 assert(!surface_->is_null());
505 int32_t bind_error = context_->BindSurfaces(*surface_, *surface_);
506 if (!bind_error) {
507 assert(bind_error);
508 }
509 AssertNoGLError();
510
511 bool success = instance_->BindGraphics(*surface_);
512 if (!success) {
513 assert(success);
514 }
515 // Clear the color buffer with opaque white for starters.
516 gles2_if_->ClearColor(context_->pp_resource(), 1.0, 1.0, 1.0, 0.0);
517 gles2_if_->Clear(context_->pp_resource(), GL_COLOR_BUFFER_BIT);
518 // Set the viewport to match the whole GL window.
519 gles2_if_->Viewport(context_->pp_resource(), 0, 0, surface_size_.width,
520 surface_size_.height);
521 AssertNoGLError();
522 return true;
523 }
524
525 void GLES2Display::CreateShader(GLuint program, GLenum type,
526 const char* source,
527 int size) {
528 GLuint shader = gles2_if_->CreateShader(context_->pp_resource(), type);
529 gles2_if_->ShaderSource(
530 context_->pp_resource(), shader, 1, &source, &size);
531 gles2_if_->CompileShader(context_->pp_resource(), shader);
532
533 int result = GL_FALSE;
534 gles2_if_->GetShaderiv(
535 context_->pp_resource(), shader, GL_COMPILE_STATUS, &result);
536 if (!result) {
537 char log[kShaderErrorSize];
538 int len = 0;
539 gles2_if_->GetShaderInfoLog(context_->pp_resource(), shader,
540 kShaderErrorSize - 1, &len, log);
541 log[len] = 0;
542 assert(result);
543 }
544 gles2_if_->AttachShader(context_->pp_resource(), program, shader);
545 gles2_if_->DeleteShader(context_->pp_resource(), shader);
546 }
547
548 void GLES2Display::LinkProgram(const PPB_OpenGLES2_Dev* gles2_if_ ) {
549 gles2_if_->LinkProgram(context_->pp_resource(), program_);
550 int result = GL_FALSE;
551 gles2_if_->GetProgramiv(context_->pp_resource(), program_, GL_LINK_STATUS,
552 &result);
553 if (!result) {
554 char log[kShaderErrorSize];
555 int len = 0;
556 gles2_if_->GetProgramInfoLog(context_->pp_resource(), program_,
557 kShaderErrorSize - 1, &len, log);
558 log[len] = 0;
559 assert(result);
560 }
561 gles2_if_->UseProgram(context_->pp_resource(), program_);
562 }
563
564 void GLES2Display::ProgramShaders() {
565 // Vertices for a full screen quad.
566 static const float kVertices[] = {
567 -1.f, 1.f,
568 -1.f, -1.f,
569 1.f, 1.f,
570 1.f, -1.f,
571 };
572
573 // Texture Coordinates mapping the entire texture for EGL image.
574 static const float kTextureCoordsEgl[] = {
575 0, 1,
576 0, 0,
577 1, 1,
578 1, 0,
579 };
580 program_ = gles2_if_->CreateProgram(context_->pp_resource());
581
582 // Create shader for EGL image
583 CreateShader(program_, GL_VERTEX_SHADER,
584 kVertexShader, sizeof(kVertexShader));
585 CreateShader(program_, GL_FRAGMENT_SHADER,
586 kFragmentShaderEgl, sizeof(kFragmentShaderEgl));
587 LinkProgram(gles2_if_);
588
589 AssertNoGLError();
590 // Bind parameters.
591 gles2_if_->Uniform1i(context_->pp_resource(), gles2_if_->
592 GetUniformLocation(context_->pp_resource(), program_,
593 "tex"), 0);
594 gles2_if_->GenBuffers(context_->pp_resource(), 1, &vertex_buffer_);
595 gles2_if_->BindBuffer(context_->pp_resource(), GL_ARRAY_BUFFER,
596 vertex_buffer_);
597 gles2_if_->BufferData(context_->pp_resource(), GL_ARRAY_BUFFER,
598 8 * sizeof(kVertices[0]), kVertices, GL_STATIC_DRAW);
599
600 AssertNoGLError();
601 int pos_location = gles2_if_->GetAttribLocation(context_->pp_resource(),
602 program_, "in_pos");
603 gles2_if_->EnableVertexAttribArray(context_->pp_resource(), pos_location);
604 gles2_if_->VertexAttribPointer(context_->pp_resource(), pos_location, 2,
605 GL_FLOAT, GL_FALSE, 0, 0);
606
607 AssertNoGLError();
608 gles2_if_->GenBuffers(context_->pp_resource(), 1, &fragment_buffer_);
609 gles2_if_->BindBuffer(context_->pp_resource(), GL_ARRAY_BUFFER,
610 fragment_buffer_);
611 gles2_if_->BufferData(context_->pp_resource(), GL_ARRAY_BUFFER,
612 8 * sizeof(kTextureCoordsEgl[0]),
613 kTextureCoordsEgl, GL_STATIC_DRAW);
614 AssertNoGLError();
615 int tc_location = gles2_if_->GetAttribLocation(context_->pp_resource(),
616 program_, "in_tc");
617 gles2_if_->EnableVertexAttribArray(context_->pp_resource(), tc_location);
618 gles2_if_->VertexAttribPointer(context_->pp_resource(), tc_location, 2,
619 GL_FLOAT, GL_FALSE, 0, kTextureCoordsEgl);
620 gles2_if_->VertexAttribPointer(context_->pp_resource(), tc_location, 2,
621 GL_FLOAT, GL_FALSE, 0, 0);
622 AssertNoGLError();
623 }
624
625 int32_t GLES2Display::GetUniqueId() {
626 // Not exactly unique in the current form but close enough for use case.
627 return next_id_++;
628 }
629
OLDNEW
« no previous file with comments | « ppapi/examples/video_decoder/video_decoder_session.h ('k') | ppapi/ppapi_tests.gypi » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698