Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(217)

Side by Side Diff: ppapi/proxy/ppapi_command_buffer_proxy.cc

Issue 215803002: Remove CommandBuffer::GetTransferBuffer. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: rebase Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « ppapi/proxy/ppapi_command_buffer_proxy.h ('k') | ppapi/proxy/ppapi_messages.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "ppapi/proxy/ppapi_command_buffer_proxy.h" 5 #include "ppapi/proxy/ppapi_command_buffer_proxy.h"
6 6
7 #include "ppapi/proxy/ppapi_messages.h" 7 #include "ppapi/proxy/ppapi_messages.h"
8 #include "ppapi/proxy/proxy_channel.h" 8 #include "ppapi/proxy/proxy_channel.h"
9 #include "ppapi/shared_impl/api_id.h" 9 #include "ppapi/shared_impl/api_id.h"
10 #include "ppapi/shared_impl/host_resource.h" 10 #include "ppapi/shared_impl/host_resource.h"
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
99 UpdateState(state, success); 99 UpdateState(state, success);
100 } 100 }
101 101
102 void PpapiCommandBufferProxy::SetGetBuffer(int32 transfer_buffer_id) { 102 void PpapiCommandBufferProxy::SetGetBuffer(int32 transfer_buffer_id) {
103 if (last_state_.error == gpu::error::kNoError) { 103 if (last_state_.error == gpu::error::kNoError) {
104 Send(new PpapiHostMsg_PPBGraphics3D_SetGetBuffer( 104 Send(new PpapiHostMsg_PPBGraphics3D_SetGetBuffer(
105 ppapi::API_ID_PPB_GRAPHICS_3D, resource_, transfer_buffer_id)); 105 ppapi::API_ID_PPB_GRAPHICS_3D, resource_, transfer_buffer_id));
106 } 106 }
107 } 107 }
108 108
109 void PpapiCommandBufferProxy::SetGetOffset(int32 get_offset) {
110 // Not implemented in proxy.
111 NOTREACHED();
112 }
113
114 scoped_refptr<gpu::Buffer> PpapiCommandBufferProxy::CreateTransferBuffer( 109 scoped_refptr<gpu::Buffer> PpapiCommandBufferProxy::CreateTransferBuffer(
115 size_t size, 110 size_t size,
116 int32* id) { 111 int32* id) {
117 *id = -1; 112 *id = -1;
118 113
119 if (last_state_.error != gpu::error::kNoError) 114 if (last_state_.error != gpu::error::kNoError)
120 return NULL; 115 return NULL;
121 116
122 if (!Send(new PpapiHostMsg_PPBGraphics3D_CreateTransferBuffer(
123 ppapi::API_ID_PPB_GRAPHICS_3D, resource_, size, id))) {
124 return NULL;
125 }
126
127 if ((*id) <= 0)
128 return NULL;
129
130 return GetTransferBuffer(*id);
131 }
132
133 void PpapiCommandBufferProxy::DestroyTransferBuffer(int32 id) {
134 if (last_state_.error != gpu::error::kNoError)
135 return;
136
137 // Remove the transfer buffer from the client side4 cache.
138 TransferBufferMap::iterator it = transfer_buffers_.find(id);
139
140 // Remove reference to buffer, allowing the shared memory object to be
141 // deleted, closing the handle in the process.
142 if (it != transfer_buffers_.end())
143 transfer_buffers_.erase(it);
144
145 Send(new PpapiHostMsg_PPBGraphics3D_DestroyTransferBuffer(
146 ppapi::API_ID_PPB_GRAPHICS_3D, resource_, id));
147 }
148
149 void PpapiCommandBufferProxy::Echo(const base::Closure& callback) {
150 NOTREACHED();
151 }
152
153 uint32 PpapiCommandBufferProxy::CreateStreamTexture(uint32 texture_id) {
154 NOTREACHED();
155 return 0;
156 }
157
158 scoped_refptr<gpu::Buffer> PpapiCommandBufferProxy::GetTransferBuffer(
159 int32 id) {
160 if (last_state_.error != gpu::error::kNoError)
161 return NULL;
162
163 // Check local cache to see if there is already a client side shared memory
164 // object for this id.
165 TransferBufferMap::iterator it = transfer_buffers_.find(id);
166 if (it != transfer_buffers_.end()) {
167 return it->second;
168 }
169
170 // Assuming we are in the renderer process, the service is responsible for 117 // Assuming we are in the renderer process, the service is responsible for
171 // duplicating the handle. This might not be true for NaCl. 118 // duplicating the handle. This might not be true for NaCl.
172 ppapi::proxy::SerializedHandle handle( 119 ppapi::proxy::SerializedHandle handle(
173 ppapi::proxy::SerializedHandle::SHARED_MEMORY); 120 ppapi::proxy::SerializedHandle::SHARED_MEMORY);
174 if (!Send(new PpapiHostMsg_PPBGraphics3D_GetTransferBuffer( 121 if (!Send(new PpapiHostMsg_PPBGraphics3D_CreateTransferBuffer(
175 ppapi::API_ID_PPB_GRAPHICS_3D, resource_, id, &handle))) { 122 ppapi::API_ID_PPB_GRAPHICS_3D, resource_, size, id, &handle))) {
176 return NULL; 123 return NULL;
177 } 124 }
178 if (!handle.is_shmem()) 125
126 if (*id <= 0 || !handle.is_shmem())
179 return NULL; 127 return NULL;
180 128
181 // Cache the transfer buffer shared memory object client side.
182 scoped_ptr<base::SharedMemory> shared_memory( 129 scoped_ptr<base::SharedMemory> shared_memory(
183 new base::SharedMemory(handle.shmem(), false)); 130 new base::SharedMemory(handle.shmem(), false));
184 131
185 // Map the shared memory on demand. 132 // Map the shared memory on demand.
186 if (!shared_memory->memory()) { 133 if (!shared_memory->memory()) {
187 if (!shared_memory->Map(handle.size())) { 134 if (!shared_memory->Map(handle.size())) {
188 return NULL; 135 return NULL;
189 } 136 }
190 } 137 }
191 138
192 scoped_refptr<gpu::Buffer> buffer = 139 scoped_refptr<gpu::Buffer> buffer =
193 new gpu::Buffer(shared_memory.Pass(), handle.size()); 140 new gpu::Buffer(shared_memory.Pass(), handle.size());
194 transfer_buffers_[id] = buffer;
195 return buffer; 141 return buffer;
196 } 142 }
197 143
198 void PpapiCommandBufferProxy::SetToken(int32 token) { 144 void PpapiCommandBufferProxy::DestroyTransferBuffer(int32 id) {
145 if (last_state_.error != gpu::error::kNoError)
146 return;
147
148 Send(new PpapiHostMsg_PPBGraphics3D_DestroyTransferBuffer(
149 ppapi::API_ID_PPB_GRAPHICS_3D, resource_, id));
150 }
151
152 void PpapiCommandBufferProxy::Echo(const base::Closure& callback) {
199 NOTREACHED(); 153 NOTREACHED();
200 } 154 }
201 155
202 void PpapiCommandBufferProxy::SetParseError(gpu::error::Error error) { 156 uint32 PpapiCommandBufferProxy::CreateStreamTexture(uint32 texture_id) {
203 NOTREACHED(); 157 NOTREACHED();
204 } 158 return 0;
205
206 void PpapiCommandBufferProxy::SetContextLostReason(
207 gpu::error::ContextLostReason reason) {
208 NOTREACHED();
209 } 159 }
210 160
211 uint32 PpapiCommandBufferProxy::InsertSyncPoint() { 161 uint32 PpapiCommandBufferProxy::InsertSyncPoint() {
212 uint32 sync_point = 0; 162 uint32 sync_point = 0;
213 if (last_state_.error == gpu::error::kNoError) { 163 if (last_state_.error == gpu::error::kNoError) {
214 Send(new PpapiHostMsg_PPBGraphics3D_InsertSyncPoint( 164 Send(new PpapiHostMsg_PPBGraphics3D_InsertSyncPoint(
215 ppapi::API_ID_PPB_GRAPHICS_3D, resource_, &sync_point)); 165 ppapi::API_ID_PPB_GRAPHICS_3D, resource_, &sync_point));
216 } 166 }
217 return sync_point; 167 return sync_point;
218 } 168 }
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
275 last_state_ = state; 225 last_state_ = state;
276 } 226 }
277 } else { 227 } else {
278 last_state_.error = gpu::error::kLostContext; 228 last_state_.error = gpu::error::kLostContext;
279 ++last_state_.generation; 229 ++last_state_.generation;
280 } 230 }
281 } 231 }
282 232
283 } // namespace proxy 233 } // namespace proxy
284 } // namespace ppapi 234 } // namespace ppapi
OLDNEW
« no previous file with comments | « ppapi/proxy/ppapi_command_buffer_proxy.h ('k') | ppapi/proxy/ppapi_messages.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698