Chromium Code Reviews| Index: gpu/command_buffer/service/gpu_preferences.h |
| diff --git a/gpu/command_buffer/service/gpu_preferences.h b/gpu/command_buffer/service/gpu_preferences.h |
| index 0c6a883979d0c3e7b5cb51f663789e9754fa85b4..60b7702ac3ab4d81e8374ae5dff68e1eb82f5055 100644 |
| --- a/gpu/command_buffer/service/gpu_preferences.h |
| +++ b/gpu/command_buffer/service/gpu_preferences.h |
| @@ -66,10 +66,10 @@ struct GPU_EXPORT GpuPreferences { |
| bool enforce_gl_minimums = false; |
| // Sets the total amount of memory that may be allocated for GPU resources |
| - size_t force_gpu_mem_available = 0; |
| + unsigned int force_gpu_mem_available = 0; |
|
palmer
2016/03/07 19:39:56
Use explicitly-sied integer types: https://www.chr
Peng
2016/03/07 19:56:20
Done.
|
| // Sets the maximum size of the in-memory gpu program cache, in kb |
| - size_t gpu_program_cache_size = kDefaultMaxProgramCacheMemoryBytes; |
| + unsigned int gpu_program_cache_size = kDefaultMaxProgramCacheMemoryBytes; |
| // Disables the GPU shader on disk cache. |
| bool disable_gpu_shader_disk_cache = false; |