Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(8)

Side by Side Diff: ui/base/resource/data_pack.cc

Issue 1969313005: [headless] Embed pak file into binary. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fixes according to comments. Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "ui/base/resource/data_pack.h" 5 #include "ui/base/resource/data_pack.h"
6 6
7 #include <errno.h> 7 #include <errno.h>
8 #include <utility> 8 #include <utility>
9 9
10 #include "base/files/file_util.h" 10 #include "base/files/file_util.h"
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
57 WRONG_ENCODING, 57 WRONG_ENCODING,
58 INIT_FAILED_FROM_FILE, 58 INIT_FAILED_FROM_FILE,
59 59
60 LOAD_ERRORS_COUNT, 60 LOAD_ERRORS_COUNT,
61 }; 61 };
62 62
63 } // namespace 63 } // namespace
64 64
65 namespace ui { 65 namespace ui {
66 66
67 class DataPack::DataSource {
68 public:
69 virtual ~DataSource() {};
sky 2016/05/19 20:25:54 no ;
altimin 2016/05/19 23:04:47 Done.
70
71 virtual size_t Length() const = 0;
72 virtual const uint8_t* Data() const = 0;
73 };
74
75 namespace {
76
77 class MemoryMappedDataSource : public DataPack::DataSource {
78 public:
79 MemoryMappedDataSource(std::unique_ptr<base::MemoryMappedFile> mmap)
sky 2016/05/19 20:25:54 explicit
altimin 2016/05/19 23:04:47 Done.
80 : mmap_(std::move(mmap)) {}
81
82 ~MemoryMappedDataSource() override{};
sky 2016/05/19 20:25:54 no ;, and space after override
altimin 2016/05/19 23:04:47 Done.
83
84 size_t Length() const override { return mmap_->length(); }
85
86 const uint8_t* Data() const override { return mmap_->data(); }
87
88 private:
89 std::unique_ptr<base::MemoryMappedFile> mmap_;
90
91 DISALLOW_COPY_AND_ASSIGN(MemoryMappedDataSource);
92 };
93
94 class BufferDataSource : public DataPack::DataSource {
sky 2016/05/19 20:25:54 StringPieceDataSource
altimin 2016/05/19 23:04:47 And again, I believe that essence of this thing is
sky 2016/05/20 15:49:04 IMO BufferDataSource is vague. It is immediately a
95 public:
96 BufferDataSource(base::StringPiece buffer) : buffer_(buffer) {}
sky 2016/05/19 20:25:54 explicit
altimin 2016/05/19 23:04:47 Done.
97
98 ~BufferDataSource() override{};
sky 2016/05/19 20:25:54 nit: no ; and space
altimin 2016/05/19 23:04:47 Done.
99
100 size_t Length() const override { return buffer_.length(); }
101
102 const uint8_t* Data() const override {
103 return reinterpret_cast<const uint8_t*>(buffer_.data());
sky 2016/05/19 20:25:54 Why do you need the reinterpret_cast here?
altimin 2016/05/19 23:04:47 Because base::StringPiece has int8_t (aka char) in
104 }
105
106 private:
107 base::StringPiece buffer_;
108
109 DISALLOW_COPY_AND_ASSIGN(BufferDataSource);
110 };
111
112 } // namespace
113
67 DataPack::DataPack(ui::ScaleFactor scale_factor) 114 DataPack::DataPack(ui::ScaleFactor scale_factor)
68 : resource_count_(0), 115 : resource_count_(0),
69 text_encoding_type_(BINARY), 116 text_encoding_type_(BINARY),
70 scale_factor_(scale_factor), 117 scale_factor_(scale_factor),
71 has_only_material_design_assets_(false) { 118 has_only_material_design_assets_(false) {
72 } 119 }
73 120
74 DataPack::~DataPack() { 121 DataPack::~DataPack() {
75 } 122 }
76 123
77 bool DataPack::LoadFromPath(const base::FilePath& path) { 124 bool DataPack::LoadFromPath(const base::FilePath& path) {
78 mmap_.reset(new base::MemoryMappedFile); 125 std::unique_ptr<base::MemoryMappedFile> mmap(new base::MemoryMappedFile);
79 if (!mmap_->Initialize(path)) { 126 if (!mmap->Initialize(path)) {
80 DLOG(ERROR) << "Failed to mmap datapack"; 127 DLOG(ERROR) << "Failed to mmap datapack";
81 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED, 128 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED,
82 LOAD_ERRORS_COUNT); 129 LOAD_ERRORS_COUNT);
83 mmap_.reset();
84 return false; 130 return false;
85 } 131 }
132 data_source_.reset(new MemoryMappedDataSource(std::move(mmap)));
86 return LoadImpl(); 133 return LoadImpl();
87 } 134 }
88 135
89 bool DataPack::LoadFromFile(base::File file) { 136 bool DataPack::LoadFromFile(base::File file) {
90 return LoadFromFileRegion(std::move(file), 137 return LoadFromFileRegion(std::move(file),
91 base::MemoryMappedFile::Region::kWholeFile); 138 base::MemoryMappedFile::Region::kWholeFile);
92 } 139 }
93 140
94 bool DataPack::LoadFromFileRegion( 141 bool DataPack::LoadFromFileRegion(
95 base::File file, 142 base::File file,
96 const base::MemoryMappedFile::Region& region) { 143 const base::MemoryMappedFile::Region& region) {
97 mmap_.reset(new base::MemoryMappedFile); 144 std::unique_ptr<base::MemoryMappedFile> mmap(new base::MemoryMappedFile);
98 if (!mmap_->Initialize(std::move(file), region)) { 145 if (!mmap->Initialize(std::move(file), region)) {
99 DLOG(ERROR) << "Failed to mmap datapack"; 146 DLOG(ERROR) << "Failed to mmap datapack";
100 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED_FROM_FILE, 147 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED_FROM_FILE,
101 LOAD_ERRORS_COUNT); 148 LOAD_ERRORS_COUNT);
102 mmap_.reset();
103 return false; 149 return false;
104 } 150 }
151 data_source_.reset(new MemoryMappedDataSource(std::move(mmap)));
152 return LoadImpl();
153 }
154
155 bool DataPack::LoadFromBuffer(base::StringPiece buffer) {
156 data_source_.reset(new BufferDataSource(buffer));
105 return LoadImpl(); 157 return LoadImpl();
106 } 158 }
107 159
108 bool DataPack::LoadImpl() { 160 bool DataPack::LoadImpl() {
109 // Sanity check the header of the file. 161 // Sanity check the header of the file.
110 if (kHeaderLength > mmap_->length()) { 162 if (kHeaderLength > data_source_->Length()) {
111 DLOG(ERROR) << "Data pack file corruption: incomplete file header."; 163 DLOG(ERROR) << "Data pack file corruption: incomplete file header.";
112 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", HEADER_TRUNCATED, 164 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", HEADER_TRUNCATED,
113 LOAD_ERRORS_COUNT); 165 LOAD_ERRORS_COUNT);
114 mmap_.reset(); 166 data_source_.reset();
115 return false; 167 return false;
116 } 168 }
117 169
118 // Parse the header of the file. 170 // Parse the header of the file.
119 // First uint32_t: version; second: resource count; 171 // First uint32_t: version; second: resource count;
120 const uint32_t* ptr = reinterpret_cast<const uint32_t*>(mmap_->data()); 172 const uint32_t* ptr = reinterpret_cast<const uint32_t*>(data_source_->Data());
121 uint32_t version = ptr[0]; 173 uint32_t version = ptr[0];
122 if (version != kFileFormatVersion) { 174 if (version != kFileFormatVersion) {
123 LOG(ERROR) << "Bad data pack version: got " << version << ", expected " 175 LOG(ERROR) << "Bad data pack version: got " << version << ", expected "
124 << kFileFormatVersion; 176 << kFileFormatVersion;
125 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", BAD_VERSION, 177 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", BAD_VERSION,
126 LOAD_ERRORS_COUNT); 178 LOAD_ERRORS_COUNT);
127 mmap_.reset(); 179 data_source_.reset();
128 return false; 180 return false;
129 } 181 }
130 resource_count_ = ptr[1]; 182 resource_count_ = ptr[1];
131 183
132 // third: text encoding. 184 // third: text encoding.
133 const uint8_t* ptr_encoding = reinterpret_cast<const uint8_t*>(ptr + 2); 185 const uint8_t* ptr_encoding = reinterpret_cast<const uint8_t*>(ptr + 2);
134 text_encoding_type_ = static_cast<TextEncodingType>(*ptr_encoding); 186 text_encoding_type_ = static_cast<TextEncodingType>(*ptr_encoding);
135 if (text_encoding_type_ != UTF8 && text_encoding_type_ != UTF16 && 187 if (text_encoding_type_ != UTF8 && text_encoding_type_ != UTF16 &&
136 text_encoding_type_ != BINARY) { 188 text_encoding_type_ != BINARY) {
137 LOG(ERROR) << "Bad data pack text encoding: got " << text_encoding_type_ 189 LOG(ERROR) << "Bad data pack text encoding: got " << text_encoding_type_
138 << ", expected between " << BINARY << " and " << UTF16; 190 << ", expected between " << BINARY << " and " << UTF16;
139 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", WRONG_ENCODING, 191 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", WRONG_ENCODING,
140 LOAD_ERRORS_COUNT); 192 LOAD_ERRORS_COUNT);
141 mmap_.reset(); 193 data_source_.reset();
142 return false; 194 return false;
143 } 195 }
144 196
145 // Sanity check the file. 197 // Sanity check the file.
146 // 1) Check we have enough entries. There's an extra entry after the last item 198 // 1) Check we have enough entries. There's an extra entry after the last item
147 // which gives the length of the last item. 199 // which gives the length of the last item.
148 if (kHeaderLength + (resource_count_ + 1) * sizeof(DataPackEntry) > 200 if (kHeaderLength + (resource_count_ + 1) * sizeof(DataPackEntry) >
149 mmap_->length()) { 201 data_source_->Length()) {
150 LOG(ERROR) << "Data pack file corruption: too short for number of " 202 LOG(ERROR) << "Data pack file corruption: too short for number of "
151 "entries specified."; 203 "entries specified.";
152 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INDEX_TRUNCATED, 204 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INDEX_TRUNCATED,
153 LOAD_ERRORS_COUNT); 205 LOAD_ERRORS_COUNT);
154 mmap_.reset(); 206 data_source_.reset();
155 return false; 207 return false;
156 } 208 }
157 // 2) Verify the entries are within the appropriate bounds. There's an extra 209 // 2) Verify the entries are within the appropriate bounds. There's an extra
158 // entry after the last item which gives us the length of the last item. 210 // entry after the last item which gives us the length of the last item.
159 for (size_t i = 0; i < resource_count_ + 1; ++i) { 211 for (size_t i = 0; i < resource_count_ + 1; ++i) {
160 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( 212 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>(
161 mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry))); 213 data_source_->Data() + kHeaderLength + (i * sizeof(DataPackEntry)));
162 if (entry->file_offset > mmap_->length()) { 214 if (entry->file_offset > data_source_->Length()) {
163 LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. " 215 LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. "
164 << "Was the file corrupted?"; 216 << "Was the file corrupted?";
165 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", ENTRY_NOT_FOUND, 217 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", ENTRY_NOT_FOUND,
166 LOAD_ERRORS_COUNT); 218 LOAD_ERRORS_COUNT);
167 mmap_.reset(); 219 data_source_.reset();
168 return false; 220 return false;
169 } 221 }
170 } 222 }
171 223
172 return true; 224 return true;
173 } 225 }
174 226
175 bool DataPack::HasResource(uint16_t resource_id) const { 227 bool DataPack::HasResource(uint16_t resource_id) const {
176 return !!bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_, 228 return !!bsearch(&resource_id, data_source_->Data() + kHeaderLength,
177 sizeof(DataPackEntry), DataPackEntry::CompareById); 229 resource_count_, sizeof(DataPackEntry),
230 DataPackEntry::CompareById);
178 } 231 }
179 232
180 bool DataPack::GetStringPiece(uint16_t resource_id, 233 bool DataPack::GetStringPiece(uint16_t resource_id,
181 base::StringPiece* data) const { 234 base::StringPiece* data) const {
182 // It won't be hard to make this endian-agnostic, but it's not worth 235 // It won't be hard to make this endian-agnostic, but it's not worth
183 // bothering to do right now. 236 // bothering to do right now.
184 #if defined(__BYTE_ORDER) 237 #if defined(__BYTE_ORDER)
185 // Linux check 238 // Linux check
186 static_assert(__BYTE_ORDER == __LITTLE_ENDIAN, 239 static_assert(__BYTE_ORDER == __LITTLE_ENDIAN,
187 "datapack assumes little endian"); 240 "datapack assumes little endian");
188 #elif defined(__BIG_ENDIAN__) 241 #elif defined(__BIG_ENDIAN__)
189 // Mac check 242 // Mac check
190 #error DataPack assumes little endian 243 #error DataPack assumes little endian
191 #endif 244 #endif
192 245
193 const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>( 246 const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>(bsearch(
194 bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_, 247 &resource_id, data_source_->Data() + kHeaderLength, resource_count_,
195 sizeof(DataPackEntry), DataPackEntry::CompareById)); 248 sizeof(DataPackEntry), DataPackEntry::CompareById));
196 if (!target) { 249 if (!target) {
197 return false; 250 return false;
198 } 251 }
199 252
200 const DataPackEntry* next_entry = target + 1; 253 const DataPackEntry* next_entry = target + 1;
201 // If the next entry points beyond the end of the file this data pack's entry 254 // If the next entry points beyond the end of the file this data pack's entry
202 // table is corrupt. Log an error and return false. See 255 // table is corrupt. Log an error and return false. See
203 // http://crbug.com/371301. 256 // http://crbug.com/371301.
204 if (next_entry->file_offset > mmap_->length()) { 257 if (next_entry->file_offset > data_source_->Length()) {
205 size_t entry_index = target - 258 size_t entry_index = target - reinterpret_cast<const DataPackEntry*>(
206 reinterpret_cast<const DataPackEntry*>(mmap_->data() + kHeaderLength); 259 data_source_->Data() + kHeaderLength);
207 LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end " 260 LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end "
208 << "of file. This should have been caught when loading. Was the " 261 << "of file. This should have been caught when loading. Was the "
209 << "file modified?"; 262 << "file modified?";
210 return false; 263 return false;
211 } 264 }
212 265
213 size_t length = next_entry->file_offset - target->file_offset; 266 size_t length = next_entry->file_offset - target->file_offset;
214 data->set(reinterpret_cast<const char*>(mmap_->data() + target->file_offset), 267 data->set(
215 length); 268 reinterpret_cast<const char*>(data_source_->Data() + target->file_offset),
269 length);
216 return true; 270 return true;
217 } 271 }
218 272
219 base::RefCountedStaticMemory* DataPack::GetStaticMemory( 273 base::RefCountedStaticMemory* DataPack::GetStaticMemory(
220 uint16_t resource_id) const { 274 uint16_t resource_id) const {
221 base::StringPiece piece; 275 base::StringPiece piece;
222 if (!GetStringPiece(resource_id, &piece)) 276 if (!GetStringPiece(resource_id, &piece))
223 return NULL; 277 return NULL;
224 278
225 return new base::RefCountedStaticMemory(piece.data(), piece.length()); 279 return new base::RefCountedStaticMemory(piece.data(), piece.length());
226 } 280 }
227 281
228 ResourceHandle::TextEncodingType DataPack::GetTextEncodingType() const { 282 ResourceHandle::TextEncodingType DataPack::GetTextEncodingType() const {
229 return text_encoding_type_; 283 return text_encoding_type_;
230 } 284 }
231 285
232 ui::ScaleFactor DataPack::GetScaleFactor() const { 286 ui::ScaleFactor DataPack::GetScaleFactor() const {
233 return scale_factor_; 287 return scale_factor_;
234 } 288 }
235 289
236 bool DataPack::HasOnlyMaterialDesignAssets() const { 290 bool DataPack::HasOnlyMaterialDesignAssets() const {
237 return has_only_material_design_assets_; 291 return has_only_material_design_assets_;
238 } 292 }
239 293
240 #if DCHECK_IS_ON() 294 #if DCHECK_IS_ON()
241 void DataPack::CheckForDuplicateResources( 295 void DataPack::CheckForDuplicateResources(
242 const ScopedVector<ResourceHandle>& packs) { 296 const ScopedVector<ResourceHandle>& packs) {
243 for (size_t i = 0; i < resource_count_ + 1; ++i) { 297 for (size_t i = 0; i < resource_count_ + 1; ++i) {
244 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( 298 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>(
245 mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry))); 299 data_source_->Data() + kHeaderLength + (i * sizeof(DataPackEntry)));
246 const uint16_t resource_id = entry->resource_id; 300 const uint16_t resource_id = entry->resource_id;
247 const float resource_scale = GetScaleForScaleFactor(scale_factor_); 301 const float resource_scale = GetScaleForScaleFactor(scale_factor_);
248 for (const ResourceHandle* handle : packs) { 302 for (const ResourceHandle* handle : packs) {
249 if (HasOnlyMaterialDesignAssets() != 303 if (HasOnlyMaterialDesignAssets() !=
250 handle->HasOnlyMaterialDesignAssets()) { 304 handle->HasOnlyMaterialDesignAssets()) {
251 continue; 305 continue;
252 } 306 }
253 if (GetScaleForScaleFactor(handle->GetScaleFactor()) != resource_scale) 307 if (GetScaleForScaleFactor(handle->GetScaleFactor()) != resource_scale)
254 continue; 308 continue;
255 DCHECK(!handle->HasResource(resource_id)) << "Duplicate resource " 309 DCHECK(!handle->HasResource(resource_id)) << "Duplicate resource "
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
345 return false; 399 return false;
346 } 400 }
347 } 401 }
348 402
349 base::CloseFile(file); 403 base::CloseFile(file);
350 404
351 return true; 405 return true;
352 } 406 }
353 407
354 } // namespace ui 408 } // namespace ui
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698