Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(658)

Side by Side Diff: ui/base/resource/data_pack.cc

Issue 1969313005: [headless] Embed pak file into binary. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: updated years in copyright Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « ui/base/resource/data_pack.h ('k') | ui/base/resource/data_pack_unittest.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "ui/base/resource/data_pack.h" 5 #include "ui/base/resource/data_pack.h"
6 6
7 #include <errno.h> 7 #include <errno.h>
8 #include <utility> 8 #include <utility>
9 9
10 #include "base/files/file_util.h" 10 #include "base/files/file_util.h"
11 #include "base/files/memory_mapped_file.h" 11 #include "base/files/memory_mapped_file.h"
12 #include "base/logging.h" 12 #include "base/logging.h"
13 #include "base/memory/ptr_util.h"
13 #include "base/memory/ref_counted_memory.h" 14 #include "base/memory/ref_counted_memory.h"
14 #include "base/metrics/histogram_macros.h" 15 #include "base/metrics/histogram_macros.h"
15 #include "base/strings/string_piece.h" 16 #include "base/strings/string_piece.h"
16 17
17 // For details of the file layout, see 18 // For details of the file layout, see
18 // http://dev.chromium.org/developers/design-documents/linuxresourcesandlocalize dstrings 19 // http://dev.chromium.org/developers/design-documents/linuxresourcesandlocalize dstrings
19 20
20 namespace { 21 namespace {
21 22
22 static const uint32_t kFileFormatVersion = 4; 23 static const uint32_t kFileFormatVersion = 4;
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
61 }; 62 };
62 63
63 void LogDataPackError(LoadErrors error) { 64 void LogDataPackError(LoadErrors error) {
64 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", error, LOAD_ERRORS_COUNT); 65 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", error, LOAD_ERRORS_COUNT);
65 } 66 }
66 67
67 } // namespace 68 } // namespace
68 69
69 namespace ui { 70 namespace ui {
70 71
72 // Abstraction of a data source (memory mapped file or in-memory buffer).
73 class DataPack::DataSource {
74 public:
75 virtual ~DataSource() {}
76
77 virtual size_t GetLength() const = 0;
78 virtual const uint8_t* GetData() const = 0;
79 };
80
81 class DataPack::MemoryMappedDataSource : public DataPack::DataSource {
82 public:
83 explicit MemoryMappedDataSource(std::unique_ptr<base::MemoryMappedFile> mmap)
84 : mmap_(std::move(mmap)) {}
85
86 ~MemoryMappedDataSource() override {}
87
88 // DataPack::DataSource:
89 size_t GetLength() const override { return mmap_->length(); }
90
91 const uint8_t* GetData() const override { return mmap_->data(); }
92
93 private:
94 std::unique_ptr<base::MemoryMappedFile> mmap_;
95
96 DISALLOW_COPY_AND_ASSIGN(MemoryMappedDataSource);
97 };
98
99 class DataPack::BufferDataSource : public DataPack::DataSource {
100 public:
101 explicit BufferDataSource(base::StringPiece buffer) : buffer_(buffer) {}
102
103 ~BufferDataSource() override {}
104
105 // DataPack::DataSource:
106 size_t GetLength() const override { return buffer_.length(); }
107
108 const uint8_t* GetData() const override {
109 return reinterpret_cast<const uint8_t*>(buffer_.data());
110 }
111
112 private:
113 base::StringPiece buffer_;
114
115 DISALLOW_COPY_AND_ASSIGN(BufferDataSource);
116 };
117
71 DataPack::DataPack(ui::ScaleFactor scale_factor) 118 DataPack::DataPack(ui::ScaleFactor scale_factor)
72 : resource_count_(0), 119 : resource_count_(0),
73 text_encoding_type_(BINARY), 120 text_encoding_type_(BINARY),
74 scale_factor_(scale_factor) { 121 scale_factor_(scale_factor) {
75 } 122 }
76 123
77 DataPack::~DataPack() { 124 DataPack::~DataPack() {
78 } 125 }
79 126
80 bool DataPack::LoadFromPath(const base::FilePath& path) { 127 bool DataPack::LoadFromPath(const base::FilePath& path) {
81 mmap_.reset(new base::MemoryMappedFile); 128 std::unique_ptr<base::MemoryMappedFile> mmap =
82 if (!mmap_->Initialize(path)) { 129 base::MakeUnique<base::MemoryMappedFile>();
130 if (!mmap->Initialize(path)) {
83 DLOG(ERROR) << "Failed to mmap datapack"; 131 DLOG(ERROR) << "Failed to mmap datapack";
84 LogDataPackError(INIT_FAILED); 132 LogDataPackError(INIT_FAILED);
85 mmap_.reset(); 133 mmap.reset();
86 return false; 134 return false;
87 } 135 }
88 return LoadImpl(); 136 return LoadImpl(base::MakeUnique<MemoryMappedDataSource>(std::move(mmap)));
89 } 137 }
90 138
91 bool DataPack::LoadFromFile(base::File file) { 139 bool DataPack::LoadFromFile(base::File file) {
92 return LoadFromFileRegion(std::move(file), 140 return LoadFromFileRegion(std::move(file),
93 base::MemoryMappedFile::Region::kWholeFile); 141 base::MemoryMappedFile::Region::kWholeFile);
94 } 142 }
95 143
96 bool DataPack::LoadFromFileRegion( 144 bool DataPack::LoadFromFileRegion(
97 base::File file, 145 base::File file,
98 const base::MemoryMappedFile::Region& region) { 146 const base::MemoryMappedFile::Region& region) {
99 mmap_.reset(new base::MemoryMappedFile); 147 std::unique_ptr<base::MemoryMappedFile> mmap =
100 if (!mmap_->Initialize(std::move(file), region)) { 148 base::MakeUnique<base::MemoryMappedFile>();
149 if (!mmap->Initialize(std::move(file), region)) {
101 DLOG(ERROR) << "Failed to mmap datapack"; 150 DLOG(ERROR) << "Failed to mmap datapack";
102 LogDataPackError(INIT_FAILED_FROM_FILE); 151 LogDataPackError(INIT_FAILED_FROM_FILE);
103 mmap_.reset(); 152 mmap.reset();
104 return false; 153 return false;
105 } 154 }
106 return LoadImpl(); 155 return LoadImpl(base::MakeUnique<MemoryMappedDataSource>(std::move(mmap)));
107 } 156 }
108 157
109 bool DataPack::LoadImpl() { 158 bool DataPack::LoadFromBuffer(base::StringPiece buffer) {
159 return LoadImpl(base::MakeUnique<BufferDataSource>(buffer));
160 }
161
162 bool DataPack::LoadImpl(std::unique_ptr<DataPack::DataSource> data_source) {
110 // Sanity check the header of the file. 163 // Sanity check the header of the file.
111 if (kHeaderLength > mmap_->length()) { 164 if (kHeaderLength > data_source->GetLength()) {
112 DLOG(ERROR) << "Data pack file corruption: incomplete file header."; 165 DLOG(ERROR) << "Data pack file corruption: incomplete file header.";
113 LogDataPackError(HEADER_TRUNCATED); 166 LogDataPackError(HEADER_TRUNCATED);
114 mmap_.reset();
115 return false; 167 return false;
116 } 168 }
117 169
118 // Parse the header of the file. 170 // Parse the header of the file.
119 // First uint32_t: version; second: resource count; 171 // First uint32_t: version; second: resource count;
120 const uint32_t* ptr = reinterpret_cast<const uint32_t*>(mmap_->data()); 172 const uint32_t* ptr =
173 reinterpret_cast<const uint32_t*>(data_source->GetData());
121 uint32_t version = ptr[0]; 174 uint32_t version = ptr[0];
122 if (version != kFileFormatVersion) { 175 if (version != kFileFormatVersion) {
123 LOG(ERROR) << "Bad data pack version: got " << version << ", expected " 176 LOG(ERROR) << "Bad data pack version: got " << version << ", expected "
124 << kFileFormatVersion; 177 << kFileFormatVersion;
125 LogDataPackError(BAD_VERSION); 178 LogDataPackError(BAD_VERSION);
126 mmap_.reset();
127 return false; 179 return false;
128 } 180 }
129 resource_count_ = ptr[1]; 181 resource_count_ = ptr[1];
130 182
131 // third: text encoding. 183 // third: text encoding.
132 const uint8_t* ptr_encoding = reinterpret_cast<const uint8_t*>(ptr + 2); 184 const uint8_t* ptr_encoding = reinterpret_cast<const uint8_t*>(ptr + 2);
133 text_encoding_type_ = static_cast<TextEncodingType>(*ptr_encoding); 185 text_encoding_type_ = static_cast<TextEncodingType>(*ptr_encoding);
134 if (text_encoding_type_ != UTF8 && text_encoding_type_ != UTF16 && 186 if (text_encoding_type_ != UTF8 && text_encoding_type_ != UTF16 &&
135 text_encoding_type_ != BINARY) { 187 text_encoding_type_ != BINARY) {
136 LOG(ERROR) << "Bad data pack text encoding: got " << text_encoding_type_ 188 LOG(ERROR) << "Bad data pack text encoding: got " << text_encoding_type_
137 << ", expected between " << BINARY << " and " << UTF16; 189 << ", expected between " << BINARY << " and " << UTF16;
138 LogDataPackError(WRONG_ENCODING); 190 LogDataPackError(WRONG_ENCODING);
139 mmap_.reset();
140 return false; 191 return false;
141 } 192 }
142 193
143 // Sanity check the file. 194 // Sanity check the file.
144 // 1) Check we have enough entries. There's an extra entry after the last item 195 // 1) Check we have enough entries. There's an extra entry after the last item
145 // which gives the length of the last item. 196 // which gives the length of the last item.
146 if (kHeaderLength + (resource_count_ + 1) * sizeof(DataPackEntry) > 197 if (kHeaderLength + (resource_count_ + 1) * sizeof(DataPackEntry) >
147 mmap_->length()) { 198 data_source->GetLength()) {
148 LOG(ERROR) << "Data pack file corruption: too short for number of " 199 LOG(ERROR) << "Data pack file corruption: too short for number of "
149 "entries specified."; 200 "entries specified.";
150 LogDataPackError(INDEX_TRUNCATED); 201 LogDataPackError(INDEX_TRUNCATED);
151 mmap_.reset();
152 return false; 202 return false;
153 } 203 }
154 // 2) Verify the entries are within the appropriate bounds. There's an extra 204 // 2) Verify the entries are within the appropriate bounds. There's an extra
155 // entry after the last item which gives us the length of the last item. 205 // entry after the last item which gives us the length of the last item.
156 for (size_t i = 0; i < resource_count_ + 1; ++i) { 206 for (size_t i = 0; i < resource_count_ + 1; ++i) {
157 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( 207 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>(
158 mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry))); 208 data_source->GetData() + kHeaderLength + (i * sizeof(DataPackEntry)));
159 if (entry->file_offset > mmap_->length()) { 209 if (entry->file_offset > data_source->GetLength()) {
160 LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. " 210 LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. "
161 << "Was the file corrupted?"; 211 << "Was the file corrupted?";
162 LogDataPackError(ENTRY_NOT_FOUND); 212 LogDataPackError(ENTRY_NOT_FOUND);
163 mmap_.reset();
164 return false; 213 return false;
165 } 214 }
166 } 215 }
167 216
217 data_source_ = std::move(data_source);
218
168 return true; 219 return true;
169 } 220 }
170 221
171 bool DataPack::HasResource(uint16_t resource_id) const { 222 bool DataPack::HasResource(uint16_t resource_id) const {
172 return !!bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_, 223 return !!bsearch(&resource_id, data_source_->GetData() + kHeaderLength,
173 sizeof(DataPackEntry), DataPackEntry::CompareById); 224 resource_count_, sizeof(DataPackEntry),
225 DataPackEntry::CompareById);
174 } 226 }
175 227
176 bool DataPack::GetStringPiece(uint16_t resource_id, 228 bool DataPack::GetStringPiece(uint16_t resource_id,
177 base::StringPiece* data) const { 229 base::StringPiece* data) const {
178 // It won't be hard to make this endian-agnostic, but it's not worth 230 // It won't be hard to make this endian-agnostic, but it's not worth
179 // bothering to do right now. 231 // bothering to do right now.
180 #if defined(__BYTE_ORDER) 232 #if defined(__BYTE_ORDER)
181 // Linux check 233 // Linux check
182 static_assert(__BYTE_ORDER == __LITTLE_ENDIAN, 234 static_assert(__BYTE_ORDER == __LITTLE_ENDIAN,
183 "datapack assumes little endian"); 235 "datapack assumes little endian");
184 #elif defined(__BIG_ENDIAN__) 236 #elif defined(__BIG_ENDIAN__)
185 // Mac check 237 // Mac check
186 #error DataPack assumes little endian 238 #error DataPack assumes little endian
187 #endif 239 #endif
188 240
189 const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>( 241 const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>(bsearch(
190 bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_, 242 &resource_id, data_source_->GetData() + kHeaderLength, resource_count_,
191 sizeof(DataPackEntry), DataPackEntry::CompareById)); 243 sizeof(DataPackEntry), DataPackEntry::CompareById));
192 if (!target) { 244 if (!target) {
193 return false; 245 return false;
194 } 246 }
195 247
196 const DataPackEntry* next_entry = target + 1; 248 const DataPackEntry* next_entry = target + 1;
197 // If the next entry points beyond the end of the file this data pack's entry 249 // If the next entry points beyond the end of the file this data pack's entry
198 // table is corrupt. Log an error and return false. See 250 // table is corrupt. Log an error and return false. See
199 // http://crbug.com/371301. 251 // http://crbug.com/371301.
200 if (next_entry->file_offset > mmap_->length()) { 252 if (next_entry->file_offset > data_source_->GetLength()) {
201 size_t entry_index = target - 253 size_t entry_index = target - reinterpret_cast<const DataPackEntry*>(
202 reinterpret_cast<const DataPackEntry*>(mmap_->data() + kHeaderLength); 254 data_source_->GetData() + kHeaderLength);
203 LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end " 255 LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end "
204 << "of file. This should have been caught when loading. Was the " 256 << "of file. This should have been caught when loading. Was the "
205 << "file modified?"; 257 << "file modified?";
206 return false; 258 return false;
207 } 259 }
208 260
209 size_t length = next_entry->file_offset - target->file_offset; 261 size_t length = next_entry->file_offset - target->file_offset;
210 data->set(reinterpret_cast<const char*>(mmap_->data() + target->file_offset), 262 data->set(reinterpret_cast<const char*>(data_source_->GetData() +
263 target->file_offset),
211 length); 264 length);
212 return true; 265 return true;
213 } 266 }
214 267
215 base::RefCountedStaticMemory* DataPack::GetStaticMemory( 268 base::RefCountedStaticMemory* DataPack::GetStaticMemory(
216 uint16_t resource_id) const { 269 uint16_t resource_id) const {
217 base::StringPiece piece; 270 base::StringPiece piece;
218 if (!GetStringPiece(resource_id, &piece)) 271 if (!GetStringPiece(resource_id, &piece))
219 return NULL; 272 return NULL;
220 273
221 return new base::RefCountedStaticMemory(piece.data(), piece.length()); 274 return new base::RefCountedStaticMemory(piece.data(), piece.length());
222 } 275 }
223 276
224 ResourceHandle::TextEncodingType DataPack::GetTextEncodingType() const { 277 ResourceHandle::TextEncodingType DataPack::GetTextEncodingType() const {
225 return text_encoding_type_; 278 return text_encoding_type_;
226 } 279 }
227 280
228 ui::ScaleFactor DataPack::GetScaleFactor() const { 281 ui::ScaleFactor DataPack::GetScaleFactor() const {
229 return scale_factor_; 282 return scale_factor_;
230 } 283 }
231 284
232 #if DCHECK_IS_ON() 285 #if DCHECK_IS_ON()
233 void DataPack::CheckForDuplicateResources( 286 void DataPack::CheckForDuplicateResources(
234 const ScopedVector<ResourceHandle>& packs) { 287 const ScopedVector<ResourceHandle>& packs) {
235 for (size_t i = 0; i < resource_count_ + 1; ++i) { 288 for (size_t i = 0; i < resource_count_ + 1; ++i) {
236 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( 289 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>(
237 mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry))); 290 data_source_->GetData() + kHeaderLength + (i * sizeof(DataPackEntry)));
238 const uint16_t resource_id = entry->resource_id; 291 const uint16_t resource_id = entry->resource_id;
239 const float resource_scale = GetScaleForScaleFactor(scale_factor_); 292 const float resource_scale = GetScaleForScaleFactor(scale_factor_);
240 for (const ResourceHandle* handle : packs) { 293 for (const ResourceHandle* handle : packs) {
241 if (GetScaleForScaleFactor(handle->GetScaleFactor()) != resource_scale) 294 if (GetScaleForScaleFactor(handle->GetScaleFactor()) != resource_scale)
242 continue; 295 continue;
243 DCHECK(!handle->HasResource(resource_id)) << "Duplicate resource " 296 DCHECK(!handle->HasResource(resource_id)) << "Duplicate resource "
244 << resource_id << " with scale " 297 << resource_id << " with scale "
245 << resource_scale; 298 << resource_scale;
246 } 299 }
247 } 300 }
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
333 return false; 386 return false;
334 } 387 }
335 } 388 }
336 389
337 base::CloseFile(file); 390 base::CloseFile(file);
338 391
339 return true; 392 return true;
340 } 393 }
341 394
342 } // namespace ui 395 } // namespace ui
OLDNEW
« no previous file with comments | « ui/base/resource/data_pack.h ('k') | ui/base/resource/data_pack_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698