Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "ui/base/resource/data_pack.h" | 5 #include "ui/base/resource/data_pack.h" |
| 6 | 6 |
| 7 #include <errno.h> | 7 #include <errno.h> |
| 8 #include <utility> | 8 #include <utility> |
| 9 | 9 |
| 10 #include "base/files/file_util.h" | 10 #include "base/files/file_util.h" |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 61 }; | 61 }; |
| 62 | 62 |
| 63 void LogDataPackError(LoadErrors error) { | 63 void LogDataPackError(LoadErrors error) { |
| 64 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", error, LOAD_ERRORS_COUNT); | 64 UMA_HISTOGRAM_ENUMERATION("DataPack.Load", error, LOAD_ERRORS_COUNT); |
| 65 } | 65 } |
| 66 | 66 |
| 67 } // namespace | 67 } // namespace |
| 68 | 68 |
| 69 namespace ui { | 69 namespace ui { |
| 70 | 70 |
| 71 class DataPack::DataSource { | |
|
sky
2017/02/08 22:22:58
Add description.
altimin
2017/02/09 00:13:51
Done.
| |
| 72 public: | |
| 73 virtual ~DataSource() {} | |
| 74 | |
| 75 virtual size_t Length() const = 0; | |
|
sky
2017/02/08 22:22:58
Please use GetLength and GetData.
altimin
2017/02/09 00:13:51
Done.
| |
| 76 virtual const uint8_t* Data() const = 0; | |
| 77 }; | |
| 78 | |
| 79 namespace { | |
| 80 | |
| 81 class MemoryMappedDataSource : public DataPack::DataSource { | |
| 82 public: | |
| 83 explicit MemoryMappedDataSource(std::unique_ptr<base::MemoryMappedFile> mmap) | |
| 84 : mmap_(std::move(mmap)) {} | |
| 85 | |
| 86 ~MemoryMappedDataSource() override {} | |
| 87 | |
| 88 size_t Length() const override { return mmap_->length(); } | |
|
sky
2017/02/08 22:22:58
Generally we prefix overrides with the class the o
altimin
2017/02/09 00:13:51
Done.
| |
| 89 | |
| 90 const uint8_t* Data() const override { return mmap_->data(); } | |
| 91 | |
| 92 private: | |
| 93 std::unique_ptr<base::MemoryMappedFile> mmap_; | |
| 94 | |
| 95 DISALLOW_COPY_AND_ASSIGN(MemoryMappedDataSource); | |
| 96 }; | |
| 97 | |
| 98 class BufferDataSource : public DataPack::DataSource { | |
| 99 public: | |
| 100 explicit BufferDataSource(base::StringPiece buffer) : buffer_(buffer) {} | |
| 101 | |
| 102 ~BufferDataSource() override {} | |
| 103 | |
| 104 size_t Length() const override { return buffer_.length(); } | |
|
sky
2017/02/08 22:22:58
Same comment about adding where overrides come fro
altimin
2017/02/09 00:13:52
Done.
| |
| 105 | |
| 106 const uint8_t* Data() const override { | |
| 107 return reinterpret_cast<const uint8_t*>(buffer_.data()); | |
|
sky
2017/02/08 22:22:58
Can't this be a static_cast?
altimin
2017/02/09 00:13:52
It's impossible to cast from const int8_t* to cons
| |
| 108 } | |
| 109 | |
| 110 private: | |
| 111 base::StringPiece buffer_; | |
| 112 | |
| 113 DISALLOW_COPY_AND_ASSIGN(BufferDataSource); | |
| 114 }; | |
| 115 | |
| 116 } // namespace | |
| 117 | |
| 71 DataPack::DataPack(ui::ScaleFactor scale_factor) | 118 DataPack::DataPack(ui::ScaleFactor scale_factor) |
| 72 : resource_count_(0), | 119 : resource_count_(0), |
| 73 text_encoding_type_(BINARY), | 120 text_encoding_type_(BINARY), |
| 74 scale_factor_(scale_factor) { | 121 scale_factor_(scale_factor) { |
| 75 } | 122 } |
| 76 | 123 |
| 77 DataPack::~DataPack() { | 124 DataPack::~DataPack() { |
| 78 } | 125 } |
| 79 | 126 |
| 80 bool DataPack::LoadFromPath(const base::FilePath& path) { | 127 bool DataPack::LoadFromPath(const base::FilePath& path) { |
| 81 mmap_.reset(new base::MemoryMappedFile); | 128 std::unique_ptr<base::MemoryMappedFile> mmap(new base::MemoryMappedFile); |
|
sky
2017/02/08 22:22:58
MakeUnique where using unique_ptr.
altimin
2017/02/09 00:13:52
Done.
| |
| 82 if (!mmap_->Initialize(path)) { | 129 if (!mmap->Initialize(path)) { |
| 83 DLOG(ERROR) << "Failed to mmap datapack"; | 130 DLOG(ERROR) << "Failed to mmap datapack"; |
| 84 LogDataPackError(INIT_FAILED); | 131 LogDataPackError(INIT_FAILED); |
| 85 mmap_.reset(); | 132 mmap.reset(); |
| 86 return false; | 133 return false; |
| 87 } | 134 } |
| 135 data_source_.reset(new MemoryMappedDataSource(std::move(mmap))); | |
| 88 return LoadImpl(); | 136 return LoadImpl(); |
| 89 } | 137 } |
| 90 | 138 |
| 91 bool DataPack::LoadFromFile(base::File file) { | 139 bool DataPack::LoadFromFile(base::File file) { |
| 92 return LoadFromFileRegion(std::move(file), | 140 return LoadFromFileRegion(std::move(file), |
| 93 base::MemoryMappedFile::Region::kWholeFile); | 141 base::MemoryMappedFile::Region::kWholeFile); |
| 94 } | 142 } |
| 95 | 143 |
| 96 bool DataPack::LoadFromFileRegion( | 144 bool DataPack::LoadFromFileRegion( |
| 97 base::File file, | 145 base::File file, |
| 98 const base::MemoryMappedFile::Region& region) { | 146 const base::MemoryMappedFile::Region& region) { |
| 99 mmap_.reset(new base::MemoryMappedFile); | 147 std::unique_ptr<base::MemoryMappedFile> mmap(new base::MemoryMappedFile); |
| 100 if (!mmap_->Initialize(std::move(file), region)) { | 148 if (!mmap->Initialize(std::move(file), region)) { |
| 101 DLOG(ERROR) << "Failed to mmap datapack"; | 149 DLOG(ERROR) << "Failed to mmap datapack"; |
| 102 LogDataPackError(INIT_FAILED_FROM_FILE); | 150 LogDataPackError(INIT_FAILED_FROM_FILE); |
| 103 mmap_.reset(); | 151 mmap.reset(); |
| 104 return false; | 152 return false; |
| 105 } | 153 } |
| 154 data_source_.reset(new MemoryMappedDataSource(std::move(mmap))); | |
| 155 return LoadImpl(); | |
| 156 } | |
| 157 | |
| 158 bool DataPack::LoadFromBuffer(base::StringPiece buffer) { | |
| 159 data_source_.reset(new BufferDataSource(buffer)); | |
| 106 return LoadImpl(); | 160 return LoadImpl(); |
| 107 } | 161 } |
| 108 | 162 |
| 109 bool DataPack::LoadImpl() { | 163 bool DataPack::LoadImpl() { |
| 110 // Sanity check the header of the file. | 164 // Sanity check the header of the file. |
| 111 if (kHeaderLength > mmap_->length()) { | 165 if (kHeaderLength > data_source_->Length()) { |
| 112 DLOG(ERROR) << "Data pack file corruption: incomplete file header."; | 166 DLOG(ERROR) << "Data pack file corruption: incomplete file header."; |
| 113 LogDataPackError(HEADER_TRUNCATED); | 167 LogDataPackError(HEADER_TRUNCATED); |
| 114 mmap_.reset(); | 168 data_source_.reset(); |
|
sky
2017/02/08 22:22:58
I think this code would be less fragile if you pas
altimin
2017/02/09 00:13:52
Great suggestion, thanks!
Done.
| |
| 115 return false; | 169 return false; |
| 116 } | 170 } |
| 117 | 171 |
| 118 // Parse the header of the file. | 172 // Parse the header of the file. |
| 119 // First uint32_t: version; second: resource count; | 173 // First uint32_t: version; second: resource count; |
| 120 const uint32_t* ptr = reinterpret_cast<const uint32_t*>(mmap_->data()); | 174 const uint32_t* ptr = reinterpret_cast<const uint32_t*>(data_source_->Data()); |
| 121 uint32_t version = ptr[0]; | 175 uint32_t version = ptr[0]; |
| 122 if (version != kFileFormatVersion) { | 176 if (version != kFileFormatVersion) { |
| 123 LOG(ERROR) << "Bad data pack version: got " << version << ", expected " | 177 LOG(ERROR) << "Bad data pack version: got " << version << ", expected " |
| 124 << kFileFormatVersion; | 178 << kFileFormatVersion; |
| 125 LogDataPackError(BAD_VERSION); | 179 LogDataPackError(BAD_VERSION); |
| 126 mmap_.reset(); | 180 data_source_.reset(); |
| 127 return false; | 181 return false; |
| 128 } | 182 } |
| 129 resource_count_ = ptr[1]; | 183 resource_count_ = ptr[1]; |
| 130 | 184 |
| 131 // third: text encoding. | 185 // third: text encoding. |
| 132 const uint8_t* ptr_encoding = reinterpret_cast<const uint8_t*>(ptr + 2); | 186 const uint8_t* ptr_encoding = reinterpret_cast<const uint8_t*>(ptr + 2); |
| 133 text_encoding_type_ = static_cast<TextEncodingType>(*ptr_encoding); | 187 text_encoding_type_ = static_cast<TextEncodingType>(*ptr_encoding); |
| 134 if (text_encoding_type_ != UTF8 && text_encoding_type_ != UTF16 && | 188 if (text_encoding_type_ != UTF8 && text_encoding_type_ != UTF16 && |
| 135 text_encoding_type_ != BINARY) { | 189 text_encoding_type_ != BINARY) { |
| 136 LOG(ERROR) << "Bad data pack text encoding: got " << text_encoding_type_ | 190 LOG(ERROR) << "Bad data pack text encoding: got " << text_encoding_type_ |
| 137 << ", expected between " << BINARY << " and " << UTF16; | 191 << ", expected between " << BINARY << " and " << UTF16; |
| 138 LogDataPackError(WRONG_ENCODING); | 192 LogDataPackError(WRONG_ENCODING); |
| 139 mmap_.reset(); | 193 data_source_.reset(); |
| 140 return false; | 194 return false; |
| 141 } | 195 } |
| 142 | 196 |
| 143 // Sanity check the file. | 197 // Sanity check the file. |
| 144 // 1) Check we have enough entries. There's an extra entry after the last item | 198 // 1) Check we have enough entries. There's an extra entry after the last item |
| 145 // which gives the length of the last item. | 199 // which gives the length of the last item. |
| 146 if (kHeaderLength + (resource_count_ + 1) * sizeof(DataPackEntry) > | 200 if (kHeaderLength + (resource_count_ + 1) * sizeof(DataPackEntry) > |
| 147 mmap_->length()) { | 201 data_source_->Length()) { |
| 148 LOG(ERROR) << "Data pack file corruption: too short for number of " | 202 LOG(ERROR) << "Data pack file corruption: too short for number of " |
| 149 "entries specified."; | 203 "entries specified."; |
| 150 LogDataPackError(INDEX_TRUNCATED); | 204 LogDataPackError(INDEX_TRUNCATED); |
| 151 mmap_.reset(); | 205 data_source_.reset(); |
| 152 return false; | 206 return false; |
| 153 } | 207 } |
| 154 // 2) Verify the entries are within the appropriate bounds. There's an extra | 208 // 2) Verify the entries are within the appropriate bounds. There's an extra |
| 155 // entry after the last item which gives us the length of the last item. | 209 // entry after the last item which gives us the length of the last item. |
| 156 for (size_t i = 0; i < resource_count_ + 1; ++i) { | 210 for (size_t i = 0; i < resource_count_ + 1; ++i) { |
| 157 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( | 211 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( |
| 158 mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry))); | 212 data_source_->Data() + kHeaderLength + (i * sizeof(DataPackEntry))); |
| 159 if (entry->file_offset > mmap_->length()) { | 213 if (entry->file_offset > data_source_->Length()) { |
| 160 LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. " | 214 LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. " |
| 161 << "Was the file corrupted?"; | 215 << "Was the file corrupted?"; |
| 162 LogDataPackError(ENTRY_NOT_FOUND); | 216 LogDataPackError(ENTRY_NOT_FOUND); |
| 163 mmap_.reset(); | 217 data_source_.reset(); |
| 164 return false; | 218 return false; |
| 165 } | 219 } |
| 166 } | 220 } |
| 167 | 221 |
| 168 return true; | 222 return true; |
| 169 } | 223 } |
| 170 | 224 |
| 171 bool DataPack::HasResource(uint16_t resource_id) const { | 225 bool DataPack::HasResource(uint16_t resource_id) const { |
| 172 return !!bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_, | 226 return !!bsearch(&resource_id, data_source_->Data() + kHeaderLength, |
| 173 sizeof(DataPackEntry), DataPackEntry::CompareById); | 227 resource_count_, sizeof(DataPackEntry), |
| 228 DataPackEntry::CompareById); | |
| 174 } | 229 } |
| 175 | 230 |
| 176 bool DataPack::GetStringPiece(uint16_t resource_id, | 231 bool DataPack::GetStringPiece(uint16_t resource_id, |
| 177 base::StringPiece* data) const { | 232 base::StringPiece* data) const { |
| 178 // It won't be hard to make this endian-agnostic, but it's not worth | 233 // It won't be hard to make this endian-agnostic, but it's not worth |
| 179 // bothering to do right now. | 234 // bothering to do right now. |
| 180 #if defined(__BYTE_ORDER) | 235 #if defined(__BYTE_ORDER) |
| 181 // Linux check | 236 // Linux check |
| 182 static_assert(__BYTE_ORDER == __LITTLE_ENDIAN, | 237 static_assert(__BYTE_ORDER == __LITTLE_ENDIAN, |
| 183 "datapack assumes little endian"); | 238 "datapack assumes little endian"); |
| 184 #elif defined(__BIG_ENDIAN__) | 239 #elif defined(__BIG_ENDIAN__) |
| 185 // Mac check | 240 // Mac check |
| 186 #error DataPack assumes little endian | 241 #error DataPack assumes little endian |
| 187 #endif | 242 #endif |
| 188 | 243 |
| 189 const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>( | 244 const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>(bsearch( |
| 190 bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_, | 245 &resource_id, data_source_->Data() + kHeaderLength, resource_count_, |
| 191 sizeof(DataPackEntry), DataPackEntry::CompareById)); | 246 sizeof(DataPackEntry), DataPackEntry::CompareById)); |
| 192 if (!target) { | 247 if (!target) { |
| 193 return false; | 248 return false; |
| 194 } | 249 } |
| 195 | 250 |
| 196 const DataPackEntry* next_entry = target + 1; | 251 const DataPackEntry* next_entry = target + 1; |
| 197 // If the next entry points beyond the end of the file this data pack's entry | 252 // If the next entry points beyond the end of the file this data pack's entry |
| 198 // table is corrupt. Log an error and return false. See | 253 // table is corrupt. Log an error and return false. See |
| 199 // http://crbug.com/371301. | 254 // http://crbug.com/371301. |
| 200 if (next_entry->file_offset > mmap_->length()) { | 255 if (next_entry->file_offset > data_source_->Length()) { |
| 201 size_t entry_index = target - | 256 size_t entry_index = target - reinterpret_cast<const DataPackEntry*>( |
| 202 reinterpret_cast<const DataPackEntry*>(mmap_->data() + kHeaderLength); | 257 data_source_->Data() + kHeaderLength); |
| 203 LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end " | 258 LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end " |
| 204 << "of file. This should have been caught when loading. Was the " | 259 << "of file. This should have been caught when loading. Was the " |
| 205 << "file modified?"; | 260 << "file modified?"; |
| 206 return false; | 261 return false; |
| 207 } | 262 } |
| 208 | 263 |
| 209 size_t length = next_entry->file_offset - target->file_offset; | 264 size_t length = next_entry->file_offset - target->file_offset; |
| 210 data->set(reinterpret_cast<const char*>(mmap_->data() + target->file_offset), | 265 data->set( |
| 211 length); | 266 reinterpret_cast<const char*>(data_source_->Data() + target->file_offset), |
|
sky
2017/02/08 22:22:58
Can this be a static_cast?
altimin
2017/02/09 00:13:51
See above.
| |
| 267 length); | |
| 212 return true; | 268 return true; |
| 213 } | 269 } |
| 214 | 270 |
| 215 base::RefCountedStaticMemory* DataPack::GetStaticMemory( | 271 base::RefCountedStaticMemory* DataPack::GetStaticMemory( |
| 216 uint16_t resource_id) const { | 272 uint16_t resource_id) const { |
| 217 base::StringPiece piece; | 273 base::StringPiece piece; |
| 218 if (!GetStringPiece(resource_id, &piece)) | 274 if (!GetStringPiece(resource_id, &piece)) |
| 219 return NULL; | 275 return NULL; |
| 220 | 276 |
| 221 return new base::RefCountedStaticMemory(piece.data(), piece.length()); | 277 return new base::RefCountedStaticMemory(piece.data(), piece.length()); |
| 222 } | 278 } |
| 223 | 279 |
| 224 ResourceHandle::TextEncodingType DataPack::GetTextEncodingType() const { | 280 ResourceHandle::TextEncodingType DataPack::GetTextEncodingType() const { |
| 225 return text_encoding_type_; | 281 return text_encoding_type_; |
| 226 } | 282 } |
| 227 | 283 |
| 228 ui::ScaleFactor DataPack::GetScaleFactor() const { | 284 ui::ScaleFactor DataPack::GetScaleFactor() const { |
| 229 return scale_factor_; | 285 return scale_factor_; |
| 230 } | 286 } |
| 231 | 287 |
| 232 #if DCHECK_IS_ON() | 288 #if DCHECK_IS_ON() |
| 233 void DataPack::CheckForDuplicateResources( | 289 void DataPack::CheckForDuplicateResources( |
| 234 const ScopedVector<ResourceHandle>& packs) { | 290 const ScopedVector<ResourceHandle>& packs) { |
| 235 for (size_t i = 0; i < resource_count_ + 1; ++i) { | 291 for (size_t i = 0; i < resource_count_ + 1; ++i) { |
| 236 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( | 292 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( |
| 237 mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry))); | 293 data_source_->Data() + kHeaderLength + (i * sizeof(DataPackEntry))); |
| 238 const uint16_t resource_id = entry->resource_id; | 294 const uint16_t resource_id = entry->resource_id; |
| 239 const float resource_scale = GetScaleForScaleFactor(scale_factor_); | 295 const float resource_scale = GetScaleForScaleFactor(scale_factor_); |
| 240 for (const ResourceHandle* handle : packs) { | 296 for (const ResourceHandle* handle : packs) { |
| 241 if (GetScaleForScaleFactor(handle->GetScaleFactor()) != resource_scale) | 297 if (GetScaleForScaleFactor(handle->GetScaleFactor()) != resource_scale) |
| 242 continue; | 298 continue; |
| 243 DCHECK(!handle->HasResource(resource_id)) << "Duplicate resource " | 299 DCHECK(!handle->HasResource(resource_id)) << "Duplicate resource " |
| 244 << resource_id << " with scale " | 300 << resource_id << " with scale " |
| 245 << resource_scale; | 301 << resource_scale; |
| 246 } | 302 } |
| 247 } | 303 } |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 333 return false; | 389 return false; |
| 334 } | 390 } |
| 335 } | 391 } |
| 336 | 392 |
| 337 base::CloseFile(file); | 393 base::CloseFile(file); |
| 338 | 394 |
| 339 return true; | 395 return true; |
| 340 } | 396 } |
| 341 | 397 |
| 342 } // namespace ui | 398 } // namespace ui |
| OLD | NEW |