Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "ui/base/resource/data_pack.h" | 5 #include "ui/base/resource/data_pack.h" |
| 6 | 6 |
| 7 #include <errno.h> | 7 #include <errno.h> |
| 8 #include <set> | 8 #include <set> |
| 9 #include <utility> | 9 #include <utility> |
| 10 | 10 |
| 11 #include "base/command_line.h" | 11 #include "base/command_line.h" |
| 12 #include "base/files/file_util.h" | 12 #include "base/files/file_util.h" |
| 13 #include "base/files/memory_mapped_file.h" | 13 #include "base/files/memory_mapped_file.h" |
| 14 #include "base/logging.h" | 14 #include "base/logging.h" |
| 15 #include "base/memory/ptr_util.h" | 15 #include "base/memory/ptr_util.h" |
| 16 #include "base/memory/ref_counted_memory.h" | 16 #include "base/memory/ref_counted_memory.h" |
| 17 #include "base/metrics/histogram_macros.h" | 17 #include "base/metrics/histogram_macros.h" |
| 18 #include "base/stl_util.h" | 18 #include "base/stl_util.h" |
| 19 #include "base/strings/string_piece.h" | 19 #include "base/strings/string_piece.h" |
| 20 #include "base/synchronization/lock.h" | 20 #include "base/synchronization/lock.h" |
| 21 | 21 |
| 22 // For details of the file layout, see | 22 // For details of the file layout, see |
| 23 // http://dev.chromium.org/developers/design-documents/linuxresourcesandlocalize dstrings | 23 // http://dev.chromium.org/developers/design-documents/linuxresourcesandlocalize dstrings |
| 24 | 24 |
| 25 namespace { | 25 namespace { |
| 26 | 26 |
| 27 static const uint32_t kFileFormatVersion = 4; | 27 static const uint32_t kFileFormatVersionWithoutAliases = 4; |
| 28 // Length of file header: version, entry count and text encoding type. | 28 static const uint32_t kFileFormatVersionWithAliases = 5; |
| 29 static const size_t kHeaderLength = 2 * sizeof(uint32_t) + sizeof(uint8_t); | 29 static const size_t kHeaderLengthV4 = 2 * sizeof(uint32_t) + sizeof(uint8_t); |
| 30 | 30 static const size_t kHeaderLengthV5 = 4 * sizeof(uint16_t); |
|
flackr
2017/07/07 18:54:12
This seems to be masquerading the actual data it r
agrieve
2017/07/07 20:47:08
I think it's actually simpler to just say encoding
| |
| 31 #pragma pack(push, 2) | |
| 32 struct DataPackEntry { | |
| 33 uint16_t resource_id; | |
| 34 uint32_t file_offset; | |
| 35 | |
| 36 static int CompareById(const void* void_key, const void* void_entry) { | |
| 37 uint16_t key = *reinterpret_cast<const uint16_t*>(void_key); | |
| 38 const DataPackEntry* entry = | |
| 39 reinterpret_cast<const DataPackEntry*>(void_entry); | |
| 40 if (key < entry->resource_id) { | |
| 41 return -1; | |
| 42 } else if (key > entry->resource_id) { | |
| 43 return 1; | |
| 44 } else { | |
| 45 return 0; | |
| 46 } | |
| 47 } | |
| 48 }; | |
| 49 #pragma pack(pop) | |
| 50 | |
| 51 static_assert(sizeof(DataPackEntry) == 6, "size of entry must be six"); | |
| 52 | 31 |
| 53 // We're crashing when trying to load a pak file on Windows. Add some error | 32 // We're crashing when trying to load a pak file on Windows. Add some error |
| 54 // codes for logging. | 33 // codes for logging. |
| 55 // http://crbug.com/58056 | 34 // http://crbug.com/58056 |
| 56 enum LoadErrors { | 35 enum LoadErrors { |
| 57 INIT_FAILED = 1, | 36 INIT_FAILED = 1, |
| 58 BAD_VERSION, | 37 BAD_VERSION, |
| 59 INDEX_TRUNCATED, | 38 INDEX_TRUNCATED, |
| 60 ENTRY_NOT_FOUND, | 39 ENTRY_NOT_FOUND, |
| 61 HEADER_TRUNCATED, | 40 HEADER_TRUNCATED, |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 95 if (!base::ContainsKey(*resource_ids_logged, resource_id)) { | 74 if (!base::ContainsKey(*resource_ids_logged, resource_id)) { |
| 96 printf("Resource=%d\n", resource_id); | 75 printf("Resource=%d\n", resource_id); |
| 97 resource_ids_logged->insert(resource_id); | 76 resource_ids_logged->insert(resource_id); |
| 98 } | 77 } |
| 99 } | 78 } |
| 100 | 79 |
| 101 } // namespace | 80 } // namespace |
| 102 | 81 |
| 103 namespace ui { | 82 namespace ui { |
| 104 | 83 |
| 84 #pragma pack(push, 2) | |
| 85 struct DataPack::Entry { | |
| 86 uint16_t resource_id; | |
| 87 uint32_t file_offset; | |
| 88 | |
| 89 static int CompareById(const void* void_key, const void* void_entry) { | |
| 90 uint16_t key = *reinterpret_cast<const uint16_t*>(void_key); | |
| 91 const Entry* entry = reinterpret_cast<const Entry*>(void_entry); | |
| 92 return key - entry->resource_id; | |
| 93 } | |
| 94 }; | |
| 95 | |
| 96 struct DataPack::Alias { | |
| 97 uint16_t resource_id; | |
| 98 uint16_t entry_index; | |
| 99 | |
| 100 static int CompareById(const void* void_key, const void* void_entry) { | |
| 101 uint16_t key = *reinterpret_cast<const uint16_t*>(void_key); | |
| 102 const Alias* entry = reinterpret_cast<const Alias*>(void_entry); | |
| 103 return key - entry->resource_id; | |
| 104 } | |
| 105 }; | |
| 106 #pragma pack(pop) | |
| 107 | |
| 105 // Abstraction of a data source (memory mapped file or in-memory buffer). | 108 // Abstraction of a data source (memory mapped file or in-memory buffer). |
| 106 class DataPack::DataSource { | 109 class DataPack::DataSource { |
| 107 public: | 110 public: |
| 108 virtual ~DataSource() {} | 111 virtual ~DataSource() {} |
| 109 | 112 |
| 110 virtual size_t GetLength() const = 0; | 113 virtual size_t GetLength() const = 0; |
| 111 virtual const uint8_t* GetData() const = 0; | 114 virtual const uint8_t* GetData() const = 0; |
| 112 }; | 115 }; |
| 113 | 116 |
| 114 class DataPack::MemoryMappedDataSource : public DataPack::DataSource { | 117 class DataPack::MemoryMappedDataSource : public DataPack::DataSource { |
| (...skipping 27 matching lines...) Expand all Loading... | |
| 142 return reinterpret_cast<const uint8_t*>(buffer_.data()); | 145 return reinterpret_cast<const uint8_t*>(buffer_.data()); |
| 143 } | 146 } |
| 144 | 147 |
| 145 private: | 148 private: |
| 146 base::StringPiece buffer_; | 149 base::StringPiece buffer_; |
| 147 | 150 |
| 148 DISALLOW_COPY_AND_ASSIGN(BufferDataSource); | 151 DISALLOW_COPY_AND_ASSIGN(BufferDataSource); |
| 149 }; | 152 }; |
| 150 | 153 |
| 151 DataPack::DataPack(ui::ScaleFactor scale_factor) | 154 DataPack::DataPack(ui::ScaleFactor scale_factor) |
| 152 : resource_count_(0), | 155 : resource_table_(nullptr), |
| 156 resource_count_(0), | |
| 157 alias_table_(nullptr), | |
| 158 alias_count_(0), | |
| 153 text_encoding_type_(BINARY), | 159 text_encoding_type_(BINARY), |
| 154 scale_factor_(scale_factor) { | 160 scale_factor_(scale_factor) { |
| 161 // Static assert must be within a DataPack member to appease visiblity rules. | |
| 162 static_assert(sizeof(Entry) == 6, "size of Entry must be 6"); | |
| 163 static_assert(sizeof(Alias) == 4, "size of Alias must be 4"); | |
| 155 } | 164 } |
| 156 | 165 |
| 157 DataPack::~DataPack() { | 166 DataPack::~DataPack() { |
| 158 } | 167 } |
| 159 | 168 |
| 160 bool DataPack::LoadFromPath(const base::FilePath& path) { | 169 bool DataPack::LoadFromPath(const base::FilePath& path) { |
| 161 std::unique_ptr<base::MemoryMappedFile> mmap = | 170 std::unique_ptr<base::MemoryMappedFile> mmap = |
| 162 base::MakeUnique<base::MemoryMappedFile>(); | 171 base::MakeUnique<base::MemoryMappedFile>(); |
| 163 if (!mmap->Initialize(path)) { | 172 if (!mmap->Initialize(path)) { |
| 164 DLOG(ERROR) << "Failed to mmap datapack"; | 173 DLOG(ERROR) << "Failed to mmap datapack"; |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 186 return false; | 195 return false; |
| 187 } | 196 } |
| 188 return LoadImpl(base::MakeUnique<MemoryMappedDataSource>(std::move(mmap))); | 197 return LoadImpl(base::MakeUnique<MemoryMappedDataSource>(std::move(mmap))); |
| 189 } | 198 } |
| 190 | 199 |
| 191 bool DataPack::LoadFromBuffer(base::StringPiece buffer) { | 200 bool DataPack::LoadFromBuffer(base::StringPiece buffer) { |
| 192 return LoadImpl(base::MakeUnique<BufferDataSource>(buffer)); | 201 return LoadImpl(base::MakeUnique<BufferDataSource>(buffer)); |
| 193 } | 202 } |
| 194 | 203 |
| 195 bool DataPack::LoadImpl(std::unique_ptr<DataPack::DataSource> data_source) { | 204 bool DataPack::LoadImpl(std::unique_ptr<DataPack::DataSource> data_source) { |
| 196 // Sanity check the header of the file. | 205 // Sanity check the header of the file. There will be at least one entry in |
| 197 if (kHeaderLength > data_source->GetLength()) { | 206 // the resource table, so pick the larger of the two versions. |
| 207 if (data_source->GetLength() < kHeaderLengthV4) { | |
|
flackr
2017/07/07 18:54:12
This means we don't get HEADER_TRUNCATED for an 8
agrieve
2017/07/07 20:47:09
I think the main point of this check is just to en
flackr
2017/07/18 20:21:23
It does mean that on a truncated v4 file with just
agrieve
2017/07/19 02:34:04
Done.
| |
| 198 DLOG(ERROR) << "Data pack file corruption: incomplete file header."; | 208 DLOG(ERROR) << "Data pack file corruption: incomplete file header."; |
| 199 LogDataPackError(HEADER_TRUNCATED); | 209 LogDataPackError(HEADER_TRUNCATED); |
| 200 return false; | 210 return false; |
| 201 } | 211 } |
| 202 | 212 |
| 203 // Parse the header of the file. | 213 // Parse the header of the file. |
| 204 // First uint32_t: version; second: resource count; | 214 const uint8_t* data = data_source->GetData(); |
| 205 const uint32_t* ptr = | 215 int version = data[0]; |
|
flackr
2017/07/07 18:54:12
I think we should keep the version length the same
agrieve
2017/07/07 20:47:08
I'm not sure what your concern is here. The code a
flackr
2017/07/10 14:07:49
I'm worried it's bad practice to shorten the versi
agrieve
2017/07/18 19:30:29
Yeah, I suppose a few bytes per-file isn't worth c
| |
| 206 reinterpret_cast<const uint32_t*>(data_source->GetData()); | 216 size_t header_length; |
| 207 uint32_t version = ptr[0]; | 217 if (version == kFileFormatVersionWithoutAliases) { |
| 208 if (version != kFileFormatVersion) { | 218 resource_count_ = reinterpret_cast<const uint32_t*>(data)[1]; |
| 219 alias_count_ = 0; | |
| 220 text_encoding_type_ = static_cast<TextEncodingType>(data[8]); | |
| 221 header_length = kHeaderLengthV4; | |
| 222 } else if (version == kFileFormatVersionWithAliases) { | |
| 223 // Version 5 added the alias table and change the header format. | |
| 224 text_encoding_type_ = static_cast<TextEncodingType>(data[2]); | |
| 225 resource_count_ = reinterpret_cast<const uint16_t*>(data)[2]; | |
| 226 alias_count_ = reinterpret_cast<const uint16_t*>(data)[3]; | |
| 227 header_length = kHeaderLengthV5; | |
| 228 } else { | |
| 209 LOG(ERROR) << "Bad data pack version: got " << version << ", expected " | 229 LOG(ERROR) << "Bad data pack version: got " << version << ", expected " |
| 210 << kFileFormatVersion; | 230 << kFileFormatVersionWithoutAliases << " or " |
| 231 << kFileFormatVersionWithAliases; | |
| 211 LogDataPackError(BAD_VERSION); | 232 LogDataPackError(BAD_VERSION); |
| 212 return false; | 233 return false; |
| 213 } | 234 } |
| 214 resource_count_ = ptr[1]; | |
| 215 | 235 |
| 216 // third: text encoding. | |
| 217 const uint8_t* ptr_encoding = reinterpret_cast<const uint8_t*>(ptr + 2); | |
| 218 text_encoding_type_ = static_cast<TextEncodingType>(*ptr_encoding); | |
| 219 if (text_encoding_type_ != UTF8 && text_encoding_type_ != UTF16 && | 236 if (text_encoding_type_ != UTF8 && text_encoding_type_ != UTF16 && |
| 220 text_encoding_type_ != BINARY) { | 237 text_encoding_type_ != BINARY) { |
| 221 LOG(ERROR) << "Bad data pack text encoding: got " << text_encoding_type_ | 238 LOG(ERROR) << "Bad data pack text encoding: got " << text_encoding_type_ |
| 222 << ", expected between " << BINARY << " and " << UTF16; | 239 << ", expected between " << BINARY << " and " << UTF16; |
| 223 LogDataPackError(WRONG_ENCODING); | 240 LogDataPackError(WRONG_ENCODING); |
| 224 return false; | 241 return false; |
| 225 } | 242 } |
| 226 | 243 |
| 227 // Sanity check the file. | 244 // Sanity check the file. |
| 228 // 1) Check we have enough entries. There's an extra entry after the last item | 245 // 1) Check we have enough entries. There's an extra entry after the last item |
| 229 // which gives the length of the last item. | 246 // which gives the length of the last item. |
| 230 if (kHeaderLength + (resource_count_ + 1) * sizeof(DataPackEntry) > | 247 size_t resource_table_size = (resource_count_ + 1) * sizeof(Entry); |
| 248 size_t alias_table_size = alias_count_ * sizeof(Alias); | |
| 249 if (header_length + resource_table_size + alias_table_size > | |
| 231 data_source->GetLength()) { | 250 data_source->GetLength()) { |
| 232 LOG(ERROR) << "Data pack file corruption: too short for number of " | 251 LOG(ERROR) << "Data pack file corruption: " |
| 233 "entries specified."; | 252 << "too short for number of entries."; |
| 234 LogDataPackError(INDEX_TRUNCATED); | 253 LogDataPackError(INDEX_TRUNCATED); |
| 235 return false; | 254 return false; |
| 236 } | 255 } |
| 256 | |
| 257 resource_table_ = reinterpret_cast<const Entry*>(&data[header_length]); | |
| 258 alias_table_ = reinterpret_cast<const Alias*>( | |
| 259 &data[header_length + resource_table_size]); | |
| 260 | |
| 237 // 2) Verify the entries are within the appropriate bounds. There's an extra | 261 // 2) Verify the entries are within the appropriate bounds. There's an extra |
| 238 // entry after the last item which gives us the length of the last item. | 262 // entry after the last item which gives us the length of the last item. |
|
flackr
2017/07/07 18:54:12
Also verify the indices of the alias table are wit
agrieve
2017/07/07 20:47:08
Done.
| |
| 239 for (size_t i = 0; i < resource_count_ + 1; ++i) { | 263 for (size_t i = 0; i < resource_count_ + 1; ++i) { |
| 240 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( | 264 if (resource_table_[i].file_offset > data_source->GetLength()) { |
| 241 data_source->GetData() + kHeaderLength + (i * sizeof(DataPackEntry))); | 265 LOG(ERROR) << "Data pack file corruption: " |
| 242 if (entry->file_offset > data_source->GetLength()) { | 266 << "Entry #" << i << " past end of file."; |
| 243 LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. " | |
| 244 << "Was the file corrupted?"; | |
| 245 LogDataPackError(ENTRY_NOT_FOUND); | 267 LogDataPackError(ENTRY_NOT_FOUND); |
| 246 return false; | 268 return false; |
| 247 } | 269 } |
| 248 } | 270 } |
| 249 | 271 |
| 250 data_source_ = std::move(data_source); | 272 data_source_ = std::move(data_source); |
| 251 | |
| 252 return true; | 273 return true; |
| 253 } | 274 } |
| 254 | 275 |
| 276 const DataPack::Entry* DataPack::LookupEntryById(uint16_t resource_id) const { | |
| 277 const Entry* ret = reinterpret_cast<const Entry*>( | |
|
flackr
2017/07/07 18:54:12
nit: Add a comment explaining that we search the r
agrieve
2017/07/07 20:47:08
Done.
| |
| 278 bsearch(&resource_id, resource_table_, resource_count_, sizeof(Entry), | |
| 279 Entry::CompareById)); | |
| 280 if (ret == nullptr) { | |
| 281 const Alias* alias = reinterpret_cast<const Alias*>( | |
| 282 bsearch(&resource_id, alias_table_, alias_count_, sizeof(Alias), | |
| 283 Alias::CompareById)); | |
| 284 if (alias != nullptr) { | |
| 285 ret = &resource_table_[alias->entry_index]; | |
| 286 } | |
| 287 } | |
| 288 return ret; | |
| 289 } | |
| 290 | |
| 255 bool DataPack::HasResource(uint16_t resource_id) const { | 291 bool DataPack::HasResource(uint16_t resource_id) const { |
| 256 return !!bsearch(&resource_id, data_source_->GetData() + kHeaderLength, | 292 return !!LookupEntryById(resource_id); |
| 257 resource_count_, sizeof(DataPackEntry), | |
| 258 DataPackEntry::CompareById); | |
| 259 } | 293 } |
| 260 | 294 |
| 261 bool DataPack::GetStringPiece(uint16_t resource_id, | 295 bool DataPack::GetStringPiece(uint16_t resource_id, |
| 262 base::StringPiece* data) const { | 296 base::StringPiece* data) const { |
| 263 // It won't be hard to make this endian-agnostic, but it's not worth | 297 // It won't be hard to make this endian-agnostic, but it's not worth |
| 264 // bothering to do right now. | 298 // bothering to do right now. |
| 265 #if defined(__BYTE_ORDER) | 299 #if defined(__BYTE_ORDER) |
| 266 // Linux check | 300 // Linux check |
| 267 static_assert(__BYTE_ORDER == __LITTLE_ENDIAN, | 301 static_assert(__BYTE_ORDER == __LITTLE_ENDIAN, |
| 268 "datapack assumes little endian"); | 302 "datapack assumes little endian"); |
| 269 #elif defined(__BIG_ENDIAN__) | 303 #elif defined(__BIG_ENDIAN__) |
| 270 // Mac check | 304 // Mac check |
| 271 #error DataPack assumes little endian | 305 #error DataPack assumes little endian |
| 272 #endif | 306 #endif |
| 273 | 307 |
| 274 const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>(bsearch( | 308 const Entry* target = LookupEntryById(resource_id); |
| 275 &resource_id, data_source_->GetData() + kHeaderLength, resource_count_, | 309 if (!target) |
| 276 sizeof(DataPackEntry), DataPackEntry::CompareById)); | |
| 277 if (!target) { | |
| 278 return false; | 310 return false; |
| 279 } | |
| 280 | 311 |
| 281 const DataPackEntry* next_entry = target + 1; | 312 const Entry* next_entry = target + 1; |
| 282 // If the next entry points beyond the end of the file this data pack's entry | 313 // If the next entry points beyond the end of the file this data pack's entry |
| 283 // table is corrupt. Log an error and return false. See | 314 // table is corrupt. Log an error and return false. See |
| 284 // http://crbug.com/371301. | 315 // http://crbug.com/371301. |
| 285 if (next_entry->file_offset > data_source_->GetLength()) { | 316 size_t entry_offset = |
| 286 size_t entry_index = target - reinterpret_cast<const DataPackEntry*>( | 317 reinterpret_cast<const uint8_t*>(next_entry) - data_source_->GetData(); |
| 287 data_source_->GetData() + kHeaderLength); | 318 size_t pak_size = data_source_->GetLength(); |
| 319 if (entry_offset > pak_size || next_entry->file_offset > pak_size) { | |
| 320 size_t entry_index = target - resource_table_; | |
| 288 LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end " | 321 LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end " |
| 289 << "of file. This should have been caught when loading. Was the " | 322 << "of file. This should have been caught when loading. Was the " |
| 290 << "file modified?"; | 323 << "file modified?"; |
| 291 return false; | 324 return false; |
| 292 } | 325 } |
| 293 | 326 |
| 294 MaybePrintResourceId(resource_id); | 327 MaybePrintResourceId(resource_id); |
| 295 size_t length = next_entry->file_offset - target->file_offset; | 328 size_t length = next_entry->file_offset - target->file_offset; |
| 296 data->set(reinterpret_cast<const char*>(data_source_->GetData() + | 329 data->set(reinterpret_cast<const char*>(data_source_->GetData() + |
| 297 target->file_offset), | 330 target->file_offset), |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 313 } | 346 } |
| 314 | 347 |
| 315 ui::ScaleFactor DataPack::GetScaleFactor() const { | 348 ui::ScaleFactor DataPack::GetScaleFactor() const { |
| 316 return scale_factor_; | 349 return scale_factor_; |
| 317 } | 350 } |
| 318 | 351 |
| 319 #if DCHECK_IS_ON() | 352 #if DCHECK_IS_ON() |
| 320 void DataPack::CheckForDuplicateResources( | 353 void DataPack::CheckForDuplicateResources( |
| 321 const std::vector<std::unique_ptr<ResourceHandle>>& packs) { | 354 const std::vector<std::unique_ptr<ResourceHandle>>& packs) { |
| 322 for (size_t i = 0; i < resource_count_ + 1; ++i) { | 355 for (size_t i = 0; i < resource_count_ + 1; ++i) { |
| 323 const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( | 356 const uint16_t resource_id = resource_table_[i].resource_id; |
| 324 data_source_->GetData() + kHeaderLength + (i * sizeof(DataPackEntry))); | |
| 325 const uint16_t resource_id = entry->resource_id; | |
| 326 const float resource_scale = GetScaleForScaleFactor(scale_factor_); | 357 const float resource_scale = GetScaleForScaleFactor(scale_factor_); |
| 327 for (const auto& handle : packs) { | 358 for (const auto& handle : packs) { |
| 328 if (GetScaleForScaleFactor(handle->GetScaleFactor()) != resource_scale) | 359 if (GetScaleForScaleFactor(handle->GetScaleFactor()) != resource_scale) |
| 329 continue; | 360 continue; |
| 330 DCHECK(!handle->HasResource(resource_id)) << "Duplicate resource " | 361 DCHECK(!handle->HasResource(resource_id)) << "Duplicate resource " |
| 331 << resource_id << " with scale " | 362 << resource_id << " with scale " |
| 332 << resource_scale; | 363 << resource_scale; |
| 333 } | 364 } |
| 334 } | 365 } |
| 335 } | 366 } |
| 336 #endif // DCHECK_IS_ON() | 367 #endif // DCHECK_IS_ON() |
| 337 | 368 |
| 338 // static | 369 // static |
| 339 bool DataPack::WritePack(const base::FilePath& path, | 370 bool DataPack::WritePack(const base::FilePath& path, |
| 340 const std::map<uint16_t, base::StringPiece>& resources, | 371 const std::map<uint16_t, base::StringPiece>& resources, |
| 341 TextEncodingType textEncodingType) { | 372 TextEncodingType textEncodingType) { |
| 342 FILE* file = base::OpenFile(path, "wb"); | 373 FILE* file = base::OpenFile(path, "wb"); |
| 343 if (!file) | 374 if (!file) |
| 344 return false; | 375 return false; |
| 345 | 376 |
| 346 if (fwrite(&kFileFormatVersion, sizeof(kFileFormatVersion), 1, file) != 1) { | 377 // TODO(agrieve): Is there any benefit to writing a v5 file (with aliases)? |
|
flackr
2017/07/07 18:54:12
Oh this is interesting. Maybe we should change thi
agrieve
2017/07/07 20:47:09
Sounds good. Just heading out now, but will addres
agrieve
2017/07/18 19:30:29
Done.
| |
| 378 if (fwrite(&kFileFormatVersionWithoutAliases, | |
| 379 sizeof(kFileFormatVersionWithoutAliases), 1, file) != 1) { | |
| 347 LOG(ERROR) << "Failed to write file version"; | 380 LOG(ERROR) << "Failed to write file version"; |
| 348 base::CloseFile(file); | 381 base::CloseFile(file); |
| 349 return false; | 382 return false; |
| 350 } | 383 } |
| 351 | 384 |
| 352 // Note: the python version of this function explicitly sorted keys, but | 385 // Note: the python version of this function explicitly sorted keys, but |
| 353 // std::map is a sorted associative container, we shouldn't have to do that. | 386 // std::map is a sorted associative container, we shouldn't have to do that. |
| 354 uint32_t entry_count = resources.size(); | 387 uint32_t entry_count = resources.size(); |
| 355 if (fwrite(&entry_count, sizeof(entry_count), 1, file) != 1) { | 388 if (fwrite(&entry_count, sizeof(entry_count), 1, file) != 1) { |
| 356 LOG(ERROR) << "Failed to write entry count"; | 389 LOG(ERROR) << "Failed to write entry count"; |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 368 | 401 |
| 369 uint8_t write_buffer = static_cast<uint8_t>(textEncodingType); | 402 uint8_t write_buffer = static_cast<uint8_t>(textEncodingType); |
| 370 if (fwrite(&write_buffer, sizeof(uint8_t), 1, file) != 1) { | 403 if (fwrite(&write_buffer, sizeof(uint8_t), 1, file) != 1) { |
| 371 LOG(ERROR) << "Failed to write file text resources encoding"; | 404 LOG(ERROR) << "Failed to write file text resources encoding"; |
| 372 base::CloseFile(file); | 405 base::CloseFile(file); |
| 373 return false; | 406 return false; |
| 374 } | 407 } |
| 375 | 408 |
| 376 // Each entry is a uint16_t + a uint32_t. We have an extra entry after the | 409 // Each entry is a uint16_t + a uint32_t. We have an extra entry after the |
| 377 // last item so we can compute the size of the list item. | 410 // last item so we can compute the size of the list item. |
| 378 uint32_t index_length = (entry_count + 1) * sizeof(DataPackEntry); | 411 uint32_t index_length = (entry_count + 1) * sizeof(Entry); |
| 379 uint32_t data_offset = kHeaderLength + index_length; | 412 uint32_t data_offset = kHeaderLengthV4 + index_length; |
| 380 for (std::map<uint16_t, base::StringPiece>::const_iterator it = | 413 for (std::map<uint16_t, base::StringPiece>::const_iterator it = |
| 381 resources.begin(); | 414 resources.begin(); |
| 382 it != resources.end(); ++it) { | 415 it != resources.end(); ++it) { |
| 383 uint16_t resource_id = it->first; | 416 uint16_t resource_id = it->first; |
| 384 if (fwrite(&resource_id, sizeof(resource_id), 1, file) != 1) { | 417 if (fwrite(&resource_id, sizeof(resource_id), 1, file) != 1) { |
| 385 LOG(ERROR) << "Failed to write id for " << resource_id; | 418 LOG(ERROR) << "Failed to write id for " << resource_id; |
| 386 base::CloseFile(file); | 419 base::CloseFile(file); |
| 387 return false; | 420 return false; |
| 388 } | 421 } |
| 389 | 422 |
| (...skipping 30 matching lines...) Expand all Loading... | |
| 420 return false; | 453 return false; |
| 421 } | 454 } |
| 422 } | 455 } |
| 423 | 456 |
| 424 base::CloseFile(file); | 457 base::CloseFile(file); |
| 425 | 458 |
| 426 return true; | 459 return true; |
| 427 } | 460 } |
| 428 | 461 |
| 429 } // namespace ui | 462 } // namespace ui |
| OLD | NEW |