Index: ui/base/resource/data_pack.cc |
diff --git a/ui/base/resource/data_pack.cc b/ui/base/resource/data_pack.cc |
index 245c1b7c84206779e16c43cfb34c8135c5646971..e014fce78a8e256f90141b8cc44edc262de8dcee 100644 |
--- a/ui/base/resource/data_pack.cc |
+++ b/ui/base/resource/data_pack.cc |
@@ -60,6 +60,37 @@ enum LoadErrors { |
LOAD_ERRORS_COUNT, |
}; |
+class MemoryMappedDataSource : public ui::DataSource { |
+ public: |
+ MemoryMappedDataSource(std::unique_ptr<base::MemoryMappedFile> mmap) |
+ : mmap_(std::move(mmap)) {} |
+ |
+ ~MemoryMappedDataSource() override{}; |
sadrul
2016/05/17 15:07:50
space between override and {}. No ; at the end.
altimin
2016/05/17 15:40:07
Done.
|
+ |
+ size_t length() const override { return mmap_->length(); } |
+ |
+ const uint8_t* data() const override { return mmap_->data(); } |
+ |
+ private: |
+ std::unique_ptr<base::MemoryMappedFile> mmap_; |
sadrul
2016/05/17 15:07:50
DISALLOW_COPY_AND_ASSIGN
altimin
2016/05/17 15:40:07
Done.
|
+}; |
+ |
+class BufferDataSource : public ui::DataSource { |
+ public: |
+ BufferDataSource(base::StringPiece buffer) : buffer_(buffer) {} |
+ |
+ ~BufferDataSource() override{}; |
sadrul
2016/05/17 15:07:50
same as above
altimin
2016/05/17 15:40:07
Done.
|
+ |
+ size_t length() const override { return buffer_.length(); } |
+ |
+ const uint8_t* data() const override { |
+ return reinterpret_cast<const uint8_t*>(buffer_.data()); |
+ } |
+ |
+ private: |
+ base::StringPiece buffer_; |
+}; |
sadrul
2016/05/17 15:07:50
ditto
altimin
2016/05/17 15:40:07
Done.
|
+ |
} // namespace |
namespace ui { |
@@ -75,14 +106,14 @@ DataPack::~DataPack() { |
} |
bool DataPack::LoadFromPath(const base::FilePath& path) { |
- mmap_.reset(new base::MemoryMappedFile); |
- if (!mmap_->Initialize(path)) { |
+ std::unique_ptr<base::MemoryMappedFile> mmap(new base::MemoryMappedFile); |
+ if (!mmap->Initialize(path)) { |
DLOG(ERROR) << "Failed to mmap datapack"; |
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED, |
LOAD_ERRORS_COUNT); |
- mmap_.reset(); |
return false; |
} |
+ data_source_.reset(new MemoryMappedDataSource(std::move(mmap))); |
return LoadImpl(); |
} |
@@ -94,37 +125,44 @@ bool DataPack::LoadFromFile(base::File file) { |
bool DataPack::LoadFromFileRegion( |
base::File file, |
const base::MemoryMappedFile::Region& region) { |
- mmap_.reset(new base::MemoryMappedFile); |
- if (!mmap_->Initialize(std::move(file), region)) { |
+ std::unique_ptr<base::MemoryMappedFile> mmap(new base::MemoryMappedFile); |
+ if (!mmap->Initialize(std::move(file), region)) { |
DLOG(ERROR) << "Failed to mmap datapack"; |
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED_FROM_FILE, |
LOAD_ERRORS_COUNT); |
- mmap_.reset(); |
return false; |
} |
+ data_source_.reset(new MemoryMappedDataSource(std::move(mmap))); |
+ return LoadImpl(); |
+} |
+ |
+bool DataPack::LoadFromBuffer(base::StringPiece buffer) { |
+ data_source_.reset(new BufferDataSource(buffer)); |
return LoadImpl(); |
} |
bool DataPack::LoadImpl() { |
// Sanity check the header of the file. |
- if (kHeaderLength > mmap_->length()) { |
+ if (kHeaderLength > data_source_->length()) { |
DLOG(ERROR) << "Data pack file corruption: incomplete file header."; |
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", HEADER_TRUNCATED, |
LOAD_ERRORS_COUNT); |
- mmap_.reset(); |
+ // Clear memory mapped file even when we're loading data from a buffer, |
+ // in order to simplify code. |
sadrul
2016/05/17 15:07:50
This comment feels out of place? When loading from
altimin
2016/05/17 15:40:07
Done.
|
+ data_source_.reset(); |
return false; |
} |
// Parse the header of the file. |
// First uint32_t: version; second: resource count; |
- const uint32_t* ptr = reinterpret_cast<const uint32_t*>(mmap_->data()); |
+ const uint32_t* ptr = reinterpret_cast<const uint32_t*>(data_source_->data()); |
uint32_t version = ptr[0]; |
if (version != kFileFormatVersion) { |
LOG(ERROR) << "Bad data pack version: got " << version << ", expected " |
<< kFileFormatVersion; |
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", BAD_VERSION, |
LOAD_ERRORS_COUNT); |
- mmap_.reset(); |
+ data_source_.reset(); |
return false; |
} |
resource_count_ = ptr[1]; |
@@ -138,7 +176,7 @@ bool DataPack::LoadImpl() { |
<< ", expected between " << BINARY << " and " << UTF16; |
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", WRONG_ENCODING, |
LOAD_ERRORS_COUNT); |
- mmap_.reset(); |
+ data_source_.reset(); |
return false; |
} |
@@ -146,25 +184,25 @@ bool DataPack::LoadImpl() { |
// 1) Check we have enough entries. There's an extra entry after the last item |
// which gives the length of the last item. |
if (kHeaderLength + (resource_count_ + 1) * sizeof(DataPackEntry) > |
- mmap_->length()) { |
+ data_source_->length()) { |
LOG(ERROR) << "Data pack file corruption: too short for number of " |
"entries specified."; |
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INDEX_TRUNCATED, |
LOAD_ERRORS_COUNT); |
- mmap_.reset(); |
+ data_source_.reset(); |
return false; |
} |
// 2) Verify the entries are within the appropriate bounds. There's an extra |
// entry after the last item which gives us the length of the last item. |
for (size_t i = 0; i < resource_count_ + 1; ++i) { |
const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( |
- mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry))); |
- if (entry->file_offset > mmap_->length()) { |
+ data_source_->data() + kHeaderLength + (i * sizeof(DataPackEntry))); |
+ if (entry->file_offset > data_source_->length()) { |
LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. " |
<< "Was the file corrupted?"; |
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", ENTRY_NOT_FOUND, |
LOAD_ERRORS_COUNT); |
- mmap_.reset(); |
+ data_source_.reset(); |
return false; |
} |
} |
@@ -173,8 +211,9 @@ bool DataPack::LoadImpl() { |
} |
bool DataPack::HasResource(uint16_t resource_id) const { |
- return !!bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_, |
- sizeof(DataPackEntry), DataPackEntry::CompareById); |
+ return !!bsearch(&resource_id, data_source_->data() + kHeaderLength, |
+ resource_count_, sizeof(DataPackEntry), |
+ DataPackEntry::CompareById); |
} |
bool DataPack::GetStringPiece(uint16_t resource_id, |
@@ -190,9 +229,9 @@ bool DataPack::GetStringPiece(uint16_t resource_id, |
#error DataPack assumes little endian |
#endif |
- const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>( |
- bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_, |
- sizeof(DataPackEntry), DataPackEntry::CompareById)); |
+ const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>(bsearch( |
+ &resource_id, data_source_->data() + kHeaderLength, resource_count_, |
+ sizeof(DataPackEntry), DataPackEntry::CompareById)); |
if (!target) { |
return false; |
} |
@@ -201,9 +240,9 @@ bool DataPack::GetStringPiece(uint16_t resource_id, |
// If the next entry points beyond the end of the file this data pack's entry |
// table is corrupt. Log an error and return false. See |
// http://crbug.com/371301. |
- if (next_entry->file_offset > mmap_->length()) { |
- size_t entry_index = target - |
- reinterpret_cast<const DataPackEntry*>(mmap_->data() + kHeaderLength); |
+ if (next_entry->file_offset > data_source_->length()) { |
+ size_t entry_index = target - reinterpret_cast<const DataPackEntry*>( |
+ data_source_->data() + kHeaderLength); |
LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end " |
<< "of file. This should have been caught when loading. Was the " |
<< "file modified?"; |
@@ -211,8 +250,9 @@ bool DataPack::GetStringPiece(uint16_t resource_id, |
} |
size_t length = next_entry->file_offset - target->file_offset; |
- data->set(reinterpret_cast<const char*>(mmap_->data() + target->file_offset), |
- length); |
+ data->set( |
+ reinterpret_cast<const char*>(data_source_->data() + target->file_offset), |
+ length); |
return true; |
} |
@@ -242,7 +282,7 @@ void DataPack::CheckForDuplicateResources( |
const ScopedVector<ResourceHandle>& packs) { |
for (size_t i = 0; i < resource_count_ + 1; ++i) { |
const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>( |
- mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry))); |
+ data_source_->data() + kHeaderLength + (i * sizeof(DataPackEntry))); |
const uint16_t resource_id = entry->resource_id; |
const float resource_scale = GetScaleForScaleFactor(scale_factor_); |
for (const ResourceHandle* handle : packs) { |