| Index: ui/base/resource/data_pack.cc
|
| diff --git a/ui/base/resource/data_pack.cc b/ui/base/resource/data_pack.cc
|
| index 245c1b7c84206779e16c43cfb34c8135c5646971..5fed705af789bd3ceac6ed73915adb0836ac1668 100644
|
| --- a/ui/base/resource/data_pack.cc
|
| +++ b/ui/base/resource/data_pack.cc
|
| @@ -80,9 +80,11 @@ bool DataPack::LoadFromPath(const base::FilePath& path) {
|
| DLOG(ERROR) << "Failed to mmap datapack";
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + Clear();
|
| return false;
|
| }
|
| + length_ = mmap_->length();
|
| + data_ = mmap_->data();
|
| return LoadImpl();
|
| }
|
|
|
| @@ -99,32 +101,42 @@ bool DataPack::LoadFromFileRegion(
|
| DLOG(ERROR) << "Failed to mmap datapack";
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED_FROM_FILE,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + Clear();
|
| return false;
|
| }
|
| + length_ = mmap_->length();
|
| + data_ = mmap_->data();
|
| + return LoadImpl();
|
| +}
|
| +
|
| +bool DataPack::LoadFromBuffer(base::StringPiece buffer) {
|
| + length_ = buffer.length();
|
| + data_ = reinterpret_cast<const uint8_t*>(buffer.data());
|
| return LoadImpl();
|
| }
|
|
|
| bool DataPack::LoadImpl() {
|
| // Sanity check the header of the file.
|
| - if (kHeaderLength > mmap_->length()) {
|
| + if (kHeaderLength > length_) {
|
| DLOG(ERROR) << "Data pack file corruption: incomplete file header.";
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", HEADER_TRUNCATED,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + // Clear memory mapped file even when we're loading data from a buffer,
|
| + // in order to simplify code.
|
| + Clear();
|
| return false;
|
| }
|
|
|
| // Parse the header of the file.
|
| // First uint32_t: version; second: resource count;
|
| - const uint32_t* ptr = reinterpret_cast<const uint32_t*>(mmap_->data());
|
| + const uint32_t* ptr = reinterpret_cast<const uint32_t*>(data_);
|
| uint32_t version = ptr[0];
|
| if (version != kFileFormatVersion) {
|
| LOG(ERROR) << "Bad data pack version: got " << version << ", expected "
|
| << kFileFormatVersion;
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", BAD_VERSION,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + Clear();
|
| return false;
|
| }
|
| resource_count_ = ptr[1];
|
| @@ -138,7 +150,7 @@ bool DataPack::LoadImpl() {
|
| << ", expected between " << BINARY << " and " << UTF16;
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", WRONG_ENCODING,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + Clear();
|
| return false;
|
| }
|
|
|
| @@ -146,25 +158,25 @@ bool DataPack::LoadImpl() {
|
| // 1) Check we have enough entries. There's an extra entry after the last item
|
| // which gives the length of the last item.
|
| if (kHeaderLength + (resource_count_ + 1) * sizeof(DataPackEntry) >
|
| - mmap_->length()) {
|
| + length_) {
|
| LOG(ERROR) << "Data pack file corruption: too short for number of "
|
| "entries specified.";
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INDEX_TRUNCATED,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + Clear();
|
| return false;
|
| }
|
| // 2) Verify the entries are within the appropriate bounds. There's an extra
|
| // entry after the last item which gives us the length of the last item.
|
| for (size_t i = 0; i < resource_count_ + 1; ++i) {
|
| const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>(
|
| - mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry)));
|
| - if (entry->file_offset > mmap_->length()) {
|
| + data_ + kHeaderLength + (i * sizeof(DataPackEntry)));
|
| + if (entry->file_offset > length_) {
|
| LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. "
|
| << "Was the file corrupted?";
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", ENTRY_NOT_FOUND,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + Clear();
|
| return false;
|
| }
|
| }
|
| @@ -172,8 +184,14 @@ bool DataPack::LoadImpl() {
|
| return true;
|
| }
|
|
|
| +void DataPack::Clear() {
|
| + mmap_.reset();
|
| + length_ = 0;
|
| + data_ = nullptr;
|
| +}
|
| +
|
| bool DataPack::HasResource(uint16_t resource_id) const {
|
| - return !!bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_,
|
| + return !!bsearch(&resource_id, data_ + kHeaderLength, resource_count_,
|
| sizeof(DataPackEntry), DataPackEntry::CompareById);
|
| }
|
|
|
| @@ -191,7 +209,7 @@ bool DataPack::GetStringPiece(uint16_t resource_id,
|
| #endif
|
|
|
| const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>(
|
| - bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_,
|
| + bsearch(&resource_id, data_ + kHeaderLength, resource_count_,
|
| sizeof(DataPackEntry), DataPackEntry::CompareById));
|
| if (!target) {
|
| return false;
|
| @@ -201,9 +219,9 @@ bool DataPack::GetStringPiece(uint16_t resource_id,
|
| // If the next entry points beyond the end of the file this data pack's entry
|
| // table is corrupt. Log an error and return false. See
|
| // http://crbug.com/371301.
|
| - if (next_entry->file_offset > mmap_->length()) {
|
| + if (next_entry->file_offset > length_) {
|
| size_t entry_index = target -
|
| - reinterpret_cast<const DataPackEntry*>(mmap_->data() + kHeaderLength);
|
| + reinterpret_cast<const DataPackEntry*>(data_ + kHeaderLength);
|
| LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end "
|
| << "of file. This should have been caught when loading. Was the "
|
| << "file modified?";
|
| @@ -211,7 +229,7 @@ bool DataPack::GetStringPiece(uint16_t resource_id,
|
| }
|
|
|
| size_t length = next_entry->file_offset - target->file_offset;
|
| - data->set(reinterpret_cast<const char*>(mmap_->data() + target->file_offset),
|
| + data->set(reinterpret_cast<const char*>(data_ + target->file_offset),
|
| length);
|
| return true;
|
| }
|
| @@ -242,7 +260,7 @@ void DataPack::CheckForDuplicateResources(
|
| const ScopedVector<ResourceHandle>& packs) {
|
| for (size_t i = 0; i < resource_count_ + 1; ++i) {
|
| const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>(
|
| - mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry)));
|
| + data_ + kHeaderLength + (i * sizeof(DataPackEntry)));
|
| const uint16_t resource_id = entry->resource_id;
|
| const float resource_scale = GetScaleForScaleFactor(scale_factor_);
|
| for (const ResourceHandle* handle : packs) {
|
|
|