| Index: ui/base/resource/data_pack.cc
|
| diff --git a/ui/base/resource/data_pack.cc b/ui/base/resource/data_pack.cc
|
| index 245c1b7c84206779e16c43cfb34c8135c5646971..10088e8898f2168cf93928077f97f32b3a9fa1e7 100644
|
| --- a/ui/base/resource/data_pack.cc
|
| +++ b/ui/base/resource/data_pack.cc
|
| @@ -60,6 +60,41 @@ enum LoadErrors {
|
| LOAD_ERRORS_COUNT,
|
| };
|
|
|
| +class MemoryMappedDataSource : public ui::DataSource {
|
| + public:
|
| + MemoryMappedDataSource(std::unique_ptr<base::MemoryMappedFile> mmap)
|
| + : mmap_(std::move(mmap)) {}
|
| +
|
| + ~MemoryMappedDataSource() override {};
|
| +
|
| + size_t Length() const override { return mmap_->length(); }
|
| +
|
| + const uint8_t* Data() const override { return mmap_->data(); }
|
| +
|
| + private:
|
| + std::unique_ptr<base::MemoryMappedFile> mmap_;
|
| +
|
| + DISALLOW_COPY_AND_ASSIGN(MemoryMappedDataSource);
|
| +};
|
| +
|
| +class BufferDataSource : public ui::DataSource {
|
| + public:
|
| + BufferDataSource(base::StringPiece buffer) : buffer_(buffer) {}
|
| +
|
| + ~BufferDataSource() override {};
|
| +
|
| + size_t Length() const override { return buffer_.length(); }
|
| +
|
| + const uint8_t* Data() const override {
|
| + return reinterpret_cast<const uint8_t*>(buffer_.data());
|
| + }
|
| +
|
| + private:
|
| + base::StringPiece buffer_;
|
| +
|
| + DISALLOW_COPY_AND_ASSIGN(BufferDataSource);
|
| +};
|
| +
|
| } // namespace
|
|
|
| namespace ui {
|
| @@ -75,14 +110,14 @@ DataPack::~DataPack() {
|
| }
|
|
|
| bool DataPack::LoadFromPath(const base::FilePath& path) {
|
| - mmap_.reset(new base::MemoryMappedFile);
|
| - if (!mmap_->Initialize(path)) {
|
| + std::unique_ptr<base::MemoryMappedFile> mmap(new base::MemoryMappedFile);
|
| + if (!mmap->Initialize(path)) {
|
| DLOG(ERROR) << "Failed to mmap datapack";
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| return false;
|
| }
|
| + data_source_.reset(new MemoryMappedDataSource(std::move(mmap)));
|
| return LoadImpl();
|
| }
|
|
|
| @@ -94,37 +129,42 @@ bool DataPack::LoadFromFile(base::File file) {
|
| bool DataPack::LoadFromFileRegion(
|
| base::File file,
|
| const base::MemoryMappedFile::Region& region) {
|
| - mmap_.reset(new base::MemoryMappedFile);
|
| - if (!mmap_->Initialize(std::move(file), region)) {
|
| + std::unique_ptr<base::MemoryMappedFile> mmap(new base::MemoryMappedFile);
|
| + if (!mmap->Initialize(std::move(file), region)) {
|
| DLOG(ERROR) << "Failed to mmap datapack";
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED_FROM_FILE,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| return false;
|
| }
|
| + data_source_.reset(new MemoryMappedDataSource(std::move(mmap)));
|
| + return LoadImpl();
|
| +}
|
| +
|
| +bool DataPack::LoadFromBuffer(base::StringPiece buffer) {
|
| + data_source_.reset(new BufferDataSource(buffer));
|
| return LoadImpl();
|
| }
|
|
|
| bool DataPack::LoadImpl() {
|
| // Sanity check the header of the file.
|
| - if (kHeaderLength > mmap_->length()) {
|
| + if (kHeaderLength > data_source_->Length()) {
|
| DLOG(ERROR) << "Data pack file corruption: incomplete file header.";
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", HEADER_TRUNCATED,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + data_source_.reset();
|
| return false;
|
| }
|
|
|
| // Parse the header of the file.
|
| // First uint32_t: version; second: resource count;
|
| - const uint32_t* ptr = reinterpret_cast<const uint32_t*>(mmap_->data());
|
| + const uint32_t* ptr = reinterpret_cast<const uint32_t*>(data_source_->Data());
|
| uint32_t version = ptr[0];
|
| if (version != kFileFormatVersion) {
|
| LOG(ERROR) << "Bad data pack version: got " << version << ", expected "
|
| << kFileFormatVersion;
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", BAD_VERSION,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + data_source_.reset();
|
| return false;
|
| }
|
| resource_count_ = ptr[1];
|
| @@ -138,7 +178,7 @@ bool DataPack::LoadImpl() {
|
| << ", expected between " << BINARY << " and " << UTF16;
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", WRONG_ENCODING,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + data_source_.reset();
|
| return false;
|
| }
|
|
|
| @@ -146,25 +186,25 @@ bool DataPack::LoadImpl() {
|
| // 1) Check we have enough entries. There's an extra entry after the last item
|
| // which gives the length of the last item.
|
| if (kHeaderLength + (resource_count_ + 1) * sizeof(DataPackEntry) >
|
| - mmap_->length()) {
|
| + data_source_->Length()) {
|
| LOG(ERROR) << "Data pack file corruption: too short for number of "
|
| "entries specified.";
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INDEX_TRUNCATED,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + data_source_.reset();
|
| return false;
|
| }
|
| // 2) Verify the entries are within the appropriate bounds. There's an extra
|
| // entry after the last item which gives us the length of the last item.
|
| for (size_t i = 0; i < resource_count_ + 1; ++i) {
|
| const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>(
|
| - mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry)));
|
| - if (entry->file_offset > mmap_->length()) {
|
| + data_source_->Data() + kHeaderLength + (i * sizeof(DataPackEntry)));
|
| + if (entry->file_offset > data_source_->Length()) {
|
| LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. "
|
| << "Was the file corrupted?";
|
| UMA_HISTOGRAM_ENUMERATION("DataPack.Load", ENTRY_NOT_FOUND,
|
| LOAD_ERRORS_COUNT);
|
| - mmap_.reset();
|
| + data_source_.reset();
|
| return false;
|
| }
|
| }
|
| @@ -173,8 +213,9 @@ bool DataPack::LoadImpl() {
|
| }
|
|
|
| bool DataPack::HasResource(uint16_t resource_id) const {
|
| - return !!bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_,
|
| - sizeof(DataPackEntry), DataPackEntry::CompareById);
|
| + return !!bsearch(&resource_id, data_source_->Data() + kHeaderLength,
|
| + resource_count_, sizeof(DataPackEntry),
|
| + DataPackEntry::CompareById);
|
| }
|
|
|
| bool DataPack::GetStringPiece(uint16_t resource_id,
|
| @@ -190,9 +231,9 @@ bool DataPack::GetStringPiece(uint16_t resource_id,
|
| #error DataPack assumes little endian
|
| #endif
|
|
|
| - const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>(
|
| - bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_,
|
| - sizeof(DataPackEntry), DataPackEntry::CompareById));
|
| + const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>(bsearch(
|
| + &resource_id, data_source_->Data() + kHeaderLength, resource_count_,
|
| + sizeof(DataPackEntry), DataPackEntry::CompareById));
|
| if (!target) {
|
| return false;
|
| }
|
| @@ -201,9 +242,9 @@ bool DataPack::GetStringPiece(uint16_t resource_id,
|
| // If the next entry points beyond the end of the file this data pack's entry
|
| // table is corrupt. Log an error and return false. See
|
| // http://crbug.com/371301.
|
| - if (next_entry->file_offset > mmap_->length()) {
|
| - size_t entry_index = target -
|
| - reinterpret_cast<const DataPackEntry*>(mmap_->data() + kHeaderLength);
|
| + if (next_entry->file_offset > data_source_->Length()) {
|
| + size_t entry_index = target - reinterpret_cast<const DataPackEntry*>(
|
| + data_source_->Data() + kHeaderLength);
|
| LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end "
|
| << "of file. This should have been caught when loading. Was the "
|
| << "file modified?";
|
| @@ -211,8 +252,9 @@ bool DataPack::GetStringPiece(uint16_t resource_id,
|
| }
|
|
|
| size_t length = next_entry->file_offset - target->file_offset;
|
| - data->set(reinterpret_cast<const char*>(mmap_->data() + target->file_offset),
|
| - length);
|
| + data->set(
|
| + reinterpret_cast<const char*>(data_source_->Data() + target->file_offset),
|
| + length);
|
| return true;
|
| }
|
|
|
| @@ -242,7 +284,7 @@ void DataPack::CheckForDuplicateResources(
|
| const ScopedVector<ResourceHandle>& packs) {
|
| for (size_t i = 0; i < resource_count_ + 1; ++i) {
|
| const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>(
|
| - mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry)));
|
| + data_source_->Data() + kHeaderLength + (i * sizeof(DataPackEntry)));
|
| const uint16_t resource_id = entry->resource_id;
|
| const float resource_scale = GetScaleForScaleFactor(scale_factor_);
|
| for (const ResourceHandle* handle : packs) {
|
|
|