Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(117)

Unified Diff: ui/base/resource/data_pack.cc

Issue 1969313005: [headless] Embed pak file into binary. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fixes according to comments. Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: ui/base/resource/data_pack.cc
diff --git a/ui/base/resource/data_pack.cc b/ui/base/resource/data_pack.cc
index 245c1b7c84206779e16c43cfb34c8135c5646971..a49016efd3d0aad8eb8e66b8bdd34d555d020a35 100644
--- a/ui/base/resource/data_pack.cc
+++ b/ui/base/resource/data_pack.cc
@@ -64,6 +64,53 @@ enum LoadErrors {
namespace ui {
+class DataPack::DataSource {
+ public:
+ virtual ~DataSource() {};
sky 2016/05/19 20:25:54 no ;
altimin 2016/05/19 23:04:47 Done.
+
+ virtual size_t Length() const = 0;
+ virtual const uint8_t* Data() const = 0;
+};
+
+namespace {
+
+class MemoryMappedDataSource : public DataPack::DataSource {
+ public:
+ MemoryMappedDataSource(std::unique_ptr<base::MemoryMappedFile> mmap)
sky 2016/05/19 20:25:54 explicit
altimin 2016/05/19 23:04:47 Done.
+ : mmap_(std::move(mmap)) {}
+
+ ~MemoryMappedDataSource() override{};
sky 2016/05/19 20:25:54 no ;, and space after override
altimin 2016/05/19 23:04:47 Done.
+
+ size_t Length() const override { return mmap_->length(); }
+
+ const uint8_t* Data() const override { return mmap_->data(); }
+
+ private:
+ std::unique_ptr<base::MemoryMappedFile> mmap_;
+
+ DISALLOW_COPY_AND_ASSIGN(MemoryMappedDataSource);
+};
+
+class BufferDataSource : public DataPack::DataSource {
sky 2016/05/19 20:25:54 StringPieceDataSource
altimin 2016/05/19 23:04:47 And again, I believe that essence of this thing is
sky 2016/05/20 15:49:04 IMO BufferDataSource is vague. It is immediately a
+ public:
+ BufferDataSource(base::StringPiece buffer) : buffer_(buffer) {}
sky 2016/05/19 20:25:54 explicit
altimin 2016/05/19 23:04:47 Done.
+
+ ~BufferDataSource() override{};
sky 2016/05/19 20:25:54 nit: no ; and space
altimin 2016/05/19 23:04:47 Done.
+
+ size_t Length() const override { return buffer_.length(); }
+
+ const uint8_t* Data() const override {
+ return reinterpret_cast<const uint8_t*>(buffer_.data());
sky 2016/05/19 20:25:54 Why do you need the reinterpret_cast here?
altimin 2016/05/19 23:04:47 Because base::StringPiece has int8_t (aka char) in
+ }
+
+ private:
+ base::StringPiece buffer_;
+
+ DISALLOW_COPY_AND_ASSIGN(BufferDataSource);
+};
+
+} // namespace
+
DataPack::DataPack(ui::ScaleFactor scale_factor)
: resource_count_(0),
text_encoding_type_(BINARY),
@@ -75,14 +122,14 @@ DataPack::~DataPack() {
}
bool DataPack::LoadFromPath(const base::FilePath& path) {
- mmap_.reset(new base::MemoryMappedFile);
- if (!mmap_->Initialize(path)) {
+ std::unique_ptr<base::MemoryMappedFile> mmap(new base::MemoryMappedFile);
+ if (!mmap->Initialize(path)) {
DLOG(ERROR) << "Failed to mmap datapack";
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED,
LOAD_ERRORS_COUNT);
- mmap_.reset();
return false;
}
+ data_source_.reset(new MemoryMappedDataSource(std::move(mmap)));
return LoadImpl();
}
@@ -94,37 +141,42 @@ bool DataPack::LoadFromFile(base::File file) {
bool DataPack::LoadFromFileRegion(
base::File file,
const base::MemoryMappedFile::Region& region) {
- mmap_.reset(new base::MemoryMappedFile);
- if (!mmap_->Initialize(std::move(file), region)) {
+ std::unique_ptr<base::MemoryMappedFile> mmap(new base::MemoryMappedFile);
+ if (!mmap->Initialize(std::move(file), region)) {
DLOG(ERROR) << "Failed to mmap datapack";
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INIT_FAILED_FROM_FILE,
LOAD_ERRORS_COUNT);
- mmap_.reset();
return false;
}
+ data_source_.reset(new MemoryMappedDataSource(std::move(mmap)));
+ return LoadImpl();
+}
+
+bool DataPack::LoadFromBuffer(base::StringPiece buffer) {
+ data_source_.reset(new BufferDataSource(buffer));
return LoadImpl();
}
bool DataPack::LoadImpl() {
// Sanity check the header of the file.
- if (kHeaderLength > mmap_->length()) {
+ if (kHeaderLength > data_source_->Length()) {
DLOG(ERROR) << "Data pack file corruption: incomplete file header.";
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", HEADER_TRUNCATED,
LOAD_ERRORS_COUNT);
- mmap_.reset();
+ data_source_.reset();
return false;
}
// Parse the header of the file.
// First uint32_t: version; second: resource count;
- const uint32_t* ptr = reinterpret_cast<const uint32_t*>(mmap_->data());
+ const uint32_t* ptr = reinterpret_cast<const uint32_t*>(data_source_->Data());
uint32_t version = ptr[0];
if (version != kFileFormatVersion) {
LOG(ERROR) << "Bad data pack version: got " << version << ", expected "
<< kFileFormatVersion;
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", BAD_VERSION,
LOAD_ERRORS_COUNT);
- mmap_.reset();
+ data_source_.reset();
return false;
}
resource_count_ = ptr[1];
@@ -138,7 +190,7 @@ bool DataPack::LoadImpl() {
<< ", expected between " << BINARY << " and " << UTF16;
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", WRONG_ENCODING,
LOAD_ERRORS_COUNT);
- mmap_.reset();
+ data_source_.reset();
return false;
}
@@ -146,25 +198,25 @@ bool DataPack::LoadImpl() {
// 1) Check we have enough entries. There's an extra entry after the last item
// which gives the length of the last item.
if (kHeaderLength + (resource_count_ + 1) * sizeof(DataPackEntry) >
- mmap_->length()) {
+ data_source_->Length()) {
LOG(ERROR) << "Data pack file corruption: too short for number of "
"entries specified.";
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", INDEX_TRUNCATED,
LOAD_ERRORS_COUNT);
- mmap_.reset();
+ data_source_.reset();
return false;
}
// 2) Verify the entries are within the appropriate bounds. There's an extra
// entry after the last item which gives us the length of the last item.
for (size_t i = 0; i < resource_count_ + 1; ++i) {
const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>(
- mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry)));
- if (entry->file_offset > mmap_->length()) {
+ data_source_->Data() + kHeaderLength + (i * sizeof(DataPackEntry)));
+ if (entry->file_offset > data_source_->Length()) {
LOG(ERROR) << "Entry #" << i << " in data pack points off end of file. "
<< "Was the file corrupted?";
UMA_HISTOGRAM_ENUMERATION("DataPack.Load", ENTRY_NOT_FOUND,
LOAD_ERRORS_COUNT);
- mmap_.reset();
+ data_source_.reset();
return false;
}
}
@@ -173,8 +225,9 @@ bool DataPack::LoadImpl() {
}
bool DataPack::HasResource(uint16_t resource_id) const {
- return !!bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_,
- sizeof(DataPackEntry), DataPackEntry::CompareById);
+ return !!bsearch(&resource_id, data_source_->Data() + kHeaderLength,
+ resource_count_, sizeof(DataPackEntry),
+ DataPackEntry::CompareById);
}
bool DataPack::GetStringPiece(uint16_t resource_id,
@@ -190,9 +243,9 @@ bool DataPack::GetStringPiece(uint16_t resource_id,
#error DataPack assumes little endian
#endif
- const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>(
- bsearch(&resource_id, mmap_->data() + kHeaderLength, resource_count_,
- sizeof(DataPackEntry), DataPackEntry::CompareById));
+ const DataPackEntry* target = reinterpret_cast<const DataPackEntry*>(bsearch(
+ &resource_id, data_source_->Data() + kHeaderLength, resource_count_,
+ sizeof(DataPackEntry), DataPackEntry::CompareById));
if (!target) {
return false;
}
@@ -201,9 +254,9 @@ bool DataPack::GetStringPiece(uint16_t resource_id,
// If the next entry points beyond the end of the file this data pack's entry
// table is corrupt. Log an error and return false. See
// http://crbug.com/371301.
- if (next_entry->file_offset > mmap_->length()) {
- size_t entry_index = target -
- reinterpret_cast<const DataPackEntry*>(mmap_->data() + kHeaderLength);
+ if (next_entry->file_offset > data_source_->Length()) {
+ size_t entry_index = target - reinterpret_cast<const DataPackEntry*>(
+ data_source_->Data() + kHeaderLength);
LOG(ERROR) << "Entry #" << entry_index << " in data pack points off end "
<< "of file. This should have been caught when loading. Was the "
<< "file modified?";
@@ -211,8 +264,9 @@ bool DataPack::GetStringPiece(uint16_t resource_id,
}
size_t length = next_entry->file_offset - target->file_offset;
- data->set(reinterpret_cast<const char*>(mmap_->data() + target->file_offset),
- length);
+ data->set(
+ reinterpret_cast<const char*>(data_source_->Data() + target->file_offset),
+ length);
return true;
}
@@ -242,7 +296,7 @@ void DataPack::CheckForDuplicateResources(
const ScopedVector<ResourceHandle>& packs) {
for (size_t i = 0; i < resource_count_ + 1; ++i) {
const DataPackEntry* entry = reinterpret_cast<const DataPackEntry*>(
- mmap_->data() + kHeaderLength + (i * sizeof(DataPackEntry)));
+ data_source_->Data() + kHeaderLength + (i * sizeof(DataPackEntry)));
const uint16_t resource_id = entry->resource_id;
const float resource_scale = GetScaleForScaleFactor(scale_factor_);
for (const ResourceHandle* handle : packs) {

Powered by Google App Engine
This is Rietveld 408576698