| Index: third_party/protobuf/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc
|
| diff --git a/third_party/protobuf/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc b/third_party/protobuf/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc
|
| index c7fd96ac22c2da9b72f4985a42297b0eb878fa62..196b39dd93207e5529a8e5a0701afcb72b658c2d 100644
|
| --- a/third_party/protobuf/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc
|
| +++ b/third_party/protobuf/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc
|
| @@ -44,10 +44,9 @@
|
|
|
| #include <google/protobuf/stubs/hash.h>
|
| #include <google/protobuf/compiler/objectivec/objectivec_helpers.h>
|
| -#include <google/protobuf/descriptor.pb.h>
|
| #include <google/protobuf/io/coded_stream.h>
|
| -#include <google/protobuf/io/printer.h>
|
| #include <google/protobuf/io/zero_copy_stream_impl.h>
|
| +#include <google/protobuf/descriptor.pb.h>
|
| #include <google/protobuf/stubs/common.h>
|
| #include <google/protobuf/stubs/strutil.h>
|
|
|
| @@ -82,10 +81,6 @@ const char* const kUpperSegmentsList[] = {"url", "http", "https"};
|
| hash_set<string> kUpperSegments =
|
| MakeWordsMap(kUpperSegmentsList, GOOGLE_ARRAYSIZE(kUpperSegmentsList));
|
|
|
| -bool ascii_isnewline(char c) {
|
| - return c == '\n' || c == '\r';
|
| -}
|
| -
|
| // Internal helper for name handing.
|
| // Do not expose this outside of helpers, stick to having functions for specific
|
| // cases (ClassName(), FieldName()), so there is always consistent suffix rules.
|
| @@ -210,14 +205,10 @@ const char* const kReservedWordList[] = {
|
| hash_set<string> kReservedWords =
|
| MakeWordsMap(kReservedWordList, GOOGLE_ARRAYSIZE(kReservedWordList));
|
|
|
| -string SanitizeNameForObjC(const string& input,
|
| - const string& extension,
|
| - string* out_suffix_added) {
|
| +string SanitizeNameForObjC(const string& input, const string& extension) {
|
| if (kReservedWords.count(input) > 0) {
|
| - if (out_suffix_added) *out_suffix_added = extension;
|
| return input + extension;
|
| }
|
| - if (out_suffix_added) out_suffix_added->clear();
|
| return input;
|
| }
|
|
|
| @@ -266,34 +257,6 @@ bool IsSpecialName(const string& name, const string* special_names,
|
| return false;
|
| }
|
|
|
| -string GetZeroEnumNameForFlagType(const FlagType flag_type) {
|
| - switch(flag_type) {
|
| - case FLAGTYPE_DESCRIPTOR_INITIALIZATION:
|
| - return "GPBDescriptorInitializationFlag_None";
|
| - case FLAGTYPE_EXTENSION:
|
| - return "GPBExtensionNone";
|
| - case FLAGTYPE_FIELD:
|
| - return "GPBFieldNone";
|
| - default:
|
| - GOOGLE_LOG(FATAL) << "Can't get here.";
|
| - return "0";
|
| - }
|
| -}
|
| -
|
| -string GetEnumNameForFlagType(const FlagType flag_type) {
|
| - switch(flag_type) {
|
| - case FLAGTYPE_DESCRIPTOR_INITIALIZATION:
|
| - return "GPBDescriptorInitializationFlags";
|
| - case FLAGTYPE_EXTENSION:
|
| - return "GPBExtensionOptions";
|
| - case FLAGTYPE_FIELD:
|
| - return "GPBFieldFlags";
|
| - default:
|
| - GOOGLE_LOG(FATAL) << "Can't get here.";
|
| - return string();
|
| - }
|
| -}
|
| -
|
| } // namespace
|
|
|
| // Escape C++ trigraphs by escaping question marks to \?
|
| @@ -309,16 +272,6 @@ string StripProto(const string& filename) {
|
| }
|
| }
|
|
|
| -void StringPieceTrimWhitespace(StringPiece* input) {
|
| - while (!input->empty() && ascii_isspace(*input->data())) {
|
| - input->remove_prefix(1);
|
| - }
|
| - while (!input->empty() && ascii_isspace((*input)[input->length() - 1])) {
|
| - input->remove_suffix(1);
|
| - }
|
| -}
|
| -
|
| -
|
| bool IsRetainedName(const string& name) {
|
| // List of prefixes from
|
| // http://developer.apple.com/library/mac/#documentation/Cocoa/Conceptual/MemoryMgmt/Articles/mmRules.html
|
| @@ -340,10 +293,11 @@ string BaseFileName(const FileDescriptor* file) {
|
| return basename;
|
| }
|
|
|
| -string FileClassPrefix(const FileDescriptor* file) {
|
| - // Default is empty string, no need to check has_objc_class_prefix.
|
| - string result = file->options().objc_class_prefix();
|
| - return result;
|
| +string FileName(const FileDescriptor* file) {
|
| + string path = FilePath(file);
|
| + string basename;
|
| + PathSplit(path, NULL, &basename);
|
| + return basename;
|
| }
|
|
|
| string FilePath(const FileDescriptor* file) {
|
| @@ -363,17 +317,10 @@ string FilePath(const FileDescriptor* file) {
|
| return output;
|
| }
|
|
|
| -string FilePathBasename(const FileDescriptor* file) {
|
| - string output;
|
| - string basename;
|
| - string directory;
|
| - PathSplit(file->name(), &directory, &basename);
|
| - basename = StripProto(basename);
|
| -
|
| - // CamelCase to be more ObjC friendly.
|
| - output = UnderscoresToCamelCase(basename, true);
|
| -
|
| - return output;
|
| +string FileClassPrefix(const FileDescriptor* file) {
|
| + // Default is empty string, no need to check has_objc_class_prefix.
|
| + string result = file->options().objc_class_prefix();
|
| + return result;
|
| }
|
|
|
| string FileClassName(const FileDescriptor* file) {
|
| @@ -382,7 +329,7 @@ string FileClassName(const FileDescriptor* file) {
|
| name += "Root";
|
| // There aren't really any reserved words that end in "Root", but playing
|
| // it safe and checking.
|
| - return SanitizeNameForObjC(name, "_RootClass", NULL);
|
| + return SanitizeNameForObjC(name, "_RootClass");
|
| }
|
|
|
| string ClassNameWorker(const Descriptor* descriptor) {
|
| @@ -404,15 +351,11 @@ string ClassNameWorker(const EnumDescriptor* descriptor) {
|
| }
|
|
|
| string ClassName(const Descriptor* descriptor) {
|
| - return ClassName(descriptor, NULL);
|
| -}
|
| -
|
| -string ClassName(const Descriptor* descriptor, string* out_suffix_added) {
|
| // 1. Message names are used as is (style calls for CamelCase, trust it).
|
| // 2. Check for reserved word at the very end and then suffix things.
|
| string prefix = FileClassPrefix(descriptor->file());
|
| string name = ClassNameWorker(descriptor);
|
| - return SanitizeNameForObjC(prefix + name, "_Class", out_suffix_added);
|
| + return SanitizeNameForObjC(prefix + name, "_Class");
|
| }
|
|
|
| string EnumName(const EnumDescriptor* descriptor) {
|
| @@ -426,7 +369,7 @@ string EnumName(const EnumDescriptor* descriptor) {
|
| // yields Fixed_Class, Fixed_Size.
|
| string name = FileClassPrefix(descriptor->file());
|
| name += ClassNameWorker(descriptor);
|
| - return SanitizeNameForObjC(name, "_Enum", NULL);
|
| + return SanitizeNameForObjC(name, "_Enum");
|
| }
|
|
|
| string EnumValueName(const EnumValueDescriptor* descriptor) {
|
| @@ -441,7 +384,7 @@ string EnumValueName(const EnumValueDescriptor* descriptor) {
|
| const string& name = class_name + "_" + value_str;
|
| // There aren't really any reserved words with an underscore and a leading
|
| // capital letter, but playing it safe and checking.
|
| - return SanitizeNameForObjC(name, "_Value", NULL);
|
| + return SanitizeNameForObjC(name, "_Value");
|
| }
|
|
|
| string EnumValueShortName(const EnumValueDescriptor* descriptor) {
|
| @@ -478,7 +421,7 @@ string UnCamelCaseEnumShortName(const string& name) {
|
| string ExtensionMethodName(const FieldDescriptor* descriptor) {
|
| const string& name = NameFromFieldDescriptor(descriptor);
|
| const string& result = UnderscoresToCamelCase(name, false);
|
| - return SanitizeNameForObjC(result, "_Extension", NULL);
|
| + return SanitizeNameForObjC(result, "_Extension");
|
| }
|
|
|
| string FieldName(const FieldDescriptor* field) {
|
| @@ -493,7 +436,7 @@ string FieldName(const FieldDescriptor* field) {
|
| result += "_p";
|
| }
|
| }
|
| - return SanitizeNameForObjC(result, "_p", NULL);
|
| + return SanitizeNameForObjC(result, "_p");
|
| }
|
|
|
| string FieldNameCapitalized(const FieldDescriptor* field) {
|
| @@ -853,26 +796,21 @@ bool HasNonZeroDefaultValue(const FieldDescriptor* field) {
|
| return false;
|
| }
|
|
|
| -string BuildFlagsString(const FlagType flag_type,
|
| - const vector<string>& strings) {
|
| +string BuildFlagsString(const vector<string>& strings) {
|
| if (strings.size() == 0) {
|
| - return GetZeroEnumNameForFlagType(flag_type);
|
| - } else if (strings.size() == 1) {
|
| - return strings[0];
|
| + return "0";
|
| }
|
| - string string("(" + GetEnumNameForFlagType(flag_type) + ")(");
|
| + string string;
|
| for (size_t i = 0; i != strings.size(); ++i) {
|
| if (i > 0) {
|
| string.append(" | ");
|
| }
|
| string.append(strings[i]);
|
| }
|
| - string.append(")");
|
| return string;
|
| }
|
|
|
| -string BuildCommentsString(const SourceLocation& location,
|
| - bool prefer_single_line) {
|
| +string BuildCommentsString(const SourceLocation& location) {
|
| const string& comments = location.leading_comments.empty()
|
| ? location.trailing_comments
|
| : location.leading_comments;
|
| @@ -881,82 +819,77 @@ string BuildCommentsString(const SourceLocation& location,
|
| while (!lines.empty() && lines.back().empty()) {
|
| lines.pop_back();
|
| }
|
| - // If there are no comments, just return an empty string.
|
| - if (lines.size() == 0) {
|
| - return "";
|
| + string prefix("///");
|
| + string suffix("\n");
|
| + string final_comments;
|
| + for (int i = 0; i < lines.size(); i++) {
|
| + // HeaderDoc uses '\' and '@' for markers; escape them.
|
| + const string line = StringReplace(lines[i], "\\", "\\\\", true);
|
| + final_comments +=
|
| + prefix + StringReplace(line, "@", "\\@", true) + suffix;
|
| }
|
| + return final_comments;
|
| +}
|
|
|
| - string prefix;
|
| - string suffix;
|
| - string final_comments;
|
| - string epilogue;
|
| +namespace {
|
|
|
| - bool add_leading_space = false;
|
| +// Internal helper class that parses the expected package to prefix mappings
|
| +// file.
|
| +class Parser {
|
| + public:
|
| + Parser(map<string, string>* inout_package_to_prefix_map)
|
| + : prefix_map_(inout_package_to_prefix_map), line_(0) {}
|
|
|
| - if (prefer_single_line && lines.size() == 1) {
|
| - prefix = "/** ";
|
| - suffix = " */\n";
|
| - } else {
|
| - prefix = "* ";
|
| - suffix = "\n";
|
| - final_comments += "/**\n";
|
| - epilogue = " **/\n";
|
| - add_leading_space = true;
|
| - }
|
| + // Parses a check of input, returning success/failure.
|
| + bool ParseChunk(StringPiece chunk);
|
|
|
| - for (int i = 0; i < lines.size(); i++) {
|
| - string line = StripPrefixString(lines[i], " ");
|
| - // HeaderDoc and appledoc use '\' and '@' for markers; escape them.
|
| - line = StringReplace(line, "\\", "\\\\", true);
|
| - line = StringReplace(line, "@", "\\@", true);
|
| - // Decouple / from * to not have inline comments inside comments.
|
| - line = StringReplace(line, "/*", "/\\*", true);
|
| - line = StringReplace(line, "*/", "*\\/", true);
|
| - line = prefix + line;
|
| - StripWhitespace(&line);
|
| - // If not a one line, need to add the first space before *, as
|
| - // StripWhitespace would have removed it.
|
| - line = (add_leading_space ? " " : "") + line;
|
| - final_comments += line + suffix;
|
| - }
|
| - final_comments += epilogue;
|
| - return final_comments;
|
| -}
|
| + // Should be called to finish parsing (after all input has been provided via
|
| + // ParseChunk()). Returns success/failure.
|
| + bool Finish();
|
| +
|
| + int last_line() const { return line_; }
|
| + string error_str() const { return error_str_; }
|
| +
|
| + private:
|
| + bool ParseLoop();
|
| +
|
| + map<string, string>* prefix_map_;
|
| + int line_;
|
| + string error_str_;
|
| + StringPiece p_;
|
| + string leftover_;
|
| +};
|
|
|
| -// Making these a generator option for folks that don't use CocoaPods, but do
|
| -// want to put the library in a framework is an interesting question. The
|
| -// problem is it means changing sources shipped with the library to actually
|
| -// use a different value; so it isn't as simple as a option.
|
| -const char* const ProtobufLibraryFrameworkName = "Protobuf";
|
| -
|
| -string ProtobufFrameworkImportSymbol(const string& framework_name) {
|
| - // GPB_USE_[framework_name]_FRAMEWORK_IMPORTS
|
| - string result = string("GPB_USE_");
|
| - result += ToUpper(framework_name);
|
| - result += "_FRAMEWORK_IMPORTS";
|
| +bool Parser::ParseChunk(StringPiece chunk) {
|
| + if (!leftover_.empty()) {
|
| + chunk.AppendToString(&leftover_);
|
| + p_ = StringPiece(leftover_);
|
| + } else {
|
| + p_ = chunk;
|
| + }
|
| + bool result = ParseLoop();
|
| + if (p_.empty()) {
|
| + leftover_.clear();
|
| + } else {
|
| + leftover_ = p_.ToString();
|
| + }
|
| return result;
|
| }
|
|
|
| -bool IsProtobufLibraryBundledProtoFile(const FileDescriptor* file) {
|
| - // We don't check the name prefix or proto package because some files
|
| - // (descriptor.proto), aren't shipped generated by the library, so this
|
| - // seems to be the safest way to only catch the ones shipped.
|
| - const string name = file->name();
|
| - if (name == "google/protobuf/any.proto" ||
|
| - name == "google/protobuf/api.proto" ||
|
| - name == "google/protobuf/duration.proto" ||
|
| - name == "google/protobuf/empty.proto" ||
|
| - name == "google/protobuf/field_mask.proto" ||
|
| - name == "google/protobuf/source_context.proto" ||
|
| - name == "google/protobuf/struct.proto" ||
|
| - name == "google/protobuf/timestamp.proto" ||
|
| - name == "google/protobuf/type.proto" ||
|
| - name == "google/protobuf/wrappers.proto") {
|
| +bool Parser::Finish() {
|
| + if (leftover_.empty()) {
|
| return true;
|
| }
|
| - return false;
|
| + // Force a newline onto the end to finish parsing.
|
| + p_ = StringPiece(leftover_ + "\n");
|
| + if (!ParseLoop()) {
|
| + return false;
|
| + }
|
| + return p_.empty(); // Everything used?
|
| }
|
|
|
| +static bool ascii_isnewline(char c) { return c == '\n' || c == '\r'; }
|
| +
|
| bool ReadLine(StringPiece* input, StringPiece* line) {
|
| for (int len = 0; len < input->size(); ++len) {
|
| if (ascii_isnewline((*input)[len])) {
|
| @@ -969,6 +902,15 @@ bool ReadLine(StringPiece* input, StringPiece* line) {
|
| return false; // Ran out of input with no newline.
|
| }
|
|
|
| +void TrimWhitespace(StringPiece* input) {
|
| + while (!input->empty() && ascii_isspace(*input->data())) {
|
| + input->remove_prefix(1);
|
| + }
|
| + while (!input->empty() && ascii_isspace((*input)[input->length() - 1])) {
|
| + input->remove_suffix(1);
|
| + }
|
| +}
|
| +
|
| void RemoveComment(StringPiece* input) {
|
| int offset = input->find('#');
|
| if (offset != StringPiece::npos) {
|
| @@ -976,35 +918,29 @@ void RemoveComment(StringPiece* input) {
|
| }
|
| }
|
|
|
| -namespace {
|
| -
|
| -class ExpectedPrefixesCollector : public LineConsumer {
|
| - public:
|
| - ExpectedPrefixesCollector(map<string, string>* inout_package_to_prefix_map)
|
| - : prefix_map_(inout_package_to_prefix_map) {}
|
| -
|
| - virtual bool ConsumeLine(const StringPiece& line, string* out_error);
|
| -
|
| - private:
|
| - map<string, string>* prefix_map_;
|
| -};
|
| -
|
| -bool ExpectedPrefixesCollector::ConsumeLine(
|
| - const StringPiece& line, string* out_error) {
|
| - int offset = line.find('=');
|
| - if (offset == StringPiece::npos) {
|
| - *out_error =
|
| - string("Expected prefixes file line without equal sign: '") +
|
| - line.ToString() + "'.";
|
| - return false;
|
| +bool Parser::ParseLoop() {
|
| + StringPiece line;
|
| + while (ReadLine(&p_, &line)) {
|
| + ++line_;
|
| + RemoveComment(&line);
|
| + TrimWhitespace(&line);
|
| + if (line.size() == 0) {
|
| + continue; // Blank line.
|
| + }
|
| + int offset = line.find('=');
|
| + if (offset == StringPiece::npos) {
|
| + error_str_ =
|
| + string("Line without equal sign: '") + line.ToString() + "'.";
|
| + return false;
|
| + }
|
| + StringPiece package(line, 0, offset);
|
| + StringPiece prefix(line, offset + 1, line.length() - offset - 1);
|
| + TrimWhitespace(&package);
|
| + TrimWhitespace(&prefix);
|
| + // Don't really worry about error checking the package/prefix for
|
| + // being valid. Assume the file is validated when it is created/edited.
|
| + (*prefix_map_)[package.ToString()] = prefix.ToString();
|
| }
|
| - StringPiece package(line, 0, offset);
|
| - StringPiece prefix(line, offset + 1, line.length() - offset - 1);
|
| - StringPieceTrimWhitespace(&package);
|
| - StringPieceTrimWhitespace(&prefix);
|
| - // Don't really worry about error checking the package/prefix for
|
| - // being valid. Assume the file is validated when it is created/edited.
|
| - (*prefix_map_)[package.ToString()] = prefix.ToString();
|
| return true;
|
| }
|
|
|
| @@ -1015,25 +951,60 @@ bool LoadExpectedPackagePrefixes(const Options &generation_options,
|
| return true;
|
| }
|
|
|
| - ExpectedPrefixesCollector collector(prefix_map);
|
| - return ParseSimpleFile(
|
| - generation_options.expected_prefixes_path, &collector, out_error);
|
| + int fd;
|
| + do {
|
| + fd = open(generation_options.expected_prefixes_path.c_str(), O_RDONLY);
|
| + } while (fd < 0 && errno == EINTR);
|
| + if (fd < 0) {
|
| + *out_error =
|
| + string("error: Unable to open \"") +
|
| + generation_options.expected_prefixes_path +
|
| + "\", " + strerror(errno);
|
| + return false;
|
| + }
|
| + io::FileInputStream file_stream(fd);
|
| + file_stream.SetCloseOnDelete(true);
|
| +
|
| + Parser parser(prefix_map);
|
| + const void* buf;
|
| + int buf_len;
|
| + while (file_stream.Next(&buf, &buf_len)) {
|
| + if (buf_len == 0) {
|
| + continue;
|
| + }
|
| +
|
| + if (!parser.ParseChunk(StringPiece(static_cast<const char*>(buf), buf_len))) {
|
| + *out_error =
|
| + string("error: ") + generation_options.expected_prefixes_path +
|
| + " Line " + SimpleItoa(parser.last_line()) + ", " + parser.error_str();
|
| + return false;
|
| + }
|
| + }
|
| + return parser.Finish();
|
| }
|
|
|
| -bool ValidateObjCClassPrefix(
|
| - const FileDescriptor* file,
|
| - const string& expected_prefixes_path,
|
| - const map<string, string>& expected_package_prefixes,
|
| - string* out_error) {
|
| +} // namespace
|
| +
|
| +bool ValidateObjCClassPrefix(const FileDescriptor* file,
|
| + const Options& generation_options,
|
| + string* out_error) {
|
| const string prefix = file->options().objc_class_prefix();
|
| const string package = file->package();
|
|
|
| // NOTE: src/google/protobuf/compiler/plugin.cc makes use of cerr for some
|
| // error cases, so it seems to be ok to use as a back door for warnings.
|
|
|
| + // Load any expected package prefixes to validate against those.
|
| + map<string, string> expected_package_prefixes;
|
| + if (!LoadExpectedPackagePrefixes(generation_options,
|
| + &expected_package_prefixes,
|
| + out_error)) {
|
| + return false;
|
| + }
|
| +
|
| // Check: Error - See if there was an expected prefix for the package and
|
| // report if it doesn't match (wrong or missing).
|
| - map<string, string>::const_iterator package_match =
|
| + map<string, string>::iterator package_match =
|
| expected_package_prefixes.find(package);
|
| if (package_match != expected_package_prefixes.end()) {
|
| // There was an entry, and...
|
| @@ -1054,11 +1025,27 @@ bool ValidateObjCClassPrefix(
|
| }
|
|
|
| // If there was no prefix option, we're done at this point.
|
| - if (prefix.empty()) {
|
| + if (prefix.length() == 0) {
|
| // No prefix, nothing left to check.
|
| return true;
|
| }
|
|
|
| + // Check: Error - Make sure the prefix wasn't expected for a different
|
| + // package (overlap is allowed, but it has to be listed as an expected
|
| + // overlap).
|
| + for (map<string, string>::iterator i = expected_package_prefixes.begin();
|
| + i != expected_package_prefixes.end(); ++i) {
|
| + if (i->second == prefix) {
|
| + *out_error =
|
| + "error: Found 'option objc_class_prefix = \"" + prefix +
|
| + "\";' in '" + file->name() +
|
| + "'; that prefix is already used for 'package " + i->first +
|
| + ";'. It can only be reused by listing it in the expected file (" +
|
| + generation_options.expected_prefixes_path + ").";
|
| + return false; // Only report first usage of the prefix.
|
| + }
|
| + }
|
| +
|
| // Check: Warning - Make sure the prefix is is a reasonable value according
|
| // to Apple's rules (the checks above implicitly whitelist anything that
|
| // doesn't meet these rules).
|
| @@ -1080,56 +1067,6 @@ bool ValidateObjCClassPrefix(
|
| cerr.flush();
|
| }
|
|
|
| - // Look for any other package that uses the same prefix.
|
| - string other_package_for_prefix;
|
| - for (map<string, string>::const_iterator i = expected_package_prefixes.begin();
|
| - i != expected_package_prefixes.end(); ++i) {
|
| - if (i->second == prefix) {
|
| - other_package_for_prefix = i->first;
|
| - break;
|
| - }
|
| - }
|
| -
|
| - // Check: Warning - If the file does not have a package, check whether
|
| - // the prefix declared is being used by another package or not.
|
| - if (package.empty()) {
|
| - // The file does not have a package and ...
|
| - if (other_package_for_prefix.empty()) {
|
| - // ... no other package has declared that prefix.
|
| - cerr << endl
|
| - << "protoc:0: warning: File '" << file->name() << "' has no "
|
| - << "package. Consider adding a new package to the proto and adding '"
|
| - << "new.package = " << prefix << "' to the expected prefixes file ("
|
| - << expected_prefixes_path << ")." << endl;
|
| - cerr.flush();
|
| - } else {
|
| - // ... another package has declared the same prefix.
|
| - cerr << endl
|
| - << "protoc:0: warning: File '" << file->name() << "' has no package "
|
| - << "and package '" << other_package_for_prefix << "' already uses '"
|
| - << prefix << "' as its prefix. Consider either adding a new package "
|
| - << "to the proto, or reusing one of the packages already using this "
|
| - << "prefix in the expected prefixes file ("
|
| - << expected_prefixes_path << ")." << endl;
|
| - cerr.flush();
|
| - }
|
| - return true;
|
| - }
|
| -
|
| - // Check: Error - Make sure the prefix wasn't expected for a different
|
| - // package (overlap is allowed, but it has to be listed as an expected
|
| - // overlap).
|
| - if (!other_package_for_prefix.empty()) {
|
| - *out_error =
|
| - "error: Found 'option objc_class_prefix = \"" + prefix +
|
| - "\";' in '" + file->name() +
|
| - "'; that prefix is already used for 'package " +
|
| - other_package_for_prefix + ";'. It can only be reused by listing " +
|
| - "it in the expected file (" +
|
| - expected_prefixes_path + ").";
|
| - return false; // Only report first usage of the prefix.
|
| - }
|
| -
|
| // Check: Warning - If the given package/prefix pair wasn't expected, issue a
|
| // warning issue a warning suggesting it gets added to the file.
|
| if (!expected_package_prefixes.empty()) {
|
| @@ -1137,43 +1074,13 @@ bool ValidateObjCClassPrefix(
|
| << "protoc:0: warning: Found unexpected 'option objc_class_prefix = \""
|
| << prefix << "\";' in '" << file->name() << "';"
|
| << " consider adding it to the expected prefixes file ("
|
| - << expected_prefixes_path << ")." << endl;
|
| + << generation_options.expected_prefixes_path << ")." << endl;
|
| cerr.flush();
|
| }
|
|
|
| return true;
|
| }
|
|
|
| -} // namespace
|
| -
|
| -bool ValidateObjCClassPrefixes(const vector<const FileDescriptor*>& files,
|
| - const Options& generation_options,
|
| - string* out_error) {
|
| - // Load the expected package prefixes, if available, to validate against.
|
| - map<string, string> expected_package_prefixes;
|
| - if (!LoadExpectedPackagePrefixes(generation_options,
|
| - &expected_package_prefixes,
|
| - out_error)) {
|
| - return false;
|
| - }
|
| -
|
| - for (int i = 0; i < files.size(); i++) {
|
| - bool is_valid =
|
| - ValidateObjCClassPrefix(files[i],
|
| - generation_options.expected_prefixes_path,
|
| - expected_package_prefixes,
|
| - out_error);
|
| - if (!is_valid) {
|
| - return false;
|
| - }
|
| - }
|
| - return true;
|
| -}
|
| -
|
| -TextFormatDecodeData::TextFormatDecodeData() { }
|
| -
|
| -TextFormatDecodeData::~TextFormatDecodeData() { }
|
| -
|
| void TextFormatDecodeData::AddString(int32 key,
|
| const string& input_for_decode,
|
| const string& desired_output) {
|
| @@ -1382,289 +1289,6 @@ string TextFormatDecodeData::DecodeDataForString(const string& input_for_decode,
|
| return builder.Finish() + (char)'\0';
|
| }
|
|
|
| -namespace {
|
| -
|
| -class Parser {
|
| - public:
|
| - Parser(LineConsumer* line_consumer)
|
| - : line_consumer_(line_consumer), line_(0) {}
|
| -
|
| - // Parses a check of input, returning success/failure.
|
| - bool ParseChunk(StringPiece chunk);
|
| -
|
| - // Should be called to finish parsing (after all input has been provided via
|
| - // ParseChunk()). Returns success/failure.
|
| - bool Finish();
|
| -
|
| - int last_line() const { return line_; }
|
| - string error_str() const { return error_str_; }
|
| -
|
| - private:
|
| - bool ParseLoop();
|
| -
|
| - LineConsumer* line_consumer_;
|
| - int line_;
|
| - string error_str_;
|
| - StringPiece p_;
|
| - string leftover_;
|
| -};
|
| -
|
| -bool Parser::ParseChunk(StringPiece chunk) {
|
| - if (!leftover_.empty()) {
|
| - chunk.AppendToString(&leftover_);
|
| - p_ = StringPiece(leftover_);
|
| - } else {
|
| - p_ = chunk;
|
| - }
|
| - bool result = ParseLoop();
|
| - if (p_.empty()) {
|
| - leftover_.clear();
|
| - } else {
|
| - leftover_ = p_.ToString();
|
| - }
|
| - return result;
|
| -}
|
| -
|
| -bool Parser::Finish() {
|
| - if (leftover_.empty()) {
|
| - return true;
|
| - }
|
| - // Force a newline onto the end to finish parsing.
|
| - leftover_ += "\n";
|
| - p_ = StringPiece(leftover_);
|
| - if (!ParseLoop()) {
|
| - return false;
|
| - }
|
| - return p_.empty(); // Everything used?
|
| -}
|
| -
|
| -bool Parser::ParseLoop() {
|
| - StringPiece line;
|
| - while (ReadLine(&p_, &line)) {
|
| - ++line_;
|
| - RemoveComment(&line);
|
| - StringPieceTrimWhitespace(&line);
|
| - if (line.size() == 0) {
|
| - continue; // Blank line.
|
| - }
|
| - if (!line_consumer_->ConsumeLine(line, &error_str_)) {
|
| - return false;
|
| - }
|
| - }
|
| - return true;
|
| -}
|
| -
|
| -} // namespace
|
| -
|
| -LineConsumer::LineConsumer() {}
|
| -
|
| -LineConsumer::~LineConsumer() {}
|
| -
|
| -bool ParseSimpleFile(
|
| - const string& path, LineConsumer* line_consumer, string* out_error) {
|
| - int fd;
|
| - do {
|
| - fd = open(path.c_str(), O_RDONLY);
|
| - } while (fd < 0 && errno == EINTR);
|
| - if (fd < 0) {
|
| - *out_error =
|
| - string("error: Unable to open \"") + path + "\", " + strerror(errno);
|
| - return false;
|
| - }
|
| - io::FileInputStream file_stream(fd);
|
| - file_stream.SetCloseOnDelete(true);
|
| -
|
| - Parser parser(line_consumer);
|
| - const void* buf;
|
| - int buf_len;
|
| - while (file_stream.Next(&buf, &buf_len)) {
|
| - if (buf_len == 0) {
|
| - continue;
|
| - }
|
| -
|
| - if (!parser.ParseChunk(StringPiece(static_cast<const char*>(buf), buf_len))) {
|
| - *out_error =
|
| - string("error: ") + path +
|
| - " Line " + SimpleItoa(parser.last_line()) + ", " + parser.error_str();
|
| - return false;
|
| - }
|
| - }
|
| - return parser.Finish();
|
| -}
|
| -
|
| -ImportWriter::ImportWriter(
|
| - const string& generate_for_named_framework,
|
| - const string& named_framework_to_proto_path_mappings_path)
|
| - : generate_for_named_framework_(generate_for_named_framework),
|
| - named_framework_to_proto_path_mappings_path_(
|
| - named_framework_to_proto_path_mappings_path),
|
| - need_to_parse_mapping_file_(true) {
|
| -}
|
| -
|
| -ImportWriter::~ImportWriter() {}
|
| -
|
| -void ImportWriter::AddFile(const FileDescriptor* file,
|
| - const string& header_extension) {
|
| - const string file_path(FilePath(file));
|
| -
|
| - if (IsProtobufLibraryBundledProtoFile(file)) {
|
| - protobuf_framework_imports_.push_back(
|
| - FilePathBasename(file) + header_extension);
|
| - protobuf_non_framework_imports_.push_back(file_path + header_extension);
|
| - return;
|
| - }
|
| -
|
| - // Lazy parse any mappings.
|
| - if (need_to_parse_mapping_file_) {
|
| - ParseFrameworkMappings();
|
| - }
|
| -
|
| - map<string, string>::iterator proto_lookup =
|
| - proto_file_to_framework_name_.find(file->name());
|
| - if (proto_lookup != proto_file_to_framework_name_.end()) {
|
| - other_framework_imports_.push_back(
|
| - proto_lookup->second + "/" +
|
| - FilePathBasename(file) + header_extension);
|
| - return;
|
| - }
|
| -
|
| - if (!generate_for_named_framework_.empty()) {
|
| - other_framework_imports_.push_back(
|
| - generate_for_named_framework_ + "/" +
|
| - FilePathBasename(file) + header_extension);
|
| - return;
|
| - }
|
| -
|
| - other_imports_.push_back(file_path + header_extension);
|
| -}
|
| -
|
| -void ImportWriter::Print(io::Printer* printer) const {
|
| - assert(protobuf_non_framework_imports_.size() ==
|
| - protobuf_framework_imports_.size());
|
| -
|
| - bool add_blank_line = false;
|
| -
|
| - if (protobuf_framework_imports_.size() > 0) {
|
| - const string framework_name(ProtobufLibraryFrameworkName);
|
| - const string cpp_symbol(ProtobufFrameworkImportSymbol(framework_name));
|
| -
|
| - printer->Print(
|
| - "#if $cpp_symbol$\n",
|
| - "cpp_symbol", cpp_symbol);
|
| - for (vector<string>::const_iterator iter = protobuf_framework_imports_.begin();
|
| - iter != protobuf_framework_imports_.end(); ++iter) {
|
| - printer->Print(
|
| - " #import <$framework_name$/$header$>\n",
|
| - "framework_name", framework_name,
|
| - "header", *iter);
|
| - }
|
| - printer->Print(
|
| - "#else\n");
|
| - for (vector<string>::const_iterator iter = protobuf_non_framework_imports_.begin();
|
| - iter != protobuf_non_framework_imports_.end(); ++iter) {
|
| - printer->Print(
|
| - " #import \"$header$\"\n",
|
| - "header", *iter);
|
| - }
|
| - printer->Print(
|
| - "#endif\n");
|
| -
|
| - add_blank_line = true;
|
| - }
|
| -
|
| - if (other_framework_imports_.size() > 0) {
|
| - if (add_blank_line) {
|
| - printer->Print("\n");
|
| - }
|
| -
|
| - for (vector<string>::const_iterator iter = other_framework_imports_.begin();
|
| - iter != other_framework_imports_.end(); ++iter) {
|
| - printer->Print(
|
| - " #import <$header$>\n",
|
| - "header", *iter);
|
| - }
|
| -
|
| - add_blank_line = true;
|
| - }
|
| -
|
| - if (other_imports_.size() > 0) {
|
| - if (add_blank_line) {
|
| - printer->Print("\n");
|
| - }
|
| -
|
| - for (vector<string>::const_iterator iter = other_imports_.begin();
|
| - iter != other_imports_.end(); ++iter) {
|
| - printer->Print(
|
| - " #import \"$header$\"\n",
|
| - "header", *iter);
|
| - }
|
| - }
|
| -}
|
| -
|
| -void ImportWriter::ParseFrameworkMappings() {
|
| - need_to_parse_mapping_file_ = false;
|
| - if (named_framework_to_proto_path_mappings_path_.empty()) {
|
| - return; // Nothing to do.
|
| - }
|
| -
|
| - ProtoFrameworkCollector collector(&proto_file_to_framework_name_);
|
| - string parse_error;
|
| - if (!ParseSimpleFile(named_framework_to_proto_path_mappings_path_,
|
| - &collector, &parse_error)) {
|
| - cerr << "error parsing " << named_framework_to_proto_path_mappings_path_
|
| - << " : " << parse_error << endl;
|
| - cerr.flush();
|
| - }
|
| -}
|
| -
|
| -bool ImportWriter::ProtoFrameworkCollector::ConsumeLine(
|
| - const StringPiece& line, string* out_error) {
|
| - int offset = line.find(':');
|
| - if (offset == StringPiece::npos) {
|
| - *out_error =
|
| - string("Framework/proto file mapping line without colon sign: '") +
|
| - line.ToString() + "'.";
|
| - return false;
|
| - }
|
| - StringPiece framework_name(line, 0, offset);
|
| - StringPiece proto_file_list(line, offset + 1, line.length() - offset - 1);
|
| - StringPieceTrimWhitespace(&framework_name);
|
| -
|
| - int start = 0;
|
| - while (start < proto_file_list.length()) {
|
| - offset = proto_file_list.find(',', start);
|
| - if (offset == StringPiece::npos) {
|
| - offset = proto_file_list.length();
|
| - }
|
| -
|
| - StringPiece proto_file(proto_file_list, start, offset - start);
|
| - StringPieceTrimWhitespace(&proto_file);
|
| - if (proto_file.size() != 0) {
|
| - map<string, string>::iterator existing_entry =
|
| - map_->find(proto_file.ToString());
|
| - if (existing_entry != map_->end()) {
|
| - cerr << "warning: duplicate proto file reference, replacing framework entry for '"
|
| - << proto_file.ToString() << "' with '" << framework_name.ToString()
|
| - << "' (was '" << existing_entry->second << "')." << endl;
|
| - cerr.flush();
|
| - }
|
| -
|
| - if (proto_file.find(' ') != StringPiece::npos) {
|
| - cerr << "note: framework mapping file had a proto file with a space in, hopefully that isn't a missing comma: '"
|
| - << proto_file.ToString() << "'" << endl;
|
| - cerr.flush();
|
| - }
|
| -
|
| - (*map_)[proto_file.ToString()] = framework_name.ToString();
|
| - }
|
| -
|
| - start = offset + 1;
|
| - }
|
| -
|
| - return true;
|
| -}
|
| -
|
| -
|
| } // namespace objectivec
|
| } // namespace compiler
|
| } // namespace protobuf
|
|
|