| Index: third_party/protobuf/src/google/protobuf/compiler/parser_unittest.cc
|
| ===================================================================
|
| --- third_party/protobuf/src/google/protobuf/compiler/parser_unittest.cc (revision 216642)
|
| +++ third_party/protobuf/src/google/protobuf/compiler/parser_unittest.cc (working copy)
|
| @@ -44,6 +44,7 @@
|
| #include <google/protobuf/wire_format.h>
|
| #include <google/protobuf/text_format.h>
|
| #include <google/protobuf/unittest.pb.h>
|
| +#include <google/protobuf/unittest_custom_options.pb.h>
|
| #include <google/protobuf/stubs/strutil.h>
|
| #include <google/protobuf/stubs/substitute.h>
|
| #include <google/protobuf/stubs/map-util.h>
|
| @@ -509,6 +510,31 @@
|
| "}");
|
| }
|
|
|
| +TEST_F(ParseMessageTest, LargerMaxForMessageSetWireFormatMessages) {
|
| + // Messages using the message_set_wire_format option can accept larger
|
| + // extension numbers, as the numbers are not encoded as int32 field values
|
| + // rather than tags.
|
| + ExpectParsesTo(
|
| + "message TestMessage {\n"
|
| + " extensions 4 to max;\n"
|
| + " option message_set_wire_format = true;\n"
|
| + "}\n",
|
| +
|
| + "message_type {"
|
| + " name: \"TestMessage\""
|
| + " extension_range { start:4 end: 0x7fffffff }"
|
| + " options {\n"
|
| + " uninterpreted_option { \n"
|
| + " name {\n"
|
| + " name_part: \"message_set_wire_format\"\n"
|
| + " is_extension: false\n"
|
| + " }\n"
|
| + " identifier_value: \"true\"\n"
|
| + " }\n"
|
| + " }\n"
|
| + "}");
|
| +}
|
| +
|
| TEST_F(ParseMessageTest, Extensions) {
|
| ExpectParsesTo(
|
| "extend Extendee1 { optional int32 foo = 12; }\n"
|
| @@ -571,6 +597,10 @@
|
| " FOO = 13;\n"
|
| " BAR = -10;\n"
|
| " BAZ = 500;\n"
|
| + " HEX_MAX = 0x7FFFFFFF;\n"
|
| + " HEX_MIN = -0x80000000;\n"
|
| + " INT_MAX = 2147483647;\n"
|
| + " INT_MIN = -2147483648;\n"
|
| "}\n",
|
|
|
| "enum_type {"
|
| @@ -578,6 +608,10 @@
|
| " value { name:\"FOO\" number:13 }"
|
| " value { name:\"BAR\" number:-10 }"
|
| " value { name:\"BAZ\" number:500 }"
|
| + " value { name:\"HEX_MAX\" number:2147483647 }"
|
| + " value { name:\"HEX_MIN\" number:-2147483648 }"
|
| + " value { name:\"INT_MAX\" number:2147483647 }"
|
| + " value { name:\"INT_MIN\" number:-2147483648 }"
|
| "}");
|
| }
|
|
|
| @@ -631,7 +665,7 @@
|
| "}");
|
| }
|
|
|
| -TEST_F(ParseServiceTest, Methods) {
|
| +TEST_F(ParseServiceTest, MethodsAndStreams) {
|
| ExpectParsesTo(
|
| "service TestService {\n"
|
| " rpc Foo(In1) returns (Out1);\n"
|
| @@ -668,6 +702,20 @@
|
| "dependency: \"baz.proto\"");
|
| }
|
|
|
| +TEST_F(ParseMiscTest, ParsePublicImports) {
|
| + ExpectParsesTo(
|
| + "import \"foo.proto\";\n"
|
| + "import public \"bar.proto\";\n"
|
| + "import \"baz.proto\";\n"
|
| + "import public \"qux.proto\";\n",
|
| + "dependency: \"foo.proto\""
|
| + "dependency: \"bar.proto\""
|
| + "dependency: \"baz.proto\""
|
| + "dependency: \"qux.proto\""
|
| + "public_dependency: 1 "
|
| + "public_dependency: 3 ");
|
| +}
|
| +
|
| TEST_F(ParseMiscTest, ParsePackage) {
|
| ExpectParsesTo(
|
| "package foo.bar.baz;\n",
|
| @@ -867,6 +915,20 @@
|
| "6:36: Integer out of range.\n");
|
| }
|
|
|
| +TEST_F(ParseErrorTest, EnumValueOutOfRange) {
|
| + ExpectHasErrors(
|
| + "enum TestEnum {\n"
|
| + " HEX_TOO_BIG = 0x80000000;\n"
|
| + " HEX_TOO_SMALL = -0x80000001;\n"
|
| + " INT_TOO_BIG = 2147483648;\n"
|
| + " INT_TOO_SMALL = -2147483649;\n"
|
| + "}\n",
|
| + "1:19: Integer out of range.\n"
|
| + "2:19: Integer out of range.\n"
|
| + "3:19: Integer out of range.\n"
|
| + "4:19: Integer out of range.\n");
|
| +}
|
| +
|
| TEST_F(ParseErrorTest, DefaultValueMissing) {
|
| ExpectHasErrors(
|
| "message TestMessage {\n"
|
| @@ -976,6 +1038,7 @@
|
| "1:26: Expected message type.\n");
|
| }
|
|
|
| +
|
| TEST_F(ParseErrorTest, EofInMethodOptions) {
|
| ExpectHasErrors(
|
| "service TestService {\n"
|
| @@ -984,6 +1047,7 @@
|
| "1:29: Reached end of input in service definition (missing '}').\n");
|
| }
|
|
|
| +
|
| TEST_F(ParseErrorTest, PrimitiveMethodInput) {
|
| ExpectHasErrors(
|
| "service TestService {\n"
|
| @@ -992,6 +1056,7 @@
|
| "1:10: Expected message type.\n");
|
| }
|
|
|
| +
|
| TEST_F(ParseErrorTest, MethodOptionTypeError) {
|
| // This used to cause an infinite loop.
|
| ExpectHasErrors(
|
| @@ -1002,6 +1067,7 @@
|
| "2:45: Expected \"=\".\n");
|
| }
|
|
|
| +
|
| // -------------------------------------------------------------------
|
| // Import and package errors
|
|
|
| @@ -1157,6 +1223,7 @@
|
| "3:6: \"Bar\" is already defined in \"Foo\".\n");
|
| }
|
|
|
| +
|
| TEST_F(ParserValidationErrorTest, MethodInputTypeError) {
|
| ExpectHasValidationErrors(
|
| "message Baz {}\n"
|
| @@ -1166,6 +1233,7 @@
|
| "2:10: \"Qux\" is not defined.\n");
|
| }
|
|
|
| +
|
| TEST_F(ParserValidationErrorTest, MethodOutputTypeError) {
|
| ExpectHasValidationErrors(
|
| "message Baz {}\n"
|
| @@ -1175,6 +1243,7 @@
|
| "2:23: \"Qux\" is not defined.\n");
|
| }
|
|
|
| +
|
| // ===================================================================
|
| // Test that the output from FileDescriptor::DebugString() (and all other
|
| // descriptor types) is parseable, and results in the same Descriptor
|
| @@ -1236,6 +1305,11 @@
|
| // also need to give it the same name as the original.
|
| parsed.set_name("google/protobuf/unittest.proto");
|
| // We need the imported dependency before we can build our parsed proto
|
| + const FileDescriptor* public_import =
|
| + protobuf_unittest_import::PublicImportMessage::descriptor()->file();
|
| + FileDescriptorProto public_import_proto;
|
| + public_import->CopyTo(&public_import_proto);
|
| + ASSERT_TRUE(pool_.BuildFile(public_import_proto) != NULL);
|
| const FileDescriptor* import =
|
| protobuf_unittest_import::ImportMessage::descriptor()->file();
|
| FileDescriptorProto import_proto;
|
| @@ -1258,6 +1332,45 @@
|
| EXPECT_EQ(expected.DebugString(), parsed.DebugString());
|
| }
|
|
|
| +TEST_F(ParseDecriptorDebugTest, TestCustomOptions) {
|
| + const FileDescriptor* original_file =
|
| + protobuf_unittest::AggregateMessage::descriptor()->file();
|
| + FileDescriptorProto expected;
|
| + original_file->CopyTo(&expected);
|
| +
|
| + string debug_string = original_file->DebugString();
|
| +
|
| + // Parse the debug string
|
| + SetupParser(debug_string.c_str());
|
| + FileDescriptorProto parsed;
|
| + parser_->Parse(input_.get(), &parsed);
|
| + EXPECT_EQ(io::Tokenizer::TYPE_END, input_->current().type);
|
| + ASSERT_EQ("", error_collector_.text_);
|
| +
|
| + // We now have a FileDescriptorProto, but to compare with the expected we
|
| + // need to link to a FileDecriptor, then output back to a proto. We'll
|
| + // also need to give it the same name as the original.
|
| + parsed.set_name(original_file->name());
|
| +
|
| + // unittest_custom_options.proto depends on descriptor.proto.
|
| + const FileDescriptor* import = FileDescriptorProto::descriptor()->file();
|
| + FileDescriptorProto import_proto;
|
| + import->CopyTo(&import_proto);
|
| + ASSERT_TRUE(pool_.BuildFile(import_proto) != NULL);
|
| + const FileDescriptor* actual = pool_.BuildFile(parsed);
|
| + ASSERT_TRUE(actual != NULL);
|
| + parsed.Clear();
|
| + actual->CopyTo(&parsed);
|
| +
|
| + // The messages might be in different orders, making them hard to compare.
|
| + // So, sort the messages in the descriptor protos (including nested messages,
|
| + // recursively).
|
| + SortMessages(&expected);
|
| + SortMessages(&parsed);
|
| +
|
| + EXPECT_EQ(expected.DebugString(), parsed.DebugString());
|
| +}
|
| +
|
| // ===================================================================
|
| // SourceCodeInfo tests.
|
|
|
| @@ -1353,67 +1466,6 @@
|
| }
|
| }
|
|
|
| -// Split some text on line breaks. The line breaks are retained in the output,
|
| -// so each line (except the last) ends with a '\n', and the lines can be
|
| -// concatenated to produce the original text.
|
| -//
|
| -// I couldn't find the proper string utility function for this. Our
|
| -// split-on-delimiter functions don't include the delimiter in the output.
|
| -void SplitLines(const string& text, vector<string>* lines) {
|
| - string::size_type pos = 0;
|
| -
|
| - while (pos != string::npos) {
|
| - string::size_type last_pos = pos;
|
| - pos = text.find_first_of('\n', pos);
|
| - if (pos != string::npos) ++pos;
|
| - lines->push_back(text.substr(last_pos, pos - last_pos));
|
| - }
|
| -}
|
| -
|
| -// Look for the given tags in the given text and construct a span (as defined
|
| -// by SourceCodeInfo.Location.span) from them. E.g. for text like:
|
| -// /*a*/message /*b*/Foo/*c*/ {}/*d*/
|
| -// There are four tags: "a", "b", "c", and "d". The constructed span starts
|
| -// immediately after the start tag's trailing '/' and ends immediately before
|
| -// the end tags leading '/'.
|
| -void MakeExpectedSpan(const vector<string>& lines,
|
| - const string& start_tag, const string& end_tag,
|
| - RepeatedField<int>* output) {
|
| - string start_comment = "/*" + start_tag + "*/";
|
| - string end_comment = "/*" + end_tag + "*/";
|
| -
|
| - int start_line = -1;
|
| - int start_column = -1;
|
| - for (int i = 0; i < lines.size(); i++) {
|
| - string::size_type pos = lines[i].find(start_comment);
|
| - if (pos != string::npos) {
|
| - start_line = i;
|
| - start_column = pos + start_comment.size();
|
| - break;
|
| - }
|
| - }
|
| - ASSERT_NE(start_line, -1)
|
| - << "Tag \"" << start_comment << "\" not found in text.";
|
| -
|
| - int end_line = -1;
|
| - int end_column = -1;
|
| - for (int i = start_line; i < lines.size(); i++) {
|
| - string::size_type pos = lines[i].find(end_comment);
|
| - if (pos != string::npos) {
|
| - end_line = i;
|
| - end_column = pos;
|
| - break;
|
| - }
|
| - }
|
| - ASSERT_NE(end_line, -1)
|
| - << "Tag \"" << end_comment << "\" not found in text.";
|
| -
|
| - output->Add(start_line);
|
| - output->Add(start_column);
|
| - if (end_line != start_line) output->Add(end_line);
|
| - output->Add(end_column);
|
| -}
|
| -
|
| // Check if two spans are equal.
|
| bool CompareSpans(const RepeatedField<int>& span1,
|
| const RepeatedField<int>& span2) {
|
| @@ -1434,8 +1486,8 @@
|
| // Parse the given text as a .proto file and populate the spans_ map with
|
| // all the source location spans in its SourceCodeInfo table.
|
| bool Parse(const char* text) {
|
| - SetupParser(text);
|
| - SplitLines(text, &lines_);
|
| + ExtractMarkers(text);
|
| + SetupParser(text_without_markers_.c_str());
|
| if (!parser_->Parse(input_.get(), &file_)) {
|
| return false;
|
| }
|
| @@ -1470,19 +1522,38 @@
|
| // part of the FileDescriptorProto. (If unclear, look at the actual tests;
|
| // it should quickly become obvious.)
|
|
|
| - bool HasSpan(const char* start_tag, const char* end_tag,
|
| + bool HasSpan(char start_marker, char end_marker,
|
| const Message& descriptor_proto) {
|
| - return HasSpan(start_tag, end_tag, descriptor_proto, NULL, -1);
|
| + return HasSpanWithComment(
|
| + start_marker, end_marker, descriptor_proto, NULL, -1, NULL, NULL);
|
| }
|
|
|
| - bool HasSpan(const char* start_tag, const char* end_tag,
|
| + bool HasSpanWithComment(char start_marker, char end_marker,
|
| + const Message& descriptor_proto,
|
| + const char* expected_leading_comments,
|
| + const char* expected_trailing_comments) {
|
| + return HasSpanWithComment(
|
| + start_marker, end_marker, descriptor_proto, NULL, -1,
|
| + expected_leading_comments, expected_trailing_comments);
|
| + }
|
| +
|
| + bool HasSpan(char start_marker, char end_marker,
|
| const Message& descriptor_proto, const string& field_name) {
|
| - return HasSpan(start_tag, end_tag, descriptor_proto, field_name, -1);
|
| + return HasSpan(start_marker, end_marker, descriptor_proto, field_name, -1);
|
| }
|
|
|
| - bool HasSpan(const char* start_tag, const char* end_tag,
|
| + bool HasSpan(char start_marker, char end_marker,
|
| const Message& descriptor_proto, const string& field_name,
|
| int index) {
|
| + return HasSpan(start_marker, end_marker, descriptor_proto,
|
| + field_name, index, NULL, NULL);
|
| + }
|
| +
|
| + bool HasSpan(char start_marker, char end_marker,
|
| + const Message& descriptor_proto,
|
| + const string& field_name, int index,
|
| + const char* expected_leading_comments,
|
| + const char* expected_trailing_comments) {
|
| const FieldDescriptor* field =
|
| descriptor_proto.GetDescriptor()->FindFieldByName(field_name);
|
| if (field == NULL) {
|
| @@ -1491,29 +1562,34 @@
|
| return false;
|
| }
|
|
|
| - return HasSpan(start_tag, end_tag, descriptor_proto, field, index);
|
| + return HasSpanWithComment(
|
| + start_marker, end_marker, descriptor_proto, field, index,
|
| + expected_leading_comments, expected_trailing_comments);
|
| }
|
|
|
| bool HasSpan(const Message& descriptor_proto) {
|
| - return HasSpan(NULL, NULL, descriptor_proto, NULL, -1);
|
| + return HasSpanWithComment(
|
| + '\0', '\0', descriptor_proto, NULL, -1, NULL, NULL);
|
| }
|
|
|
| bool HasSpan(const Message& descriptor_proto, const string& field_name) {
|
| - return HasSpan(NULL, NULL, descriptor_proto, field_name, -1);
|
| + return HasSpan('\0', '\0', descriptor_proto, field_name, -1);
|
| }
|
|
|
| bool HasSpan(const Message& descriptor_proto, const string& field_name,
|
| int index) {
|
| - return HasSpan(NULL, NULL, descriptor_proto, field_name, index);
|
| + return HasSpan('\0', '\0', descriptor_proto, field_name, index);
|
| }
|
|
|
| - bool HasSpan(const char* start_tag, const char* end_tag,
|
| - const Message& descriptor_proto, const FieldDescriptor* field,
|
| - int index) {
|
| + bool HasSpanWithComment(char start_marker, char end_marker,
|
| + const Message& descriptor_proto,
|
| + const FieldDescriptor* field, int index,
|
| + const char* expected_leading_comments,
|
| + const char* expected_trailing_comments) {
|
| pair<SpanMap::iterator, SpanMap::iterator> range =
|
| spans_.equal_range(SpanKey(descriptor_proto, field, index));
|
|
|
| - if (start_tag == NULL) {
|
| + if (start_marker == '\0') {
|
| if (range.first == range.second) {
|
| return false;
|
| } else {
|
| @@ -1521,11 +1597,34 @@
|
| return true;
|
| }
|
| } else {
|
| + pair<int, int> start_pos = FindOrDie(markers_, start_marker);
|
| + pair<int, int> end_pos = FindOrDie(markers_, end_marker);
|
| +
|
| RepeatedField<int> expected_span;
|
| - MakeExpectedSpan(lines_, start_tag, end_tag, &expected_span);
|
| + expected_span.Add(start_pos.first);
|
| + expected_span.Add(start_pos.second);
|
| + if (end_pos.first != start_pos.first) {
|
| + expected_span.Add(end_pos.first);
|
| + }
|
| + expected_span.Add(end_pos.second);
|
|
|
| for (SpanMap::iterator iter = range.first; iter != range.second; ++iter) {
|
| if (CompareSpans(expected_span, iter->second->span())) {
|
| + if (expected_leading_comments == NULL) {
|
| + EXPECT_FALSE(iter->second->has_leading_comments());
|
| + } else {
|
| + EXPECT_TRUE(iter->second->has_leading_comments());
|
| + EXPECT_EQ(expected_leading_comments,
|
| + iter->second->leading_comments());
|
| + }
|
| + if (expected_trailing_comments == NULL) {
|
| + EXPECT_FALSE(iter->second->has_trailing_comments());
|
| + } else {
|
| + EXPECT_TRUE(iter->second->has_trailing_comments());
|
| + EXPECT_EQ(expected_trailing_comments,
|
| + iter->second->trailing_comments());
|
| + }
|
| +
|
| spans_.erase(iter);
|
| return true;
|
| }
|
| @@ -1542,10 +1641,11 @@
|
| int index;
|
|
|
| inline SpanKey() {}
|
| - inline SpanKey(const Message& descriptor_proto,
|
| - const FieldDescriptor* field,
|
| - int index)
|
| - : descriptor_proto(&descriptor_proto), field(field), index(index) {}
|
| + inline SpanKey(const Message& descriptor_proto_param,
|
| + const FieldDescriptor* field_param,
|
| + int index_param)
|
| + : descriptor_proto(&descriptor_proto_param), field(field_param),
|
| + index(index_param) {}
|
|
|
| inline bool operator<(const SpanKey& other) const {
|
| if (descriptor_proto < other.descriptor_proto) return true;
|
| @@ -1558,32 +1658,63 @@
|
|
|
| typedef multimap<SpanKey, const SourceCodeInfo::Location*> SpanMap;
|
| SpanMap spans_;
|
| - vector<string> lines_;
|
| + map<char, pair<int, int> > markers_;
|
| + string text_without_markers_;
|
| +
|
| + void ExtractMarkers(const char* text) {
|
| + markers_.clear();
|
| + text_without_markers_.clear();
|
| + int line = 0;
|
| + int column = 0;
|
| + while (*text != '\0') {
|
| + if (*text == '$') {
|
| + ++text;
|
| + GOOGLE_CHECK_NE('\0', *text);
|
| + if (*text == '$') {
|
| + text_without_markers_ += '$';
|
| + ++column;
|
| + } else {
|
| + markers_[*text] = make_pair(line, column);
|
| + ++text;
|
| + GOOGLE_CHECK_EQ('$', *text);
|
| + }
|
| + } else if (*text == '\n') {
|
| + ++line;
|
| + column = 0;
|
| + text_without_markers_ += *text;
|
| + } else {
|
| + text_without_markers_ += *text;
|
| + ++column;
|
| + }
|
| + ++text;
|
| + }
|
| + }
|
| };
|
|
|
| TEST_F(SourceInfoTest, BasicFileDecls) {
|
| EXPECT_TRUE(Parse(
|
| - "/*a*/syntax = \"proto2\";\n"
|
| - "package /*b*/foo.bar/*c*/;\n"
|
| - "import /*d*/\"baz.proto\"/*e*/;\n"
|
| - "import /*f*/\"qux.proto\"/*g*/;/*h*/\n"
|
| + "$a$syntax = \"proto2\";\n"
|
| + "package $b$foo.bar$c$;\n"
|
| + "import $d$\"baz.proto\"$e$;\n"
|
| + "import $f$\"qux.proto\"$g$;$h$\n"
|
| + "\n"
|
| "// comment ignored\n"));
|
|
|
| - EXPECT_TRUE(HasSpan("a", "h", file_));
|
| - EXPECT_TRUE(HasSpan("b", "c", file_, "package"));
|
| - EXPECT_TRUE(HasSpan("d", "e", file_, "dependency", 0));
|
| - EXPECT_TRUE(HasSpan("f", "g", file_, "dependency", 1));
|
| + EXPECT_TRUE(HasSpan('a', 'h', file_));
|
| + EXPECT_TRUE(HasSpan('b', 'c', file_, "package"));
|
| + EXPECT_TRUE(HasSpan('d', 'e', file_, "dependency", 0));
|
| + EXPECT_TRUE(HasSpan('f', 'g', file_, "dependency", 1));
|
| }
|
|
|
| TEST_F(SourceInfoTest, Messages) {
|
| EXPECT_TRUE(Parse(
|
| - "/*a*/message /*b*/Foo/*c*/ {}/*d*/\n"
|
| - "/*e*/message /*f*/Bar/*g*/ {}/*h*/\n"));
|
| + "$a$message $b$Foo$c$ {}$d$\n"
|
| + "$e$message $f$Bar$g$ {}$h$\n"));
|
|
|
| - EXPECT_TRUE(HasSpan("a", "d", file_.message_type(0)));
|
| - EXPECT_TRUE(HasSpan("b", "c", file_.message_type(0), "name"));
|
| - EXPECT_TRUE(HasSpan("e", "h", file_.message_type(1)));
|
| - EXPECT_TRUE(HasSpan("f", "g", file_.message_type(1), "name"));
|
| + EXPECT_TRUE(HasSpan('a', 'd', file_.message_type(0)));
|
| + EXPECT_TRUE(HasSpan('b', 'c', file_.message_type(0), "name"));
|
| + EXPECT_TRUE(HasSpan('e', 'h', file_.message_type(1)));
|
| + EXPECT_TRUE(HasSpan('f', 'g', file_.message_type(1), "name"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1592,24 +1723,24 @@
|
| TEST_F(SourceInfoTest, Fields) {
|
| EXPECT_TRUE(Parse(
|
| "message Foo {\n"
|
| - " /*a*/optional/*b*/ /*c*/int32/*d*/ /*e*/bar/*f*/ = /*g*/1/*h*/;/*i*/\n"
|
| - " /*j*/repeated/*k*/ /*l*/X.Y/*m*/ /*n*/baz/*o*/ = /*p*/2/*q*/;/*r*/\n"
|
| + " $a$optional$b$ $c$int32$d$ $e$bar$f$ = $g$1$h$;$i$\n"
|
| + " $j$repeated$k$ $l$X.Y$m$ $n$baz$o$ = $p$2$q$;$r$\n"
|
| "}\n"));
|
|
|
| const FieldDescriptorProto& field1 = file_.message_type(0).field(0);
|
| const FieldDescriptorProto& field2 = file_.message_type(0).field(1);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "i", field1));
|
| - EXPECT_TRUE(HasSpan("a", "b", field1, "label"));
|
| - EXPECT_TRUE(HasSpan("c", "d", field1, "type"));
|
| - EXPECT_TRUE(HasSpan("e", "f", field1, "name"));
|
| - EXPECT_TRUE(HasSpan("g", "h", field1, "number"));
|
| + EXPECT_TRUE(HasSpan('a', 'i', field1));
|
| + EXPECT_TRUE(HasSpan('a', 'b', field1, "label"));
|
| + EXPECT_TRUE(HasSpan('c', 'd', field1, "type"));
|
| + EXPECT_TRUE(HasSpan('e', 'f', field1, "name"));
|
| + EXPECT_TRUE(HasSpan('g', 'h', field1, "number"));
|
|
|
| - EXPECT_TRUE(HasSpan("j", "r", field2));
|
| - EXPECT_TRUE(HasSpan("j", "k", field2, "label"));
|
| - EXPECT_TRUE(HasSpan("l", "m", field2, "type_name"));
|
| - EXPECT_TRUE(HasSpan("n", "o", field2, "name"));
|
| - EXPECT_TRUE(HasSpan("p", "q", field2, "number"));
|
| + EXPECT_TRUE(HasSpan('j', 'r', field2));
|
| + EXPECT_TRUE(HasSpan('j', 'k', field2, "label"));
|
| + EXPECT_TRUE(HasSpan('l', 'm', field2, "type_name"));
|
| + EXPECT_TRUE(HasSpan('n', 'o', field2, "name"));
|
| + EXPECT_TRUE(HasSpan('p', 'q', field2, "number"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1619,31 +1750,31 @@
|
|
|
| TEST_F(SourceInfoTest, Extensions) {
|
| EXPECT_TRUE(Parse(
|
| - "/*a*/extend /*b*/Foo/*c*/ {\n"
|
| - " /*d*/optional/*e*/ int32 bar = 1;/*f*/\n"
|
| - " /*g*/repeated/*h*/ X.Y baz = 2;/*i*/\n"
|
| - "}/*j*/\n"
|
| - "/*k*/extend /*l*/Bar/*m*/ {\n"
|
| - " /*n*/optional int32 qux = 1;/*o*/\n"
|
| - "}/*p*/\n"));
|
| + "$a$extend $b$Foo$c$ {\n"
|
| + " $d$optional$e$ int32 bar = 1;$f$\n"
|
| + " $g$repeated$h$ X.Y baz = 2;$i$\n"
|
| + "}$j$\n"
|
| + "$k$extend $l$Bar$m$ {\n"
|
| + " $n$optional int32 qux = 1;$o$\n"
|
| + "}$p$\n"));
|
|
|
| const FieldDescriptorProto& field1 = file_.extension(0);
|
| const FieldDescriptorProto& field2 = file_.extension(1);
|
| const FieldDescriptorProto& field3 = file_.extension(2);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "j", file_, "extension"));
|
| - EXPECT_TRUE(HasSpan("k", "p", file_, "extension"));
|
| + EXPECT_TRUE(HasSpan('a', 'j', file_, "extension"));
|
| + EXPECT_TRUE(HasSpan('k', 'p', file_, "extension"));
|
|
|
| - EXPECT_TRUE(HasSpan("d", "f", field1));
|
| - EXPECT_TRUE(HasSpan("d", "e", field1, "label"));
|
| - EXPECT_TRUE(HasSpan("b", "c", field1, "extendee"));
|
| + EXPECT_TRUE(HasSpan('d', 'f', field1));
|
| + EXPECT_TRUE(HasSpan('d', 'e', field1, "label"));
|
| + EXPECT_TRUE(HasSpan('b', 'c', field1, "extendee"));
|
|
|
| - EXPECT_TRUE(HasSpan("g", "i", field2));
|
| - EXPECT_TRUE(HasSpan("g", "h", field2, "label"));
|
| - EXPECT_TRUE(HasSpan("b", "c", field2, "extendee"));
|
| + EXPECT_TRUE(HasSpan('g', 'i', field2));
|
| + EXPECT_TRUE(HasSpan('g', 'h', field2, "label"));
|
| + EXPECT_TRUE(HasSpan('b', 'c', field2, "extendee"));
|
|
|
| - EXPECT_TRUE(HasSpan("n", "o", field3));
|
| - EXPECT_TRUE(HasSpan("l", "m", field3, "extendee"));
|
| + EXPECT_TRUE(HasSpan('n', 'o', field3));
|
| + EXPECT_TRUE(HasSpan('l', 'm', field3, "extendee"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1662,32 +1793,32 @@
|
| TEST_F(SourceInfoTest, NestedExtensions) {
|
| EXPECT_TRUE(Parse(
|
| "message Message {\n"
|
| - " /*a*/extend /*b*/Foo/*c*/ {\n"
|
| - " /*d*/optional/*e*/ int32 bar = 1;/*f*/\n"
|
| - " /*g*/repeated/*h*/ X.Y baz = 2;/*i*/\n"
|
| - " }/*j*/\n"
|
| - " /*k*/extend /*l*/Bar/*m*/ {\n"
|
| - " /*n*/optional int32 qux = 1;/*o*/\n"
|
| - " }/*p*/\n"
|
| + " $a$extend $b$Foo$c$ {\n"
|
| + " $d$optional$e$ int32 bar = 1;$f$\n"
|
| + " $g$repeated$h$ X.Y baz = 2;$i$\n"
|
| + " }$j$\n"
|
| + " $k$extend $l$Bar$m$ {\n"
|
| + " $n$optional int32 qux = 1;$o$\n"
|
| + " }$p$\n"
|
| "}\n"));
|
|
|
| const FieldDescriptorProto& field1 = file_.message_type(0).extension(0);
|
| const FieldDescriptorProto& field2 = file_.message_type(0).extension(1);
|
| const FieldDescriptorProto& field3 = file_.message_type(0).extension(2);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "j", file_.message_type(0), "extension"));
|
| - EXPECT_TRUE(HasSpan("k", "p", file_.message_type(0), "extension"));
|
| + EXPECT_TRUE(HasSpan('a', 'j', file_.message_type(0), "extension"));
|
| + EXPECT_TRUE(HasSpan('k', 'p', file_.message_type(0), "extension"));
|
|
|
| - EXPECT_TRUE(HasSpan("d", "f", field1));
|
| - EXPECT_TRUE(HasSpan("d", "e", field1, "label"));
|
| - EXPECT_TRUE(HasSpan("b", "c", field1, "extendee"));
|
| + EXPECT_TRUE(HasSpan('d', 'f', field1));
|
| + EXPECT_TRUE(HasSpan('d', 'e', field1, "label"));
|
| + EXPECT_TRUE(HasSpan('b', 'c', field1, "extendee"));
|
|
|
| - EXPECT_TRUE(HasSpan("g", "i", field2));
|
| - EXPECT_TRUE(HasSpan("g", "h", field2, "label"));
|
| - EXPECT_TRUE(HasSpan("b", "c", field2, "extendee"));
|
| + EXPECT_TRUE(HasSpan('g', 'i', field2));
|
| + EXPECT_TRUE(HasSpan('g', 'h', field2, "label"));
|
| + EXPECT_TRUE(HasSpan('b', 'c', field2, "extendee"));
|
|
|
| - EXPECT_TRUE(HasSpan("n", "o", field3));
|
| - EXPECT_TRUE(HasSpan("l", "m", field3, "extendee"));
|
| + EXPECT_TRUE(HasSpan('n', 'o', field3));
|
| + EXPECT_TRUE(HasSpan('l', 'm', field3, "extendee"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1708,8 +1839,8 @@
|
| TEST_F(SourceInfoTest, ExtensionRanges) {
|
| EXPECT_TRUE(Parse(
|
| "message Message {\n"
|
| - " /*a*/extensions /*b*/1/*c*/ to /*d*/4/*e*/, /*f*/6/*g*/;/*h*/\n"
|
| - " /*i*/extensions /*j*/8/*k*/ to /*l*/max/*m*/;/*n*/\n"
|
| + " $a$extensions $b$1$c$ to $d$4$e$, $f$6$g$;$h$\n"
|
| + " $i$extensions $j$8$k$ to $l$max$m$;$n$\n"
|
| "}\n"));
|
|
|
| const DescriptorProto::ExtensionRange& range1 =
|
| @@ -1719,20 +1850,20 @@
|
| const DescriptorProto::ExtensionRange& range3 =
|
| file_.message_type(0).extension_range(2);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "h", file_.message_type(0), "extension_range"));
|
| - EXPECT_TRUE(HasSpan("i", "n", file_.message_type(0), "extension_range"));
|
| + EXPECT_TRUE(HasSpan('a', 'h', file_.message_type(0), "extension_range"));
|
| + EXPECT_TRUE(HasSpan('i', 'n', file_.message_type(0), "extension_range"));
|
|
|
| - EXPECT_TRUE(HasSpan("b", "e", range1));
|
| - EXPECT_TRUE(HasSpan("b", "c", range1, "start"));
|
| - EXPECT_TRUE(HasSpan("d", "e", range1, "end"));
|
| + EXPECT_TRUE(HasSpan('b', 'e', range1));
|
| + EXPECT_TRUE(HasSpan('b', 'c', range1, "start"));
|
| + EXPECT_TRUE(HasSpan('d', 'e', range1, "end"));
|
|
|
| - EXPECT_TRUE(HasSpan("f", "g", range2));
|
| - EXPECT_TRUE(HasSpan("f", "g", range2, "start"));
|
| - EXPECT_TRUE(HasSpan("f", "g", range2, "end"));
|
| + EXPECT_TRUE(HasSpan('f', 'g', range2));
|
| + EXPECT_TRUE(HasSpan('f', 'g', range2, "start"));
|
| + EXPECT_TRUE(HasSpan('f', 'g', range2, "end"));
|
|
|
| - EXPECT_TRUE(HasSpan("j", "m", range3));
|
| - EXPECT_TRUE(HasSpan("j", "k", range3, "start"));
|
| - EXPECT_TRUE(HasSpan("l", "m", range3, "end"));
|
| + EXPECT_TRUE(HasSpan('j', 'm', range3));
|
| + EXPECT_TRUE(HasSpan('j', 'k', range3, "start"));
|
| + EXPECT_TRUE(HasSpan('l', 'm', range3, "end"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1743,22 +1874,22 @@
|
| TEST_F(SourceInfoTest, NestedMessages) {
|
| EXPECT_TRUE(Parse(
|
| "message Foo {\n"
|
| - " /*a*/message /*b*/Bar/*c*/ {\n"
|
| - " /*d*/message /*e*/Baz/*f*/ {}/*g*/\n"
|
| - " }/*h*/\n"
|
| - " /*i*/message /*j*/Qux/*k*/ {}/*l*/\n"
|
| + " $a$message $b$Bar$c$ {\n"
|
| + " $d$message $e$Baz$f$ {}$g$\n"
|
| + " }$h$\n"
|
| + " $i$message $j$Qux$k$ {}$l$\n"
|
| "}\n"));
|
|
|
| const DescriptorProto& bar = file_.message_type(0).nested_type(0);
|
| const DescriptorProto& baz = bar.nested_type(0);
|
| const DescriptorProto& qux = file_.message_type(0).nested_type(1);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "h", bar));
|
| - EXPECT_TRUE(HasSpan("b", "c", bar, "name"));
|
| - EXPECT_TRUE(HasSpan("d", "g", baz));
|
| - EXPECT_TRUE(HasSpan("e", "f", baz, "name"));
|
| - EXPECT_TRUE(HasSpan("i", "l", qux));
|
| - EXPECT_TRUE(HasSpan("j", "k", qux, "name"));
|
| + EXPECT_TRUE(HasSpan('a', 'h', bar));
|
| + EXPECT_TRUE(HasSpan('b', 'c', bar, "name"));
|
| + EXPECT_TRUE(HasSpan('d', 'g', baz));
|
| + EXPECT_TRUE(HasSpan('e', 'f', baz, "name"));
|
| + EXPECT_TRUE(HasSpan('i', 'l', qux));
|
| + EXPECT_TRUE(HasSpan('j', 'k', qux, "name"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1770,9 +1901,9 @@
|
| EXPECT_TRUE(Parse(
|
| "message Foo {\n"
|
| " message Bar {}\n"
|
| - " /*a*/optional/*b*/ /*c*/group/*d*/ /*e*/Baz/*f*/ = /*g*/1/*h*/ {\n"
|
| - " /*i*/message Qux {}/*j*/\n"
|
| - " }/*k*/\n"
|
| + " $a$optional$b$ $c$group$d$ $e$Baz$f$ = $g$1$h$ {\n"
|
| + " $i$message Qux {}$j$\n"
|
| + " }$k$\n"
|
| "}\n"));
|
|
|
| const DescriptorProto& bar = file_.message_type(0).nested_type(0);
|
| @@ -1780,16 +1911,16 @@
|
| const DescriptorProto& qux = baz.nested_type(0);
|
| const FieldDescriptorProto& field = file_.message_type(0).field(0);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "k", field));
|
| - EXPECT_TRUE(HasSpan("a", "b", field, "label"));
|
| - EXPECT_TRUE(HasSpan("c", "d", field, "type"));
|
| - EXPECT_TRUE(HasSpan("e", "f", field, "name"));
|
| - EXPECT_TRUE(HasSpan("e", "f", field, "type_name"));
|
| - EXPECT_TRUE(HasSpan("g", "h", field, "number"));
|
| + EXPECT_TRUE(HasSpan('a', 'k', field));
|
| + EXPECT_TRUE(HasSpan('a', 'b', field, "label"));
|
| + EXPECT_TRUE(HasSpan('c', 'd', field, "type"));
|
| + EXPECT_TRUE(HasSpan('e', 'f', field, "name"));
|
| + EXPECT_TRUE(HasSpan('e', 'f', field, "type_name"));
|
| + EXPECT_TRUE(HasSpan('g', 'h', field, "number"));
|
|
|
| - EXPECT_TRUE(HasSpan("a", "k", baz));
|
| - EXPECT_TRUE(HasSpan("e", "f", baz, "name"));
|
| - EXPECT_TRUE(HasSpan("i", "j", qux));
|
| + EXPECT_TRUE(HasSpan('a', 'k', baz));
|
| + EXPECT_TRUE(HasSpan('e', 'f', baz, "name"));
|
| + EXPECT_TRUE(HasSpan('i', 'j', qux));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1802,13 +1933,13 @@
|
|
|
| TEST_F(SourceInfoTest, Enums) {
|
| EXPECT_TRUE(Parse(
|
| - "/*a*/enum /*b*/Foo/*c*/ {}/*d*/\n"
|
| - "/*e*/enum /*f*/Bar/*g*/ {}/*h*/\n"));
|
| + "$a$enum $b$Foo$c$ {}$d$\n"
|
| + "$e$enum $f$Bar$g$ {}$h$\n"));
|
|
|
| - EXPECT_TRUE(HasSpan("a", "d", file_.enum_type(0)));
|
| - EXPECT_TRUE(HasSpan("b", "c", file_.enum_type(0), "name"));
|
| - EXPECT_TRUE(HasSpan("e", "h", file_.enum_type(1)));
|
| - EXPECT_TRUE(HasSpan("f", "g", file_.enum_type(1), "name"));
|
| + EXPECT_TRUE(HasSpan('a', 'd', file_.enum_type(0)));
|
| + EXPECT_TRUE(HasSpan('b', 'c', file_.enum_type(0), "name"));
|
| + EXPECT_TRUE(HasSpan('e', 'h', file_.enum_type(1)));
|
| + EXPECT_TRUE(HasSpan('f', 'g', file_.enum_type(1), "name"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1817,19 +1948,19 @@
|
| TEST_F(SourceInfoTest, EnumValues) {
|
| EXPECT_TRUE(Parse(
|
| "enum Foo {\n"
|
| - " /*a*/BAR/*b*/ = /*c*/1/*d*/;/*e*/\n"
|
| - " /*f*/BAZ/*g*/ = /*h*/2/*i*/;/*j*/\n"
|
| + " $a$BAR$b$ = $c$1$d$;$e$\n"
|
| + " $f$BAZ$g$ = $h$2$i$;$j$\n"
|
| "}"));
|
|
|
| const EnumValueDescriptorProto& bar = file_.enum_type(0).value(0);
|
| const EnumValueDescriptorProto& baz = file_.enum_type(0).value(1);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "e", bar));
|
| - EXPECT_TRUE(HasSpan("a", "b", bar, "name"));
|
| - EXPECT_TRUE(HasSpan("c", "d", bar, "number"));
|
| - EXPECT_TRUE(HasSpan("f", "j", baz));
|
| - EXPECT_TRUE(HasSpan("f", "g", baz, "name"));
|
| - EXPECT_TRUE(HasSpan("h", "i", baz, "number"));
|
| + EXPECT_TRUE(HasSpan('a', 'e', bar));
|
| + EXPECT_TRUE(HasSpan('a', 'b', bar, "name"));
|
| + EXPECT_TRUE(HasSpan('c', 'd', bar, "number"));
|
| + EXPECT_TRUE(HasSpan('f', 'j', baz));
|
| + EXPECT_TRUE(HasSpan('f', 'g', baz, "name"));
|
| + EXPECT_TRUE(HasSpan('h', 'i', baz, "number"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1840,17 +1971,17 @@
|
| TEST_F(SourceInfoTest, NestedEnums) {
|
| EXPECT_TRUE(Parse(
|
| "message Foo {\n"
|
| - " /*a*/enum /*b*/Bar/*c*/ {}/*d*/\n"
|
| - " /*e*/enum /*f*/Baz/*g*/ {}/*h*/\n"
|
| + " $a$enum $b$Bar$c$ {}$d$\n"
|
| + " $e$enum $f$Baz$g$ {}$h$\n"
|
| "}\n"));
|
|
|
| const EnumDescriptorProto& bar = file_.message_type(0).enum_type(0);
|
| const EnumDescriptorProto& baz = file_.message_type(0).enum_type(1);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "d", bar));
|
| - EXPECT_TRUE(HasSpan("b", "c", bar, "name"));
|
| - EXPECT_TRUE(HasSpan("e", "h", baz));
|
| - EXPECT_TRUE(HasSpan("f", "g", baz, "name"));
|
| + EXPECT_TRUE(HasSpan('a', 'd', bar));
|
| + EXPECT_TRUE(HasSpan('b', 'c', bar, "name"));
|
| + EXPECT_TRUE(HasSpan('e', 'h', baz));
|
| + EXPECT_TRUE(HasSpan('f', 'g', baz, "name"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1860,37 +1991,37 @@
|
|
|
| TEST_F(SourceInfoTest, Services) {
|
| EXPECT_TRUE(Parse(
|
| - "/*a*/service /*b*/Foo/*c*/ {}/*d*/\n"
|
| - "/*e*/service /*f*/Bar/*g*/ {}/*h*/\n"));
|
| + "$a$service $b$Foo$c$ {}$d$\n"
|
| + "$e$service $f$Bar$g$ {}$h$\n"));
|
|
|
| - EXPECT_TRUE(HasSpan("a", "d", file_.service(0)));
|
| - EXPECT_TRUE(HasSpan("b", "c", file_.service(0), "name"));
|
| - EXPECT_TRUE(HasSpan("e", "h", file_.service(1)));
|
| - EXPECT_TRUE(HasSpan("f", "g", file_.service(1), "name"));
|
| + EXPECT_TRUE(HasSpan('a', 'd', file_.service(0)));
|
| + EXPECT_TRUE(HasSpan('b', 'c', file_.service(0), "name"));
|
| + EXPECT_TRUE(HasSpan('e', 'h', file_.service(1)));
|
| + EXPECT_TRUE(HasSpan('f', 'g', file_.service(1), "name"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| }
|
|
|
| -TEST_F(SourceInfoTest, Methods) {
|
| +TEST_F(SourceInfoTest, MethodsAndStreams) {
|
| EXPECT_TRUE(Parse(
|
| "service Foo {\n"
|
| - " /*a*/rpc /*b*/Bar/*c*/(/*d*/X/*e*/) returns(/*f*/Y/*g*/);/*h*/"
|
| - " /*i*/rpc /*j*/Baz/*k*/(/*l*/Z/*m*/) returns(/*n*/W/*o*/);/*p*/"
|
| + " $a$rpc $b$Bar$c$($d$X$e$) returns($f$Y$g$);$h$"
|
| + " $i$rpc $j$Baz$k$($l$Z$m$) returns($n$W$o$);$p$"
|
| "}"));
|
|
|
| const MethodDescriptorProto& bar = file_.service(0).method(0);
|
| const MethodDescriptorProto& baz = file_.service(0).method(1);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "h", bar));
|
| - EXPECT_TRUE(HasSpan("b", "c", bar, "name"));
|
| - EXPECT_TRUE(HasSpan("d", "e", bar, "input_type"));
|
| - EXPECT_TRUE(HasSpan("f", "g", bar, "output_type"));
|
| + EXPECT_TRUE(HasSpan('a', 'h', bar));
|
| + EXPECT_TRUE(HasSpan('b', 'c', bar, "name"));
|
| + EXPECT_TRUE(HasSpan('d', 'e', bar, "input_type"));
|
| + EXPECT_TRUE(HasSpan('f', 'g', bar, "output_type"));
|
|
|
| - EXPECT_TRUE(HasSpan("i", "p", baz));
|
| - EXPECT_TRUE(HasSpan("j", "k", baz, "name"));
|
| - EXPECT_TRUE(HasSpan("l", "m", baz, "input_type"));
|
| - EXPECT_TRUE(HasSpan("n", "o", baz, "output_type"));
|
| + EXPECT_TRUE(HasSpan('i', 'p', baz));
|
| + EXPECT_TRUE(HasSpan('j', 'k', baz, "name"));
|
| + EXPECT_TRUE(HasSpan('l', 'm', baz, "input_type"));
|
| + EXPECT_TRUE(HasSpan('n', 'o', baz, "output_type"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1900,13 +2031,13 @@
|
|
|
| TEST_F(SourceInfoTest, Options) {
|
| EXPECT_TRUE(Parse(
|
| - "/*a*/option /*b*/foo/*c*/./*d*/(/*e*/bar.baz/*f*/)/*g*/ = "
|
| - "/*h*/123/*i*/;/*j*/\n"
|
| - "/*k*/option qux = /*l*/-123/*m*/;/*n*/\n"
|
| - "/*o*/option corge = /*p*/abc/*q*/;/*r*/\n"
|
| - "/*s*/option grault = /*t*/'blah'/*u*/;/*v*/\n"
|
| - "/*w*/option garply = /*x*/{ yadda yadda }/*y*/;/*z*/\n"
|
| - "/*0*/option waldo = /*1*/123.0/*2*/;/*3*/\n"
|
| + "$a$option $b$foo$c$.$d$($e$bar.baz$f$)$g$ = "
|
| + "$h$123$i$;$j$\n"
|
| + "$k$option qux = $l$-123$m$;$n$\n"
|
| + "$o$option corge = $p$abc$q$;$r$\n"
|
| + "$s$option grault = $t$'blah'$u$;$v$\n"
|
| + "$w$option garply = $x${ yadda yadda }$y$;$z$\n"
|
| + "$0$option waldo = $1$123.0$2$;$3$\n"
|
| ));
|
|
|
| const UninterpretedOption& option1 = file_.options().uninterpreted_option(0);
|
| @@ -1916,29 +2047,29 @@
|
| const UninterpretedOption& option5 = file_.options().uninterpreted_option(4);
|
| const UninterpretedOption& option6 = file_.options().uninterpreted_option(5);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "j", file_.options()));
|
| - EXPECT_TRUE(HasSpan("b", "i", option1));
|
| - EXPECT_TRUE(HasSpan("b", "g", option1, "name"));
|
| - EXPECT_TRUE(HasSpan("b", "c", option1.name(0)));
|
| - EXPECT_TRUE(HasSpan("b", "c", option1.name(0), "name_part"));
|
| - EXPECT_TRUE(HasSpan("d", "g", option1.name(1)));
|
| - EXPECT_TRUE(HasSpan("e", "f", option1.name(1), "name_part"));
|
| - EXPECT_TRUE(HasSpan("h", "i", option1, "positive_int_value"));
|
| + EXPECT_TRUE(HasSpan('a', 'j', file_.options()));
|
| + EXPECT_TRUE(HasSpan('a', 'j', option1));
|
| + EXPECT_TRUE(HasSpan('b', 'g', option1, "name"));
|
| + EXPECT_TRUE(HasSpan('b', 'c', option1.name(0)));
|
| + EXPECT_TRUE(HasSpan('b', 'c', option1.name(0), "name_part"));
|
| + EXPECT_TRUE(HasSpan('d', 'g', option1.name(1)));
|
| + EXPECT_TRUE(HasSpan('e', 'f', option1.name(1), "name_part"));
|
| + EXPECT_TRUE(HasSpan('h', 'i', option1, "positive_int_value"));
|
|
|
| - EXPECT_TRUE(HasSpan("k", "n", file_.options()));
|
| - EXPECT_TRUE(HasSpan("l", "m", option2, "negative_int_value"));
|
| + EXPECT_TRUE(HasSpan('k', 'n', file_.options()));
|
| + EXPECT_TRUE(HasSpan('l', 'm', option2, "negative_int_value"));
|
|
|
| - EXPECT_TRUE(HasSpan("o", "r", file_.options()));
|
| - EXPECT_TRUE(HasSpan("p", "q", option3, "identifier_value"));
|
| + EXPECT_TRUE(HasSpan('o', 'r', file_.options()));
|
| + EXPECT_TRUE(HasSpan('p', 'q', option3, "identifier_value"));
|
|
|
| - EXPECT_TRUE(HasSpan("s", "v", file_.options()));
|
| - EXPECT_TRUE(HasSpan("t", "u", option4, "string_value"));
|
| + EXPECT_TRUE(HasSpan('s', 'v', file_.options()));
|
| + EXPECT_TRUE(HasSpan('t', 'u', option4, "string_value"));
|
|
|
| - EXPECT_TRUE(HasSpan("w", "z", file_.options()));
|
| - EXPECT_TRUE(HasSpan("x", "y", option5, "aggregate_value"));
|
| + EXPECT_TRUE(HasSpan('w', 'z', file_.options()));
|
| + EXPECT_TRUE(HasSpan('x', 'y', option5, "aggregate_value"));
|
|
|
| - EXPECT_TRUE(HasSpan("0", "3", file_.options()));
|
| - EXPECT_TRUE(HasSpan("1", "2", option6, "double_value"));
|
| + EXPECT_TRUE(HasSpan('0', '3', file_.options()));
|
| + EXPECT_TRUE(HasSpan('1', '2', option6, "double_value"));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -1967,22 +2098,22 @@
|
| TEST_F(SourceInfoTest, ScopedOptions) {
|
| EXPECT_TRUE(Parse(
|
| "message Foo {\n"
|
| - " /*a*/option mopt = 1;/*b*/\n"
|
| + " $a$option mopt = 1;$b$\n"
|
| "}\n"
|
| "enum Bar {\n"
|
| - " /*c*/option eopt = 1;/*d*/\n"
|
| + " $c$option eopt = 1;$d$\n"
|
| "}\n"
|
| "service Baz {\n"
|
| - " /*e*/option sopt = 1;/*f*/\n"
|
| + " $e$option sopt = 1;$f$\n"
|
| " rpc M(X) returns(Y) {\n"
|
| - " /*g*/option mopt = 1;/*h*/\n"
|
| + " $g$option mopt = 1;$h$\n"
|
| " }\n"
|
| "}\n"));
|
|
|
| - EXPECT_TRUE(HasSpan("a", "b", file_.message_type(0).options()));
|
| - EXPECT_TRUE(HasSpan("c", "d", file_.enum_type(0).options()));
|
| - EXPECT_TRUE(HasSpan("e", "f", file_.service(0).options()));
|
| - EXPECT_TRUE(HasSpan("g", "h", file_.service(0).method(0).options()));
|
| + EXPECT_TRUE(HasSpan('a', 'b', file_.message_type(0).options()));
|
| + EXPECT_TRUE(HasSpan('c', 'd', file_.enum_type(0).options()));
|
| + EXPECT_TRUE(HasSpan('e', 'f', file_.service(0).options()));
|
| + EXPECT_TRUE(HasSpan('g', 'h', file_.service(0).method(0).options()));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -2045,8 +2176,8 @@
|
| EXPECT_TRUE(Parse(
|
| "message Foo {"
|
| " optional int32 bar = 1 "
|
| - "/*a*/[default=/*b*/123/*c*/,/*d*/opt1=123/*e*/,"
|
| - "/*f*/opt2='hi'/*g*/]/*h*/;"
|
| + "$a$[default=$b$123$c$,$d$opt1=123$e$,"
|
| + "$f$opt2='hi'$g$]$h$;"
|
| "}\n"
|
| ));
|
|
|
| @@ -2054,10 +2185,10 @@
|
| const UninterpretedOption& option1 = field.options().uninterpreted_option(0);
|
| const UninterpretedOption& option2 = field.options().uninterpreted_option(1);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "h", field.options()));
|
| - EXPECT_TRUE(HasSpan("b", "c", field, "default_value"));
|
| - EXPECT_TRUE(HasSpan("d", "e", option1));
|
| - EXPECT_TRUE(HasSpan("f", "g", option2));
|
| + EXPECT_TRUE(HasSpan('a', 'h', field.options()));
|
| + EXPECT_TRUE(HasSpan('b', 'c', field, "default_value"));
|
| + EXPECT_TRUE(HasSpan('d', 'e', option1));
|
| + EXPECT_TRUE(HasSpan('f', 'g', option2));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -2084,7 +2215,7 @@
|
| // syntax used for enum options is understood.
|
| EXPECT_TRUE(Parse(
|
| "enum Foo {"
|
| - " BAR = 1 /*a*/[/*b*/opt1=123/*c*/,/*d*/opt2='hi'/*e*/]/*f*/;"
|
| + " BAR = 1 $a$[$b$opt1=123$c$,$d$opt2='hi'$e$]$f$;"
|
| "}\n"
|
| ));
|
|
|
| @@ -2092,9 +2223,9 @@
|
| const UninterpretedOption& option1 = value.options().uninterpreted_option(0);
|
| const UninterpretedOption& option2 = value.options().uninterpreted_option(1);
|
|
|
| - EXPECT_TRUE(HasSpan("a", "f", value.options()));
|
| - EXPECT_TRUE(HasSpan("b", "c", option1));
|
| - EXPECT_TRUE(HasSpan("d", "e", option2));
|
| + EXPECT_TRUE(HasSpan('a', 'f', value.options()));
|
| + EXPECT_TRUE(HasSpan('b', 'c', option1));
|
| + EXPECT_TRUE(HasSpan('d', 'e', option2));
|
|
|
| // Ignore these.
|
| EXPECT_TRUE(HasSpan(file_));
|
| @@ -2113,6 +2244,127 @@
|
| EXPECT_TRUE(HasSpan(option2, "string_value"));
|
| }
|
|
|
| +TEST_F(SourceInfoTest, DocComments) {
|
| + EXPECT_TRUE(Parse(
|
| + "// Foo leading\n"
|
| + "// line 2\n"
|
| + "$a$message Foo {\n"
|
| + " // Foo trailing\n"
|
| + " // line 2\n"
|
| + "\n"
|
| + " // ignored\n"
|
| + "\n"
|
| + " // bar leading\n"
|
| + " $b$optional int32 bar = 1;$c$\n"
|
| + " // bar trailing\n"
|
| + "}$d$\n"
|
| + "// ignored\n"
|
| + ));
|
| +
|
| + const DescriptorProto& foo = file_.message_type(0);
|
| + const FieldDescriptorProto& bar = foo.field(0);
|
| +
|
| + EXPECT_TRUE(HasSpanWithComment('a', 'd', foo,
|
| + " Foo leading\n line 2\n",
|
| + " Foo trailing\n line 2\n"));
|
| + EXPECT_TRUE(HasSpanWithComment('b', 'c', bar,
|
| + " bar leading\n",
|
| + " bar trailing\n"));
|
| +
|
| + // Ignore these.
|
| + EXPECT_TRUE(HasSpan(file_));
|
| + EXPECT_TRUE(HasSpan(foo, "name"));
|
| + EXPECT_TRUE(HasSpan(bar, "label"));
|
| + EXPECT_TRUE(HasSpan(bar, "type"));
|
| + EXPECT_TRUE(HasSpan(bar, "name"));
|
| + EXPECT_TRUE(HasSpan(bar, "number"));
|
| +}
|
| +
|
| +TEST_F(SourceInfoTest, DocComments2) {
|
| + EXPECT_TRUE(Parse(
|
| + "// ignored\n"
|
| + "syntax = \"proto2\";\n"
|
| + "// Foo leading\n"
|
| + "// line 2\n"
|
| + "$a$message Foo {\n"
|
| + " /* Foo trailing\n"
|
| + " * line 2 */\n"
|
| + " // ignored\n"
|
| + " /* bar leading\n"
|
| + " */"
|
| + " $b$optional int32 bar = 1;$c$ // bar trailing\n"
|
| + " // ignored\n"
|
| + "}$d$\n"
|
| + "// ignored\n"
|
| + "\n"
|
| + "// option leading\n"
|
| + "$e$option baz = 123;$f$\n"
|
| + "// option trailing\n"
|
| + ));
|
| +
|
| + const DescriptorProto& foo = file_.message_type(0);
|
| + const FieldDescriptorProto& bar = foo.field(0);
|
| + const UninterpretedOption& baz = file_.options().uninterpreted_option(0);
|
| +
|
| + EXPECT_TRUE(HasSpanWithComment('a', 'd', foo,
|
| + " Foo leading\n line 2\n",
|
| + " Foo trailing\n line 2 "));
|
| + EXPECT_TRUE(HasSpanWithComment('b', 'c', bar,
|
| + " bar leading\n",
|
| + " bar trailing\n"));
|
| + EXPECT_TRUE(HasSpanWithComment('e', 'f', baz,
|
| + " option leading\n",
|
| + " option trailing\n"));
|
| +
|
| + // Ignore these.
|
| + EXPECT_TRUE(HasSpan(file_));
|
| + EXPECT_TRUE(HasSpan(foo, "name"));
|
| + EXPECT_TRUE(HasSpan(bar, "label"));
|
| + EXPECT_TRUE(HasSpan(bar, "type"));
|
| + EXPECT_TRUE(HasSpan(bar, "name"));
|
| + EXPECT_TRUE(HasSpan(bar, "number"));
|
| + EXPECT_TRUE(HasSpan(file_.options()));
|
| + EXPECT_TRUE(HasSpan(baz, "name"));
|
| + EXPECT_TRUE(HasSpan(baz.name(0)));
|
| + EXPECT_TRUE(HasSpan(baz.name(0), "name_part"));
|
| + EXPECT_TRUE(HasSpan(baz, "positive_int_value"));
|
| +}
|
| +
|
| +TEST_F(SourceInfoTest, DocComments3) {
|
| + EXPECT_TRUE(Parse(
|
| + "$a$message Foo {\n"
|
| + " // bar leading\n"
|
| + " $b$optional int32 bar = 1 [(baz.qux) = {}];$c$\n"
|
| + " // bar trailing\n"
|
| + "}$d$\n"
|
| + "// ignored\n"
|
| + ));
|
| +
|
| + const DescriptorProto& foo = file_.message_type(0);
|
| + const FieldDescriptorProto& bar = foo.field(0);
|
| +
|
| + EXPECT_TRUE(HasSpanWithComment('b', 'c', bar,
|
| + " bar leading\n",
|
| + " bar trailing\n"));
|
| +
|
| + // Ignore these.
|
| + EXPECT_TRUE(HasSpan(file_));
|
| + EXPECT_TRUE(HasSpan(foo));
|
| + EXPECT_TRUE(HasSpan(foo, "name"));
|
| + EXPECT_TRUE(HasSpan(bar, "label"));
|
| + EXPECT_TRUE(HasSpan(bar, "type"));
|
| + EXPECT_TRUE(HasSpan(bar, "name"));
|
| + EXPECT_TRUE(HasSpan(bar, "number"));
|
| + EXPECT_TRUE(HasSpan(bar.options()));
|
| + EXPECT_TRUE(HasSpan(bar.options().uninterpreted_option(0)));
|
| + EXPECT_TRUE(HasSpan(bar.options().uninterpreted_option(0), "name"));
|
| + EXPECT_TRUE(HasSpan(bar.options().uninterpreted_option(0).name(0)));
|
| + EXPECT_TRUE(HasSpan(
|
| + bar.options().uninterpreted_option(0).name(0), "name_part"));
|
| + EXPECT_TRUE(HasSpan(
|
| + bar.options().uninterpreted_option(0), "aggregate_value"));
|
| +}
|
| +
|
| // ===================================================================
|
|
|
| } // anonymous namespace
|
|
|