| Index: third_party/protobuf/src/google/protobuf/io/tokenizer.h
|
| diff --git a/third_party/protobuf/src/google/protobuf/io/tokenizer.h b/third_party/protobuf/src/google/protobuf/io/tokenizer.h
|
| index e80d564cd5e8f477a4cfdbee3c6cfb93ab95900f..64ee7d8475d8d89c71e60037e28f1a42d986700e 100644
|
| --- a/third_party/protobuf/src/google/protobuf/io/tokenizer.h
|
| +++ b/third_party/protobuf/src/google/protobuf/io/tokenizer.h
|
| @@ -191,7 +191,7 @@ class LIBPROTOBUF_EXPORT Tokenizer {
|
| // * grault. */
|
| // optional int32 grault = 6;
|
| bool NextWithComments(string* prev_trailing_comments,
|
| - std::vector<string>* detached_comments,
|
| + vector<string>* detached_comments,
|
| string* next_leading_comments);
|
|
|
| // Parse helpers ---------------------------------------------------
|
| @@ -329,7 +329,7 @@ class LIBPROTOBUF_EXPORT Tokenizer {
|
| // depending on what was read. This needs to know if the first
|
| // character was a zero in order to correctly recognize hex and octal
|
| // numbers.
|
| - // It also needs to know if the first character was a . to parse floating
|
| + // It also needs to know if the first characted was a . to parse floating
|
| // point correctly.
|
| TokenType ConsumeNumber(bool started_with_zero, bool started_with_dot);
|
|
|
|
|