Index: third_party/protobuf/src/google/protobuf/io/tokenizer.h |
diff --git a/third_party/protobuf/src/google/protobuf/io/tokenizer.h b/third_party/protobuf/src/google/protobuf/io/tokenizer.h |
index d115161f5823fc85326fdd87e829ec2c091f14a3..8f759abbd886acffcee52b6e9fe542b5b0b425b2 100644 |
--- a/third_party/protobuf/src/google/protobuf/io/tokenizer.h |
+++ b/third_party/protobuf/src/google/protobuf/io/tokenizer.h |
@@ -122,12 +122,17 @@ class LIBPROTOBUF_EXPORT Tokenizer { |
// the token within the input stream. They are zero-based. |
int line; |
int column; |
+ int end_column; |
}; |
// Get the current token. This is updated when Next() is called. Before |
// the first call to Next(), current() has type TYPE_START and no contents. |
const Token& current(); |
+ // Return the previous token -- i.e. what current() returned before the |
+ // previous call to Next(). |
+ const Token& previous(); |
+ |
// Advance to the next token. Returns false if the end of the input is |
// reached. |
bool Next(); |
@@ -180,6 +185,7 @@ class LIBPROTOBUF_EXPORT Tokenizer { |
GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(Tokenizer); |
Token current_; // Returned by current(). |
+ Token previous_; // Returned by previous(). |
ZeroCopyInputStream* input_; |
ErrorCollector* error_collector_; |
@@ -291,6 +297,10 @@ inline const Tokenizer::Token& Tokenizer::current() { |
return current_; |
} |
+inline const Tokenizer::Token& Tokenizer::previous() { |
+ return previous_; |
+} |
+ |
inline void Tokenizer::ParseString(const string& text, string* output) { |
output->clear(); |
ParseStringAppend(text, output); |