| Index: tools/gn/tokenizer.cc
|
| diff --git a/tools/gn/tokenizer.cc b/tools/gn/tokenizer.cc
|
| index 4dd71d5beaf953e66acaeb19a7d062270f632ee7..0568becbc8dcb51f92808b74c14a83de5cc45f32 100644
|
| --- a/tools/gn/tokenizer.cc
|
| +++ b/tools/gn/tokenizer.cc
|
| @@ -74,7 +74,7 @@ Tokenizer::Tokenizer(const InputFile* input_file, Err* err)
|
| err_(err),
|
| cur_(0),
|
| line_number_(1),
|
| - char_in_line_(1) {
|
| + column_number_(1) {
|
| }
|
|
|
| Tokenizer::~Tokenizer() {
|
| @@ -126,7 +126,8 @@ std::vector<Token> Tokenizer::Run() {
|
| (tokens_.empty() || tokens_.back().type() != Token::SUFFIX_COMMENT ||
|
| tokens_.back().location().line_number() + 1 !=
|
| location.line_number() ||
|
| - tokens_.back().location().char_offset() != location.char_offset())) {
|
| + tokens_.back().location().column_number() !=
|
| + location.column_number())) {
|
| type = Token::LINE_COMMENT;
|
| if (!at_end()) // Could be EOF.
|
| Advance(); // The current \n.
|
| @@ -374,16 +375,16 @@ void Tokenizer::Advance() {
|
| DCHECK(cur_ < input_.size());
|
| if (IsCurrentNewline()) {
|
| line_number_++;
|
| - char_in_line_ = 1;
|
| + column_number_ = 1;
|
| } else {
|
| - char_in_line_++;
|
| + column_number_++;
|
| }
|
| cur_++;
|
| }
|
|
|
| Location Tokenizer::GetCurrentLocation() const {
|
| return Location(
|
| - input_file_, line_number_, char_in_line_, static_cast<int>(cur_));
|
| + input_file_, line_number_, column_number_, static_cast<int>(cur_));
|
| }
|
|
|
| Err Tokenizer::GetErrorForInvalidToken(const Location& location) const {
|
|
|