| Index: src/parser.cc
|
| diff --git a/src/parser.cc b/src/parser.cc
|
| index bec0a4bd571e29dda80149b669062641c40b81e1..788cddb12cc87eec82aee101f87ad1ef520a2514 100644
|
| --- a/src/parser.cc
|
| +++ b/src/parser.cc
|
| @@ -23,6 +23,53 @@
|
| namespace v8 {
|
| namespace internal {
|
|
|
| +
|
| +int FunctionEntry::Size() {
|
| + if (backing_.is_empty()) return 0;
|
| + int result = kSize;
|
| + for (int i = 0; i < identifier_count(); i++) {
|
| + result += 1; // is_one_byte
|
| + CHECK(result < backing_.length());
|
| + const int byte_length = backing_[result];
|
| + const int word_length =
|
| + 1 + (((byte_length * sizeof(unsigned char)) - 1) / sizeof(unsigned));
|
| + result += 1; // length
|
| + result += word_length;
|
| + }
|
| + CHECK(result <= backing_.length());
|
| + return result;
|
| +}
|
| +
|
| +
|
| +const AstRawString* FunctionEntry::IdentifierIterator::Next(
|
| + AstValueFactory* ast_value_factory) {
|
| + // FunctionEntry::size() is always called when an IdentifierIterator is
|
| + // constructed (by FunctionEntry::Identifiers), and FunctionEntry::size()
|
| + // CHECKs that the identifiers don't run past the end of backing_ so we don't
|
| + // have to CHECK that here.
|
| + DCHECK(!AtEnd());
|
| + const bool is_one_byte = static_cast<bool>(backing_.first());
|
| + backing_ += 1;
|
| + const int length = backing_.first();
|
| + backing_ += 1;
|
| + if (is_one_byte) {
|
| + Vector<const uint8_t> data = Vector<const uint8_t>::cast(backing_);
|
| + data.Truncate(length);
|
| + const int word_length =
|
| + 1 + (((length * sizeof(uint8_t)) - 1) / sizeof(unsigned));
|
| + backing_ += word_length;
|
| + return ast_value_factory->GetOneByteString(data);
|
| + } else {
|
| + Vector<const uint16_t> data = Vector<const uint16_t>::cast(backing_);
|
| + data.Truncate(length);
|
| + const int word_length =
|
| + 1 + (((length * sizeof(uint16_t)) - 1) / sizeof(unsigned));
|
| + backing_ += word_length;
|
| + return ast_value_factory->GetTwoByteString(data);
|
| + }
|
| +}
|
| +
|
| +
|
| RegExpBuilder::RegExpBuilder(Zone* zone)
|
| : zone_(zone),
|
| pending_empty_(false),
|
| @@ -188,20 +235,29 @@ FunctionEntry ParseData::GetFunctionEntry(int start) {
|
| // start position.
|
| if ((function_index_ + FunctionEntry::kSize <= Length()) &&
|
| (static_cast<int>(Data()[function_index_]) == start)) {
|
| - int index = function_index_;
|
| - function_index_ += FunctionEntry::kSize;
|
| - Vector<unsigned> subvector(&(Data()[index]), FunctionEntry::kSize);
|
| - return FunctionEntry(subvector);
|
| + int remaining_length = Length() - function_index_;
|
| + Vector<unsigned> subvector(&(Data()[function_index_]), remaining_length);
|
| + FunctionEntry entry(subvector);
|
| + int entry_size = entry.Size();
|
| + CHECK(entry_size <= remaining_length);
|
| + function_index_ += entry_size;
|
| + return entry;
|
| }
|
| return FunctionEntry();
|
| }
|
|
|
|
|
| int ParseData::FunctionCount() {
|
| - int functions_size = FunctionsSize();
|
| - if (functions_size < 0) return 0;
|
| - if (functions_size % FunctionEntry::kSize != 0) return 0;
|
| - return functions_size / FunctionEntry::kSize;
|
| + int function_count = 0;
|
| + int function_index = PreparseDataConstants::kHeaderSize;
|
| + while (function_index < Length()) {
|
| + function_count += 1;
|
| + Vector<unsigned> subvector(&(Data()[function_index]),
|
| + Length() - function_index);
|
| + FunctionEntry entry(subvector);
|
| + function_index += entry.Size();
|
| + }
|
| + return function_count;
|
| }
|
|
|
|
|
| @@ -216,7 +272,6 @@ bool ParseData::IsSane() {
|
| // Check that the space allocated for function entries is sane.
|
| int functions_size = FunctionsSize();
|
| if (functions_size < 0) return false;
|
| - if (functions_size % FunctionEntry::kSize != 0) return false;
|
| // Check that the total size has room for header and function entries.
|
| int minimum_size =
|
| PreparseDataConstants::kHeaderSize + functions_size;
|
| @@ -929,6 +984,8 @@ FunctionLiteral* Parser::DoParseProgram(CompilationInfo* info, Scope** scope,
|
| }
|
|
|
|
|
| +// This method is invoked by the runtime to re-parse the source of a lazily
|
| +// compiled function when that function gets called.
|
| FunctionLiteral* Parser::ParseLazy() {
|
| // It's OK to use the counters here, since this function is only called in
|
| // the main thread.
|
| @@ -3608,28 +3665,9 @@ FunctionLiteral* Parser::ParseFunctionLiteral(
|
| // compiled.
|
| // These are all things we can know at this point, without looking at the
|
| // function itself.
|
| -
|
| - // In addition, we need to distinguish between these cases:
|
| - // (function foo() {
|
| - // bar = function() { return 1; }
|
| - // })();
|
| - // and
|
| - // (function foo() {
|
| - // var a = 1;
|
| - // bar = function() { return a; }
|
| - // })();
|
| -
|
| - // Now foo will be parsed eagerly and compiled eagerly (optimization: assume
|
| - // parenthesis before the function means that it will be called
|
| - // immediately). The inner function *must* be parsed eagerly to resolve the
|
| - // possible reference to the variable in foo's scope. However, it's possible
|
| - // that it will be compiled lazily.
|
| -
|
| - // To make this additional case work, both Parser and PreParser implement a
|
| - // logic where only top-level functions will be parsed lazily.
|
| - bool is_lazily_parsed = (mode() == PARSE_LAZILY &&
|
| - scope_->AllowsLazyCompilation() &&
|
| - !parenthesized_function_);
|
| + bool is_lazily_parsed =
|
| + (mode() != PARSE_EAGERLY && scope_->AllowsLazyCompilation() &&
|
| + !parenthesized_function_);
|
| parenthesized_function_ = false; // The bit was set for this function only.
|
|
|
| if (is_lazily_parsed) {
|
| @@ -3717,6 +3755,18 @@ void Parser::SkipLazyFunctionBody(const AstRawString* function_name,
|
| *materialized_literal_count = entry.literal_count();
|
| *expected_property_count = entry.property_count();
|
| scope_->SetStrictMode(entry.strict_mode());
|
| + if (entry.calls_eval()) {
|
| + // If there is a direct call to eval (ES5 10.4.2(2) / 15.1.2.1.1) then
|
| + // every variable gets allocated and there's little point in keeping track
|
| + // of which variables are accessed by nested functions.
|
| + scope_->DeclarationScope()->RecordEvalCall();
|
| + } else {
|
| + FunctionEntry::IdentifierIterator it = entry.Identifiers();
|
| + while (!it.AtEnd()) {
|
| + ExpressionFromIdentifier(it.Next(ast_value_factory()),
|
| + RelocInfo::kNoPosition, scope_, factory());
|
| + }
|
| + }
|
| } else {
|
| // With no cached data, we partially parse the function, without building an
|
| // AST. This gathers the data needed to build a lazy function.
|
| @@ -3741,6 +3791,28 @@ void Parser::SkipLazyFunctionBody(const AstRawString* function_name,
|
| if (!*ok) {
|
| return;
|
| }
|
| +
|
| + if (logger.calls_eval()) {
|
| + scope_->DeclarationScope()->RecordEvalCall();
|
| + if (compile_options() == ScriptCompiler::kProduceParserCache) {
|
| + DCHECK(log_);
|
| + log_->LogEvalCall();
|
| + }
|
| + } else {
|
| + SingletonLogger::IdentifierIterator it = logger.IdentifiersStart();
|
| + for (const AstRawString* identifier = it.Next(); identifier != NULL;
|
| + identifier = it.Next()) {
|
| + // position doesn't matter here, we're only creating the expression
|
| + // so we can track usage of variables
|
| + ExpressionFromIdentifier(identifier, RelocInfo::kNoPosition, scope_,
|
| + factory());
|
| + if (compile_options() == ScriptCompiler::kProduceParserCache) {
|
| + DCHECK(log_);
|
| + log_->LogIdentifier(identifier);
|
| + }
|
| + }
|
| + }
|
| +
|
| total_preparse_skipped_ += scope_->end_position() - function_block_pos;
|
| *materialized_literal_count = logger.literals();
|
| *expected_property_count = logger.properties();
|
| @@ -3761,9 +3833,14 @@ void Parser::SkipLazyFunctionBody(const AstRawString* function_name,
|
| ZoneList<Statement*>* Parser::ParseEagerFunctionBody(
|
| const AstRawString* function_name, int pos, Variable* fvar,
|
| Token::Value fvar_init_op, bool is_generator, bool* ok) {
|
| - // Everything inside an eagerly parsed function will be parsed eagerly
|
| - // (see comment above).
|
| - ParsingModeScope parsing_mode(this, PARSE_EAGERLY);
|
| + // We can lazily parse functions enclosed in eagerly parsed functions but we
|
| + // have to keep track of what variables the inner functions access - or if the
|
| + // the inner function calls eval (in which case we have to assume every
|
| + // variable might be accessed inside the eval).
|
| + Mode mode =
|
| + (mode_ == PARSE_EAGERLY) ? PARSE_EAGERLY : PARSE_INNER_FUNCTION_LAZILY;
|
| + ParsingModeScope parsing_mode(this, mode);
|
| +
|
| ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(8, zone());
|
| if (fvar != NULL) {
|
| VariableProxy* fproxy = scope_->NewUnresolved(
|
| @@ -3827,7 +3904,8 @@ PreParser::PreParseResult Parser::ParseLazyFunctionBodyWithPreParser(
|
| DCHECK_EQ(Token::LBRACE, scanner()->current_token());
|
|
|
| if (reusable_preparser_ == NULL) {
|
| - reusable_preparser_ = new PreParser(&scanner_, NULL, stack_limit_);
|
| + reusable_preparser_ = new PreParser(&scanner_, NULL, ast_value_factory(),
|
| + zone(), stack_limit_);
|
| reusable_preparser_->set_allow_harmony_scoping(allow_harmony_scoping());
|
| reusable_preparser_->set_allow_modules(allow_modules());
|
| reusable_preparser_->set_allow_natives_syntax(allow_natives_syntax());
|
| @@ -3839,10 +3917,9 @@ PreParser::PreParseResult Parser::ParseLazyFunctionBodyWithPreParser(
|
| reusable_preparser_->set_allow_harmony_object_literals(
|
| allow_harmony_object_literals());
|
| }
|
| - PreParser::PreParseResult result =
|
| - reusable_preparser_->PreParseLazyFunction(strict_mode(),
|
| - is_generator(),
|
| - logger);
|
| + PreParser::PreParseResult result = reusable_preparser_->PreParseLazyFunction(
|
| + strict_mode(), is_generator(), mode_ == PARSE_INNER_FUNCTION_LAZILY,
|
| + logger);
|
| if (pre_parse_timer_ != NULL) {
|
| pre_parse_timer_->Stop();
|
| }
|
|
|