Chromium Code Reviews| Index: components/omnibox/browser/scored_history_match.cc |
| diff --git a/components/omnibox/browser/scored_history_match.cc b/components/omnibox/browser/scored_history_match.cc |
| index bdde8e547430e18c6a9659cc48203cde566edf58..bc7a5920917015416b6eaf8fa2016976c2eeb93c 100644 |
| --- a/components/omnibox/browser/scored_history_match.cc |
| +++ b/components/omnibox/browser/scored_history_match.cc |
| @@ -109,6 +109,8 @@ float ScoredHistoryMatch::bookmark_value_; |
| float ScoredHistoryMatch::typed_value_; |
| bool ScoredHistoryMatch::fix_few_visits_bug_; |
| bool ScoredHistoryMatch::frequency_uses_sum_; |
| +OmniboxFieldTrial::NumMatchesMultipliers* |
| + ScoredHistoryMatch::num_matches_to_frequency_multiplier_ = nullptr; |
| size_t ScoredHistoryMatch::max_visits_to_score_; |
| bool ScoredHistoryMatch::allow_tld_matches_; |
| bool ScoredHistoryMatch::allow_scheme_matches_; |
| @@ -133,8 +135,8 @@ ScoredHistoryMatch::ScoredHistoryMatch() |
| WordStarts(), |
| RowWordStarts(), |
| false, |
| - base::Time::Max()) { |
| -} |
| + 1, |
| + base::Time::Max()) {} |
| ScoredHistoryMatch::ScoredHistoryMatch( |
| const history::URLRow& row, |
| @@ -144,6 +146,7 @@ ScoredHistoryMatch::ScoredHistoryMatch( |
| const WordStarts& terms_to_word_starts_offsets, |
| const RowWordStarts& word_starts, |
| bool is_url_bookmarked, |
| + size_t num_matching_pages, |
| base::Time now) |
| : HistoryMatch(row, 0, false, false), raw_score(0) { |
| // NOTE: Call Init() before doing any validity checking to ensure that the |
| @@ -260,7 +263,8 @@ ScoredHistoryMatch::ScoredHistoryMatch( |
| const float topicality_score = GetTopicalityScore( |
| terms_vector.size(), url, terms_to_word_starts_offsets, word_starts); |
| - const float frequency_score = GetFrequency(now, is_url_bookmarked, visits); |
| + const float frequency_score = |
| + GetFrequency(now, is_url_bookmarked, visits, num_matching_pages); |
| raw_score = base::saturated_cast<int>(GetFinalRelevancyScore( |
| topicality_score, frequency_score, *hqp_relevance_buckets_)); |
| @@ -412,6 +416,10 @@ void ScoredHistoryMatch::Init() { |
| max_visits_to_score_ = OmniboxFieldTrial::HQPMaxVisitsToScore(); |
| frequency_uses_sum_ = OmniboxFieldTrial::HQPFreqencyUsesSum(); |
| fix_few_visits_bug_ = OmniboxFieldTrial::HQPFixFewVisitsBug(); |
| + num_matches_to_frequency_multiplier_ = |
| + new OmniboxFieldTrial::NumMatchesMultipliers(); |
|
Peter Kasting
2016/12/01 07:07:53
This leaks. If you want a leaky object of class t
Mark P
2016/12/04 01:06:42
Still TODO.
Mark P
2016/12/06 21:02:47
I don't understand how this macro should be used i
Peter Kasting
2016/12/06 21:41:27
There are two ways to do this I can think of:
(1)
Mark P
2016/12/08 00:21:31
Thanks for the explanation. I'll do something lik
|
| + OmniboxFieldTrial::HQPGetNumMatchesMultipliers( |
| + num_matches_to_frequency_multiplier_); |
| allow_tld_matches_ = OmniboxFieldTrial::HQPAllowMatchInTLDValue(); |
| allow_scheme_matches_ = OmniboxFieldTrial::HQPAllowMatchInSchemeValue(); |
| num_title_words_to_allow_ = OmniboxFieldTrial::HQPNumTitleWordsToAllow(); |
| @@ -581,7 +589,8 @@ float ScoredHistoryMatch::GetRecencyScore(int last_visit_days_ago) const { |
| float ScoredHistoryMatch::GetFrequency(const base::Time& now, |
| const bool bookmarked, |
| - const VisitInfoVector& visits) const { |
| + const VisitInfoVector& visits, |
| + const size_t num_matching_pages) const { |
| // Compute the weighted sum of |value_of_transition| over the last at most |
| // |max_visits_to_score_| visits, where each visit is weighted using |
| // GetRecencyScore() based on how many days ago it happened. |
| @@ -603,17 +612,27 @@ float ScoredHistoryMatch::GetFrequency(const base::Time& now, |
| const float bucket_weight = GetRecencyScore((now - i->first).InDays()); |
| summed_visit_points += (value_of_transition * bucket_weight); |
| } |
| - if (frequency_uses_sum_) |
| - return summed_visit_points; |
| - |
| - // Compute the average weighted value_of_transition and return it. |
| - // Use |max_visits_to_score_| as the denominator for the average regardless of |
| - // how many visits there were in order to penalize a match that has |
| - // fewer visits than kMaxVisitsToScore. |
| - if (fix_few_visits_bug_) |
| - return summed_visit_points / ScoredHistoryMatch::max_visits_to_score_; |
| - return visits.size() * summed_visit_points / |
| - ScoredHistoryMatch::max_visits_to_score_; |
| + float frequency; |
| + if (frequency_uses_sum_) { |
| + frequency = summed_visit_points; |
| + } else { |
| + // Compute the average weighted value_of_transition and return it. |
| + // Use |max_visits_to_score_| as the denominator for the average regardless |
| + // of how many visits there were in order to penalize a match that has |
| + // fewer visits than kMaxVisitsToScore. |
| + if (fix_few_visits_bug_) { |
| + frequency = |
| + summed_visit_points / ScoredHistoryMatch::max_visits_to_score_; |
| + } else { |
| + frequency = visits.size() * summed_visit_points / |
| + ScoredHistoryMatch::max_visits_to_score_; |
| + } |
| + } |
| + // Boost the score if applicable. |
| + if (num_matches_to_frequency_multiplier_->find(num_matching_pages) != |
| + num_matches_to_frequency_multiplier_->end()) |
| + frequency *= (*num_matches_to_frequency_multiplier_)[num_matching_pages]; |
| + return frequency; |
|
Peter Kasting
2016/12/01 07:07:53
I don't love putting this here.
Part of this is c
Mark P
2016/12/04 01:06:42
I agree with your reasoning below. After some tho
|
| } |
| // static |