Index: chrome/browser/supervised_user/supervised_user_service.cc |
diff --git a/chrome/browser/supervised_user/supervised_user_service.cc b/chrome/browser/supervised_user/supervised_user_service.cc |
index 8f0a559890127c54baf25371b298f2520469952f..9816eb09a0cfe6221dfb1b7f86e2e25699e9ff98 100644 |
--- a/chrome/browser/supervised_user/supervised_user_service.cc |
+++ b/chrome/browser/supervised_user/supervised_user_service.cc |
@@ -111,6 +111,17 @@ void SupervisedUserService::URLFilterContext::LoadWhitelists( |
io_url_filter_, base::Passed(&site_lists_copy))); |
} |
+void SupervisedUserService::URLFilterContext::LoadBlacklist( |
+ const base::FilePath& path) { |
+ // For now, support loading only once. If we want to support re-load, we'll |
+ // have to clear the blacklist pointer in the url filters first. |
+ DCHECK_EQ(0u, blacklist_.GetEntryCount()); |
+ blacklist_.ReadFromFile( |
+ path, |
+ base::Bind(&SupervisedUserService::URLFilterContext::OnBlacklistLoaded, |
+ base::Unretained(this))); |
+} |
+ |
void SupervisedUserService::URLFilterContext::SetManualHosts( |
scoped_ptr<std::map<std::string, bool> > host_map) { |
ui_url_filter_->SetManualHosts(host_map.get()); |
@@ -131,6 +142,16 @@ void SupervisedUserService::URLFilterContext::SetManualURLs( |
io_url_filter_, base::Owned(url_map.release()))); |
} |
+void SupervisedUserService::URLFilterContext::OnBlacklistLoaded() { |
+ ui_url_filter_->SetBlacklist(&blacklist_); |
+ BrowserThread::PostTask( |
+ BrowserThread::IO, |
+ FROM_HERE, |
+ base::Bind(&SupervisedUserURLFilter::SetBlacklist, |
+ io_url_filter_, |
+ &blacklist_)); |
+} |
+ |
SupervisedUserService::SupervisedUserService(Profile* profile) |
: profile_(profile), |
active_(false), |
@@ -519,6 +540,10 @@ void SupervisedUserService::UpdateSiteLists() { |
#endif |
} |
+void SupervisedUserService::LoadBlacklist(const base::FilePath& path) { |
+ url_filter_context_.LoadBlacklist(path); |
+} |
+ |
bool SupervisedUserService::AccessRequestsEnabled() { |
if (waiting_for_permissions_) |
return false; |