Chromium Code Reviews| Index: chrome/browser/chromeos/drive/job_scheduler.cc |
| diff --git a/chrome/browser/chromeos/drive/job_scheduler.cc b/chrome/browser/chromeos/drive/job_scheduler.cc |
| index 815f821abc2aa75e64f3ce30673bf0e4f5491748..921996d451fa62e64d1a3e49ab30f1b9c31e0b30 100644 |
| --- a/chrome/browser/chromeos/drive/job_scheduler.cc |
| +++ b/chrome/browser/chromeos/drive/job_scheduler.cc |
| @@ -33,22 +33,21 @@ const int JobScheduler::kMaxJobCount[] = { |
| 1, // FILE_QUEUE |
| }; |
| -JobScheduler::QueueEntry::QueueEntry() |
| - : job_id(-1), |
| +JobScheduler::JobEntry::JobEntry(JobType type) |
| + : job_info(type), |
| context(DriveClientContext(USER_INITIATED)) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| } |
| -JobScheduler::QueueEntry::~QueueEntry() { |
| +JobScheduler::JobEntry::~JobEntry() { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| } |
| -bool JobScheduler::QueueEntry::Compare( |
| - const JobScheduler::QueueEntry* left, |
| - const JobScheduler::QueueEntry* right) { |
| +bool JobScheduler::JobEntry::Compare(const JobEntry& left, |
|
hidehiko
2013/05/09 08:25:23
Less may be more descriptive?
hashimoto
2013/05/09 08:53:18
Done.
|
| + const JobEntry& right) { |
| // Lower values of ContextType are higher priority. |
| // See also the comment at ContextType. |
| - return (left->context.type < right->context.type); |
| + return (left.context.type < right.context.type); |
| } |
| JobScheduler::JobScheduler( |
| @@ -79,16 +78,13 @@ JobScheduler::~JobScheduler() { |
| } |
| DCHECK_EQ(num_pending_jobs + num_running_jobs, job_map_.size()); |
| - for (int i = 0; i < NUM_QUEUES; ++i) { |
| - STLDeleteElements(&queue_[i]); |
| - } |
| net::NetworkChangeNotifier::RemoveConnectionTypeObserver(this); |
| } |
| std::vector<JobInfo> JobScheduler::GetJobInfoList() { |
| std::vector<JobInfo> job_info_list; |
| for (JobIDMap::iterator iter(&job_map_); !iter.IsAtEnd(); iter.Advance()) |
| - job_info_list.push_back(*iter.GetCurrentValue()); |
| + job_info_list.push_back(iter.GetCurrentValue()->job_info); |
| return job_info_list; |
| } |
| @@ -108,9 +104,9 @@ void JobScheduler::CancelJob(JobID job_id) { |
| // TODO(kinaba): Move the cancellation feature from DriveService |
| // to JobScheduler. In particular, implement cancel based on job_id. |
| // crbug.com/231029 |
| - JobInfo* info = job_map_.Lookup(job_id); |
| - if (info) |
| - drive_service_->CancelForFilePath(info->file_path); |
| + JobEntry* job = job_map_.Lookup(job_id); |
| + if (job) |
| + drive_service_->CancelForFilePath(job->job_info.file_path); |
| } |
| void JobScheduler::CancelAllJobs() { |
| @@ -126,10 +122,10 @@ void JobScheduler::GetAccountMetadata( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_GET_ACCOUNT_METADATA)); |
| new_job->get_account_metadata_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_GET_ACCOUNT_METADATA); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::GetAboutResource( |
| @@ -137,10 +133,10 @@ void JobScheduler::GetAboutResource( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_GET_ABOUT_RESOURCE)); |
| new_job->get_about_resource_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_GET_ABOUT_RESOURCE); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::GetAppList( |
| @@ -148,10 +144,10 @@ void JobScheduler::GetAppList( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_GET_APP_LIST)); |
| new_job->get_app_list_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_GET_APP_LIST); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::GetAllResourceList( |
| @@ -159,10 +155,10 @@ void JobScheduler::GetAllResourceList( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_GET_ALL_RESOURCE_LIST)); |
| new_job->get_resource_list_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_GET_ALL_RESOURCE_LIST); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::GetResourceListInDirectory( |
| @@ -171,11 +167,12 @@ void JobScheduler::GetResourceListInDirectory( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry( |
| + TYPE_GET_RESOURCE_LIST_IN_DIRECTORY)); |
| new_job->directory_resource_id = directory_resource_id; |
| new_job->get_resource_list_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_GET_RESOURCE_LIST_IN_DIRECTORY); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::Search( |
| @@ -184,11 +181,11 @@ void JobScheduler::Search( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_SEARCH)); |
| new_job->search_query = search_query; |
| new_job->get_resource_list_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_SEARCH); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::GetChangeList( |
| @@ -197,11 +194,11 @@ void JobScheduler::GetChangeList( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_GET_CHANGE_LIST)); |
| new_job->start_changestamp = start_changestamp; |
| new_job->get_resource_list_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_GET_CHANGE_LIST); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::ContinueGetResourceList( |
| @@ -210,11 +207,11 @@ void JobScheduler::ContinueGetResourceList( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_CONTINUE_GET_RESOURCE_LIST)); |
| new_job->feed_url = feed_url; |
| new_job->get_resource_list_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_CONTINUE_GET_RESOURCE_LIST); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::GetResourceEntry( |
| @@ -224,12 +221,12 @@ void JobScheduler::GetResourceEntry( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_GET_RESOURCE_ENTRY)); |
| new_job->resource_id = resource_id; |
| new_job->context = context; |
| new_job->get_resource_entry_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_GET_RESOURCE_ENTRY); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::DeleteResource( |
| @@ -238,11 +235,11 @@ void JobScheduler::DeleteResource( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_DELETE_RESOURCE)); |
| new_job->resource_id = resource_id; |
| new_job->entry_action_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_DELETE_RESOURCE); |
| + StartNewJob(new_job.Pass()); |
| } |
| @@ -253,12 +250,12 @@ void JobScheduler::CopyHostedDocument( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_COPY_HOSTED_DOCUMENT)); |
| new_job->resource_id = resource_id; |
| new_job->new_name = new_name; |
| new_job->get_resource_entry_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_COPY_HOSTED_DOCUMENT); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::RenameResource( |
| @@ -268,12 +265,12 @@ void JobScheduler::RenameResource( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_RENAME_RESOURCE)); |
| new_job->resource_id = resource_id; |
| new_job->new_name = new_name; |
| new_job->entry_action_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_RENAME_RESOURCE); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::AddResourceToDirectory( |
| @@ -283,12 +280,12 @@ void JobScheduler::AddResourceToDirectory( |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| DCHECK(!callback.is_null()); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_ADD_RESOURCE_TO_DIRECTORY)); |
| new_job->parent_resource_id = parent_resource_id; |
| new_job->resource_id = resource_id; |
| new_job->entry_action_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_ADD_RESOURCE_TO_DIRECTORY); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::RemoveResourceFromDirectory( |
| @@ -297,12 +294,13 @@ void JobScheduler::RemoveResourceFromDirectory( |
| const google_apis::EntryActionCallback& callback) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry( |
| + TYPE_REMOVE_RESOURCE_FROM_DIRECTORY)); |
| new_job->parent_resource_id = parent_resource_id; |
| new_job->resource_id = resource_id; |
| new_job->entry_action_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_REMOVE_RESOURCE_FROM_DIRECTORY); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::AddNewDirectory( |
| @@ -311,12 +309,12 @@ void JobScheduler::AddNewDirectory( |
| const google_apis::GetResourceEntryCallback& callback) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_ADD_NEW_DIRECTORY)); |
| new_job->parent_resource_id = parent_resource_id; |
| new_job->directory_name = directory_name; |
| new_job->get_resource_entry_callback = callback; |
| - StartNewJob(new_job.Pass(), TYPE_ADD_NEW_DIRECTORY); |
| + StartNewJob(new_job.Pass()); |
| } |
| JobID JobScheduler::DownloadFile( |
| @@ -328,7 +326,7 @@ JobID JobScheduler::DownloadFile( |
| const google_apis::GetContentCallback& get_content_callback) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_DOWNLOAD_FILE)); |
| new_job->drive_file_path = virtual_path; |
| new_job->local_file_path = local_cache_path; |
| new_job->download_url = download_url; |
| @@ -336,7 +334,7 @@ JobID JobScheduler::DownloadFile( |
| new_job->download_action_callback = download_action_callback; |
| new_job->get_content_callback = get_content_callback; |
| - return StartNewJob(new_job.Pass(), TYPE_DOWNLOAD_FILE); |
| + return StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::UploadNewFile( |
| @@ -349,7 +347,7 @@ void JobScheduler::UploadNewFile( |
| const google_apis::UploadCompletionCallback& callback) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_UPLOAD_NEW_FILE)); |
| new_job->resource_id = parent_resource_id; |
| new_job->drive_file_path = drive_file_path; |
| new_job->local_file_path = local_file_path; |
| @@ -358,7 +356,7 @@ void JobScheduler::UploadNewFile( |
| new_job->upload_completion_callback = callback; |
| new_job->context = context; |
| - StartNewJob(new_job.Pass(), TYPE_UPLOAD_NEW_FILE); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::UploadExistingFile( |
| @@ -371,7 +369,7 @@ void JobScheduler::UploadExistingFile( |
| const google_apis::UploadCompletionCallback& upload_completion_callback) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_UPLOAD_EXISTING_FILE)); |
| new_job->resource_id = resource_id; |
| new_job->drive_file_path = drive_file_path; |
| new_job->local_file_path = local_file_path; |
| @@ -380,7 +378,7 @@ void JobScheduler::UploadExistingFile( |
| new_job->upload_completion_callback = upload_completion_callback; |
| new_job->context = context; |
| - StartNewJob(new_job.Pass(), TYPE_UPLOAD_EXISTING_FILE); |
| + StartNewJob(new_job.Pass()); |
| } |
| void JobScheduler::CreateFile( |
| @@ -392,7 +390,7 @@ void JobScheduler::CreateFile( |
| const google_apis::UploadCompletionCallback& callback) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - scoped_ptr<QueueEntry> new_job(new QueueEntry); |
| + scoped_ptr<JobEntry> new_job(new JobEntry(TYPE_CREATE_FILE)); |
| new_job->resource_id = parent_resource_id; |
| new_job->drive_file_path = drive_file_path; |
| new_job->title = title; |
| @@ -400,34 +398,40 @@ void JobScheduler::CreateFile( |
| new_job->upload_completion_callback = callback; |
| new_job->context = context; |
| - StartNewJob(new_job.Pass(), TYPE_CREATE_FILE); |
| + StartNewJob(new_job.Pass()); |
| } |
| -JobID JobScheduler::StartNewJob(scoped_ptr<QueueEntry> job, JobType type) { |
| - // job_info is owned by job_map_ and released when it is removed in OnJobDone. |
| - JobInfo* job_info = new JobInfo(type); |
| - job->job_id = job_info->job_id = job_map_.Add(job_info); |
| +JobID JobScheduler::StartNewJob(scoped_ptr<JobEntry> job) { |
|
hidehiko
2013/05/09 08:25:23
nit: How about add DCHECK for |job| just in case?
hashimoto
2013/05/09 08:53:18
Done.
|
| + // |job| is owned by job_map_ and released when it is removed in OnJobDone. |
| + JobInfo* job_info = &job->job_info; |
| job_info->file_path = job->drive_file_path; |
| + job_info->job_id = job_map_.Add(job.release()); |
| - QueueJob(job.Pass()); |
| + QueueJob(job_info->job_id); |
| NotifyJobAdded(*job_info); |
| - StartJobLoop(GetJobQueueType(type)); |
| + StartJobLoop(GetJobQueueType(job_info->job_type)); |
| return job_info->job_id; |
| } |
| -void JobScheduler::QueueJob(scoped_ptr<QueueEntry> job) { |
| +void JobScheduler::QueueJob(JobID job_id) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - JobInfo* job_info = job_map_.Lookup(job->job_id); |
| - DCHECK(job_info); |
| + JobEntry* job_entry = job_map_.Lookup(job_id); |
| + DCHECK(job_entry); |
| + const JobInfo& job_info = job_entry->job_info; |
| - QueueType queue_type = GetJobQueueType(job_info->job_type); |
| - std::list<QueueEntry*>& queue = queue_[queue_type]; |
| + QueueType queue_type = GetJobQueueType(job_info.job_type); |
| + std::list<JobID>* queue = &queue_[queue_type]; |
| - queue.push_back(job.release()); |
| - queue.sort(&QueueEntry::Compare); |
| + std::list<JobID>::iterator it = queue->begin(); |
| + for (; it != queue->end(); ++it) { |
| + JobEntry* job_entry2 = job_map_.Lookup(*it); |
| + if (JobEntry::Compare(*job_entry, *job_entry2)) |
|
hidehiko
2013/05/09 08:25:23
If I understand correctly, we should put the job a
hashimoto
2013/05/09 08:53:18
This code puts the new job at just before the firs
hidehiko
2013/05/09 08:56:36
Oops. Sorry, I misread. Thank you for your explana
|
| + break; |
| + } |
| + queue->insert(it, job_id); |
| - util::Log("Job queued: %s - %s", job_info->ToString().c_str(), |
| + util::Log("Job queued: %s - %s", job_info.ToString().c_str(), |
| GetQueueInfo(queue_type).c_str()); |
| } |
| @@ -445,34 +449,32 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| return; |
| } |
| + JobID job_id = queue_[queue_type].front(); |
| + JobEntry* entry = job_map_.Lookup(job_id); |
| + DCHECK(entry); |
| + |
| // Check if we should defer based on the first item in the queue |
| - if (ShouldStopJobLoop(queue_type, queue_[queue_type].front()->context)) { |
| + if (ShouldStopJobLoop(queue_type, entry->context)) { |
| return; |
| } |
| // Increment the number of jobs. |
| ++jobs_running_[queue_type]; |
| - // Should copy before calling queue_.pop_front(). |
| - scoped_ptr<QueueEntry> queue_entry(queue_[queue_type].front()); |
| queue_[queue_type].pop_front(); |
| - JobInfo* job_info = job_map_.Lookup(queue_entry->job_id); |
| - DCHECK(job_info); |
| + JobInfo* job_info = &entry->job_info; |
| job_info->state = STATE_RUNNING; |
| job_info->start_time = base::Time::Now(); |
| NotifyJobUpdated(*job_info); |
| - // The some arguments are evaluated after bind, so we copy the pointer to the |
| - // QueueEntry |
| - QueueEntry* entry = queue_entry.get(); |
| - |
| switch (job_info->job_type) { |
| case TYPE_GET_ABOUT_RESOURCE: { |
| drive_service_->GetAboutResource( |
| base::Bind(&JobScheduler::OnGetAboutResourceJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->get_about_resource_callback)); |
| } |
| break; |
| @@ -480,7 +482,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| drive_service_->GetAccountMetadata( |
| base::Bind(&JobScheduler::OnGetAccountMetadataJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->get_account_metadata_callback)); |
| } |
| break; |
| @@ -488,7 +491,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| drive_service_->GetAppList( |
| base::Bind(&JobScheduler::OnGetAppListJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->get_app_list_callback)); |
| } |
| break; |
| @@ -496,7 +500,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| drive_service_->GetAllResourceList( |
| base::Bind(&JobScheduler::OnGetResourceListJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->get_resource_list_callback)); |
| } |
| break; |
| @@ -505,7 +510,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->directory_resource_id, |
| base::Bind(&JobScheduler::OnGetResourceListJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->get_resource_list_callback)); |
| } |
| break; |
| @@ -514,7 +520,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->search_query, |
| base::Bind(&JobScheduler::OnGetResourceListJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->get_resource_list_callback)); |
| } |
| break; |
| @@ -523,7 +530,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->start_changestamp, |
| base::Bind(&JobScheduler::OnGetResourceListJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->get_resource_list_callback)); |
| } |
| break; |
| @@ -532,7 +540,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->feed_url, |
| base::Bind(&JobScheduler::OnGetResourceListJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->get_resource_list_callback)); |
| } |
| break; |
| @@ -541,7 +550,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->resource_id, |
| base::Bind(&JobScheduler::OnGetResourceEntryJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->get_resource_entry_callback)); |
| } |
| break; |
| @@ -551,7 +561,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| "", // etag |
| base::Bind(&JobScheduler::OnEntryActionJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->entry_action_callback)); |
| } |
| break; |
| @@ -561,7 +572,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->new_name, |
| base::Bind(&JobScheduler::OnGetResourceEntryJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->get_resource_entry_callback)); |
| } |
| break; |
| @@ -571,7 +583,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->new_name, |
| base::Bind(&JobScheduler::OnEntryActionJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->entry_action_callback)); |
| } |
| break; |
| @@ -581,7 +594,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->resource_id, |
| base::Bind(&JobScheduler::OnEntryActionJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->entry_action_callback)); |
| } |
| break; |
| @@ -591,7 +605,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->resource_id, |
| base::Bind(&JobScheduler::OnEntryActionJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->entry_action_callback)); |
| } |
| break; |
| @@ -601,7 +616,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->directory_name, |
| base::Bind(&JobScheduler::OnGetResourceEntryJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry))); |
| + job_id, |
| + entry->get_resource_entry_callback)); |
| } |
| break; |
| @@ -612,11 +628,12 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->download_url, |
| base::Bind(&JobScheduler::OnDownloadActionJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry)), |
| + job_id, |
| + entry->download_action_callback), |
| entry->get_content_callback, |
| base::Bind(&JobScheduler::UpdateProgress, |
| weak_ptr_factory_.GetWeakPtr(), |
| - job_info->job_id)); |
| + job_id)); |
| } |
| break; |
| @@ -629,10 +646,11 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->content_type, |
| base::Bind(&JobScheduler::OnUploadCompletionJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry)), |
| + job_id, |
| + entry->upload_completion_callback), |
| base::Bind(&JobScheduler::UpdateProgress, |
| weak_ptr_factory_.GetWeakPtr(), |
| - job_info->job_id)); |
| + job_id)); |
| } |
| break; |
| @@ -645,10 +663,11 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->etag, |
| base::Bind(&JobScheduler::OnUploadCompletionJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry)), |
| + job_id, |
| + entry->upload_completion_callback), |
| base::Bind(&JobScheduler::UpdateProgress, |
| weak_ptr_factory_.GetWeakPtr(), |
| - job_info->job_id)); |
| + job_id)); |
| } |
| break; |
| @@ -663,7 +682,8 @@ void JobScheduler::DoJobLoop(QueueType queue_type) { |
| entry->content_type, |
| base::Bind(&JobScheduler::OnUploadCompletionJobDone, |
| weak_ptr_factory_.GetWeakPtr(), |
| - base::Passed(&queue_entry)), |
| + job_id, |
| + entry->upload_completion_callback), |
| google_apis::ProgressCallback()); |
| } |
| break; |
| @@ -748,12 +768,12 @@ void JobScheduler::ResetThrottleAndContinueJobLoop(QueueType queue_type) { |
| queue_type)); |
| } |
| -bool JobScheduler::OnJobDone(scoped_ptr<JobScheduler::QueueEntry> queue_entry, |
| - FileError error) { |
| +bool JobScheduler::OnJobDone(JobID job_id, FileError error) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - JobInfo* job_info = job_map_.Lookup(queue_entry->job_id); |
| - DCHECK(job_info); |
| + JobEntry* job_entry = job_map_.Lookup(job_id); |
| + DCHECK(job_entry); |
| + JobInfo* job_info = &job_entry->job_info; |
| QueueType queue_type = GetJobQueueType(job_info->job_type); |
| const base::TimeDelta elapsed = base::Time::Now() - job_info->start_time; |
| @@ -772,7 +792,7 @@ bool JobScheduler::OnJobDone(scoped_ptr<JobScheduler::QueueEntry> queue_entry, |
| NotifyJobUpdated(*job_info); |
| // Requeue the job. |
| - QueueJob(queue_entry.Pass()); |
| + QueueJob(job_id); |
| ThrottleAndContinueJobLoop(queue_type); |
| return false; |
| @@ -780,7 +800,7 @@ bool JobScheduler::OnJobDone(scoped_ptr<JobScheduler::QueueEntry> queue_entry, |
| NotifyJobDone(*job_info, error); |
| // The job has finished, no retry will happen in the scheduler. Now we can |
| // remove the job info from the map. This is the only place of the removal. |
| - job_map_.Remove(queue_entry->job_id); |
| + job_map_.Remove(job_id); |
| ResetThrottleAndContinueJobLoop(queue_type); |
| return true; |
| @@ -788,116 +808,109 @@ bool JobScheduler::OnJobDone(scoped_ptr<JobScheduler::QueueEntry> queue_entry, |
| } |
| void JobScheduler::OnGetResourceListJobDone( |
| - scoped_ptr<JobScheduler::QueueEntry> queue_entry, |
| + JobID job_id, |
| + const google_apis::GetResourceListCallback& callback, |
| google_apis::GDataErrorCode error, |
| scoped_ptr<google_apis::ResourceList> resource_list) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - DCHECK(!queue_entry->get_resource_list_callback.is_null()); |
| + DCHECK(!callback.is_null()); |
| - google_apis::GetResourceListCallback callback = |
| - queue_entry->get_resource_list_callback; |
| - if (OnJobDone(queue_entry.Pass(), util::GDataToFileError(error))) |
| + if (OnJobDone(job_id, util::GDataToFileError(error))) |
| callback.Run(error, resource_list.Pass()); |
| } |
| void JobScheduler::OnGetResourceEntryJobDone( |
| - scoped_ptr<JobScheduler::QueueEntry> queue_entry, |
| + JobID job_id, |
| + const google_apis::GetResourceEntryCallback& callback, |
| google_apis::GDataErrorCode error, |
| scoped_ptr<google_apis::ResourceEntry> entry) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - DCHECK(!queue_entry->get_resource_entry_callback.is_null()); |
| + DCHECK(!callback.is_null()); |
| - google_apis::GetResourceEntryCallback callback = |
| - queue_entry->get_resource_entry_callback; |
| - if (OnJobDone(queue_entry.Pass(), util::GDataToFileError(error))) |
| + if (OnJobDone(job_id, util::GDataToFileError(error))) |
| callback.Run(error, entry.Pass()); |
| } |
| void JobScheduler::OnGetAboutResourceJobDone( |
| - scoped_ptr<QueueEntry> queue_entry, |
| + JobID job_id, |
| + const google_apis::GetAboutResourceCallback& callback, |
| google_apis::GDataErrorCode error, |
| scoped_ptr<google_apis::AboutResource> about_resource) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - DCHECK(!queue_entry->get_about_resource_callback.is_null()); |
| + DCHECK(!callback.is_null()); |
| - google_apis::GetAboutResourceCallback callback = |
| - queue_entry->get_about_resource_callback; |
| - if (OnJobDone(queue_entry.Pass(), util::GDataToFileError(error))) |
| + if (OnJobDone(job_id, util::GDataToFileError(error))) |
| callback.Run(error, about_resource.Pass()); |
| } |
| void JobScheduler::OnGetAccountMetadataJobDone( |
| - scoped_ptr<QueueEntry> queue_entry, |
| + JobID job_id, |
| + const google_apis::GetAccountMetadataCallback& callback, |
| google_apis::GDataErrorCode error, |
| scoped_ptr<google_apis::AccountMetadata> account_metadata) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - DCHECK(!queue_entry->get_account_metadata_callback.is_null()); |
| + DCHECK(!callback.is_null()); |
| - google_apis::GetAccountMetadataCallback callback = |
| - queue_entry->get_account_metadata_callback; |
| - if (OnJobDone(queue_entry.Pass(), util::GDataToFileError(error))) |
| + if (OnJobDone(job_id, util::GDataToFileError(error))) |
| callback.Run(error, account_metadata.Pass()); |
| } |
| void JobScheduler::OnGetAppListJobDone( |
| - scoped_ptr<JobScheduler::QueueEntry> queue_entry, |
| + JobID job_id, |
| + const google_apis::GetAppListCallback& callback, |
| google_apis::GDataErrorCode error, |
| scoped_ptr<google_apis::AppList> app_list) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - DCHECK(!queue_entry->get_app_list_callback.is_null()); |
| + DCHECK(!callback.is_null()); |
| - google_apis::GetAppListCallback callback = queue_entry->get_app_list_callback; |
| - if (OnJobDone(queue_entry.Pass(), util::GDataToFileError(error))) |
| + if (OnJobDone(job_id, util::GDataToFileError(error))) |
| callback.Run(error, app_list.Pass()); |
| } |
| void JobScheduler::OnEntryActionJobDone( |
| - scoped_ptr<JobScheduler::QueueEntry> queue_entry, |
| + JobID job_id, |
| + const google_apis::EntryActionCallback& callback, |
| google_apis::GDataErrorCode error) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - DCHECK(!queue_entry->entry_action_callback.is_null()); |
| + DCHECK(!callback.is_null()); |
| - google_apis::EntryActionCallback callback = |
| - queue_entry->entry_action_callback; |
| - if (OnJobDone(queue_entry.Pass(), util::GDataToFileError(error))) |
| + if (OnJobDone(job_id, util::GDataToFileError(error))) |
| callback.Run(error); |
| } |
| void JobScheduler::OnDownloadActionJobDone( |
| - scoped_ptr<JobScheduler::QueueEntry> queue_entry, |
| + JobID job_id, |
| + const google_apis::DownloadActionCallback& callback, |
| google_apis::GDataErrorCode error, |
| const base::FilePath& temp_file) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - DCHECK(!queue_entry->download_action_callback.is_null()); |
| + DCHECK(!callback.is_null()); |
| - google_apis::DownloadActionCallback callback = |
| - queue_entry->download_action_callback; |
| - if (OnJobDone(queue_entry.Pass(), util::GDataToFileError(error))) |
| + if (OnJobDone(job_id, util::GDataToFileError(error))) |
| callback.Run(error, temp_file); |
| } |
| void JobScheduler::OnUploadCompletionJobDone( |
| - scoped_ptr<QueueEntry> queue_entry, |
| + JobID job_id, |
| + const google_apis::UploadCompletionCallback& callback, |
| google_apis::GDataErrorCode error, |
| const base::FilePath& drive_path, |
| const base::FilePath& file_path, |
| scoped_ptr<google_apis::ResourceEntry> resource_entry) { |
| DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); |
| - DCHECK(!queue_entry->upload_completion_callback.is_null()); |
| + DCHECK(!callback.is_null()); |
| - google_apis::UploadCompletionCallback callback = |
| - queue_entry->upload_completion_callback; |
| - if (OnJobDone(queue_entry.Pass(), util::GDataToFileError(error))) |
| + if (OnJobDone(job_id, util::GDataToFileError(error))) |
| callback.Run(error, drive_path, file_path, resource_entry.Pass()); |
| } |
| void JobScheduler::UpdateProgress(JobID job_id, int64 progress, int64 total) { |
| - JobInfo* job_info = job_map_.Lookup(job_id); |
| - DCHECK(job_info); |
| + JobEntry* job_entry = job_map_.Lookup(job_id); |
| + DCHECK(job_entry); |
| - job_info->num_completed_bytes = progress; |
| - job_info->num_total_bytes = total; |
| - NotifyJobUpdated(*job_info); |
| + job_entry->job_info.num_completed_bytes = progress; |
| + job_entry->job_info.num_total_bytes = total; |
| + NotifyJobUpdated(job_entry->job_info); |
| } |
| void JobScheduler::OnConnectionTypeChanged( |