Chromium Code Reviews| Index: chrome/browser/chromeos/gdata/gdata_file_system.cc |
| diff --git a/chrome/browser/chromeos/gdata/gdata_file_system.cc b/chrome/browser/chromeos/gdata/gdata_file_system.cc |
| index cf74476d74794356fcd1ce6c36d9f6733f7f7f5c..a6b481b4d1746b415e1226c0622b1bbb6bf7b150 100644 |
| --- a/chrome/browser/chromeos/gdata/gdata_file_system.cc |
| +++ b/chrome/browser/chromeos/gdata/gdata_file_system.cc |
| @@ -1456,9 +1456,48 @@ void GDataFileSystem::OnGetDocuments( |
| // Add the current feed to the list of collected feeds for this directory. |
| feed_list->Append(data.release()); |
| - // Check if we need to collect more data to complete the directory list. |
| - if (current_feed->GetNextFeedURL(&next_feed_url) && |
| - !next_feed_url.is_empty()) { |
| + bool initial_read = false; |
| + { |
| + base::AutoLock lock(lock_); |
| + initial_read = root_->origin() == UNINITIALIZED; |
| + } |
| + |
| + bool has_more_data = current_feed->GetNextFeedURL(&next_feed_url) && |
| + !next_feed_url.is_empty(); |
| + |
| + // If we are completely done with feed content fetching or if this is initial |
| + // batch of content feed, we might want to update |root_| content and report |
|
satorux1
2012/03/25 17:28:27
nit: to make it very clear, we might want to descr
zel
2012/03/25 17:39:20
Done.
|
| + // on it. |
| + if (initial_read || !has_more_data) { |
| + error = UpdateDirectoryWithDocumentFeed(feed_list.get(), |
| + FROM_SERVER); |
| + if (error != base::PLATFORM_FILE_OK) { |
| + if (!callback.is_null()) { |
| + proxy->PostTask(FROM_HERE, |
| + base::Bind(callback, error, FilePath(), |
| + reinterpret_cast<GDataFileBase*>(NULL))); |
| + } |
| + |
| + return; |
| + } |
| + |
| + // If we had someone to report this too, then this retrieval was done in a |
| + // context of search... so continue search. |
| + if (!callback.is_null()) { |
| + proxy->PostTask(FROM_HERE, |
| + base::Bind(&GDataFileSystem::FindFileByPathOnCallingThread, |
| + GetWeakPtrForCurrentThread(), |
| + search_file_path, |
| + callback)); |
| + } |
| + } |
| + |
| + if (has_more_data) { |
| + // Don't report to initial callback if we were fetching the first chunk of |
| + // uninitialized root feed, because we already reported. Instead, just |
| + // continue with entire feed fetch in backgorund. |
| + const FindFileCallback continue_callback = |
| + initial_read ? FindFileCallback() : callback; |
| // Kick of the remaining part of the feeds. |
| documents_service_->GetDocuments( |
| next_feed_url, |
| @@ -1467,32 +1506,11 @@ void GDataFileSystem::OnGetDocuments( |
| search_file_path, |
| base::Passed(&feed_list), |
| proxy, |
| - callback)); |
| - return; |
| - } |
| - |
| - error = UpdateDirectoryWithDocumentFeed(feed_list.get(), FROM_SERVER); |
| - if (error != base::PLATFORM_FILE_OK) { |
| - if (!callback.is_null()) { |
| - proxy->PostTask(FROM_HERE, |
| - base::Bind(callback, error, FilePath(), |
| - reinterpret_cast<GDataFileBase*>(NULL))); |
| - } |
| - |
| - return; |
| - } |
| - |
| - scoped_ptr<base::Value> feed_list_value(feed_list.release()); |
| - SaveFeed(feed_list_value.Pass(), FilePath(kLastFeedFile)); |
| - |
| - // If we had someone to report this too, then this retrieval was done in a |
| - // context of search... so continue search. |
| - if (!callback.is_null()) { |
| - proxy->PostTask(FROM_HERE, |
| - base::Bind(&GDataFileSystem::FindFileByPathOnCallingThread, |
| - GetWeakPtrForCurrentThread(), |
| - search_file_path, |
| - callback)); |
| + continue_callback)); |
| + } else { |
| + // Save completed feed in meta cache. |
| + scoped_ptr<base::Value> feed_list_value(feed_list.release()); |
| + SaveFeed(feed_list_value.Pass(), FilePath(kLastFeedFile)); |
| } |
| } |
| @@ -1959,6 +1977,11 @@ base::PlatformFileError GDataFileSystem::UpdateDirectoryWithDocumentFeed( |
| // An entry with the same self link may already exist, so we need to |
| // release the existing GDataFileBase instance before overwriting the |
| // entry with another GDataFileBase instance. |
| + if (map_entry.first) { |
| + LOG(WARNING) << "Found duplicate file " |
| + << map_entry.first->file_name(); |
| + } |
| + |
| delete map_entry.first; |
| map_entry.first = file; |
| map_entry.second = parent_url; |
| @@ -1976,29 +1999,31 @@ base::PlatformFileError GDataFileSystem::UpdateDirectoryWithDocumentFeed( |
| return error; |
| } |
| + scoped_ptr<GDataRootDirectory> orphaned_files(new GDataRootDirectory(NULL)); |
| for (UrlToFileAndParentMap::iterator it = file_by_url.begin(); |
| it != file_by_url.end(); ++it) { |
| scoped_ptr<GDataFileBase> file(it->second.first); |
| GURL parent_url = it->second.second; |
| GDataDirectory* dir = root_.get(); |
| if (!parent_url.is_empty()) { |
| - UrlToFileAndParentMap::iterator find_iter = file_by_url.find(parent_url); |
| + UrlToFileAndParentMap::const_iterator find_iter = |
| + file_by_url.find(parent_url); |
| if (find_iter == file_by_url.end()) { |
| - LOG(WARNING) << "Found orphaned file '" << file->file_name() |
| - << "' with non-existing parent folder of " |
| - << parent_url.spec(); |
| + DVLOG(1) << "Found orphaned file '" << file->file_name() |
| + << "' with non-existing parent folder of " |
| + << parent_url.spec(); |
| + dir = orphaned_files.get(); |
| } else { |
| - dir = find_iter->second.first->AsGDataDirectory(); |
| + dir = find_iter->second.first ? |
| + find_iter->second.first->AsGDataDirectory() : NULL; |
| if (!dir) { |
| - LOG(WARNING) << "Found orphaned file '" << file->file_name() |
| - << "' pointing to non directory parent " |
| - << parent_url.spec(); |
| - dir = root_.get(); |
| + DVLOG(1) << "Found orphaned file '" << file->file_name() |
| + << "' pointing to non directory parent " |
| + << parent_url.spec(); |
| + dir = orphaned_files.get(); |
| } |
| } |
| } |
| - DCHECK(dir); |
| - |
| dir->AddFile(file.release()); |
| } |