Chromium Code Reviews| Index: chrome/common/extensions/docs/server2/appengine_url_fetcher.py |
| diff --git a/chrome/common/extensions/docs/server2/appengine_url_fetcher.py b/chrome/common/extensions/docs/server2/appengine_url_fetcher.py |
| index 8ed0494b3d2f4832e1c8ce181c83a3fc2bb5fb2c..4dbe7436fbe98b038ba093ba9e0b18d39ad9968b 100644 |
| --- a/chrome/common/extensions/docs/server2/appengine_url_fetcher.py |
| +++ b/chrome/common/extensions/docs/server2/appengine_url_fetcher.py |
| @@ -42,7 +42,7 @@ class AppEngineUrlFetcher(object): |
| """Fetches a file synchronously. |
| """ |
| return urlfetch.fetch(self._FromBasePath(url), |
| - deadline=20, |
| + deadline=40, |
|
not at google - send to devlin
2014/10/20 21:06:57
Do we know this is failing?
Ken Rockot(use gerrit already)
2014/10/22 03:19:54
Yes. I was seeing timeouts occurring at the 20 sec
not at google - send to devlin
2014/10/22 16:51:57
I have a TODO somewhere to retry failed fetches, b
|
| headers=_MakeHeaders(username, |
| password, |
| access_token)) |
| @@ -62,7 +62,7 @@ class AppEngineUrlFetcher(object): |
| return self.FetchAsync(url, username, password, access_token).Get() |
| return result |
| - rpc = urlfetch.create_rpc(deadline=20) |
| + rpc = urlfetch.create_rpc(deadline=40) |
| urlfetch.make_fetch_call(rpc, |
| self._FromBasePath(url), |
| headers=_MakeHeaders(username, |