Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(463)

Side by Side Diff: appengine/chromium_try_flakes/status/cq_status.py

Issue 2213143002: Add infra_libs as a bootstrap dependency. (Closed) Base URL: https://chromium.googlesource.com/infra/infra.git@master
Patch Set: Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2014 The Chromium Authors. All rights reserved. 1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import datetime 5 import datetime
6 import dateutil.parser 6 import dateutil.parser
7 import logging 7 import logging
8 import json 8 import json
9 import time 9 import time
10 import urllib2 10 import urllib2
11 11
12 import gae_ts_mon 12 import gae_ts_mon
13 from google.appengine.api import taskqueue 13 from google.appengine.api import taskqueue
14 from google.appengine.api import urlfetch 14 from google.appengine.api import urlfetch
15 from google.appengine.ext import deferred 15 from google.appengine.ext import deferred
16 from google.appengine.ext import ndb 16 from google.appengine.ext import ndb
17 from google.appengine.runtime import DeadlineExceededError 17 from google.appengine.runtime import DeadlineExceededError
18 from handlers.flake_issues import MIN_REQUIRED_FLAKY_RUNS 18 from handlers.flake_issues import MIN_REQUIRED_FLAKY_RUNS
19 from model.build_run import BuildRun 19 from model.build_run import BuildRun
20 from model.build_run import PatchsetBuilderRuns 20 from model.build_run import PatchsetBuilderRuns
21 from model.fetch_status import FetchStatus 21 from model.fetch_status import FetchStatus
22 from model.flake import Flake 22 from model.flake import Flake
23 from model.flake import FlakeOccurrence 23 from model.flake import FlakeOccurrence
24 from model.flake import FlakyRun 24 from model.flake import FlakyRun
25 from status import build_result, util 25 from status import build_result, util
26 import time_functions.timestamp 26 from infra_libs.time_functions import timestamp as tf_timestamp
Sergiy Byelozyorov 2016/08/04 21:16:53 please change this to import infra_libs.time_func
27 27
28 28
29 requests_metric = gae_ts_mon.CounterMetric( 29 requests_metric = gae_ts_mon.CounterMetric(
30 'flakiness_pipeline/cq_status/requests', 30 'flakiness_pipeline/cq_status/requests',
31 description='Requests made to the chromium-cq-status API') 31 description='Requests made to the chromium-cq-status API')
32 flakes_metric = gae_ts_mon.CounterMetric( 32 flakes_metric = gae_ts_mon.CounterMetric(
33 'flakiness_pipeline/flake_occurrences_detected', 33 'flakiness_pipeline/flake_occurrences_detected',
34 description='Detected flake occurrences') 34 description='Detected flake occurrences')
35 occurrences_per_flake_day = gae_ts_mon.NonCumulativeDistributionMetric( 35 occurrences_per_flake_day = gae_ts_mon.NonCumulativeDistributionMetric(
36 'flakiness_pipeline/occurrences_per_flake/day', 36 'flakiness_pipeline/occurrences_per_flake/day',
(...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after
326 fetch_status = FetchStatus.query().get() 326 fetch_status = FetchStatus.query().get()
327 cursor = '' 327 cursor = ''
328 begin = '' 328 begin = ''
329 end = '' 329 end = ''
330 retry_count = 0 330 retry_count = 0
331 331
332 while True: 332 while True:
333 if fetch_status: 333 if fetch_status:
334 if fetch_status.done: 334 if fetch_status.done:
335 logging.info('historical fetching done so fetch latest...') 335 logging.info('historical fetching done so fetch latest...')
336 end = str(time_functions.timestamp.utcnow_ts()) 336 end = str(tf_timestamp.utcnow_ts())
337 337
338 last_build_run_seen = BuildRun.query().order( 338 last_build_run_seen = BuildRun.query().order(
339 -BuildRun.time_finished).fetch(1) 339 -BuildRun.time_finished).fetch(1)
340 begin = str(time_functions.timestamp.utctimestamp( 340 begin = str(tf_timestamp.utctimestamp(
341 last_build_run_seen[0].time_finished)) 341 last_build_run_seen[0].time_finished))
342 cursor = '' 342 cursor = ''
343 else: 343 else:
344 begin = fetch_status.begin 344 begin = fetch_status.begin
345 end = fetch_status.end 345 end = fetch_status.end
346 cursor = fetch_status.cursor 346 cursor = fetch_status.cursor
347 else: 347 else:
348 logging.info('didnt find any historical information. fetching last week') 348 logging.info('didnt find any historical information. fetching last week')
349 begin = str(time_functions.timestamp.utctimestamp( 349 begin = str(tf_timestamp.utctimestamp(
350 datetime.datetime.utcnow() - datetime.timedelta(weeks=1))) 350 datetime.datetime.utcnow() - datetime.timedelta(weeks=1)))
351 end = str(time_functions.timestamp.utcnow_ts()) 351 end = str(tf_timestamp.utcnow_ts())
352 352
353 if begin and end: 353 if begin and end:
354 logging.info( 354 logging.info(
355 'fetching from %s to %s cursor: %s', 355 'fetching from %s to %s cursor: %s',
356 str(datetime.datetime.utcfromtimestamp(float(begin))), 356 str(datetime.datetime.utcfromtimestamp(float(begin))),
357 str(datetime.datetime.utcfromtimestamp(float(end))), 357 str(datetime.datetime.utcfromtimestamp(float(end))),
358 cursor) 358 cursor)
359 else: 359 else:
360 logging.info('fetching with no begin/end and cursor: ' + cursor) 360 logging.info('fetching with no begin/end and cursor: ' + cursor)
361 361
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
435 fetch_status.begin = begin 435 fetch_status.begin = begin
436 fetch_status.end = end 436 fetch_status.end = end
437 fetch_status.cursor = cursor 437 fetch_status.cursor = cursor
438 fetch_status.put() 438 fetch_status.put()
439 439
440 if not more: 440 if not more:
441 return # finish the cron job and wait for next iteration 441 return # finish the cron job and wait for next iteration
442 except urllib2.URLError, e: 442 except urllib2.URLError, e:
443 requests_metric.increment_by(1, fields={'status': 'fetch_error'}) 443 requests_metric.increment_by(1, fields={'status': 'fetch_error'})
444 logging.warning('Failed to fetch CQ status: %s', e.reason) 444 logging.warning('Failed to fetch CQ status: %s', e.reason)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698