Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(280)

Side by Side Diff: appengine/swarming/server/task_scheduler.py

Issue 2006263005: Urgent: disable search API. (Closed) Base URL: git@github.com:luci/luci-py.git@master
Patch Set: Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2014 The LUCI Authors. All rights reserved. 1 # Copyright 2014 The LUCI Authors. All rights reserved.
2 # Use of this source code is governed by the Apache v2.0 license that can be 2 # Use of this source code is governed by the Apache v2.0 license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 """High level tasks execution scheduling API. 5 """High level tasks execution scheduling API.
6 6
7 This is the interface closest to the HTTP handlers. 7 This is the interface closest to the HTTP handlers.
8 """ 8 """
9 9
10 import contextlib 10 import contextlib
11 import datetime 11 import datetime
12 import logging 12 import logging
13 import math 13 import math
14 import random 14 import random
15 15
16 from google.appengine.api import datastore_errors 16 from google.appengine.api import datastore_errors
17 from google.appengine.api import search 17 #from google.appengine.api import search
18 from google.appengine.ext import ndb 18 from google.appengine.ext import ndb
19 from google.appengine.runtime import apiproxy_errors 19 from google.appengine.runtime import apiproxy_errors
20 20
21 from components import datastore_utils 21 from components import datastore_utils
22 from components import pubsub 22 from components import pubsub
23 from components import utils 23 from components import utils
24 import ts_mon_metrics 24 import ts_mon_metrics
25 from server import config 25 from server import config
26 from server import stats 26 from server import stats
27 from server import task_pack 27 from server import task_pack
(...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after
409 # Creates the entities TaskToRun and TaskResultSummary but do not save them 409 # Creates the entities TaskToRun and TaskResultSummary but do not save them
410 # yet. TaskRunResult will be created once a bot starts it. 410 # yet. TaskRunResult will be created once a bot starts it.
411 task = task_to_run.new_task_to_run(request) 411 task = task_to_run.new_task_to_run(request)
412 result_summary = task_result.new_result_summary(request) 412 result_summary = task_result.new_result_summary(request)
413 413
414 # Do not specify a doc_id, as they are guaranteed to be monotonically 414 # Do not specify a doc_id, as they are guaranteed to be monotonically
415 # increasing and searches are done in reverse order, which fits exactly the 415 # increasing and searches are done in reverse order, which fits exactly the
416 # created_ts ordering. This is useful because DateField is precise to the date 416 # created_ts ordering. This is useful because DateField is precise to the date
417 # (!) and NumberField is signed 32 bits so the best it could do with EPOCH is 417 # (!) and NumberField is signed 32 bits so the best it could do with EPOCH is
418 # second resolution up to year 2038. 418 # second resolution up to year 2038.
419 index = search.Index(name='requests') 419 #index = search.Index(name='requests')
420 packed = task_pack.pack_result_summary_key(result_summary.key) 420 #packed = task_pack.pack_result_summary_key(result_summary.key)
421 doc = search.Document( 421 #doc = search.Document(
422 fields=[ 422 # fields=[
423 search.TextField(name='name', value=request.name), 423 # search.TextField(name='name', value=request.name),
424 search.AtomField(name='id', value=packed), 424 # search.AtomField(name='id', value=packed),
425 ]) 425 # ])
426 # Even if it fails here, we're still fine, as the task is not "alive" yet. 426 # Even if it fails here, we're still fine, as the task is not "alive" yet.
427 search_future = index.put_async([doc]) 427 #search_future = index.put_async([doc])
428 428
429 now = utils.utcnow() 429 now = utils.utcnow()
430 430
431 if dupe_future: 431 if dupe_future:
432 # Reuse the results! 432 # Reuse the results!
433 dupe_summary = dupe_future.get_result() 433 dupe_summary = dupe_future.get_result()
434 # Refuse tasks older than X days. This is due to the isolate server dropping 434 # Refuse tasks older than X days. This is due to the isolate server dropping
435 # files. https://code.google.com/p/swarming/issues/detail?id=197 435 # files. https://code.google.com/p/swarming/issues/detail?id=197
436 oldest = now - datetime.timedelta( 436 oldest = now - datetime.timedelta(
437 seconds=config.settings().reusable_task_age_secs) 437 seconds=config.settings().reusable_task_age_secs)
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
476 for item in items: 476 for item in items:
477 item.children_task_ids.append(k) 477 item.children_task_ids.append(k)
478 item.modified_ts = now 478 item.modified_ts = now
479 ndb.put_multi(items) 479 ndb.put_multi(items)
480 480
481 # Raising will abort to the caller. 481 # Raising will abort to the caller.
482 futures = [datastore_utils.transaction_async(run)] 482 futures = [datastore_utils.transaction_async(run)]
483 if parent_task_keys: 483 if parent_task_keys:
484 futures.append(datastore_utils.transaction_async(run_parent)) 484 futures.append(datastore_utils.transaction_async(run_parent))
485 485
486 try: 486 #try:
487 search_future.get_result() 487 # search_future.get_result()
488 except search.Error: 488 #except search.Error:
489 # Do not abort the task, for now search is best effort. 489 # # Do not abort the task, for now search is best effort.
490 logging.exception('Put failed') 490 # logging.exception('Put failed')
491 491
492 for future in futures: 492 for future in futures:
493 # Check for failures, it would raise in this case, aborting the call. 493 # Check for failures, it would raise in this case, aborting the call.
494 future.get_result() 494 future.get_result()
495 495
496 stats.add_task_entry( 496 stats.add_task_entry(
497 'task_enqueued', result_summary.key, 497 'task_enqueued', result_summary.key,
498 dimensions=request.properties.dimensions, 498 dimensions=request.properties.dimensions,
499 user=request.user) 499 user=request.user)
500 return result_summary 500 return result_summary
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after
892 ## Task queue tasks. 892 ## Task queue tasks.
893 893
894 894
895 def task_handle_pubsub_task(payload): 895 def task_handle_pubsub_task(payload):
896 """Handles task enqueued by _maybe_pubsub_notify_via_tq.""" 896 """Handles task enqueued by _maybe_pubsub_notify_via_tq."""
897 # Do not catch errors to trigger task queue task retry. Errors should not 897 # Do not catch errors to trigger task queue task retry. Errors should not
898 # happen in normal case. 898 # happen in normal case.
899 _pubsub_notify( 899 _pubsub_notify(
900 payload['task_id'], payload['topic'], 900 payload['task_id'], payload['topic'],
901 payload['auth_token'], payload['userdata']) 901 payload['auth_token'], payload['userdata'])
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698