OLD | NEW |
1 # Copyright 2012 the V8 project authors. All rights reserved. | 1 # Copyright 2012 the V8 project authors. All rights reserved. |
2 # Redistribution and use in source and binary forms, with or without | 2 # Redistribution and use in source and binary forms, with or without |
3 # modification, are permitted provided that the following conditions are | 3 # modification, are permitted provided that the following conditions are |
4 # met: | 4 # met: |
5 # | 5 # |
6 # * Redistributions of source code must retain the above copyright | 6 # * Redistributions of source code must retain the above copyright |
7 # notice, this list of conditions and the following disclaimer. | 7 # notice, this list of conditions and the following disclaimer. |
8 # * Redistributions in binary form must reproduce the above | 8 # * Redistributions in binary form must reproduce the above |
9 # copyright notice, this list of conditions and the following | 9 # copyright notice, this list of conditions and the following |
10 # disclaimer in the documentation and/or other materials provided | 10 # disclaimer in the documentation and/or other materials provided |
(...skipping 16 matching lines...) Expand all Loading... |
27 | 27 |
28 | 28 |
29 import multiprocessing | 29 import multiprocessing |
30 import os | 30 import os |
31 import shutil | 31 import shutil |
32 import subprocess | 32 import subprocess |
33 import threading | 33 import threading |
34 import time | 34 import time |
35 | 35 |
36 from . import daemon | 36 from . import daemon |
37 from . import discovery | |
38 from . import local_handler | 37 from . import local_handler |
| 38 from . import presence_handler |
39 from . import signatures | 39 from . import signatures |
40 from . import status_handler | 40 from . import status_handler |
41 from . import work_handler | 41 from . import work_handler |
42 from ..network import perfdata | 42 from ..network import perfdata |
43 | 43 |
44 | 44 |
45 class Server(daemon.Daemon): | 45 class Server(daemon.Daemon): |
46 | 46 |
47 def __init__(self, pidfile, root, stdin="/dev/null", | 47 def __init__(self, pidfile, root, stdin="/dev/null", |
48 stdout="/dev/null", stderr="/dev/null"): | 48 stdout="/dev/null", stderr="/dev/null"): |
(...skipping 21 matching lines...) Expand all Loading... |
70 with open(self.relative_perf_filename) as f: | 70 with open(self.relative_perf_filename) as f: |
71 try: | 71 try: |
72 self.relative_perf = float(f.read()) | 72 self.relative_perf = float(f.read()) |
73 except: | 73 except: |
74 self.relative_perf = 1.0 | 74 self.relative_perf = 1.0 |
75 else: | 75 else: |
76 self.relative_perf = 1.0 | 76 self.relative_perf = 1.0 |
77 | 77 |
78 def run(self): | 78 def run(self): |
79 os.nice(20) | 79 os.nice(20) |
80 self.ip = discovery.GetOwnIP() | 80 self.ip = presence_handler.GetOwnIP() |
81 self.perf_data_manager = perfdata.PerfDataManager(self.datadir) | 81 self.perf_data_manager = perfdata.PerfDataManager(self.datadir) |
82 self.perf_data_lock = threading.Lock() | 82 self.perf_data_lock = threading.Lock() |
83 | 83 |
84 self.local_handler = local_handler.LocalSocketServer(self) | 84 self.local_handler = local_handler.LocalSocketServer(self) |
85 self.local_handler_thread = threading.Thread( | 85 self.local_handler_thread = threading.Thread( |
86 target=self.local_handler.serve_forever) | 86 target=self.local_handler.serve_forever) |
87 self.local_handler_thread.start() | 87 self.local_handler_thread.start() |
88 | 88 |
89 self.work_handler = work_handler.WorkSocketServer(self) | 89 self.work_handler = work_handler.WorkSocketServer(self) |
90 self.work_handler_thread = threading.Thread( | 90 self.work_handler_thread = threading.Thread( |
91 target=self.work_handler.serve_forever) | 91 target=self.work_handler.serve_forever) |
92 self.work_handler_thread.start() | 92 self.work_handler_thread.start() |
93 | 93 |
94 self.status_handler = status_handler.StatusSocketServer(self) | 94 self.status_handler = status_handler.StatusSocketServer(self) |
95 self.status_handler_thread = threading.Thread( | 95 self.status_handler_thread = threading.Thread( |
96 target=self.status_handler.serve_forever) | 96 target=self.status_handler.serve_forever) |
97 self.status_handler_thread.start() | 97 self.status_handler_thread.start() |
98 | 98 |
99 self.presence_daemon = discovery.PresenceDaemon(self) | 99 self.presence_daemon = presence_handler.PresenceDaemon(self) |
100 self.presence_daemon_thread = threading.Thread( | 100 self.presence_daemon_thread = threading.Thread( |
101 target=self.presence_daemon.serve_forever) | 101 target=self.presence_daemon.serve_forever) |
102 self.presence_daemon_thread.start() | 102 self.presence_daemon_thread.start() |
103 | 103 |
104 self.presence_daemon.FindPeers() | 104 self.presence_daemon.FindPeers() |
105 time.sleep(0.5) # Give those peers some time to reply. | 105 time.sleep(0.5) # Give those peers some time to reply. |
106 | 106 |
107 with self.peer_list_lock: | 107 with self.peer_list_lock: |
108 for p in self.peers: | 108 for p in self.peers: |
109 if p.address == self.ip: continue | 109 if p.address == self.ip: continue |
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
236 def CompareOwnPerf(self, test, arch, mode): | 236 def CompareOwnPerf(self, test, arch, mode): |
237 data_store = self.perf_data_manager.GetStore(arch, mode) | 237 data_store = self.perf_data_manager.GetStore(arch, mode) |
238 observed = data_store.FetchPerfData(test) | 238 observed = data_store.FetchPerfData(test) |
239 if not observed: return | 239 if not observed: return |
240 own_perf_estimate = observed / test.duration | 240 own_perf_estimate = observed / test.duration |
241 with self.perf_data_lock: | 241 with self.perf_data_lock: |
242 kLearnRateLimiter = 9999 | 242 kLearnRateLimiter = 9999 |
243 self.relative_perf *= kLearnRateLimiter | 243 self.relative_perf *= kLearnRateLimiter |
244 self.relative_perf += own_perf_estimate | 244 self.relative_perf += own_perf_estimate |
245 self.relative_perf /= (kLearnRateLimiter + 1) | 245 self.relative_perf /= (kLearnRateLimiter + 1) |
OLD | NEW |