OLD | NEW |
| (Empty) |
1 #!/usr/bin/env python | |
2 # | |
3 # Copyright 2007 Google Inc. | |
4 # | |
5 # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 # you may not use this file except in compliance with the License. | |
7 # You may obtain a copy of the License at | |
8 # | |
9 # http://www.apache.org/licenses/LICENSE-2.0 | |
10 # | |
11 # Unless required by applicable law or agreed to in writing, software | |
12 # distributed under the License is distributed on an "AS IS" BASIS, | |
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 # See the License for the specific language governing permissions and | |
15 # limitations under the License. | |
16 | |
17 """Tool for uploading diffs from a version control system to the codereview app. | |
18 | |
19 Usage summary: upload.py [options] [-- diff_options] | |
20 | |
21 Diff options are passed to the diff command of the underlying system. | |
22 | |
23 Supported version control systems: | |
24 Git | |
25 Mercurial | |
26 Subversion | |
27 | |
28 It is important for Git/Mercurial users to specify a tree/node/branch to diff | |
29 against by using the '--rev' option. | |
30 """ | |
31 # This code is derived from appcfg.py in the App Engine SDK (open source), | |
32 # and from ASPN recipe #146306. | |
33 | |
34 import cookielib | |
35 import getpass | |
36 import logging | |
37 import md5 | |
38 import mimetypes | |
39 import optparse | |
40 import os | |
41 import re | |
42 import socket | |
43 import subprocess | |
44 import sys | |
45 import urllib | |
46 import urllib2 | |
47 import urlparse | |
48 | |
49 try: | |
50 import readline | |
51 except ImportError: | |
52 pass | |
53 | |
54 # The logging verbosity: | |
55 # 0: Errors only. | |
56 # 1: Status messages. | |
57 # 2: Info logs. | |
58 # 3: Debug logs. | |
59 verbosity = 1 | |
60 | |
61 # Max size of patch or base file. | |
62 MAX_UPLOAD_SIZE = 900 * 1024 | |
63 | |
64 | |
65 def GetEmail(prompt): | |
66 """Prompts the user for their email address and returns it. | |
67 | |
68 The last used email address is saved to a file and offered up as a suggestion | |
69 to the user. If the user presses enter without typing in anything the last | |
70 used email address is used. If the user enters a new address, it is saved | |
71 for next time we prompt. | |
72 | |
73 """ | |
74 last_email_file_name = os.path.expanduser("~/.last_codereview_email_address") | |
75 last_email = "" | |
76 if os.path.exists(last_email_file_name): | |
77 try: | |
78 last_email_file = open(last_email_file_name, "r") | |
79 last_email = last_email_file.readline().strip("\n") | |
80 last_email_file.close() | |
81 prompt += " [%s]" % last_email | |
82 except IOError, e: | |
83 pass | |
84 email = raw_input(prompt + ": ").strip() | |
85 if email: | |
86 try: | |
87 last_email_file = open(last_email_file_name, "w") | |
88 last_email_file.write(email) | |
89 last_email_file.close() | |
90 except IOError, e: | |
91 pass | |
92 else: | |
93 email = last_email | |
94 return email | |
95 | |
96 | |
97 def StatusUpdate(msg): | |
98 """Print a status message to stdout. | |
99 | |
100 If 'verbosity' is greater than 0, print the message. | |
101 | |
102 Args: | |
103 msg: The string to print. | |
104 """ | |
105 if verbosity > 0: | |
106 print msg | |
107 | |
108 | |
109 def ErrorExit(msg): | |
110 """Print an error message to stderr and exit.""" | |
111 print >>sys.stderr, msg | |
112 sys.exit(1) | |
113 | |
114 | |
115 class ClientLoginError(urllib2.HTTPError): | |
116 """Raised to indicate there was an error authenticating with ClientLogin.""" | |
117 | |
118 def __init__(self, url, code, msg, headers, args): | |
119 urllib2.HTTPError.__init__(self, url, code, msg, headers, None) | |
120 self.args = args | |
121 self.reason = args["Error"] | |
122 | |
123 | |
124 class AbstractRpcServer(object): | |
125 """Provides a common interface for a simple RPC server.""" | |
126 | |
127 def __init__(self, host, auth_function, host_override=None, extra_headers={}, | |
128 save_cookies=False): | |
129 """Creates a new HttpRpcServer. | |
130 | |
131 Args: | |
132 host: The host to send requests to. | |
133 auth_function: A function that takes no arguments and returns an | |
134 (email, password) tuple when called. Will be called if authentication | |
135 is required. | |
136 host_override: The host header to send to the server (defaults to host). | |
137 extra_headers: A dict of extra headers to append to every request. | |
138 save_cookies: If True, save the authentication cookies to local disk. | |
139 If False, use an in-memory cookiejar instead. Subclasses must | |
140 implement this functionality. Defaults to False. | |
141 """ | |
142 self.host = host | |
143 self.host_override = host_override | |
144 self.auth_function = auth_function | |
145 self.authenticated = False | |
146 self.extra_headers = extra_headers | |
147 self.save_cookies = save_cookies | |
148 self.opener = self._GetOpener() | |
149 if self.host_override: | |
150 logging.info("Server: %s; Host: %s", self.host, self.host_override) | |
151 else: | |
152 logging.info("Server: %s", self.host) | |
153 | |
154 def _GetOpener(self): | |
155 """Returns an OpenerDirector for making HTTP requests. | |
156 | |
157 Returns: | |
158 A urllib2.OpenerDirector object. | |
159 """ | |
160 raise NotImplementedError() | |
161 | |
162 def _CreateRequest(self, url, data=None): | |
163 """Creates a new urllib request.""" | |
164 logging.debug("Creating request for: '%s' with payload:\n%s", url, data) | |
165 req = urllib2.Request(url, data=data) | |
166 if self.host_override: | |
167 req.add_header("Host", self.host_override) | |
168 for key, value in self.extra_headers.iteritems(): | |
169 req.add_header(key, value) | |
170 return req | |
171 | |
172 def _GetAuthToken(self, email, password): | |
173 """Uses ClientLogin to authenticate the user, returning an auth token. | |
174 | |
175 Args: | |
176 email: The user's email address | |
177 password: The user's password | |
178 | |
179 Raises: | |
180 ClientLoginError: If there was an error authenticating with ClientLogin. | |
181 HTTPError: If there was some other form of HTTP error. | |
182 | |
183 Returns: | |
184 The authentication token returned by ClientLogin. | |
185 """ | |
186 account_type = "GOOGLE" | |
187 if self.host.endswith(".google.com"): | |
188 # Needed for use inside Google. | |
189 account_type = "HOSTED" | |
190 req = self._CreateRequest( | |
191 url="https://www.google.com/accounts/ClientLogin", | |
192 data=urllib.urlencode({ | |
193 "Email": email, | |
194 "Passwd": password, | |
195 "service": "ah", | |
196 "source": "rietveld-codereview-upload", | |
197 "accountType": account_type, | |
198 }), | |
199 ) | |
200 try: | |
201 response = self.opener.open(req) | |
202 response_body = response.read() | |
203 response_dict = dict(x.split("=") | |
204 for x in response_body.split("\n") if x) | |
205 return response_dict["Auth"] | |
206 except urllib2.HTTPError, e: | |
207 if e.code == 403: | |
208 body = e.read() | |
209 response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) | |
210 raise ClientLoginError(req.get_full_url(), e.code, e.msg, | |
211 e.headers, response_dict) | |
212 else: | |
213 raise | |
214 | |
215 def _GetAuthCookie(self, auth_token): | |
216 """Fetches authentication cookies for an authentication token. | |
217 | |
218 Args: | |
219 auth_token: The authentication token returned by ClientLogin. | |
220 | |
221 Raises: | |
222 HTTPError: If there was an error fetching the authentication cookies. | |
223 """ | |
224 # This is a dummy value to allow us to identify when we're successful. | |
225 continue_location = "http://localhost/" | |
226 args = {"continue": continue_location, "auth": auth_token} | |
227 req = self._CreateRequest("http://%s/_ah/login?%s" % | |
228 (self.host, urllib.urlencode(args))) | |
229 try: | |
230 response = self.opener.open(req) | |
231 except urllib2.HTTPError, e: | |
232 response = e | |
233 if (response.code != 302 or | |
234 response.info()["location"] != continue_location): | |
235 raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, | |
236 response.headers, response.fp) | |
237 self.authenticated = True | |
238 | |
239 def _Authenticate(self): | |
240 """Authenticates the user. | |
241 | |
242 The authentication process works as follows: | |
243 1) We get a username and password from the user | |
244 2) We use ClientLogin to obtain an AUTH token for the user | |
245 (see http://code.google.com/apis/accounts/AuthForInstalledApps.html). | |
246 3) We pass the auth token to /_ah/login on the server to obtain an | |
247 authentication cookie. If login was successful, it tries to redirect | |
248 us to the URL we provided. | |
249 | |
250 If we attempt to access the upload API without first obtaining an | |
251 authentication cookie, it returns a 401 response and directs us to | |
252 authenticate ourselves with ClientLogin. | |
253 """ | |
254 for i in range(3): | |
255 credentials = self.auth_function() | |
256 try: | |
257 auth_token = self._GetAuthToken(credentials[0], credentials[1]) | |
258 except ClientLoginError, e: | |
259 if e.reason == "BadAuthentication": | |
260 print >>sys.stderr, "Invalid username or password." | |
261 continue | |
262 if e.reason == "CaptchaRequired": | |
263 print >>sys.stderr, ( | |
264 "Please go to\n" | |
265 "https://www.google.com/accounts/DisplayUnlockCaptcha\n" | |
266 "and verify you are a human. Then try again.") | |
267 break | |
268 if e.reason == "NotVerified": | |
269 print >>sys.stderr, "Account not verified." | |
270 break | |
271 if e.reason == "TermsNotAgreed": | |
272 print >>sys.stderr, "User has not agreed to TOS." | |
273 break | |
274 if e.reason == "AccountDeleted": | |
275 print >>sys.stderr, "The user account has been deleted." | |
276 break | |
277 if e.reason == "AccountDisabled": | |
278 print >>sys.stderr, "The user account has been disabled." | |
279 break | |
280 if e.reason == "ServiceDisabled": | |
281 print >>sys.stderr, ("The user's access to the service has been " | |
282 "disabled.") | |
283 break | |
284 if e.reason == "ServiceUnavailable": | |
285 print >>sys.stderr, "The service is not available; try again later." | |
286 break | |
287 raise | |
288 self._GetAuthCookie(auth_token) | |
289 return | |
290 | |
291 def Send(self, request_path, payload=None, | |
292 content_type="application/octet-stream", | |
293 timeout=None, | |
294 **kwargs): | |
295 """Sends an RPC and returns the response. | |
296 | |
297 Args: | |
298 request_path: The path to send the request to, eg /api/appversion/create. | |
299 payload: The body of the request, or None to send an empty request. | |
300 content_type: The Content-Type header to use. | |
301 timeout: timeout in seconds; default None i.e. no timeout. | |
302 (Note: for large requests on OS X, the timeout doesn't work right.) | |
303 kwargs: Any keyword arguments are converted into query string parameters. | |
304 | |
305 Returns: | |
306 The response body, as a string. | |
307 """ | |
308 # TODO: Don't require authentication. Let the server say | |
309 # whether it is necessary. | |
310 if not self.authenticated: | |
311 self._Authenticate() | |
312 | |
313 old_timeout = socket.getdefaulttimeout() | |
314 socket.setdefaulttimeout(timeout) | |
315 try: | |
316 tries = 0 | |
317 while True: | |
318 tries += 1 | |
319 args = dict(kwargs) | |
320 url = "http://%s%s" % (self.host, request_path) | |
321 if args: | |
322 url += "?" + urllib.urlencode(args) | |
323 req = self._CreateRequest(url=url, data=payload) | |
324 req.add_header("Content-Type", content_type) | |
325 try: | |
326 f = self.opener.open(req) | |
327 response = f.read() | |
328 f.close() | |
329 return response | |
330 except urllib2.HTTPError, e: | |
331 if tries > 3: | |
332 raise | |
333 elif e.code == 401: | |
334 self._Authenticate() | |
335 ## elif e.code >= 500 and e.code < 600: | |
336 ## # Server Error - try again. | |
337 ## continue | |
338 else: | |
339 raise | |
340 finally: | |
341 socket.setdefaulttimeout(old_timeout) | |
342 | |
343 | |
344 class HttpRpcServer(AbstractRpcServer): | |
345 """Provides a simplified RPC-style interface for HTTP requests.""" | |
346 | |
347 def _Authenticate(self): | |
348 """Save the cookie jar after authentication.""" | |
349 super(HttpRpcServer, self)._Authenticate() | |
350 if self.save_cookies: | |
351 StatusUpdate("Saving authentication cookies to %s" % self.cookie_file) | |
352 self.cookie_jar.save() | |
353 | |
354 def _GetOpener(self): | |
355 """Returns an OpenerDirector that supports cookies and ignores redirects. | |
356 | |
357 Returns: | |
358 A urllib2.OpenerDirector object. | |
359 """ | |
360 opener = urllib2.OpenerDirector() | |
361 opener.add_handler(urllib2.ProxyHandler()) | |
362 opener.add_handler(urllib2.UnknownHandler()) | |
363 opener.add_handler(urllib2.HTTPHandler()) | |
364 opener.add_handler(urllib2.HTTPDefaultErrorHandler()) | |
365 opener.add_handler(urllib2.HTTPSHandler()) | |
366 opener.add_handler(urllib2.HTTPErrorProcessor()) | |
367 if self.save_cookies: | |
368 self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") | |
369 self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) | |
370 if os.path.exists(self.cookie_file): | |
371 try: | |
372 self.cookie_jar.load() | |
373 self.authenticated = True | |
374 StatusUpdate("Loaded authentication cookies from %s" % | |
375 self.cookie_file) | |
376 except (cookielib.LoadError, IOError): | |
377 # Failed to load cookies - just ignore them. | |
378 pass | |
379 else: | |
380 # Create an empty cookie file with mode 600 | |
381 fd = os.open(self.cookie_file, os.O_CREAT, 0600) | |
382 os.close(fd) | |
383 # Always chmod the cookie file | |
384 os.chmod(self.cookie_file, 0600) | |
385 else: | |
386 # Don't save cookies across runs of update.py. | |
387 self.cookie_jar = cookielib.CookieJar() | |
388 opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) | |
389 return opener | |
390 | |
391 | |
392 parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]") | |
393 parser.add_option("-y", "--assume_yes", action="store_true", | |
394 dest="assume_yes", default=False, | |
395 help="Assume that the answer to yes/no questions is 'yes'.") | |
396 # Logging | |
397 group = parser.add_option_group("Logging options") | |
398 group.add_option("-q", "--quiet", action="store_const", const=0, | |
399 dest="verbose", help="Print errors only.") | |
400 group.add_option("-v", "--verbose", action="store_const", const=2, | |
401 dest="verbose", default=1, | |
402 help="Print info level logs (default).") | |
403 group.add_option("--noisy", action="store_const", const=3, | |
404 dest="verbose", help="Print all logs.") | |
405 # Review server | |
406 group = parser.add_option_group("Review server options") | |
407 group.add_option("-s", "--server", action="store", dest="server", | |
408 default="codereview.appspot.com", | |
409 metavar="SERVER", | |
410 help=("The server to upload to. The format is host[:port]. " | |
411 "Defaults to 'codereview.appspot.com'.")) | |
412 group.add_option("-e", "--email", action="store", dest="email", | |
413 metavar="EMAIL", default=None, | |
414 help="The username to use. Will prompt if omitted.") | |
415 group.add_option("-H", "--host", action="store", dest="host", | |
416 metavar="HOST", default=None, | |
417 help="Overrides the Host header sent with all RPCs.") | |
418 group.add_option("--no_cookies", action="store_false", | |
419 dest="save_cookies", default=True, | |
420 help="Do not save authentication cookies to local disk.") | |
421 # Issue | |
422 group = parser.add_option_group("Issue options") | |
423 group.add_option("-d", "--description", action="store", dest="description", | |
424 metavar="DESCRIPTION", default=None, | |
425 help="Optional description when creating an issue.") | |
426 group.add_option("-f", "--description_file", action="store", | |
427 dest="description_file", metavar="DESCRIPTION_FILE", | |
428 default=None, | |
429 help="Optional path of a file that contains " | |
430 "the description when creating an issue.") | |
431 group.add_option("-r", "--reviewers", action="store", dest="reviewers", | |
432 metavar="REVIEWERS", default=None, | |
433 help="Add reviewers (comma separated email addresses).") | |
434 group.add_option("--cc", action="store", dest="cc", | |
435 metavar="CC", default=None, | |
436 help="Add CC (comma separated email addresses).") | |
437 # Upload options | |
438 group = parser.add_option_group("Patch options") | |
439 group.add_option("-m", "--message", action="store", dest="message", | |
440 metavar="MESSAGE", default=None, | |
441 help="A message to identify the patch. " | |
442 "Will prompt if omitted.") | |
443 group.add_option("-i", "--issue", type="int", action="store", | |
444 metavar="ISSUE", default=None, | |
445 help="Issue number to which to add. Defaults to new issue.") | |
446 group.add_option("--download_base", action="store_true", | |
447 dest="download_base", default=False, | |
448 help="Base files will be downloaded by the server " | |
449 "(side-by-side diffs may not work on files with CRs).") | |
450 group.add_option("--rev", action="store", dest="revision", | |
451 metavar="REV", default=None, | |
452 help="Branch/tree/revision to diff against (used by DVCS).") | |
453 group.add_option("--send_mail", action="store_true", | |
454 dest="send_mail", default=False, | |
455 help="Send notification email to reviewers.") | |
456 | |
457 | |
458 def GetRpcServer(options): | |
459 """Returns an instance of an AbstractRpcServer. | |
460 | |
461 Returns: | |
462 A new AbstractRpcServer, on which RPC calls can be made. | |
463 """ | |
464 | |
465 rpc_server_class = HttpRpcServer | |
466 | |
467 def GetUserCredentials(): | |
468 """Prompts the user for a username and password.""" | |
469 email = options.email | |
470 if email is None: | |
471 email = GetEmail("Email (login for uploading to %s)" % options.server) | |
472 password = getpass.getpass("Password for %s: " % email) | |
473 return (email, password) | |
474 | |
475 # If this is the dev_appserver, use fake authentication. | |
476 host = (options.host or options.server).lower() | |
477 if host == "localhost" or host.startswith("localhost:"): | |
478 email = options.email | |
479 if email is None: | |
480 email = "test@example.com" | |
481 logging.info("Using debug user %s. Override with --email" % email) | |
482 server = rpc_server_class( | |
483 options.server, | |
484 lambda: (email, "password"), | |
485 host_override=options.host, | |
486 extra_headers={"Cookie": | |
487 'dev_appserver_login="%s:False"' % email}, | |
488 save_cookies=options.save_cookies) | |
489 # Don't try to talk to ClientLogin. | |
490 server.authenticated = True | |
491 return server | |
492 | |
493 return rpc_server_class(options.server, GetUserCredentials, | |
494 host_override=options.host, | |
495 save_cookies=options.save_cookies) | |
496 | |
497 | |
498 def EncodeMultipartFormData(fields, files): | |
499 """Encode form fields for multipart/form-data. | |
500 | |
501 Args: | |
502 fields: A sequence of (name, value) elements for regular form fields. | |
503 files: A sequence of (name, filename, value) elements for data to be | |
504 uploaded as files. | |
505 Returns: | |
506 (content_type, body) ready for httplib.HTTP instance. | |
507 | |
508 Source: | |
509 http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306 | |
510 """ | |
511 BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-' | |
512 CRLF = '\r\n' | |
513 lines = [] | |
514 for (key, value) in fields: | |
515 lines.append('--' + BOUNDARY) | |
516 lines.append('Content-Disposition: form-data; name="%s"' % key) | |
517 lines.append('') | |
518 lines.append(value) | |
519 for (key, filename, value) in files: | |
520 lines.append('--' + BOUNDARY) | |
521 lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % | |
522 (key, filename)) | |
523 lines.append('Content-Type: %s' % GetContentType(filename)) | |
524 lines.append('') | |
525 lines.append(value) | |
526 lines.append('--' + BOUNDARY + '--') | |
527 lines.append('') | |
528 body = CRLF.join(lines) | |
529 content_type = 'multipart/form-data; boundary=%s' % BOUNDARY | |
530 return content_type, body | |
531 | |
532 | |
533 def GetContentType(filename): | |
534 """Helper to guess the content-type from the filename.""" | |
535 return mimetypes.guess_type(filename)[0] or 'application/octet-stream' | |
536 | |
537 | |
538 # Use a shell for subcommands on Windows to get a PATH search. | |
539 use_shell = sys.platform.startswith("win") | |
540 | |
541 def RunShellWithReturnCode(command, print_output=False, | |
542 universal_newlines=True): | |
543 """Executes a command and returns the output from stdout and the return code. | |
544 | |
545 Args: | |
546 command: Command to execute. | |
547 print_output: If True, the output is printed to stdout. | |
548 If False, both stdout and stderr are ignored. | |
549 universal_newlines: Use universal_newlines flag (default: True). | |
550 | |
551 Returns: | |
552 Tuple (output, return code) | |
553 """ | |
554 logging.info("Running %s", command) | |
555 p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, | |
556 shell=use_shell, universal_newlines=universal_newlines) | |
557 if print_output: | |
558 output_array = [] | |
559 while True: | |
560 line = p.stdout.readline() | |
561 if not line: | |
562 break | |
563 print line.strip("\n") | |
564 output_array.append(line) | |
565 output = "".join(output_array) | |
566 else: | |
567 output = p.stdout.read() | |
568 p.wait() | |
569 errout = p.stderr.read() | |
570 if print_output and errout: | |
571 print >>sys.stderr, errout | |
572 p.stdout.close() | |
573 p.stderr.close() | |
574 return output, p.returncode | |
575 | |
576 | |
577 def RunShell(command, silent_ok=False, universal_newlines=True, | |
578 print_output=False): | |
579 data, retcode = RunShellWithReturnCode(command, print_output, | |
580 universal_newlines) | |
581 if retcode: | |
582 ErrorExit("Got error status from %s:\n%s" % (command, data)) | |
583 if not silent_ok and not data: | |
584 ErrorExit("No output from %s" % command) | |
585 return data | |
586 | |
587 | |
588 class VersionControlSystem(object): | |
589 """Abstract base class providing an interface to the VCS.""" | |
590 | |
591 def __init__(self, options): | |
592 """Constructor. | |
593 | |
594 Args: | |
595 options: Command line options. | |
596 """ | |
597 self.options = options | |
598 | |
599 def GenerateDiff(self, args): | |
600 """Return the current diff as a string. | |
601 | |
602 Args: | |
603 args: Extra arguments to pass to the diff command. | |
604 """ | |
605 raise NotImplementedError( | |
606 "abstract method -- subclass %s must override" % self.__class__) | |
607 | |
608 def GetUnknownFiles(self): | |
609 """Return a list of files unknown to the VCS.""" | |
610 raise NotImplementedError( | |
611 "abstract method -- subclass %s must override" % self.__class__) | |
612 | |
613 def CheckForUnknownFiles(self): | |
614 """Show an "are you sure?" prompt if there are unknown files.""" | |
615 unknown_files = self.GetUnknownFiles() | |
616 if unknown_files: | |
617 print "The following files are not added to version control:" | |
618 for line in unknown_files: | |
619 print line | |
620 prompt = "Are you sure to continue?(y/N) " | |
621 answer = raw_input(prompt).strip() | |
622 if answer != "y": | |
623 ErrorExit("User aborted") | |
624 | |
625 def GetBaseFile(self, filename): | |
626 """Get the content of the upstream version of a file. | |
627 | |
628 Returns: | |
629 A tuple (base_content, new_content, is_binary, status) | |
630 base_content: The contents of the base file. | |
631 new_content: For text files, this is empty. For binary files, this is | |
632 the contents of the new file, since the diff output won't contain | |
633 information to reconstruct the current file. | |
634 is_binary: True iff the file is binary. | |
635 status: The status of the file. | |
636 """ | |
637 | |
638 raise NotImplementedError( | |
639 "abstract method -- subclass %s must override" % self.__class__) | |
640 | |
641 | |
642 def GetBaseFiles(self, diff): | |
643 """Helper that calls GetBase file for each file in the patch. | |
644 | |
645 Returns: | |
646 A dictionary that maps from filename to GetBaseFile's tuple. Filenames | |
647 are retrieved based on lines that start with "Index:" or | |
648 "Property changes on:". | |
649 """ | |
650 files = {} | |
651 for line in diff.splitlines(True): | |
652 if line.startswith('Index:') or line.startswith('Property changes on:'): | |
653 unused, filename = line.split(':', 1) | |
654 # On Windows if a file has property changes its filename uses '\' | |
655 # instead of '/'. | |
656 filename = filename.strip().replace('\\', '/') | |
657 files[filename] = self.GetBaseFile(filename) | |
658 return files | |
659 | |
660 | |
661 def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options, | |
662 files): | |
663 """Uploads the base files (and if necessary, the current ones as well).""" | |
664 | |
665 def UploadFile(filename, file_id, content, is_binary, status, is_base): | |
666 """Uploads a file to the server.""" | |
667 file_too_large = False | |
668 if is_base: | |
669 type = "base" | |
670 else: | |
671 type = "current" | |
672 if len(content) > MAX_UPLOAD_SIZE: | |
673 print ("Not uploading the %s file for %s because it's too large." % | |
674 (type, filename)) | |
675 file_too_large = True | |
676 content = "" | |
677 checksum = md5.new(content).hexdigest() | |
678 if options.verbose > 0 and not file_too_large: | |
679 print "Uploading %s file for %s" % (type, filename) | |
680 url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id) | |
681 form_fields = [("filename", filename), | |
682 ("status", status), | |
683 ("checksum", checksum), | |
684 ("is_binary", str(is_binary)), | |
685 ("is_current", str(not is_base)), | |
686 ] | |
687 if file_too_large: | |
688 form_fields.append(("file_too_large", "1")) | |
689 if options.email: | |
690 form_fields.append(("user", options.email)) | |
691 ctype, body = EncodeMultipartFormData(form_fields, | |
692 [("data", filename, content)]) | |
693 response_body = rpc_server.Send(url, body, | |
694 content_type=ctype) | |
695 if not response_body.startswith("OK"): | |
696 StatusUpdate(" --> %s" % response_body) | |
697 sys.exit(1) | |
698 | |
699 patches = dict() | |
700 [patches.setdefault(v, k) for k, v in patch_list] | |
701 for filename in patches.keys(): | |
702 base_content, new_content, is_binary, status = files[filename] | |
703 file_id_str = patches.get(filename) | |
704 if file_id_str.find("nobase") != -1: | |
705 base_content = None | |
706 file_id_str = file_id_str[file_id_str.rfind("_") + 1:] | |
707 file_id = int(file_id_str) | |
708 if base_content != None: | |
709 UploadFile(filename, file_id, base_content, is_binary, status, True) | |
710 if new_content != None: | |
711 UploadFile(filename, file_id, new_content, is_binary, status, False) | |
712 | |
713 def IsImage(self, filename): | |
714 """Returns true if the filename has an image extension.""" | |
715 mimetype = mimetypes.guess_type(filename)[0] | |
716 if not mimetype: | |
717 return False | |
718 return mimetype.startswith("image/") | |
719 | |
720 | |
721 class SubversionVCS(VersionControlSystem): | |
722 """Implementation of the VersionControlSystem interface for Subversion.""" | |
723 | |
724 def __init__(self, options): | |
725 super(SubversionVCS, self).__init__(options) | |
726 if self.options.revision: | |
727 match = re.match(r"(\d+)(:(\d+))?", self.options.revision) | |
728 if not match: | |
729 ErrorExit("Invalid Subversion revision %s." % self.options.revision) | |
730 self.rev_start = match.group(1) | |
731 self.rev_end = match.group(3) | |
732 else: | |
733 self.rev_start = self.rev_end = None | |
734 # Cache output from "svn list -r REVNO dirname". | |
735 # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev). | |
736 self.svnls_cache = {} | |
737 # SVN base URL is required to fetch files deleted in an older revision. | |
738 # Result is cached to not guess it over and over again in GetBaseFile(). | |
739 required = self.options.download_base or self.options.revision is not None | |
740 self.svn_base = self._GuessBase(required) | |
741 | |
742 def GuessBase(self, required): | |
743 """Wrapper for _GuessBase.""" | |
744 return self.svn_base | |
745 | |
746 def _GuessBase(self, required): | |
747 """Returns the SVN base URL. | |
748 | |
749 Args: | |
750 required: If true, exits if the url can't be guessed, otherwise None is | |
751 returned. | |
752 """ | |
753 info = RunShell(["svn", "info"]) | |
754 for line in info.splitlines(): | |
755 words = line.split() | |
756 if len(words) == 2 and words[0] == "URL:": | |
757 url = words[1] | |
758 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) | |
759 username, netloc = urllib.splituser(netloc) | |
760 if username: | |
761 logging.info("Removed username from base URL") | |
762 if netloc.endswith("svn.python.org"): | |
763 if netloc == "svn.python.org": | |
764 if path.startswith("/projects/"): | |
765 path = path[9:] | |
766 elif netloc != "pythondev@svn.python.org": | |
767 ErrorExit("Unrecognized Python URL: %s" % url) | |
768 base = "http://svn.python.org/view/*checkout*%s/" % path | |
769 logging.info("Guessed Python base = %s", base) | |
770 elif netloc.endswith("svn.collab.net"): | |
771 if path.startswith("/repos/"): | |
772 path = path[6:] | |
773 base = "http://svn.collab.net/viewvc/*checkout*%s/" % path | |
774 logging.info("Guessed CollabNet base = %s", base) | |
775 elif netloc.endswith(".googlecode.com"): | |
776 path = path + "/" | |
777 base = urlparse.urlunparse(("http", netloc, path, params, | |
778 query, fragment)) | |
779 logging.info("Guessed Google Code base = %s", base) | |
780 else: | |
781 path = path + "/" | |
782 base = urlparse.urlunparse((scheme, netloc, path, params, | |
783 query, fragment)) | |
784 logging.info("Guessed base = %s", base) | |
785 return base | |
786 if required: | |
787 ErrorExit("Can't find URL in output from svn info") | |
788 return None | |
789 | |
790 def GenerateDiff(self, args): | |
791 cmd = ["svn", "diff"] | |
792 if self.options.revision: | |
793 cmd += ["-r", self.options.revision] | |
794 cmd.extend(args) | |
795 data = RunShell(cmd) | |
796 count = 0 | |
797 for line in data.splitlines(): | |
798 if line.startswith("Index:") or line.startswith("Property changes on:"): | |
799 count += 1 | |
800 logging.info(line) | |
801 if not count: | |
802 ErrorExit("No valid patches found in output from svn diff") | |
803 return data | |
804 | |
805 def _CollapseKeywords(self, content, keyword_str): | |
806 """Collapses SVN keywords.""" | |
807 # svn cat translates keywords but svn diff doesn't. As a result of this | |
808 # behavior patching.PatchChunks() fails with a chunk mismatch error. | |
809 # This part was originally written by the Review Board development team | |
810 # who had the same problem (http://reviews.review-board.org/r/276/). | |
811 # Mapping of keywords to known aliases | |
812 svn_keywords = { | |
813 # Standard keywords | |
814 'Date': ['Date', 'LastChangedDate'], | |
815 'Revision': ['Revision', 'LastChangedRevision', 'Rev'], | |
816 'Author': ['Author', 'LastChangedBy'], | |
817 'HeadURL': ['HeadURL', 'URL'], | |
818 'Id': ['Id'], | |
819 | |
820 # Aliases | |
821 'LastChangedDate': ['LastChangedDate', 'Date'], | |
822 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'], | |
823 'LastChangedBy': ['LastChangedBy', 'Author'], | |
824 'URL': ['URL', 'HeadURL'], | |
825 } | |
826 | |
827 def repl(m): | |
828 if m.group(2): | |
829 return "$%s::%s$" % (m.group(1), " " * len(m.group(3))) | |
830 return "$%s$" % m.group(1) | |
831 keywords = [keyword | |
832 for name in keyword_str.split(" ") | |
833 for keyword in svn_keywords.get(name, [])] | |
834 return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content) | |
835 | |
836 def GetUnknownFiles(self): | |
837 status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True) | |
838 unknown_files = [] | |
839 for line in status.split("\n"): | |
840 if line and line[0] == "?": | |
841 unknown_files.append(line) | |
842 return unknown_files | |
843 | |
844 def ReadFile(self, filename): | |
845 """Returns the contents of a file.""" | |
846 file = open(filename, 'rb') | |
847 result = "" | |
848 try: | |
849 result = file.read() | |
850 finally: | |
851 file.close() | |
852 return result | |
853 | |
854 def GetStatus(self, filename): | |
855 """Returns the status of a file.""" | |
856 if not self.options.revision: | |
857 status = RunShell(["svn", "status", "--ignore-externals", filename]) | |
858 if not status: | |
859 ErrorExit("svn status returned no output for %s" % filename) | |
860 status_lines = status.splitlines() | |
861 # If file is in a cl, the output will begin with | |
862 # "\n--- Changelist 'cl_name':\n". See | |
863 # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt | |
864 if (len(status_lines) == 3 and | |
865 not status_lines[0] and | |
866 status_lines[1].startswith("--- Changelist")): | |
867 status = status_lines[2] | |
868 else: | |
869 status = status_lines[0] | |
870 # If we have a revision to diff against we need to run "svn list" | |
871 # for the old and the new revision and compare the results to get | |
872 # the correct status for a file. | |
873 else: | |
874 dirname, relfilename = os.path.split(filename) | |
875 if dirname not in self.svnls_cache: | |
876 cmd = ["svn", "list", "-r", self.rev_start, dirname or "."] | |
877 out, returncode = RunShellWithReturnCode(cmd) | |
878 if returncode: | |
879 ErrorExit("Failed to get status for %s." % filename) | |
880 old_files = out.splitlines() | |
881 args = ["svn", "list"] | |
882 if self.rev_end: | |
883 args += ["-r", self.rev_end] | |
884 cmd = args + [dirname or "."] | |
885 out, returncode = RunShellWithReturnCode(cmd) | |
886 if returncode: | |
887 ErrorExit("Failed to run command %s" % cmd) | |
888 self.svnls_cache[dirname] = (old_files, out.splitlines()) | |
889 old_files, new_files = self.svnls_cache[dirname] | |
890 if relfilename in old_files and relfilename not in new_files: | |
891 status = "D " | |
892 elif relfilename in old_files and relfilename in new_files: | |
893 status = "M " | |
894 else: | |
895 status = "A " | |
896 return status | |
897 | |
898 def GetBaseFile(self, filename): | |
899 status = self.GetStatus(filename) | |
900 base_content = None | |
901 new_content = None | |
902 | |
903 # If a file is copied its status will be "A +", which signifies | |
904 # "addition-with-history". See "svn st" for more information. We need to | |
905 # upload the original file or else diff parsing will fail if the file was | |
906 # edited. | |
907 if status[0] == "A" and status[3] != "+": | |
908 # We'll need to upload the new content if we're adding a binary file | |
909 # since diff's output won't contain it. | |
910 mimetype = RunShell(["svn", "propget", "svn:mime-type", filename], | |
911 silent_ok=True) | |
912 base_content = "" | |
913 is_binary = mimetype and not mimetype.startswith("text/") | |
914 if is_binary and self.IsImage(filename): | |
915 new_content = self.ReadFile(filename) | |
916 elif (status[0] in ("M", "D", "R") or | |
917 (status[0] == "A" and status[3] == "+") or # Copied file. | |
918 (status[0] == " " and status[1] == "M")): # Property change. | |
919 args = [] | |
920 if self.options.revision: | |
921 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) | |
922 else: | |
923 # Don't change filename, it's needed later. | |
924 url = filename | |
925 args += ["-r", "BASE"] | |
926 cmd = ["svn"] + args + ["propget", "svn:mime-type", url] | |
927 mimetype, returncode = RunShellWithReturnCode(cmd) | |
928 if returncode: | |
929 # File does not exist in the requested revision. | |
930 # Reset mimetype, it contains an error message. | |
931 mimetype = "" | |
932 get_base = False | |
933 is_binary = mimetype and not mimetype.startswith("text/") | |
934 if status[0] == " ": | |
935 # Empty base content just to force an upload. | |
936 base_content = "" | |
937 elif is_binary: | |
938 if self.IsImage(filename): | |
939 get_base = True | |
940 if status[0] == "M": | |
941 if not self.rev_end: | |
942 new_content = self.ReadFile(filename) | |
943 else: | |
944 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end) | |
945 new_content = RunShell(["svn", "cat", url], | |
946 universal_newlines=True, silent_ok=True) | |
947 else: | |
948 base_content = "" | |
949 else: | |
950 get_base = True | |
951 | |
952 if get_base: | |
953 if is_binary: | |
954 universal_newlines = False | |
955 else: | |
956 universal_newlines = True | |
957 if self.rev_start: | |
958 # "svn cat -r REV delete_file.txt" doesn't work. cat requires | |
959 # the full URL with "@REV" appended instead of using "-r" option. | |
960 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) | |
961 base_content = RunShell(["svn", "cat", url], | |
962 universal_newlines=universal_newlines, | |
963 silent_ok=True) | |
964 else: | |
965 base_content = RunShell(["svn", "cat", filename], | |
966 universal_newlines=universal_newlines, | |
967 silent_ok=True) | |
968 if not is_binary: | |
969 args = [] | |
970 if self.rev_start: | |
971 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) | |
972 else: | |
973 url = filename | |
974 args += ["-r", "BASE"] | |
975 cmd = ["svn"] + args + ["propget", "svn:keywords", url] | |
976 keywords, returncode = RunShellWithReturnCode(cmd) | |
977 if keywords and not returncode: | |
978 base_content = self._CollapseKeywords(base_content, keywords) | |
979 else: | |
980 StatusUpdate("svn status returned unexpected output: %s" % status) | |
981 sys.exit(1) | |
982 return base_content, new_content, is_binary, status[0:5] | |
983 | |
984 | |
985 class GitVCS(VersionControlSystem): | |
986 """Implementation of the VersionControlSystem interface for Git.""" | |
987 | |
988 def __init__(self, options): | |
989 super(GitVCS, self).__init__(options) | |
990 # Map of filename -> hash of base file. | |
991 self.base_hashes = {} | |
992 | |
993 def GenerateDiff(self, extra_args): | |
994 # This is more complicated than svn's GenerateDiff because we must convert | |
995 # the diff output to include an svn-style "Index:" line as well as record | |
996 # the hashes of the base files, so we can upload them along with our diff. | |
997 if self.options.revision: | |
998 extra_args = [self.options.revision] + extra_args | |
999 gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args) | |
1000 svndiff = [] | |
1001 filecount = 0 | |
1002 filename = None | |
1003 for line in gitdiff.splitlines(): | |
1004 match = re.match(r"diff --git a/(.*) b/.*$", line) | |
1005 if match: | |
1006 filecount += 1 | |
1007 filename = match.group(1) | |
1008 svndiff.append("Index: %s\n" % filename) | |
1009 else: | |
1010 # The "index" line in a git diff looks like this (long hashes elided): | |
1011 # index 82c0d44..b2cee3f 100755 | |
1012 # We want to save the left hash, as that identifies the base file. | |
1013 match = re.match(r"index (\w+)\.\.", line) | |
1014 if match: | |
1015 self.base_hashes[filename] = match.group(1) | |
1016 svndiff.append(line + "\n") | |
1017 if not filecount: | |
1018 ErrorExit("No valid patches found in output from git diff") | |
1019 return "".join(svndiff) | |
1020 | |
1021 def GetUnknownFiles(self): | |
1022 status = RunShell(["git", "ls-files", "--exclude-standard", "--others"], | |
1023 silent_ok=True) | |
1024 return status.splitlines() | |
1025 | |
1026 def GetBaseFile(self, filename): | |
1027 hash = self.base_hashes[filename] | |
1028 base_content = None | |
1029 new_content = None | |
1030 is_binary = False | |
1031 if hash == "0" * 40: # All-zero hash indicates no base file. | |
1032 status = "A" | |
1033 base_content = "" | |
1034 else: | |
1035 status = "M" | |
1036 base_content, returncode = RunShellWithReturnCode(["git", "show", hash]) | |
1037 if returncode: | |
1038 ErrorExit("Got error status from 'git show %s'" % hash) | |
1039 return (base_content, new_content, is_binary, status) | |
1040 | |
1041 | |
1042 class MercurialVCS(VersionControlSystem): | |
1043 """Implementation of the VersionControlSystem interface for Mercurial.""" | |
1044 | |
1045 def __init__(self, options, repo_dir): | |
1046 super(MercurialVCS, self).__init__(options) | |
1047 # Absolute path to repository (we can be in a subdir) | |
1048 self.repo_dir = os.path.normpath(repo_dir) | |
1049 # Compute the subdir | |
1050 cwd = os.path.normpath(os.getcwd()) | |
1051 assert cwd.startswith(self.repo_dir) | |
1052 self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/") | |
1053 if self.options.revision: | |
1054 self.base_rev = self.options.revision | |
1055 else: | |
1056 self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip() | |
1057 | |
1058 def _GetRelPath(self, filename): | |
1059 """Get relative path of a file according to the current directory, | |
1060 given its logical path in the repo.""" | |
1061 assert filename.startswith(self.subdir), filename | |
1062 return filename[len(self.subdir):].lstrip(r"\/") | |
1063 | |
1064 def GenerateDiff(self, extra_args): | |
1065 # If no file specified, restrict to the current subdir | |
1066 extra_args = extra_args or ["."] | |
1067 cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args | |
1068 data = RunShell(cmd, silent_ok=True) | |
1069 svndiff = [] | |
1070 filecount = 0 | |
1071 for line in data.splitlines(): | |
1072 m = re.match("diff --git a/(\S+) b/(\S+)", line) | |
1073 if m: | |
1074 # Modify line to make it look like as it comes from svn diff. | |
1075 # With this modification no changes on the server side are required | |
1076 # to make upload.py work with Mercurial repos. | |
1077 # NOTE: for proper handling of moved/copied files, we have to use | |
1078 # the second filename. | |
1079 filename = m.group(2) | |
1080 svndiff.append("Index: %s" % filename) | |
1081 svndiff.append("=" * 67) | |
1082 filecount += 1 | |
1083 logging.info(line) | |
1084 else: | |
1085 svndiff.append(line) | |
1086 if not filecount: | |
1087 ErrorExit("No valid patches found in output from hg diff") | |
1088 return "\n".join(svndiff) + "\n" | |
1089 | |
1090 def GetUnknownFiles(self): | |
1091 """Return a list of files unknown to the VCS.""" | |
1092 args = [] | |
1093 status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."], | |
1094 silent_ok=True) | |
1095 unknown_files = [] | |
1096 for line in status.splitlines(): | |
1097 st, fn = line.split(" ", 1) | |
1098 if st == "?": | |
1099 unknown_files.append(fn) | |
1100 return unknown_files | |
1101 | |
1102 def GetBaseFile(self, filename): | |
1103 # "hg status" and "hg cat" both take a path relative to the current subdir | |
1104 # rather than to the repo root, but "hg diff" has given us the full path | |
1105 # to the repo root. | |
1106 base_content = "" | |
1107 new_content = None | |
1108 is_binary = False | |
1109 oldrelpath = relpath = self._GetRelPath(filename) | |
1110 # "hg status -C" returns two lines for moved/copied files, one otherwise | |
1111 out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath]) | |
1112 out = out.splitlines() | |
1113 # HACK: strip error message about missing file/directory if it isn't in | |
1114 # the working copy | |
1115 if out[0].startswith('%s: ' % relpath): | |
1116 out = out[1:] | |
1117 if len(out) > 1: | |
1118 # Moved/copied => considered as modified, use old filename to | |
1119 # retrieve base contents | |
1120 oldrelpath = out[1].strip() | |
1121 status = "M" | |
1122 else: | |
1123 status, _ = out[0].split(' ', 1) | |
1124 if status != "A": | |
1125 base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath], | |
1126 silent_ok=True) | |
1127 is_binary = "\0" in base_content # Mercurial's heuristic | |
1128 if status != "R": | |
1129 new_content = open(relpath, "rb").read() | |
1130 is_binary = is_binary or "\0" in new_content | |
1131 if is_binary and base_content: | |
1132 # Fetch again without converting newlines | |
1133 base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath], | |
1134 silent_ok=True, universal_newlines=False) | |
1135 if not is_binary or not self.IsImage(relpath): | |
1136 new_content = None | |
1137 return base_content, new_content, is_binary, status | |
1138 | |
1139 | |
1140 # NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync. | |
1141 def SplitPatch(data): | |
1142 """Splits a patch into separate pieces for each file. | |
1143 | |
1144 Args: | |
1145 data: A string containing the output of svn diff. | |
1146 | |
1147 Returns: | |
1148 A list of 2-tuple (filename, text) where text is the svn diff output | |
1149 pertaining to filename. | |
1150 """ | |
1151 patches = [] | |
1152 filename = None | |
1153 diff = [] | |
1154 for line in data.splitlines(True): | |
1155 new_filename = None | |
1156 if line.startswith('Index:'): | |
1157 unused, new_filename = line.split(':', 1) | |
1158 new_filename = new_filename.strip() | |
1159 elif line.startswith('Property changes on:'): | |
1160 unused, temp_filename = line.split(':', 1) | |
1161 # When a file is modified, paths use '/' between directories, however | |
1162 # when a property is modified '\' is used on Windows. Make them the same | |
1163 # otherwise the file shows up twice. | |
1164 temp_filename = temp_filename.strip().replace('\\', '/') | |
1165 if temp_filename != filename: | |
1166 # File has property changes but no modifications, create a new diff. | |
1167 new_filename = temp_filename | |
1168 if new_filename: | |
1169 if filename and diff: | |
1170 patches.append((filename, ''.join(diff))) | |
1171 filename = new_filename | |
1172 diff = [line] | |
1173 continue | |
1174 if diff is not None: | |
1175 diff.append(line) | |
1176 if filename and diff: | |
1177 patches.append((filename, ''.join(diff))) | |
1178 return patches | |
1179 | |
1180 | |
1181 def UploadSeparatePatches(issue, rpc_server, patchset, data, options): | |
1182 """Uploads a separate patch for each file in the diff output. | |
1183 | |
1184 Returns a list of [patch_key, filename] for each file. | |
1185 """ | |
1186 patches = SplitPatch(data) | |
1187 rv = [] | |
1188 for patch in patches: | |
1189 if len(patch[1]) > MAX_UPLOAD_SIZE: | |
1190 print ("Not uploading the patch for " + patch[0] + | |
1191 " because the file is too large.") | |
1192 continue | |
1193 form_fields = [("filename", patch[0])] | |
1194 if not options.download_base: | |
1195 form_fields.append(("content_upload", "1")) | |
1196 files = [("data", "data.diff", patch[1])] | |
1197 ctype, body = EncodeMultipartFormData(form_fields, files) | |
1198 url = "/%d/upload_patch/%d" % (int(issue), int(patchset)) | |
1199 print "Uploading patch for " + patch[0] | |
1200 response_body = rpc_server.Send(url, body, content_type=ctype) | |
1201 lines = response_body.splitlines() | |
1202 if not lines or lines[0] != "OK": | |
1203 StatusUpdate(" --> %s" % response_body) | |
1204 sys.exit(1) | |
1205 rv.append([lines[1], patch[0]]) | |
1206 return rv | |
1207 | |
1208 | |
1209 def GuessVCS(options): | |
1210 """Helper to guess the version control system. | |
1211 | |
1212 This examines the current directory, guesses which VersionControlSystem | |
1213 we're using, and returns an instance of the appropriate class. Exit with an | |
1214 error if we can't figure it out. | |
1215 | |
1216 Returns: | |
1217 A VersionControlSystem instance. Exits if the VCS can't be guessed. | |
1218 """ | |
1219 # Mercurial has a command to get the base directory of a repository | |
1220 # Try running it, but don't die if we don't have hg installed. | |
1221 # NOTE: we try Mercurial first as it can sit on top of an SVN working copy. | |
1222 try: | |
1223 out, returncode = RunShellWithReturnCode(["hg", "root"]) | |
1224 if returncode == 0: | |
1225 return MercurialVCS(options, out.strip()) | |
1226 except OSError, (errno, message): | |
1227 if errno != 2: # ENOENT -- they don't have hg installed. | |
1228 raise | |
1229 | |
1230 # Subversion has a .svn in all working directories. | |
1231 if os.path.isdir('.svn'): | |
1232 logging.info("Guessed VCS = Subversion") | |
1233 return SubversionVCS(options) | |
1234 | |
1235 # Git has a command to test if you're in a git tree. | |
1236 # Try running it, but don't die if we don't have git installed. | |
1237 try: | |
1238 out, returncode = RunShellWithReturnCode(["git", "rev-parse", | |
1239 "--is-inside-work-tree"]) | |
1240 if returncode == 0: | |
1241 return GitVCS(options) | |
1242 except OSError, (errno, message): | |
1243 if errno != 2: # ENOENT -- they don't have git installed. | |
1244 raise | |
1245 | |
1246 ErrorExit(("Could not guess version control system. " | |
1247 "Are you in a working copy directory?")) | |
1248 | |
1249 | |
1250 def RealMain(argv, data=None): | |
1251 """The real main function. | |
1252 | |
1253 Args: | |
1254 argv: Command line arguments. | |
1255 data: Diff contents. If None (default) the diff is generated by | |
1256 the VersionControlSystem implementation returned by GuessVCS(). | |
1257 | |
1258 Returns: | |
1259 A 2-tuple (issue id, patchset id). | |
1260 The patchset id is None if the base files are not uploaded by this | |
1261 script (applies only to SVN checkouts). | |
1262 """ | |
1263 logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:" | |
1264 "%(lineno)s %(message)s ")) | |
1265 os.environ['LC_ALL'] = 'C' | |
1266 options, args = parser.parse_args(argv[1:]) | |
1267 global verbosity | |
1268 verbosity = options.verbose | |
1269 if verbosity >= 3: | |
1270 logging.getLogger().setLevel(logging.DEBUG) | |
1271 elif verbosity >= 2: | |
1272 logging.getLogger().setLevel(logging.INFO) | |
1273 vcs = GuessVCS(options) | |
1274 if isinstance(vcs, SubversionVCS): | |
1275 # base field is only allowed for Subversion. | |
1276 # Note: Fetching base files may become deprecated in future releases. | |
1277 base = vcs.GuessBase(options.download_base) | |
1278 else: | |
1279 base = None | |
1280 if not base and options.download_base: | |
1281 options.download_base = True | |
1282 logging.info("Enabled upload of base file") | |
1283 if not options.assume_yes: | |
1284 vcs.CheckForUnknownFiles() | |
1285 if data is None: | |
1286 data = vcs.GenerateDiff(args) | |
1287 files = vcs.GetBaseFiles(data) | |
1288 if verbosity >= 1: | |
1289 print "Upload server:", options.server, "(change with -s/--server)" | |
1290 if options.issue: | |
1291 prompt = "Message describing this patch set: " | |
1292 else: | |
1293 prompt = "New issue subject: " | |
1294 message = options.message or raw_input(prompt).strip() | |
1295 if not message: | |
1296 ErrorExit("A non-empty message is required") | |
1297 rpc_server = GetRpcServer(options) | |
1298 form_fields = [("subject", message)] | |
1299 if base: | |
1300 form_fields.append(("base", base)) | |
1301 if options.issue: | |
1302 form_fields.append(("issue", str(options.issue))) | |
1303 if options.email: | |
1304 form_fields.append(("user", options.email)) | |
1305 if options.reviewers: | |
1306 for reviewer in options.reviewers.split(','): | |
1307 if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1: | |
1308 ErrorExit("Invalid email address: %s" % reviewer) | |
1309 form_fields.append(("reviewers", options.reviewers)) | |
1310 if options.cc: | |
1311 for cc in options.cc.split(','): | |
1312 if "@" in cc and not cc.split("@")[1].count(".") == 1: | |
1313 ErrorExit("Invalid email address: %s" % cc) | |
1314 form_fields.append(("cc", options.cc)) | |
1315 description = options.description | |
1316 if options.description_file: | |
1317 if options.description: | |
1318 ErrorExit("Can't specify description and description_file") | |
1319 file = open(options.description_file, 'r') | |
1320 description = file.read() | |
1321 file.close() | |
1322 if description: | |
1323 form_fields.append(("description", description)) | |
1324 # Send a hash of all the base file so the server can determine if a copy | |
1325 # already exists in an earlier patchset. | |
1326 base_hashes = "" | |
1327 for file, info in files.iteritems(): | |
1328 if not info[0] is None: | |
1329 checksum = md5.new(info[0]).hexdigest() | |
1330 if base_hashes: | |
1331 base_hashes += "|" | |
1332 base_hashes += checksum + ":" + file | |
1333 form_fields.append(("base_hashes", base_hashes)) | |
1334 # If we're uploading base files, don't send the email before the uploads, so | |
1335 # that it contains the file status. | |
1336 if options.send_mail and options.download_base: | |
1337 form_fields.append(("send_mail", "1")) | |
1338 if not options.download_base: | |
1339 form_fields.append(("content_upload", "1")) | |
1340 if len(data) > MAX_UPLOAD_SIZE: | |
1341 print "Patch is large, so uploading file patches separately." | |
1342 uploaded_diff_file = [] | |
1343 form_fields.append(("separate_patches", "1")) | |
1344 else: | |
1345 uploaded_diff_file = [("data", "data.diff", data)] | |
1346 ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file) | |
1347 response_body = rpc_server.Send("/upload", body, content_type=ctype) | |
1348 patchset = None | |
1349 if not options.download_base or not uploaded_diff_file: | |
1350 lines = response_body.splitlines() | |
1351 if len(lines) >= 2: | |
1352 msg = lines[0] | |
1353 patchset = lines[1].strip() | |
1354 patches = [x.split(" ", 1) for x in lines[2:]] | |
1355 else: | |
1356 msg = response_body | |
1357 else: | |
1358 msg = response_body | |
1359 StatusUpdate(msg) | |
1360 if not response_body.startswith("Issue created.") and \ | |
1361 not response_body.startswith("Issue updated."): | |
1362 sys.exit(0) | |
1363 issue = msg[msg.rfind("/")+1:] | |
1364 | |
1365 if not uploaded_diff_file: | |
1366 result = UploadSeparatePatches(issue, rpc_server, patchset, data, options) | |
1367 if not options.download_base: | |
1368 patches = result | |
1369 | |
1370 if not options.download_base: | |
1371 vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files) | |
1372 if options.send_mail: | |
1373 rpc_server.Send("/" + issue + "/mail", payload="") | |
1374 return issue, patchset | |
1375 | |
1376 | |
1377 def main(): | |
1378 try: | |
1379 RealMain(sys.argv) | |
1380 except KeyboardInterrupt: | |
1381 print | |
1382 StatusUpdate("Interrupted.") | |
1383 sys.exit(1) | |
1384 | |
1385 | |
1386 if __name__ == "__main__": | |
1387 main() | |
OLD | NEW |