OLD | NEW |
| (Empty) |
1 #!/usr/bin/env python | |
2 # coding: utf-8 | |
3 # | |
4 # Copyright 2007 Google Inc. | |
5 # | |
6 # Licensed under the Apache License, Version 2.0 (the "License"); | |
7 # you may not use this file except in compliance with the License. | |
8 # You may obtain a copy of the License at | |
9 # | |
10 # http://www.apache.org/licenses/LICENSE-2.0 | |
11 # | |
12 # Unless required by applicable law or agreed to in writing, software | |
13 # distributed under the License is distributed on an "AS IS" BASIS, | |
14 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
15 # See the License for the specific language governing permissions and | |
16 # limitations under the License. | |
17 | |
18 """Tool for uploading diffs from a version control system to the codereview app. | |
19 | |
20 Usage summary: upload.py [options] [-- diff_options] [path...] | |
21 | |
22 Diff options are passed to the diff command of the underlying system. | |
23 | |
24 Supported version control systems: | |
25 Git | |
26 Mercurial | |
27 Subversion | |
28 Perforce | |
29 CVS | |
30 | |
31 It is important for Git/Mercurial users to specify a tree/node/branch to diff | |
32 against by using the '--rev' option. | |
33 """ | |
34 # This code is derived from appcfg.py in the App Engine SDK (open source), | |
35 # and from ASPN recipe #146306. | |
36 | |
37 import ConfigParser | |
38 import cookielib | |
39 import errno | |
40 import fnmatch | |
41 import getpass | |
42 import logging | |
43 import marshal | |
44 import mimetypes | |
45 import optparse | |
46 import os | |
47 import re | |
48 import socket | |
49 import subprocess | |
50 import sys | |
51 import urllib | |
52 import urllib2 | |
53 import urlparse | |
54 | |
55 # The md5 module was deprecated in Python 2.5. | |
56 try: | |
57 from hashlib import md5 | |
58 except ImportError: | |
59 from md5 import md5 | |
60 | |
61 try: | |
62 import readline | |
63 except ImportError: | |
64 pass | |
65 | |
66 try: | |
67 import keyring | |
68 except ImportError: | |
69 keyring = None | |
70 | |
71 # The logging verbosity: | |
72 # 0: Errors only. | |
73 # 1: Status messages. | |
74 # 2: Info logs. | |
75 # 3: Debug logs. | |
76 verbosity = 1 | |
77 | |
78 # The account type used for authentication. | |
79 # This line could be changed by the review server (see handler for | |
80 # upload.py). | |
81 AUTH_ACCOUNT_TYPE = "GOOGLE" | |
82 | |
83 # URL of the default review server. As for AUTH_ACCOUNT_TYPE, this line could be | |
84 # changed by the review server (see handler for upload.py). | |
85 DEFAULT_REVIEW_SERVER = "codereview.appspot.com" | |
86 | |
87 # Max size of patch or base file. | |
88 MAX_UPLOAD_SIZE = 900 * 1024 | |
89 | |
90 # Constants for version control names. Used by GuessVCSName. | |
91 VCS_GIT = "Git" | |
92 VCS_MERCURIAL = "Mercurial" | |
93 VCS_SUBVERSION = "Subversion" | |
94 VCS_PERFORCE = "Perforce" | |
95 VCS_CVS = "CVS" | |
96 VCS_UNKNOWN = "Unknown" | |
97 | |
98 VCS_ABBREVIATIONS = { | |
99 VCS_MERCURIAL.lower(): VCS_MERCURIAL, | |
100 "hg": VCS_MERCURIAL, | |
101 VCS_SUBVERSION.lower(): VCS_SUBVERSION, | |
102 "svn": VCS_SUBVERSION, | |
103 VCS_PERFORCE.lower(): VCS_PERFORCE, | |
104 "p4": VCS_PERFORCE, | |
105 VCS_GIT.lower(): VCS_GIT, | |
106 VCS_CVS.lower(): VCS_CVS, | |
107 } | |
108 | |
109 # The result of parsing Subversion's [auto-props] setting. | |
110 svn_auto_props_map = None | |
111 | |
112 def GetEmail(prompt): | |
113 """Prompts the user for their email address and returns it. | |
114 | |
115 The last used email address is saved to a file and offered up as a suggestion | |
116 to the user. If the user presses enter without typing in anything the last | |
117 used email address is used. If the user enters a new address, it is saved | |
118 for next time we prompt. | |
119 | |
120 """ | |
121 last_email_file_name = os.path.expanduser("~/.last_codereview_email_address") | |
122 last_email = "" | |
123 if os.path.exists(last_email_file_name): | |
124 try: | |
125 last_email_file = open(last_email_file_name, "r") | |
126 last_email = last_email_file.readline().strip("\n") | |
127 last_email_file.close() | |
128 prompt += " [%s]" % last_email | |
129 except IOError, e: | |
130 pass | |
131 email = raw_input(prompt + ": ").strip() | |
132 if email: | |
133 try: | |
134 last_email_file = open(last_email_file_name, "w") | |
135 last_email_file.write(email) | |
136 last_email_file.close() | |
137 except IOError, e: | |
138 pass | |
139 else: | |
140 email = last_email | |
141 return email | |
142 | |
143 | |
144 def StatusUpdate(msg): | |
145 """Print a status message to stdout. | |
146 | |
147 If 'verbosity' is greater than 0, print the message. | |
148 | |
149 Args: | |
150 msg: The string to print. | |
151 """ | |
152 if verbosity > 0: | |
153 print msg | |
154 | |
155 | |
156 def ErrorExit(msg): | |
157 """Print an error message to stderr and exit.""" | |
158 print >>sys.stderr, msg | |
159 sys.exit(1) | |
160 | |
161 | |
162 class ClientLoginError(urllib2.HTTPError): | |
163 """Raised to indicate there was an error authenticating with ClientLogin.""" | |
164 | |
165 def __init__(self, url, code, msg, headers, args): | |
166 urllib2.HTTPError.__init__(self, url, code, msg, headers, None) | |
167 self.args = args | |
168 self._reason = args["Error"] | |
169 self.info = args.get("Info", None) | |
170 | |
171 @property | |
172 def reason(self): | |
173 # reason is a property on python 2.7 but a member variable on <=2.6. | |
174 # self.args is modified so it cannot be used as-is so save the value in | |
175 # self._reason. | |
176 return self._reason | |
177 | |
178 | |
179 class AbstractRpcServer(object): | |
180 """Provides a common interface for a simple RPC server.""" | |
181 | |
182 def __init__(self, host, auth_function, host_override=None, extra_headers={}, | |
183 save_cookies=False, account_type=AUTH_ACCOUNT_TYPE): | |
184 """Creates a new AbstractRpcServer. | |
185 | |
186 Args: | |
187 host: The host to send requests to. | |
188 auth_function: A function that takes no arguments and returns an | |
189 (email, password) tuple when called. Will be called if authentication | |
190 is required. | |
191 host_override: The host header to send to the server (defaults to host). | |
192 extra_headers: A dict of extra headers to append to every request. | |
193 save_cookies: If True, save the authentication cookies to local disk. | |
194 If False, use an in-memory cookiejar instead. Subclasses must | |
195 implement this functionality. Defaults to False. | |
196 account_type: Account type used for authentication. Defaults to | |
197 AUTH_ACCOUNT_TYPE. | |
198 """ | |
199 self.host = host | |
200 if (not self.host.startswith("http://") and | |
201 not self.host.startswith("https://")): | |
202 self.host = "http://" + self.host | |
203 self.host_override = host_override | |
204 self.auth_function = auth_function | |
205 self.authenticated = False | |
206 self.extra_headers = extra_headers | |
207 self.save_cookies = save_cookies | |
208 self.account_type = account_type | |
209 self.opener = self._GetOpener() | |
210 if self.host_override: | |
211 logging.info("Server: %s; Host: %s", self.host, self.host_override) | |
212 else: | |
213 logging.info("Server: %s", self.host) | |
214 | |
215 def _GetOpener(self): | |
216 """Returns an OpenerDirector for making HTTP requests. | |
217 | |
218 Returns: | |
219 A urllib2.OpenerDirector object. | |
220 """ | |
221 raise NotImplementedError() | |
222 | |
223 def _CreateRequest(self, url, data=None): | |
224 """Creates a new urllib request.""" | |
225 logging.debug("Creating request for: '%s' with payload:\n%s", url, data) | |
226 req = urllib2.Request(url, data=data, headers={"Accept": "text/plain"}) | |
227 if self.host_override: | |
228 req.add_header("Host", self.host_override) | |
229 for key, value in self.extra_headers.iteritems(): | |
230 req.add_header(key, value) | |
231 return req | |
232 | |
233 def _GetAuthToken(self, email, password): | |
234 """Uses ClientLogin to authenticate the user, returning an auth token. | |
235 | |
236 Args: | |
237 email: The user's email address | |
238 password: The user's password | |
239 | |
240 Raises: | |
241 ClientLoginError: If there was an error authenticating with ClientLogin. | |
242 HTTPError: If there was some other form of HTTP error. | |
243 | |
244 Returns: | |
245 The authentication token returned by ClientLogin. | |
246 """ | |
247 account_type = self.account_type | |
248 if self.host.endswith(".google.com"): | |
249 # Needed for use inside Google. | |
250 account_type = "HOSTED" | |
251 req = self._CreateRequest( | |
252 url="https://www.google.com/accounts/ClientLogin", | |
253 data=urllib.urlencode({ | |
254 "Email": email, | |
255 "Passwd": password, | |
256 "service": "ah", | |
257 "source": "rietveld-codereview-upload", | |
258 "accountType": account_type, | |
259 }), | |
260 ) | |
261 try: | |
262 response = self.opener.open(req) | |
263 response_body = response.read() | |
264 response_dict = dict(x.split("=") | |
265 for x in response_body.split("\n") if x) | |
266 return response_dict["Auth"] | |
267 except urllib2.HTTPError, e: | |
268 if e.code == 403: | |
269 body = e.read() | |
270 response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) | |
271 raise ClientLoginError(req.get_full_url(), e.code, e.msg, | |
272 e.headers, response_dict) | |
273 else: | |
274 raise | |
275 | |
276 def _GetAuthCookie(self, auth_token): | |
277 """Fetches authentication cookies for an authentication token. | |
278 | |
279 Args: | |
280 auth_token: The authentication token returned by ClientLogin. | |
281 | |
282 Raises: | |
283 HTTPError: If there was an error fetching the authentication cookies. | |
284 """ | |
285 # This is a dummy value to allow us to identify when we're successful. | |
286 continue_location = "http://localhost/" | |
287 args = {"continue": continue_location, "auth": auth_token} | |
288 req = self._CreateRequest("%s/_ah/login?%s" % | |
289 (self.host, urllib.urlencode(args))) | |
290 try: | |
291 response = self.opener.open(req) | |
292 except urllib2.HTTPError, e: | |
293 response = e | |
294 if (response.code != 302 or | |
295 response.info()["location"] != continue_location): | |
296 raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, | |
297 response.headers, response.fp) | |
298 self.authenticated = True | |
299 | |
300 def _Authenticate(self): | |
301 """Authenticates the user. | |
302 | |
303 The authentication process works as follows: | |
304 1) We get a username and password from the user | |
305 2) We use ClientLogin to obtain an AUTH token for the user | |
306 (see http://code.google.com/apis/accounts/AuthForInstalledApps.html). | |
307 3) We pass the auth token to /_ah/login on the server to obtain an | |
308 authentication cookie. If login was successful, it tries to redirect | |
309 us to the URL we provided. | |
310 | |
311 If we attempt to access the upload API without first obtaining an | |
312 authentication cookie, it returns a 401 response (or a 302) and | |
313 directs us to authenticate ourselves with ClientLogin. | |
314 """ | |
315 for i in range(3): | |
316 credentials = self.auth_function() | |
317 try: | |
318 auth_token = self._GetAuthToken(credentials[0], credentials[1]) | |
319 except ClientLoginError, e: | |
320 print >>sys.stderr, '' | |
321 if e.reason == "BadAuthentication": | |
322 if e.info == "InvalidSecondFactor": | |
323 print >>sys.stderr, ( | |
324 "Use an application-specific password instead " | |
325 "of your regular account password.\n" | |
326 "See http://www.google.com/" | |
327 "support/accounts/bin/answer.py?answer=185833") | |
328 else: | |
329 print >>sys.stderr, "Invalid username or password." | |
330 elif e.reason == "CaptchaRequired": | |
331 print >>sys.stderr, ( | |
332 "Please go to\n" | |
333 "https://www.google.com/accounts/DisplayUnlockCaptcha\n" | |
334 "and verify you are a human. Then try again.\n" | |
335 "If you are using a Google Apps account the URL is:\n" | |
336 "https://www.google.com/a/yourdomain.com/UnlockCaptcha") | |
337 elif e.reason == "NotVerified": | |
338 print >>sys.stderr, "Account not verified." | |
339 elif e.reason == "TermsNotAgreed": | |
340 print >>sys.stderr, "User has not agreed to TOS." | |
341 elif e.reason == "AccountDeleted": | |
342 print >>sys.stderr, "The user account has been deleted." | |
343 elif e.reason == "AccountDisabled": | |
344 print >>sys.stderr, "The user account has been disabled." | |
345 break | |
346 elif e.reason == "ServiceDisabled": | |
347 print >>sys.stderr, ("The user's access to the service has been " | |
348 "disabled.") | |
349 elif e.reason == "ServiceUnavailable": | |
350 print >>sys.stderr, "The service is not available; try again later." | |
351 else: | |
352 # Unknown error. | |
353 raise | |
354 print >>sys.stderr, '' | |
355 continue | |
356 self._GetAuthCookie(auth_token) | |
357 return | |
358 | |
359 def Send(self, request_path, payload=None, | |
360 content_type="application/octet-stream", | |
361 timeout=None, | |
362 extra_headers=None, | |
363 **kwargs): | |
364 """Sends an RPC and returns the response. | |
365 | |
366 Args: | |
367 request_path: The path to send the request to, eg /api/appversion/create. | |
368 payload: The body of the request, or None to send an empty request. | |
369 content_type: The Content-Type header to use. | |
370 timeout: timeout in seconds; default None i.e. no timeout. | |
371 (Note: for large requests on OS X, the timeout doesn't work right.) | |
372 extra_headers: Dict containing additional HTTP headers that should be | |
373 included in the request (string header names mapped to their values), | |
374 or None to not include any additional headers. | |
375 kwargs: Any keyword arguments are converted into query string parameters. | |
376 | |
377 Returns: | |
378 The response body, as a string. | |
379 """ | |
380 # TODO: Don't require authentication. Let the server say | |
381 # whether it is necessary. | |
382 if not self.authenticated: | |
383 self._Authenticate() | |
384 | |
385 old_timeout = socket.getdefaulttimeout() | |
386 socket.setdefaulttimeout(timeout) | |
387 try: | |
388 tries = 0 | |
389 while True: | |
390 tries += 1 | |
391 args = dict(kwargs) | |
392 url = "%s%s" % (self.host, request_path) | |
393 if args: | |
394 url += "?" + urllib.urlencode(args) | |
395 req = self._CreateRequest(url=url, data=payload) | |
396 req.add_header("Content-Type", content_type) | |
397 if extra_headers: | |
398 for header, value in extra_headers.items(): | |
399 req.add_header(header, value) | |
400 try: | |
401 f = self.opener.open(req) | |
402 response = f.read() | |
403 f.close() | |
404 return response | |
405 except urllib2.HTTPError, e: | |
406 if tries > 3: | |
407 raise | |
408 elif e.code == 401 or e.code == 302: | |
409 self._Authenticate() | |
410 elif e.code == 301: | |
411 # Handle permanent redirect manually. | |
412 url = e.info()["location"] | |
413 url_loc = urlparse.urlparse(url) | |
414 self.host = '%s://%s' % (url_loc[0], url_loc[1]) | |
415 elif e.code >= 500: | |
416 ErrorExit(e.read()) | |
417 else: | |
418 raise | |
419 finally: | |
420 socket.setdefaulttimeout(old_timeout) | |
421 | |
422 | |
423 class HttpRpcServer(AbstractRpcServer): | |
424 """Provides a simplified RPC-style interface for HTTP requests.""" | |
425 | |
426 def _Authenticate(self): | |
427 """Save the cookie jar after authentication.""" | |
428 super(HttpRpcServer, self)._Authenticate() | |
429 if self.save_cookies: | |
430 StatusUpdate("Saving authentication cookies to %s" % self.cookie_file) | |
431 self.cookie_jar.save() | |
432 | |
433 def _GetOpener(self): | |
434 """Returns an OpenerDirector that supports cookies and ignores redirects. | |
435 | |
436 Returns: | |
437 A urllib2.OpenerDirector object. | |
438 """ | |
439 opener = urllib2.OpenerDirector() | |
440 opener.add_handler(urllib2.ProxyHandler()) | |
441 opener.add_handler(urllib2.UnknownHandler()) | |
442 opener.add_handler(urllib2.HTTPHandler()) | |
443 opener.add_handler(urllib2.HTTPDefaultErrorHandler()) | |
444 opener.add_handler(urllib2.HTTPSHandler()) | |
445 opener.add_handler(urllib2.HTTPErrorProcessor()) | |
446 if self.save_cookies: | |
447 self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") | |
448 self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) | |
449 if os.path.exists(self.cookie_file): | |
450 try: | |
451 self.cookie_jar.load() | |
452 self.authenticated = True | |
453 StatusUpdate("Loaded authentication cookies from %s" % | |
454 self.cookie_file) | |
455 except (cookielib.LoadError, IOError): | |
456 # Failed to load cookies - just ignore them. | |
457 pass | |
458 else: | |
459 # Create an empty cookie file with mode 600 | |
460 fd = os.open(self.cookie_file, os.O_CREAT, 0600) | |
461 os.close(fd) | |
462 # Always chmod the cookie file | |
463 os.chmod(self.cookie_file, 0600) | |
464 else: | |
465 # Don't save cookies across runs of update.py. | |
466 self.cookie_jar = cookielib.CookieJar() | |
467 opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) | |
468 return opener | |
469 | |
470 | |
471 class CondensedHelpFormatter(optparse.IndentedHelpFormatter): | |
472 """Frees more horizontal space by removing indentation from group | |
473 options and collapsing arguments between short and long, e.g. | |
474 '-o ARG, --opt=ARG' to -o --opt ARG""" | |
475 | |
476 def format_heading(self, heading): | |
477 return "%s:\n" % heading | |
478 | |
479 def format_option(self, option): | |
480 self.dedent() | |
481 res = optparse.HelpFormatter.format_option(self, option) | |
482 self.indent() | |
483 return res | |
484 | |
485 def format_option_strings(self, option): | |
486 self.set_long_opt_delimiter(" ") | |
487 optstr = optparse.HelpFormatter.format_option_strings(self, option) | |
488 optlist = optstr.split(", ") | |
489 if len(optlist) > 1: | |
490 if option.takes_value(): | |
491 # strip METAVAR from all but the last option | |
492 optlist = [x.split()[0] for x in optlist[:-1]] + optlist[-1:] | |
493 optstr = " ".join(optlist) | |
494 return optstr | |
495 | |
496 | |
497 parser = optparse.OptionParser( | |
498 usage="%prog [options] [-- diff_options] [path...]", | |
499 add_help_option=False, | |
500 formatter=CondensedHelpFormatter() | |
501 ) | |
502 parser.add_option("-h", "--help", action="store_true", | |
503 help="Show this help message and exit.") | |
504 parser.add_option("-y", "--assume_yes", action="store_true", | |
505 dest="assume_yes", default=False, | |
506 help="Assume that the answer to yes/no questions is 'yes'.") | |
507 # Logging | |
508 group = parser.add_option_group("Logging options") | |
509 group.add_option("-q", "--quiet", action="store_const", const=0, | |
510 dest="verbose", help="Print errors only.") | |
511 group.add_option("-v", "--verbose", action="store_const", const=2, | |
512 dest="verbose", default=1, | |
513 help="Print info level logs.") | |
514 group.add_option("--noisy", action="store_const", const=3, | |
515 dest="verbose", help="Print all logs.") | |
516 group.add_option("--print_diffs", dest="print_diffs", action="store_true", | |
517 help="Print full diffs.") | |
518 # Review server | |
519 group = parser.add_option_group("Review server options") | |
520 group.add_option("-s", "--server", action="store", dest="server", | |
521 default=DEFAULT_REVIEW_SERVER, | |
522 metavar="SERVER", | |
523 help=("The server to upload to. The format is host[:port]. " | |
524 "Defaults to '%default'.")) | |
525 group.add_option("-e", "--email", action="store", dest="email", | |
526 metavar="EMAIL", default=None, | |
527 help="The username to use. Will prompt if omitted.") | |
528 group.add_option("-H", "--host", action="store", dest="host", | |
529 metavar="HOST", default=None, | |
530 help="Overrides the Host header sent with all RPCs.") | |
531 group.add_option("--no_cookies", action="store_false", | |
532 dest="save_cookies", default=True, | |
533 help="Do not save authentication cookies to local disk.") | |
534 group.add_option("--account_type", action="store", dest="account_type", | |
535 metavar="TYPE", default=AUTH_ACCOUNT_TYPE, | |
536 choices=["GOOGLE", "HOSTED"], | |
537 help=("Override the default account type " | |
538 "(defaults to '%default', " | |
539 "valid choices are 'GOOGLE' and 'HOSTED').")) | |
540 # Issue | |
541 group = parser.add_option_group("Issue options") | |
542 group.add_option("-t", "--title", action="store", dest="title", | |
543 help="New issue subject or new patch set title") | |
544 group.add_option("-m", "--message", action="store", dest="message", | |
545 default=None, | |
546 help="New issue description or new patch set message") | |
547 group.add_option("-F", "--file", action="store", dest="file", | |
548 default=None, help="Read the message above from file.") | |
549 group.add_option("-r", "--reviewers", action="store", dest="reviewers", | |
550 metavar="REVIEWERS", default=None, | |
551 help="Add reviewers (comma separated email addresses).") | |
552 group.add_option("--cc", action="store", dest="cc", | |
553 metavar="CC", default=None, | |
554 help="Add CC (comma separated email addresses).") | |
555 group.add_option("--private", action="store_true", dest="private", | |
556 default=False, | |
557 help="Make the issue restricted to reviewers and those CCed") | |
558 # Upload options | |
559 group = parser.add_option_group("Patch options") | |
560 group.add_option("-i", "--issue", type="int", action="store", | |
561 metavar="ISSUE", default=None, | |
562 help="Issue number to which to add. Defaults to new issue.") | |
563 group.add_option("--base_url", action="store", dest="base_url", default=None, | |
564 help="Base URL path for files (listed as \"Base URL\" when " | |
565 "viewing issue). If omitted, will be guessed automatically " | |
566 "for SVN repos and left blank for others.") | |
567 group.add_option("--download_base", action="store_true", | |
568 dest="download_base", default=False, | |
569 help="Base files will be downloaded by the server " | |
570 "(side-by-side diffs may not work on files with CRs).") | |
571 group.add_option("--rev", action="store", dest="revision", | |
572 metavar="REV", default=None, | |
573 help="Base revision/branch/tree to diff against. Use " | |
574 "rev1:rev2 range to review already committed changeset.") | |
575 group.add_option("--send_mail", action="store_true", | |
576 dest="send_mail", default=False, | |
577 help="Send notification email to reviewers.") | |
578 group.add_option("-p", "--send_patch", action="store_true", | |
579 dest="send_patch", default=False, | |
580 help="Same as --send_mail, but include diff as an " | |
581 "attachment, and prepend email subject with 'PATCH:'.") | |
582 group.add_option("--vcs", action="store", dest="vcs", | |
583 metavar="VCS", default=None, | |
584 help=("Version control system (optional, usually upload.py " | |
585 "already guesses the right VCS).")) | |
586 group.add_option("--emulate_svn_auto_props", action="store_true", | |
587 dest="emulate_svn_auto_props", default=False, | |
588 help=("Emulate Subversion's auto properties feature.")) | |
589 # Git-specific | |
590 group = parser.add_option_group("Git-specific options") | |
591 group.add_option("--git_similarity", action="store", dest="git_similarity", | |
592 metavar="SIM", type="int", default=50, | |
593 help=("Set the minimum similarity index for detecting renames " | |
594 "and copies. See `git diff -C`. (default 50).")) | |
595 group.add_option("--git_no_find_copies", action="store_false", default=True, | |
596 dest="git_find_copies", | |
597 help=("Prevents git from looking for copies (default off).")) | |
598 # Perforce-specific | |
599 group = parser.add_option_group("Perforce-specific options " | |
600 "(overrides P4 environment variables)") | |
601 group.add_option("--p4_port", action="store", dest="p4_port", | |
602 metavar="P4_PORT", default=None, | |
603 help=("Perforce server and port (optional)")) | |
604 group.add_option("--p4_changelist", action="store", dest="p4_changelist", | |
605 metavar="P4_CHANGELIST", default=None, | |
606 help=("Perforce changelist id")) | |
607 group.add_option("--p4_client", action="store", dest="p4_client", | |
608 metavar="P4_CLIENT", default=None, | |
609 help=("Perforce client/workspace")) | |
610 group.add_option("--p4_user", action="store", dest="p4_user", | |
611 metavar="P4_USER", default=None, | |
612 help=("Perforce user")) | |
613 | |
614 | |
615 class KeyringCreds(object): | |
616 def __init__(self, server, host, email): | |
617 self.server = server | |
618 self.host = host | |
619 self.email = email | |
620 self.accounts_seen = set() | |
621 | |
622 def GetUserCredentials(self): | |
623 """Prompts the user for a username and password. | |
624 | |
625 Only use keyring on the initial call. If the keyring contains the wrong | |
626 password, we want to give the user a chance to enter another one. | |
627 """ | |
628 # Create a local alias to the email variable to avoid Python's crazy | |
629 # scoping rules. | |
630 global keyring | |
631 email = self.email | |
632 if email is None: | |
633 email = GetEmail("Email (login for uploading to %s)" % self.server) | |
634 password = None | |
635 if keyring and not email in self.accounts_seen: | |
636 try: | |
637 password = keyring.get_password(self.host, email) | |
638 except: | |
639 # Sadly, we have to trap all errors here as | |
640 # gnomekeyring.IOError inherits from object. :/ | |
641 print "Failed to get password from keyring" | |
642 keyring = None | |
643 if password is not None: | |
644 print "Using password from system keyring." | |
645 self.accounts_seen.add(email) | |
646 else: | |
647 password = getpass.getpass("Password for %s: " % email) | |
648 if keyring: | |
649 answer = raw_input("Store password in system keyring?(y/N) ").strip() | |
650 if answer == "y": | |
651 keyring.set_password(self.host, email, password) | |
652 self.accounts_seen.add(email) | |
653 return (email, password) | |
654 | |
655 | |
656 def GetRpcServer(server, email=None, host_override=None, save_cookies=True, | |
657 account_type=AUTH_ACCOUNT_TYPE): | |
658 """Returns an instance of an AbstractRpcServer. | |
659 | |
660 Args: | |
661 server: String containing the review server URL. | |
662 email: String containing user's email address. | |
663 host_override: If not None, string containing an alternate hostname to use | |
664 in the host header. | |
665 save_cookies: Whether authentication cookies should be saved to disk. | |
666 account_type: Account type for authentication, either 'GOOGLE' | |
667 or 'HOSTED'. Defaults to AUTH_ACCOUNT_TYPE. | |
668 | |
669 Returns: | |
670 A new HttpRpcServer, on which RPC calls can be made. | |
671 """ | |
672 | |
673 # If this is the dev_appserver, use fake authentication. | |
674 host = (host_override or server).lower() | |
675 if re.match(r'(http://)?localhost([:/]|$)', host): | |
676 if email is None: | |
677 email = "test@example.com" | |
678 logging.info("Using debug user %s. Override with --email" % email) | |
679 server = HttpRpcServer( | |
680 server, | |
681 lambda: (email, "password"), | |
682 host_override=host_override, | |
683 extra_headers={"Cookie": | |
684 'dev_appserver_login="%s:False"' % email}, | |
685 save_cookies=save_cookies, | |
686 account_type=account_type) | |
687 # Don't try to talk to ClientLogin. | |
688 server.authenticated = True | |
689 return server | |
690 | |
691 return HttpRpcServer(server, | |
692 KeyringCreds(server, host, email).GetUserCredentials, | |
693 host_override=host_override, | |
694 save_cookies=save_cookies, | |
695 account_type=account_type) | |
696 | |
697 | |
698 def EncodeMultipartFormData(fields, files): | |
699 """Encode form fields for multipart/form-data. | |
700 | |
701 Args: | |
702 fields: A sequence of (name, value) elements for regular form fields. | |
703 files: A sequence of (name, filename, value) elements for data to be | |
704 uploaded as files. | |
705 Returns: | |
706 (content_type, body) ready for httplib.HTTP instance. | |
707 | |
708 Source: | |
709 http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306 | |
710 """ | |
711 BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-' | |
712 CRLF = '\r\n' | |
713 lines = [] | |
714 for (key, value) in fields: | |
715 lines.append('--' + BOUNDARY) | |
716 lines.append('Content-Disposition: form-data; name="%s"' % key) | |
717 lines.append('') | |
718 if isinstance(value, unicode): | |
719 value = value.encode('utf-8') | |
720 lines.append(value) | |
721 for (key, filename, value) in files: | |
722 lines.append('--' + BOUNDARY) | |
723 lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % | |
724 (key, filename)) | |
725 lines.append('Content-Type: %s' % GetContentType(filename)) | |
726 lines.append('') | |
727 if isinstance(value, unicode): | |
728 value = value.encode('utf-8') | |
729 lines.append(value) | |
730 lines.append('--' + BOUNDARY + '--') | |
731 lines.append('') | |
732 body = CRLF.join(lines) | |
733 content_type = 'multipart/form-data; boundary=%s' % BOUNDARY | |
734 return content_type, body | |
735 | |
736 | |
737 def GetContentType(filename): | |
738 """Helper to guess the content-type from the filename.""" | |
739 return mimetypes.guess_type(filename)[0] or 'application/octet-stream' | |
740 | |
741 | |
742 # Use a shell for subcommands on Windows to get a PATH search. | |
743 use_shell = sys.platform.startswith("win") | |
744 | |
745 def RunShellWithReturnCodeAndStderr(command, print_output=False, | |
746 universal_newlines=True, | |
747 env=os.environ): | |
748 """Executes a command and returns the output from stdout, stderr and the retur
n code. | |
749 | |
750 Args: | |
751 command: Command to execute. | |
752 print_output: If True, the output is printed to stdout. | |
753 If False, both stdout and stderr are ignored. | |
754 universal_newlines: Use universal_newlines flag (default: True). | |
755 | |
756 Returns: | |
757 Tuple (stdout, stderr, return code) | |
758 """ | |
759 logging.info("Running %s", command) | |
760 env = env.copy() | |
761 env['LC_MESSAGES'] = 'C' | |
762 p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, | |
763 shell=use_shell, universal_newlines=universal_newlines, | |
764 env=env) | |
765 if print_output: | |
766 output_array = [] | |
767 while True: | |
768 line = p.stdout.readline() | |
769 if not line: | |
770 break | |
771 print line.strip("\n") | |
772 output_array.append(line) | |
773 output = "".join(output_array) | |
774 else: | |
775 output = p.stdout.read() | |
776 p.wait() | |
777 errout = p.stderr.read() | |
778 if print_output and errout: | |
779 print >>sys.stderr, errout | |
780 p.stdout.close() | |
781 p.stderr.close() | |
782 return output, errout, p.returncode | |
783 | |
784 def RunShellWithReturnCode(command, print_output=False, | |
785 universal_newlines=True, | |
786 env=os.environ): | |
787 """Executes a command and returns the output from stdout and the return code."
"" | |
788 out, err, retcode = RunShellWithReturnCodeAndStderr(command, print_output, | |
789 universal_newlines, env) | |
790 return out, retcode | |
791 | |
792 def RunShell(command, silent_ok=False, universal_newlines=True, | |
793 print_output=False, env=os.environ): | |
794 data, retcode = RunShellWithReturnCode(command, print_output, | |
795 universal_newlines, env) | |
796 if retcode: | |
797 ErrorExit("Got error status from %s:\n%s" % (command, data)) | |
798 if not silent_ok and not data: | |
799 ErrorExit("No output from %s" % command) | |
800 return data | |
801 | |
802 | |
803 class VersionControlSystem(object): | |
804 """Abstract base class providing an interface to the VCS.""" | |
805 | |
806 def __init__(self, options): | |
807 """Constructor. | |
808 | |
809 Args: | |
810 options: Command line options. | |
811 """ | |
812 self.options = options | |
813 | |
814 def GetGUID(self): | |
815 """Return string to distinguish the repository from others, for example to | |
816 query all opened review issues for it""" | |
817 raise NotImplementedError( | |
818 "abstract method -- subclass %s must override" % self.__class__) | |
819 | |
820 def PostProcessDiff(self, diff): | |
821 """Return the diff with any special post processing this VCS needs, e.g. | |
822 to include an svn-style "Index:".""" | |
823 return diff | |
824 | |
825 def GenerateDiff(self, args): | |
826 """Return the current diff as a string. | |
827 | |
828 Args: | |
829 args: Extra arguments to pass to the diff command. | |
830 """ | |
831 raise NotImplementedError( | |
832 "abstract method -- subclass %s must override" % self.__class__) | |
833 | |
834 def GetUnknownFiles(self): | |
835 """Return a list of files unknown to the VCS.""" | |
836 raise NotImplementedError( | |
837 "abstract method -- subclass %s must override" % self.__class__) | |
838 | |
839 def CheckForUnknownFiles(self): | |
840 """Show an "are you sure?" prompt if there are unknown files.""" | |
841 unknown_files = self.GetUnknownFiles() | |
842 if unknown_files: | |
843 print "The following files are not added to version control:" | |
844 for line in unknown_files: | |
845 print line | |
846 prompt = "Are you sure to continue?(y/N) " | |
847 answer = raw_input(prompt).strip() | |
848 if answer != "y": | |
849 ErrorExit("User aborted") | |
850 | |
851 def GetBaseFile(self, filename): | |
852 """Get the content of the upstream version of a file. | |
853 | |
854 Returns: | |
855 A tuple (base_content, new_content, is_binary, status) | |
856 base_content: The contents of the base file. | |
857 new_content: For text files, this is empty. For binary files, this is | |
858 the contents of the new file, since the diff output won't contain | |
859 information to reconstruct the current file. | |
860 is_binary: True iff the file is binary. | |
861 status: The status of the file. | |
862 """ | |
863 | |
864 raise NotImplementedError( | |
865 "abstract method -- subclass %s must override" % self.__class__) | |
866 | |
867 | |
868 def GetBaseFiles(self, diff): | |
869 """Helper that calls GetBase file for each file in the patch. | |
870 | |
871 Returns: | |
872 A dictionary that maps from filename to GetBaseFile's tuple. Filenames | |
873 are retrieved based on lines that start with "Index:" or | |
874 "Property changes on:". | |
875 """ | |
876 files = {} | |
877 for line in diff.splitlines(True): | |
878 if line.startswith('Index:') or line.startswith('Property changes on:'): | |
879 unused, filename = line.split(':', 1) | |
880 # On Windows if a file has property changes its filename uses '\' | |
881 # instead of '/'. | |
882 filename = filename.strip().replace('\\', '/') | |
883 files[filename] = self.GetBaseFile(filename) | |
884 return files | |
885 | |
886 | |
887 def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options, | |
888 files): | |
889 """Uploads the base files (and if necessary, the current ones as well).""" | |
890 | |
891 def UploadFile(filename, file_id, content, is_binary, status, is_base): | |
892 """Uploads a file to the server.""" | |
893 file_too_large = False | |
894 if is_base: | |
895 type = "base" | |
896 else: | |
897 type = "current" | |
898 if len(content) > MAX_UPLOAD_SIZE: | |
899 print ("Not uploading the %s file for %s because it's too large." % | |
900 (type, filename)) | |
901 file_too_large = True | |
902 content = "" | |
903 checksum = md5(content).hexdigest() | |
904 if options.verbose > 0 and not file_too_large: | |
905 print "Uploading %s file for %s" % (type, filename) | |
906 url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id) | |
907 form_fields = [("filename", filename), | |
908 ("status", status), | |
909 ("checksum", checksum), | |
910 ("is_binary", str(is_binary)), | |
911 ("is_current", str(not is_base)), | |
912 ] | |
913 if file_too_large: | |
914 form_fields.append(("file_too_large", "1")) | |
915 if options.email: | |
916 form_fields.append(("user", options.email)) | |
917 ctype, body = EncodeMultipartFormData(form_fields, | |
918 [("data", filename, content)]) | |
919 response_body = rpc_server.Send(url, body, | |
920 content_type=ctype) | |
921 if not response_body.startswith("OK"): | |
922 StatusUpdate(" --> %s" % response_body) | |
923 sys.exit(1) | |
924 | |
925 patches = dict() | |
926 [patches.setdefault(v, k) for k, v in patch_list] | |
927 for filename in patches.keys(): | |
928 base_content, new_content, is_binary, status = files[filename] | |
929 file_id_str = patches.get(filename) | |
930 if file_id_str.find("nobase") != -1: | |
931 base_content = None | |
932 file_id_str = file_id_str[file_id_str.rfind("_") + 1:] | |
933 file_id = int(file_id_str) | |
934 if base_content != None: | |
935 UploadFile(filename, file_id, base_content, is_binary, status, True) | |
936 if new_content != None: | |
937 UploadFile(filename, file_id, new_content, is_binary, status, False) | |
938 | |
939 def IsImage(self, filename): | |
940 """Returns true if the filename has an image extension.""" | |
941 mimetype = mimetypes.guess_type(filename)[0] | |
942 if not mimetype: | |
943 return False | |
944 return mimetype.startswith("image/") | |
945 | |
946 def IsBinaryData(self, data): | |
947 """Returns true if data contains a null byte.""" | |
948 # Derived from how Mercurial's heuristic, see | |
949 # http://selenic.com/hg/file/848a6658069e/mercurial/util.py#l229 | |
950 return bool(data and "\0" in data) | |
951 | |
952 | |
953 class SubversionVCS(VersionControlSystem): | |
954 """Implementation of the VersionControlSystem interface for Subversion.""" | |
955 | |
956 def __init__(self, options): | |
957 super(SubversionVCS, self).__init__(options) | |
958 if self.options.revision: | |
959 match = re.match(r"(\d+)(:(\d+))?", self.options.revision) | |
960 if not match: | |
961 ErrorExit("Invalid Subversion revision %s." % self.options.revision) | |
962 self.rev_start = match.group(1) | |
963 self.rev_end = match.group(3) | |
964 else: | |
965 self.rev_start = self.rev_end = None | |
966 # Cache output from "svn list -r REVNO dirname". | |
967 # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev). | |
968 self.svnls_cache = {} | |
969 # Base URL is required to fetch files deleted in an older revision. | |
970 # Result is cached to not guess it over and over again in GetBaseFile(). | |
971 required = self.options.download_base or self.options.revision is not None | |
972 self.svn_base = self._GuessBase(required) | |
973 | |
974 def GetGUID(self): | |
975 return self._GetInfo("Repository UUID") | |
976 | |
977 def GuessBase(self, required): | |
978 """Wrapper for _GuessBase.""" | |
979 return self.svn_base | |
980 | |
981 def _GuessBase(self, required): | |
982 """Returns base URL for current diff. | |
983 | |
984 Args: | |
985 required: If true, exits if the url can't be guessed, otherwise None is | |
986 returned. | |
987 """ | |
988 url = self._GetInfo("URL") | |
989 if url: | |
990 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) | |
991 guess = "" | |
992 # TODO(anatoli) - repository specific hacks should be handled by server | |
993 if netloc == "svn.python.org" and scheme == "svn+ssh": | |
994 path = "projects" + path | |
995 scheme = "http" | |
996 guess = "Python " | |
997 elif netloc.endswith(".googlecode.com"): | |
998 scheme = "http" | |
999 guess = "Google Code " | |
1000 path = path + "/" | |
1001 base = urlparse.urlunparse((scheme, netloc, path, params, | |
1002 query, fragment)) | |
1003 logging.info("Guessed %sbase = %s", guess, base) | |
1004 return base | |
1005 if required: | |
1006 ErrorExit("Can't find URL in output from svn info") | |
1007 return None | |
1008 | |
1009 def _GetInfo(self, key): | |
1010 """Parses 'svn info' for current dir. Returns value for key or None""" | |
1011 for line in RunShell(["svn", "info"]).splitlines(): | |
1012 if line.startswith(key + ": "): | |
1013 return line.split(":", 1)[1].strip() | |
1014 | |
1015 def _EscapeFilename(self, filename): | |
1016 """Escapes filename for SVN commands.""" | |
1017 if "@" in filename and not filename.endswith("@"): | |
1018 filename = "%s@" % filename | |
1019 return filename | |
1020 | |
1021 def GenerateDiff(self, args): | |
1022 cmd = ["svn", "diff"] | |
1023 if self.options.revision: | |
1024 cmd += ["-r", self.options.revision] | |
1025 cmd.extend(args) | |
1026 data = RunShell(cmd) | |
1027 count = 0 | |
1028 for line in data.splitlines(): | |
1029 if line.startswith("Index:") or line.startswith("Property changes on:"): | |
1030 count += 1 | |
1031 logging.info(line) | |
1032 if not count: | |
1033 ErrorExit("No valid patches found in output from svn diff") | |
1034 return data | |
1035 | |
1036 def _CollapseKeywords(self, content, keyword_str): | |
1037 """Collapses SVN keywords.""" | |
1038 # svn cat translates keywords but svn diff doesn't. As a result of this | |
1039 # behavior patching.PatchChunks() fails with a chunk mismatch error. | |
1040 # This part was originally written by the Review Board development team | |
1041 # who had the same problem (http://reviews.review-board.org/r/276/). | |
1042 # Mapping of keywords to known aliases | |
1043 svn_keywords = { | |
1044 # Standard keywords | |
1045 'Date': ['Date', 'LastChangedDate'], | |
1046 'Revision': ['Revision', 'LastChangedRevision', 'Rev'], | |
1047 'Author': ['Author', 'LastChangedBy'], | |
1048 'HeadURL': ['HeadURL', 'URL'], | |
1049 'Id': ['Id'], | |
1050 | |
1051 # Aliases | |
1052 'LastChangedDate': ['LastChangedDate', 'Date'], | |
1053 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'], | |
1054 'LastChangedBy': ['LastChangedBy', 'Author'], | |
1055 'URL': ['URL', 'HeadURL'], | |
1056 } | |
1057 | |
1058 def repl(m): | |
1059 if m.group(2): | |
1060 return "$%s::%s$" % (m.group(1), " " * len(m.group(3))) | |
1061 return "$%s$" % m.group(1) | |
1062 keywords = [keyword | |
1063 for name in keyword_str.split(" ") | |
1064 for keyword in svn_keywords.get(name, [])] | |
1065 return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content) | |
1066 | |
1067 def GetUnknownFiles(self): | |
1068 status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True) | |
1069 unknown_files = [] | |
1070 for line in status.split("\n"): | |
1071 if line and line[0] == "?": | |
1072 unknown_files.append(line) | |
1073 return unknown_files | |
1074 | |
1075 def ReadFile(self, filename): | |
1076 """Returns the contents of a file.""" | |
1077 file = open(filename, 'rb') | |
1078 result = "" | |
1079 try: | |
1080 result = file.read() | |
1081 finally: | |
1082 file.close() | |
1083 return result | |
1084 | |
1085 def GetStatus(self, filename): | |
1086 """Returns the status of a file.""" | |
1087 if not self.options.revision: | |
1088 status = RunShell(["svn", "status", "--ignore-externals", | |
1089 self._EscapeFilename(filename)]) | |
1090 if not status: | |
1091 ErrorExit("svn status returned no output for %s" % filename) | |
1092 status_lines = status.splitlines() | |
1093 # If file is in a cl, the output will begin with | |
1094 # "\n--- Changelist 'cl_name':\n". See | |
1095 # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt | |
1096 if (len(status_lines) == 3 and | |
1097 not status_lines[0] and | |
1098 status_lines[1].startswith("--- Changelist")): | |
1099 status = status_lines[2] | |
1100 else: | |
1101 status = status_lines[0] | |
1102 # If we have a revision to diff against we need to run "svn list" | |
1103 # for the old and the new revision and compare the results to get | |
1104 # the correct status for a file. | |
1105 else: | |
1106 dirname, relfilename = os.path.split(filename) | |
1107 if dirname not in self.svnls_cache: | |
1108 cmd = ["svn", "list", "-r", self.rev_start, | |
1109 self._EscapeFilename(dirname) or "."] | |
1110 out, err, returncode = RunShellWithReturnCodeAndStderr(cmd) | |
1111 if returncode: | |
1112 # Directory might not yet exist at start revison | |
1113 # svn: Unable to find repository location for 'abc' in revision nnn | |
1114 if re.match('^svn: Unable to find repository location for .+ in revisi
on \d+', err): | |
1115 old_files = () | |
1116 else: | |
1117 ErrorExit("Failed to get status for %s:\n%s" % (filename, err)) | |
1118 else: | |
1119 old_files = out.splitlines() | |
1120 args = ["svn", "list"] | |
1121 if self.rev_end: | |
1122 args += ["-r", self.rev_end] | |
1123 cmd = args + [self._EscapeFilename(dirname) or "."] | |
1124 out, returncode = RunShellWithReturnCode(cmd) | |
1125 if returncode: | |
1126 ErrorExit("Failed to run command %s" % cmd) | |
1127 self.svnls_cache[dirname] = (old_files, out.splitlines()) | |
1128 old_files, new_files = self.svnls_cache[dirname] | |
1129 if relfilename in old_files and relfilename not in new_files: | |
1130 status = "D " | |
1131 elif relfilename in old_files and relfilename in new_files: | |
1132 status = "M " | |
1133 else: | |
1134 status = "A " | |
1135 return status | |
1136 | |
1137 def GetBaseFile(self, filename): | |
1138 status = self.GetStatus(filename) | |
1139 base_content = None | |
1140 new_content = None | |
1141 | |
1142 # If a file is copied its status will be "A +", which signifies | |
1143 # "addition-with-history". See "svn st" for more information. We need to | |
1144 # upload the original file or else diff parsing will fail if the file was | |
1145 # edited. | |
1146 if status[0] == "A" and status[3] != "+": | |
1147 # We'll need to upload the new content if we're adding a binary file | |
1148 # since diff's output won't contain it. | |
1149 mimetype = RunShell(["svn", "propget", "svn:mime-type", | |
1150 self._EscapeFilename(filename)], silent_ok=True) | |
1151 base_content = "" | |
1152 is_binary = bool(mimetype) and not mimetype.startswith("text/") | |
1153 if is_binary: | |
1154 new_content = self.ReadFile(filename) | |
1155 elif (status[0] in ("M", "D", "R") or | |
1156 (status[0] == "A" and status[3] == "+") or # Copied file. | |
1157 (status[0] == " " and status[1] == "M")): # Property change. | |
1158 args = [] | |
1159 if self.options.revision: | |
1160 # filename must not be escaped. We already add an ampersand here. | |
1161 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) | |
1162 else: | |
1163 # Don't change filename, it's needed later. | |
1164 url = filename | |
1165 args += ["-r", "BASE"] | |
1166 cmd = ["svn"] + args + ["propget", "svn:mime-type", url] | |
1167 mimetype, returncode = RunShellWithReturnCode(cmd) | |
1168 if returncode: | |
1169 # File does not exist in the requested revision. | |
1170 # Reset mimetype, it contains an error message. | |
1171 mimetype = "" | |
1172 else: | |
1173 mimetype = mimetype.strip() | |
1174 get_base = False | |
1175 # this test for binary is exactly the test prescribed by the | |
1176 # official SVN docs at | |
1177 # http://subversion.apache.org/faq.html#binary-files | |
1178 is_binary = (bool(mimetype) and | |
1179 not mimetype.startswith("text/") and | |
1180 mimetype not in ("image/x-xbitmap", "image/x-xpixmap")) | |
1181 if status[0] == " ": | |
1182 # Empty base content just to force an upload. | |
1183 base_content = "" | |
1184 elif is_binary: | |
1185 get_base = True | |
1186 if status[0] == "M": | |
1187 if not self.rev_end: | |
1188 new_content = self.ReadFile(filename) | |
1189 else: | |
1190 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end) | |
1191 new_content = RunShell(["svn", "cat", url], | |
1192 universal_newlines=True, silent_ok=True) | |
1193 else: | |
1194 get_base = True | |
1195 | |
1196 if get_base: | |
1197 if is_binary: | |
1198 universal_newlines = False | |
1199 else: | |
1200 universal_newlines = True | |
1201 if self.rev_start: | |
1202 # "svn cat -r REV delete_file.txt" doesn't work. cat requires | |
1203 # the full URL with "@REV" appended instead of using "-r" option. | |
1204 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) | |
1205 base_content = RunShell(["svn", "cat", url], | |
1206 universal_newlines=universal_newlines, | |
1207 silent_ok=True) | |
1208 else: | |
1209 base_content, ret_code = RunShellWithReturnCode( | |
1210 ["svn", "cat", self._EscapeFilename(filename)], | |
1211 universal_newlines=universal_newlines) | |
1212 if ret_code and status[0] == "R": | |
1213 # It's a replaced file without local history (see issue208). | |
1214 # The base file needs to be fetched from the server. | |
1215 url = "%s/%s" % (self.svn_base, filename) | |
1216 base_content = RunShell(["svn", "cat", url], | |
1217 universal_newlines=universal_newlines, | |
1218 silent_ok=True) | |
1219 elif ret_code: | |
1220 ErrorExit("Got error status from 'svn cat %s'" % filename) | |
1221 if not is_binary: | |
1222 args = [] | |
1223 if self.rev_start: | |
1224 url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) | |
1225 else: | |
1226 url = filename | |
1227 args += ["-r", "BASE"] | |
1228 cmd = ["svn"] + args + ["propget", "svn:keywords", url] | |
1229 keywords, returncode = RunShellWithReturnCode(cmd) | |
1230 if keywords and not returncode: | |
1231 base_content = self._CollapseKeywords(base_content, keywords) | |
1232 else: | |
1233 StatusUpdate("svn status returned unexpected output: %s" % status) | |
1234 sys.exit(1) | |
1235 return base_content, new_content, is_binary, status[0:5] | |
1236 | |
1237 | |
1238 class GitVCS(VersionControlSystem): | |
1239 """Implementation of the VersionControlSystem interface for Git.""" | |
1240 | |
1241 def __init__(self, options): | |
1242 super(GitVCS, self).__init__(options) | |
1243 # Map of filename -> (hash before, hash after) of base file. | |
1244 # Hashes for "no such file" are represented as None. | |
1245 self.hashes = {} | |
1246 # Map of new filename -> old filename for renames. | |
1247 self.renames = {} | |
1248 | |
1249 def GetGUID(self): | |
1250 revlist = RunShell("git rev-list --parents HEAD".split()).splitlines() | |
1251 # M-A: Return the 1st root hash, there could be multiple when a | |
1252 # subtree is merged. In that case, more analysis would need to | |
1253 # be done to figure out which HEAD is the 'most representative'. | |
1254 for r in revlist: | |
1255 if ' ' not in r: | |
1256 return r | |
1257 | |
1258 def PostProcessDiff(self, gitdiff): | |
1259 """Converts the diff output to include an svn-style "Index:" line as well | |
1260 as record the hashes of the files, so we can upload them along with our | |
1261 diff.""" | |
1262 # Special used by git to indicate "no such content". | |
1263 NULL_HASH = "0"*40 | |
1264 | |
1265 def IsFileNew(filename): | |
1266 return filename in self.hashes and self.hashes[filename][0] is None | |
1267 | |
1268 def AddSubversionPropertyChange(filename): | |
1269 """Add svn's property change information into the patch if given file is | |
1270 new file. | |
1271 | |
1272 We use Subversion's auto-props setting to retrieve its property. | |
1273 See http://svnbook.red-bean.com/en/1.1/ch07.html#svn-ch-7-sect-1.3.2 for | |
1274 Subversion's [auto-props] setting. | |
1275 """ | |
1276 if self.options.emulate_svn_auto_props and IsFileNew(filename): | |
1277 svnprops = GetSubversionPropertyChanges(filename) | |
1278 if svnprops: | |
1279 svndiff.append("\n" + svnprops + "\n") | |
1280 | |
1281 svndiff = [] | |
1282 filecount = 0 | |
1283 filename = None | |
1284 for line in gitdiff.splitlines(): | |
1285 match = re.match(r"diff --git a/(.*) b/(.*)$", line) | |
1286 if match: | |
1287 # Add auto property here for previously seen file. | |
1288 if filename is not None: | |
1289 AddSubversionPropertyChange(filename) | |
1290 filecount += 1 | |
1291 # Intentionally use the "after" filename so we can show renames. | |
1292 filename = match.group(2) | |
1293 svndiff.append("Index: %s\n" % filename) | |
1294 if match.group(1) != match.group(2): | |
1295 self.renames[match.group(2)] = match.group(1) | |
1296 else: | |
1297 # The "index" line in a git diff looks like this (long hashes elided): | |
1298 # index 82c0d44..b2cee3f 100755 | |
1299 # We want to save the left hash, as that identifies the base file. | |
1300 match = re.match(r"index (\w+)\.\.(\w+)", line) | |
1301 if match: | |
1302 before, after = (match.group(1), match.group(2)) | |
1303 if before == NULL_HASH: | |
1304 before = None | |
1305 if after == NULL_HASH: | |
1306 after = None | |
1307 self.hashes[filename] = (before, after) | |
1308 svndiff.append(line + "\n") | |
1309 if not filecount: | |
1310 ErrorExit("No valid patches found in output from git diff") | |
1311 # Add auto property for the last seen file. | |
1312 assert filename is not None | |
1313 AddSubversionPropertyChange(filename) | |
1314 return "".join(svndiff) | |
1315 | |
1316 def GenerateDiff(self, extra_args): | |
1317 extra_args = extra_args[:] | |
1318 if self.options.revision: | |
1319 if ":" in self.options.revision: | |
1320 extra_args = self.options.revision.split(":", 1) + extra_args | |
1321 else: | |
1322 extra_args = [self.options.revision] + extra_args | |
1323 | |
1324 # --no-ext-diff is broken in some versions of Git, so try to work around | |
1325 # this by overriding the environment (but there is still a problem if the | |
1326 # git config key "diff.external" is used). | |
1327 env = os.environ.copy() | |
1328 if "GIT_EXTERNAL_DIFF" in env: | |
1329 del env["GIT_EXTERNAL_DIFF"] | |
1330 # -M/-C will not print the diff for the deleted file when a file is renamed. | |
1331 # This is confusing because the original file will not be shown on the | |
1332 # review when a file is renamed. So, get a diff with ONLY deletes, then | |
1333 # append a diff (with rename detection), without deletes. | |
1334 cmd = [ | |
1335 "git", "diff", "--no-color", "--no-ext-diff", "--full-index", | |
1336 "--ignore-submodules", | |
1337 ] | |
1338 diff = RunShell( | |
1339 cmd + ["--no-renames", "--diff-filter=D"] + extra_args, | |
1340 env=env, silent_ok=True) | |
1341 if self.options.git_find_copies: | |
1342 similarity_options = ["--find-copies-harder", "-l100000", | |
1343 "-C%s" % self.options.git_similarity ] | |
1344 else: | |
1345 similarity_options = ["-M%s" % self.options.git_similarity ] | |
1346 diff += RunShell( | |
1347 cmd + ["--diff-filter=AMCRT"] + similarity_options + extra_args, | |
1348 env=env, silent_ok=True) | |
1349 | |
1350 # The CL could be only file deletion or not. So accept silent diff for both | |
1351 # commands then check for an empty diff manually. | |
1352 if not diff: | |
1353 ErrorExit("No output from %s" % (cmd + extra_args)) | |
1354 return diff | |
1355 | |
1356 def GetUnknownFiles(self): | |
1357 status = RunShell(["git", "ls-files", "--exclude-standard", "--others"], | |
1358 silent_ok=True) | |
1359 return status.splitlines() | |
1360 | |
1361 def GetFileContent(self, file_hash, is_binary): | |
1362 """Returns the content of a file identified by its git hash.""" | |
1363 data, retcode = RunShellWithReturnCode(["git", "show", file_hash], | |
1364 universal_newlines=not is_binary) | |
1365 if retcode: | |
1366 ErrorExit("Got error status from 'git show %s'" % file_hash) | |
1367 return data | |
1368 | |
1369 def GetBaseFile(self, filename): | |
1370 hash_before, hash_after = self.hashes.get(filename, (None,None)) | |
1371 base_content = None | |
1372 new_content = None | |
1373 status = None | |
1374 | |
1375 if filename in self.renames: | |
1376 status = "A +" # Match svn attribute name for renames. | |
1377 if filename not in self.hashes: | |
1378 # If a rename doesn't change the content, we never get a hash. | |
1379 base_content = RunShell( | |
1380 ["git", "show", "HEAD:" + filename], silent_ok=True) | |
1381 elif not hash_before: | |
1382 status = "A" | |
1383 base_content = "" | |
1384 elif not hash_after: | |
1385 status = "D" | |
1386 else: | |
1387 status = "M" | |
1388 | |
1389 is_binary = self.IsBinaryData(base_content) | |
1390 is_image = self.IsImage(filename) | |
1391 | |
1392 # Grab the before/after content if we need it. | |
1393 # Grab the base content if we don't have it already. | |
1394 if base_content is None and hash_before: | |
1395 base_content = self.GetFileContent(hash_before, is_binary) | |
1396 # Only include the "after" file if it's an image; otherwise it | |
1397 # it is reconstructed from the diff. | |
1398 if is_image and hash_after: | |
1399 new_content = self.GetFileContent(hash_after, is_binary) | |
1400 | |
1401 return (base_content, new_content, is_binary, status) | |
1402 | |
1403 | |
1404 class CVSVCS(VersionControlSystem): | |
1405 """Implementation of the VersionControlSystem interface for CVS.""" | |
1406 | |
1407 def __init__(self, options): | |
1408 super(CVSVCS, self).__init__(options) | |
1409 | |
1410 def GetGUID(self): | |
1411 """For now we don't know how to get repository ID for CVS""" | |
1412 return | |
1413 | |
1414 def GetOriginalContent_(self, filename): | |
1415 RunShell(["cvs", "up", filename], silent_ok=True) | |
1416 # TODO need detect file content encoding | |
1417 content = open(filename).read() | |
1418 return content.replace("\r\n", "\n") | |
1419 | |
1420 def GetBaseFile(self, filename): | |
1421 base_content = None | |
1422 new_content = None | |
1423 status = "A" | |
1424 | |
1425 output, retcode = RunShellWithReturnCode(["cvs", "status", filename]) | |
1426 if retcode: | |
1427 ErrorExit("Got error status from 'cvs status %s'" % filename) | |
1428 | |
1429 if output.find("Status: Locally Modified") != -1: | |
1430 status = "M" | |
1431 temp_filename = "%s.tmp123" % filename | |
1432 os.rename(filename, temp_filename) | |
1433 base_content = self.GetOriginalContent_(filename) | |
1434 os.rename(temp_filename, filename) | |
1435 elif output.find("Status: Locally Added"): | |
1436 status = "A" | |
1437 base_content = "" | |
1438 elif output.find("Status: Needs Checkout"): | |
1439 status = "D" | |
1440 base_content = self.GetOriginalContent_(filename) | |
1441 | |
1442 return (base_content, new_content, self.IsBinaryData(base_content), status) | |
1443 | |
1444 def GenerateDiff(self, extra_args): | |
1445 cmd = ["cvs", "diff", "-u", "-N"] | |
1446 if self.options.revision: | |
1447 cmd += ["-r", self.options.revision] | |
1448 | |
1449 cmd.extend(extra_args) | |
1450 data, retcode = RunShellWithReturnCode(cmd) | |
1451 count = 0 | |
1452 if retcode in [0, 1]: | |
1453 for line in data.splitlines(): | |
1454 if line.startswith("Index:"): | |
1455 count += 1 | |
1456 logging.info(line) | |
1457 | |
1458 if not count: | |
1459 ErrorExit("No valid patches found in output from cvs diff") | |
1460 | |
1461 return data | |
1462 | |
1463 def GetUnknownFiles(self): | |
1464 data, retcode = RunShellWithReturnCode(["cvs", "diff"]) | |
1465 if retcode not in [0, 1]: | |
1466 ErrorExit("Got error status from 'cvs diff':\n%s" % (data,)) | |
1467 unknown_files = [] | |
1468 for line in data.split("\n"): | |
1469 if line and line[0] == "?": | |
1470 unknown_files.append(line) | |
1471 return unknown_files | |
1472 | |
1473 class MercurialVCS(VersionControlSystem): | |
1474 """Implementation of the VersionControlSystem interface for Mercurial.""" | |
1475 | |
1476 def __init__(self, options, repo_dir): | |
1477 super(MercurialVCS, self).__init__(options) | |
1478 # Absolute path to repository (we can be in a subdir) | |
1479 self.repo_dir = os.path.normpath(repo_dir) | |
1480 # Compute the subdir | |
1481 cwd = os.path.normpath(os.getcwd()) | |
1482 assert cwd.startswith(self.repo_dir) | |
1483 self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/") | |
1484 if self.options.revision: | |
1485 self.base_rev = self.options.revision | |
1486 else: | |
1487 self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip() | |
1488 | |
1489 def GetGUID(self): | |
1490 # See chapter "Uniquely identifying a repository" | |
1491 # http://hgbook.red-bean.com/read/customizing-the-output-of-mercurial.html | |
1492 info = RunShell("hg log -r0 --template {node}".split()) | |
1493 return info.strip() | |
1494 | |
1495 def _GetRelPath(self, filename): | |
1496 """Get relative path of a file according to the current directory, | |
1497 given its logical path in the repo.""" | |
1498 absname = os.path.join(self.repo_dir, filename) | |
1499 return os.path.relpath(absname) | |
1500 | |
1501 def GenerateDiff(self, extra_args): | |
1502 cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args | |
1503 data = RunShell(cmd, silent_ok=True) | |
1504 svndiff = [] | |
1505 filecount = 0 | |
1506 for line in data.splitlines(): | |
1507 m = re.match("diff --git a/(\S+) b/(\S+)", line) | |
1508 if m: | |
1509 # Modify line to make it look like as it comes from svn diff. | |
1510 # With this modification no changes on the server side are required | |
1511 # to make upload.py work with Mercurial repos. | |
1512 # NOTE: for proper handling of moved/copied files, we have to use | |
1513 # the second filename. | |
1514 filename = m.group(2) | |
1515 svndiff.append("Index: %s" % filename) | |
1516 svndiff.append("=" * 67) | |
1517 filecount += 1 | |
1518 logging.info(line) | |
1519 else: | |
1520 svndiff.append(line) | |
1521 if not filecount: | |
1522 ErrorExit("No valid patches found in output from hg diff") | |
1523 return "\n".join(svndiff) + "\n" | |
1524 | |
1525 def GetUnknownFiles(self): | |
1526 """Return a list of files unknown to the VCS.""" | |
1527 args = [] | |
1528 status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."], | |
1529 silent_ok=True) | |
1530 unknown_files = [] | |
1531 for line in status.splitlines(): | |
1532 st, fn = line.split(" ", 1) | |
1533 if st == "?": | |
1534 unknown_files.append(fn) | |
1535 return unknown_files | |
1536 | |
1537 def GetBaseFile(self, filename): | |
1538 # "hg status" and "hg cat" both take a path relative to the current subdir, | |
1539 # but "hg diff" has given us the path relative to the repo root. | |
1540 base_content = "" | |
1541 new_content = None | |
1542 is_binary = False | |
1543 oldrelpath = relpath = self._GetRelPath(filename) | |
1544 # "hg status -C" returns two lines for moved/copied files, one otherwise | |
1545 out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath]) | |
1546 out = out.splitlines() | |
1547 # HACK: strip error message about missing file/directory if it isn't in | |
1548 # the working copy | |
1549 if out[0].startswith('%s: ' % relpath): | |
1550 out = out[1:] | |
1551 status, _ = out[0].split(' ', 1) | |
1552 if len(out) > 1 and status == "A": | |
1553 # Moved/copied => considered as modified, use old filename to | |
1554 # retrieve base contents | |
1555 oldrelpath = out[1].strip() | |
1556 status = "M" | |
1557 if ":" in self.base_rev: | |
1558 base_rev = self.base_rev.split(":", 1)[0] | |
1559 else: | |
1560 base_rev = self.base_rev | |
1561 if status != "A": | |
1562 base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], | |
1563 silent_ok=True) | |
1564 is_binary = self.IsBinaryData(base_content) | |
1565 if status != "R": | |
1566 new_content = open(relpath, "rb").read() | |
1567 is_binary = is_binary or self.IsBinaryData(new_content) | |
1568 if is_binary and base_content: | |
1569 # Fetch again without converting newlines | |
1570 base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], | |
1571 silent_ok=True, universal_newlines=False) | |
1572 if not is_binary: | |
1573 new_content = None | |
1574 return base_content, new_content, is_binary, status | |
1575 | |
1576 | |
1577 class PerforceVCS(VersionControlSystem): | |
1578 """Implementation of the VersionControlSystem interface for Perforce.""" | |
1579 | |
1580 def __init__(self, options): | |
1581 | |
1582 def ConfirmLogin(): | |
1583 # Make sure we have a valid perforce session | |
1584 while True: | |
1585 data, retcode = self.RunPerforceCommandWithReturnCode( | |
1586 ["login", "-s"], marshal_output=True) | |
1587 if not data: | |
1588 ErrorExit("Error checking perforce login") | |
1589 if not retcode and (not "code" in data or data["code"] != "error"): | |
1590 break | |
1591 print "Enter perforce password: " | |
1592 self.RunPerforceCommandWithReturnCode(["login"]) | |
1593 | |
1594 super(PerforceVCS, self).__init__(options) | |
1595 | |
1596 self.p4_changelist = options.p4_changelist | |
1597 if not self.p4_changelist: | |
1598 ErrorExit("A changelist id is required") | |
1599 if (options.revision): | |
1600 ErrorExit("--rev is not supported for perforce") | |
1601 | |
1602 self.p4_port = options.p4_port | |
1603 self.p4_client = options.p4_client | |
1604 self.p4_user = options.p4_user | |
1605 | |
1606 ConfirmLogin() | |
1607 | |
1608 if not options.title: | |
1609 description = self.RunPerforceCommand(["describe", self.p4_changelist], | |
1610 marshal_output=True) | |
1611 if description and "desc" in description: | |
1612 # Rietveld doesn't support multi-line descriptions | |
1613 raw_title = description["desc"].strip() | |
1614 lines = raw_title.splitlines() | |
1615 if len(lines): | |
1616 options.title = lines[0] | |
1617 | |
1618 def GetGUID(self): | |
1619 """For now we don't know how to get repository ID for Perforce""" | |
1620 return | |
1621 | |
1622 def RunPerforceCommandWithReturnCode(self, extra_args, marshal_output=False, | |
1623 universal_newlines=True): | |
1624 args = ["p4"] | |
1625 if marshal_output: | |
1626 # -G makes perforce format its output as marshalled python objects | |
1627 args.extend(["-G"]) | |
1628 if self.p4_port: | |
1629 args.extend(["-p", self.p4_port]) | |
1630 if self.p4_client: | |
1631 args.extend(["-c", self.p4_client]) | |
1632 if self.p4_user: | |
1633 args.extend(["-u", self.p4_user]) | |
1634 args.extend(extra_args) | |
1635 | |
1636 data, retcode = RunShellWithReturnCode( | |
1637 args, print_output=False, universal_newlines=universal_newlines) | |
1638 if marshal_output and data: | |
1639 data = marshal.loads(data) | |
1640 return data, retcode | |
1641 | |
1642 def RunPerforceCommand(self, extra_args, marshal_output=False, | |
1643 universal_newlines=True): | |
1644 # This might be a good place to cache call results, since things like | |
1645 # describe or fstat might get called repeatedly. | |
1646 data, retcode = self.RunPerforceCommandWithReturnCode( | |
1647 extra_args, marshal_output, universal_newlines) | |
1648 if retcode: | |
1649 ErrorExit("Got error status from %s:\n%s" % (extra_args, data)) | |
1650 return data | |
1651 | |
1652 def GetFileProperties(self, property_key_prefix = "", command = "describe"): | |
1653 description = self.RunPerforceCommand(["describe", self.p4_changelist], | |
1654 marshal_output=True) | |
1655 | |
1656 changed_files = {} | |
1657 file_index = 0 | |
1658 # Try depotFile0, depotFile1, ... until we don't find a match | |
1659 while True: | |
1660 file_key = "depotFile%d" % file_index | |
1661 if file_key in description: | |
1662 filename = description[file_key] | |
1663 change_type = description[property_key_prefix + str(file_index)] | |
1664 changed_files[filename] = change_type | |
1665 file_index += 1 | |
1666 else: | |
1667 break | |
1668 return changed_files | |
1669 | |
1670 def GetChangedFiles(self): | |
1671 return self.GetFileProperties("action") | |
1672 | |
1673 def GetUnknownFiles(self): | |
1674 # Perforce doesn't detect new files, they have to be explicitly added | |
1675 return [] | |
1676 | |
1677 def IsBaseBinary(self, filename): | |
1678 base_filename = self.GetBaseFilename(filename) | |
1679 return self.IsBinaryHelper(base_filename, "files") | |
1680 | |
1681 def IsPendingBinary(self, filename): | |
1682 return self.IsBinaryHelper(filename, "describe") | |
1683 | |
1684 def IsBinaryHelper(self, filename, command): | |
1685 file_types = self.GetFileProperties("type", command) | |
1686 if not filename in file_types: | |
1687 ErrorExit("Trying to check binary status of unknown file %s." % filename) | |
1688 # This treats symlinks, macintosh resource files, temporary objects, and | |
1689 # unicode as binary. See the Perforce docs for more details: | |
1690 # http://www.perforce.com/perforce/doc.current/manuals/cmdref/o.ftypes.html | |
1691 return not file_types[filename].endswith("text") | |
1692 | |
1693 def GetFileContent(self, filename, revision, is_binary): | |
1694 file_arg = filename | |
1695 if revision: | |
1696 file_arg += "#" + revision | |
1697 # -q suppresses the initial line that displays the filename and revision | |
1698 return self.RunPerforceCommand(["print", "-q", file_arg], | |
1699 universal_newlines=not is_binary) | |
1700 | |
1701 def GetBaseFilename(self, filename): | |
1702 actionsWithDifferentBases = [ | |
1703 "move/add", # p4 move | |
1704 "branch", # p4 integrate (to a new file), similar to hg "add" | |
1705 "add", # p4 integrate (to a new file), after modifying the new file | |
1706 ] | |
1707 | |
1708 # We only see a different base for "add" if this is a downgraded branch | |
1709 # after a file was branched (integrated), then edited. | |
1710 if self.GetAction(filename) in actionsWithDifferentBases: | |
1711 # -Or shows information about pending integrations/moves | |
1712 fstat_result = self.RunPerforceCommand(["fstat", "-Or", filename], | |
1713 marshal_output=True) | |
1714 | |
1715 baseFileKey = "resolveFromFile0" # I think it's safe to use only file0 | |
1716 if baseFileKey in fstat_result: | |
1717 return fstat_result[baseFileKey] | |
1718 | |
1719 return filename | |
1720 | |
1721 def GetBaseRevision(self, filename): | |
1722 base_filename = self.GetBaseFilename(filename) | |
1723 | |
1724 have_result = self.RunPerforceCommand(["have", base_filename], | |
1725 marshal_output=True) | |
1726 if "haveRev" in have_result: | |
1727 return have_result["haveRev"] | |
1728 | |
1729 def GetLocalFilename(self, filename): | |
1730 where = self.RunPerforceCommand(["where", filename], marshal_output=True) | |
1731 if "path" in where: | |
1732 return where["path"] | |
1733 | |
1734 def GenerateDiff(self, args): | |
1735 class DiffData: | |
1736 def __init__(self, perforceVCS, filename, action): | |
1737 self.perforceVCS = perforceVCS | |
1738 self.filename = filename | |
1739 self.action = action | |
1740 self.base_filename = perforceVCS.GetBaseFilename(filename) | |
1741 | |
1742 self.file_body = None | |
1743 self.base_rev = None | |
1744 self.prefix = None | |
1745 self.working_copy = True | |
1746 self.change_summary = None | |
1747 | |
1748 def GenerateDiffHeader(diffData): | |
1749 header = [] | |
1750 header.append("Index: %s" % diffData.filename) | |
1751 header.append("=" * 67) | |
1752 | |
1753 if diffData.base_filename != diffData.filename: | |
1754 if diffData.action.startswith("move"): | |
1755 verb = "rename" | |
1756 else: | |
1757 verb = "copy" | |
1758 header.append("%s from %s" % (verb, diffData.base_filename)) | |
1759 header.append("%s to %s" % (verb, diffData.filename)) | |
1760 | |
1761 suffix = "\t(revision %s)" % diffData.base_rev | |
1762 header.append("--- " + diffData.base_filename + suffix) | |
1763 if diffData.working_copy: | |
1764 suffix = "\t(working copy)" | |
1765 header.append("+++ " + diffData.filename + suffix) | |
1766 if diffData.change_summary: | |
1767 header.append(diffData.change_summary) | |
1768 return header | |
1769 | |
1770 def GenerateMergeDiff(diffData, args): | |
1771 # -du generates a unified diff, which is nearly svn format | |
1772 diffData.file_body = self.RunPerforceCommand( | |
1773 ["diff", "-du", diffData.filename] + args) | |
1774 diffData.base_rev = self.GetBaseRevision(diffData.filename) | |
1775 diffData.prefix = "" | |
1776 | |
1777 # We have to replace p4's file status output (the lines starting | |
1778 # with +++ or ---) to match svn's diff format | |
1779 lines = diffData.file_body.splitlines() | |
1780 first_good_line = 0 | |
1781 while (first_good_line < len(lines) and | |
1782 not lines[first_good_line].startswith("@@")): | |
1783 first_good_line += 1 | |
1784 diffData.file_body = "\n".join(lines[first_good_line:]) | |
1785 return diffData | |
1786 | |
1787 def GenerateAddDiff(diffData): | |
1788 fstat = self.RunPerforceCommand(["fstat", diffData.filename], | |
1789 marshal_output=True) | |
1790 if "headRev" in fstat: | |
1791 diffData.base_rev = fstat["headRev"] # Re-adding a deleted file | |
1792 else: | |
1793 diffData.base_rev = "0" # Brand new file | |
1794 diffData.working_copy = False | |
1795 rel_path = self.GetLocalFilename(diffData.filename) | |
1796 diffData.file_body = open(rel_path, 'r').read() | |
1797 # Replicate svn's list of changed lines | |
1798 line_count = len(diffData.file_body.splitlines()) | |
1799 diffData.change_summary = "@@ -0,0 +1" | |
1800 if line_count > 1: | |
1801 diffData.change_summary += ",%d" % line_count | |
1802 diffData.change_summary += " @@" | |
1803 diffData.prefix = "+" | |
1804 return diffData | |
1805 | |
1806 def GenerateDeleteDiff(diffData): | |
1807 diffData.base_rev = self.GetBaseRevision(diffData.filename) | |
1808 is_base_binary = self.IsBaseBinary(diffData.filename) | |
1809 # For deletes, base_filename == filename | |
1810 diffData.file_body = self.GetFileContent(diffData.base_filename, | |
1811 None, | |
1812 is_base_binary) | |
1813 # Replicate svn's list of changed lines | |
1814 line_count = len(diffData.file_body.splitlines()) | |
1815 diffData.change_summary = "@@ -1" | |
1816 if line_count > 1: | |
1817 diffData.change_summary += ",%d" % line_count | |
1818 diffData.change_summary += " +0,0 @@" | |
1819 diffData.prefix = "-" | |
1820 return diffData | |
1821 | |
1822 changed_files = self.GetChangedFiles() | |
1823 | |
1824 svndiff = [] | |
1825 filecount = 0 | |
1826 for (filename, action) in changed_files.items(): | |
1827 svn_status = self.PerforceActionToSvnStatus(action) | |
1828 if svn_status == "SKIP": | |
1829 continue | |
1830 | |
1831 diffData = DiffData(self, filename, action) | |
1832 # Is it possible to diff a branched file? Stackoverflow says no: | |
1833 # http://stackoverflow.com/questions/1771314/in-perforce-command-line-how-
to-diff-a-file-reopened-for-add | |
1834 if svn_status == "M": | |
1835 diffData = GenerateMergeDiff(diffData, args) | |
1836 elif svn_status == "A": | |
1837 diffData = GenerateAddDiff(diffData) | |
1838 elif svn_status == "D": | |
1839 diffData = GenerateDeleteDiff(diffData) | |
1840 else: | |
1841 ErrorExit("Unknown file action %s (svn action %s)." % \ | |
1842 (action, svn_status)) | |
1843 | |
1844 svndiff += GenerateDiffHeader(diffData) | |
1845 | |
1846 for line in diffData.file_body.splitlines(): | |
1847 svndiff.append(diffData.prefix + line) | |
1848 filecount += 1 | |
1849 if not filecount: | |
1850 ErrorExit("No valid patches found in output from p4 diff") | |
1851 return "\n".join(svndiff) + "\n" | |
1852 | |
1853 def PerforceActionToSvnStatus(self, status): | |
1854 # Mirroring the list at http://permalink.gmane.org/gmane.comp.version-contro
l.mercurial.devel/28717 | |
1855 # Is there something more official? | |
1856 return { | |
1857 "add" : "A", | |
1858 "branch" : "A", | |
1859 "delete" : "D", | |
1860 "edit" : "M", # Also includes changing file types. | |
1861 "integrate" : "M", | |
1862 "move/add" : "M", | |
1863 "move/delete": "SKIP", | |
1864 "purge" : "D", # How does a file's status become "purge"? | |
1865 }[status] | |
1866 | |
1867 def GetAction(self, filename): | |
1868 changed_files = self.GetChangedFiles() | |
1869 if not filename in changed_files: | |
1870 ErrorExit("Trying to get base version of unknown file %s." % filename) | |
1871 | |
1872 return changed_files[filename] | |
1873 | |
1874 def GetBaseFile(self, filename): | |
1875 base_filename = self.GetBaseFilename(filename) | |
1876 base_content = "" | |
1877 new_content = None | |
1878 | |
1879 status = self.PerforceActionToSvnStatus(self.GetAction(filename)) | |
1880 | |
1881 if status != "A": | |
1882 revision = self.GetBaseRevision(base_filename) | |
1883 if not revision: | |
1884 ErrorExit("Couldn't find base revision for file %s" % filename) | |
1885 is_base_binary = self.IsBaseBinary(base_filename) | |
1886 base_content = self.GetFileContent(base_filename, | |
1887 revision, | |
1888 is_base_binary) | |
1889 | |
1890 is_binary = self.IsPendingBinary(filename) | |
1891 if status != "D" and status != "SKIP": | |
1892 relpath = self.GetLocalFilename(filename) | |
1893 if is_binary: | |
1894 new_content = open(relpath, "rb").read() | |
1895 | |
1896 return base_content, new_content, is_binary, status | |
1897 | |
1898 # NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync. | |
1899 def SplitPatch(data): | |
1900 """Splits a patch into separate pieces for each file. | |
1901 | |
1902 Args: | |
1903 data: A string containing the output of svn diff. | |
1904 | |
1905 Returns: | |
1906 A list of 2-tuple (filename, text) where text is the svn diff output | |
1907 pertaining to filename. | |
1908 """ | |
1909 patches = [] | |
1910 filename = None | |
1911 diff = [] | |
1912 for line in data.splitlines(True): | |
1913 new_filename = None | |
1914 if line.startswith('Index:'): | |
1915 unused, new_filename = line.split(':', 1) | |
1916 new_filename = new_filename.strip() | |
1917 elif line.startswith('Property changes on:'): | |
1918 unused, temp_filename = line.split(':', 1) | |
1919 # When a file is modified, paths use '/' between directories, however | |
1920 # when a property is modified '\' is used on Windows. Make them the same | |
1921 # otherwise the file shows up twice. | |
1922 temp_filename = temp_filename.strip().replace('\\', '/') | |
1923 if temp_filename != filename: | |
1924 # File has property changes but no modifications, create a new diff. | |
1925 new_filename = temp_filename | |
1926 if new_filename: | |
1927 if filename and diff: | |
1928 patches.append((filename, ''.join(diff))) | |
1929 filename = new_filename | |
1930 diff = [line] | |
1931 continue | |
1932 if diff is not None: | |
1933 diff.append(line) | |
1934 if filename and diff: | |
1935 patches.append((filename, ''.join(diff))) | |
1936 return patches | |
1937 | |
1938 | |
1939 def UploadSeparatePatches(issue, rpc_server, patchset, data, options): | |
1940 """Uploads a separate patch for each file in the diff output. | |
1941 | |
1942 Returns a list of [patch_key, filename] for each file. | |
1943 """ | |
1944 patches = SplitPatch(data) | |
1945 rv = [] | |
1946 for patch in patches: | |
1947 if len(patch[1]) > MAX_UPLOAD_SIZE: | |
1948 print ("Not uploading the patch for " + patch[0] + | |
1949 " because the file is too large.") | |
1950 continue | |
1951 form_fields = [("filename", patch[0])] | |
1952 if not options.download_base: | |
1953 form_fields.append(("content_upload", "1")) | |
1954 files = [("data", "data.diff", patch[1])] | |
1955 ctype, body = EncodeMultipartFormData(form_fields, files) | |
1956 url = "/%d/upload_patch/%d" % (int(issue), int(patchset)) | |
1957 print "Uploading patch for " + patch[0] | |
1958 response_body = rpc_server.Send(url, body, content_type=ctype) | |
1959 lines = response_body.splitlines() | |
1960 if not lines or lines[0] != "OK": | |
1961 StatusUpdate(" --> %s" % response_body) | |
1962 sys.exit(1) | |
1963 rv.append([lines[1], patch[0]]) | |
1964 return rv | |
1965 | |
1966 | |
1967 def GuessVCSName(options): | |
1968 """Helper to guess the version control system. | |
1969 | |
1970 This examines the current directory, guesses which VersionControlSystem | |
1971 we're using, and returns an string indicating which VCS is detected. | |
1972 | |
1973 Returns: | |
1974 A pair (vcs, output). vcs is a string indicating which VCS was detected | |
1975 and is one of VCS_GIT, VCS_MERCURIAL, VCS_SUBVERSION, VCS_PERFORCE, | |
1976 VCS_CVS, or VCS_UNKNOWN. | |
1977 Since local perforce repositories can't be easily detected, this method | |
1978 will only guess VCS_PERFORCE if any perforce options have been specified. | |
1979 output is a string containing any interesting output from the vcs | |
1980 detection routine, or None if there is nothing interesting. | |
1981 """ | |
1982 for attribute, value in options.__dict__.iteritems(): | |
1983 if attribute.startswith("p4") and value != None: | |
1984 return (VCS_PERFORCE, None) | |
1985 | |
1986 def RunDetectCommand(vcs_type, command): | |
1987 """Helper to detect VCS by executing command. | |
1988 | |
1989 Returns: | |
1990 A pair (vcs, output) or None. Throws exception on error. | |
1991 """ | |
1992 try: | |
1993 out, returncode = RunShellWithReturnCode(command) | |
1994 if returncode == 0: | |
1995 return (vcs_type, out.strip()) | |
1996 except OSError, (errcode, message): | |
1997 if errcode != errno.ENOENT: # command not found code | |
1998 raise | |
1999 | |
2000 # Mercurial has a command to get the base directory of a repository | |
2001 # Try running it, but don't die if we don't have hg installed. | |
2002 # NOTE: we try Mercurial first as it can sit on top of an SVN working copy. | |
2003 res = RunDetectCommand(VCS_MERCURIAL, ["hg", "root"]) | |
2004 if res != None: | |
2005 return res | |
2006 | |
2007 # Subversion from 1.7 has a single centralized .svn folder | |
2008 # ( see http://subversion.apache.org/docs/release-notes/1.7.html#wc-ng ) | |
2009 # That's why we use 'svn info' instead of checking for .svn dir | |
2010 res = RunDetectCommand(VCS_SUBVERSION, ["svn", "info"]) | |
2011 if res != None: | |
2012 return res | |
2013 | |
2014 # Git has a command to test if you're in a git tree. | |
2015 # Try running it, but don't die if we don't have git installed. | |
2016 res = RunDetectCommand(VCS_GIT, ["git", "rev-parse", | |
2017 "--is-inside-work-tree"]) | |
2018 if res != None: | |
2019 return res | |
2020 | |
2021 # detect CVS repos use `cvs status && $? == 0` rules | |
2022 res = RunDetectCommand(VCS_CVS, ["cvs", "status"]) | |
2023 if res != None: | |
2024 return res | |
2025 | |
2026 return (VCS_UNKNOWN, None) | |
2027 | |
2028 | |
2029 def GuessVCS(options): | |
2030 """Helper to guess the version control system. | |
2031 | |
2032 This verifies any user-specified VersionControlSystem (by command line | |
2033 or environment variable). If the user didn't specify one, this examines | |
2034 the current directory, guesses which VersionControlSystem we're using, | |
2035 and returns an instance of the appropriate class. Exit with an error | |
2036 if we can't figure it out. | |
2037 | |
2038 Returns: | |
2039 A VersionControlSystem instance. Exits if the VCS can't be guessed. | |
2040 """ | |
2041 vcs = options.vcs | |
2042 if not vcs: | |
2043 vcs = os.environ.get("CODEREVIEW_VCS") | |
2044 if vcs: | |
2045 v = VCS_ABBREVIATIONS.get(vcs.lower()) | |
2046 if v is None: | |
2047 ErrorExit("Unknown version control system %r specified." % vcs) | |
2048 (vcs, extra_output) = (v, None) | |
2049 else: | |
2050 (vcs, extra_output) = GuessVCSName(options) | |
2051 | |
2052 if vcs == VCS_MERCURIAL: | |
2053 if extra_output is None: | |
2054 extra_output = RunShell(["hg", "root"]).strip() | |
2055 return MercurialVCS(options, extra_output) | |
2056 elif vcs == VCS_SUBVERSION: | |
2057 return SubversionVCS(options) | |
2058 elif vcs == VCS_PERFORCE: | |
2059 return PerforceVCS(options) | |
2060 elif vcs == VCS_GIT: | |
2061 return GitVCS(options) | |
2062 elif vcs == VCS_CVS: | |
2063 return CVSVCS(options) | |
2064 | |
2065 ErrorExit(("Could not guess version control system. " | |
2066 "Are you in a working copy directory?")) | |
2067 | |
2068 | |
2069 def CheckReviewer(reviewer): | |
2070 """Validate a reviewer -- either a nickname or an email addres. | |
2071 | |
2072 Args: | |
2073 reviewer: A nickname or an email address. | |
2074 | |
2075 Calls ErrorExit() if it is an invalid email address. | |
2076 """ | |
2077 if "@" not in reviewer: | |
2078 return # Assume nickname | |
2079 parts = reviewer.split("@") | |
2080 if len(parts) > 2: | |
2081 ErrorExit("Invalid email address: %r" % reviewer) | |
2082 assert len(parts) == 2 | |
2083 if "." not in parts[1]: | |
2084 ErrorExit("Invalid email address: %r" % reviewer) | |
2085 | |
2086 | |
2087 def LoadSubversionAutoProperties(): | |
2088 """Returns the content of [auto-props] section of Subversion's config file as | |
2089 a dictionary. | |
2090 | |
2091 Returns: | |
2092 A dictionary whose key-value pair corresponds the [auto-props] section's | |
2093 key-value pair. | |
2094 In following cases, returns empty dictionary: | |
2095 - config file doesn't exist, or | |
2096 - 'enable-auto-props' is not set to 'true-like-value' in [miscellany]. | |
2097 """ | |
2098 if os.name == 'nt': | |
2099 subversion_config = os.environ.get("APPDATA") + "\\Subversion\\config" | |
2100 else: | |
2101 subversion_config = os.path.expanduser("~/.subversion/config") | |
2102 if not os.path.exists(subversion_config): | |
2103 return {} | |
2104 config = ConfigParser.ConfigParser() | |
2105 config.read(subversion_config) | |
2106 if (config.has_section("miscellany") and | |
2107 config.has_option("miscellany", "enable-auto-props") and | |
2108 config.getboolean("miscellany", "enable-auto-props") and | |
2109 config.has_section("auto-props")): | |
2110 props = {} | |
2111 for file_pattern in config.options("auto-props"): | |
2112 props[file_pattern] = ParseSubversionPropertyValues( | |
2113 config.get("auto-props", file_pattern)) | |
2114 return props | |
2115 else: | |
2116 return {} | |
2117 | |
2118 def ParseSubversionPropertyValues(props): | |
2119 """Parse the given property value which comes from [auto-props] section and | |
2120 returns a list whose element is a (svn_prop_key, svn_prop_value) pair. | |
2121 | |
2122 See the following doctest for example. | |
2123 | |
2124 >>> ParseSubversionPropertyValues('svn:eol-style=LF') | |
2125 [('svn:eol-style', 'LF')] | |
2126 >>> ParseSubversionPropertyValues('svn:mime-type=image/jpeg') | |
2127 [('svn:mime-type', 'image/jpeg')] | |
2128 >>> ParseSubversionPropertyValues('svn:eol-style=LF;svn:executable') | |
2129 [('svn:eol-style', 'LF'), ('svn:executable', '*')] | |
2130 """ | |
2131 key_value_pairs = [] | |
2132 for prop in props.split(";"): | |
2133 key_value = prop.split("=") | |
2134 assert len(key_value) <= 2 | |
2135 if len(key_value) == 1: | |
2136 # If value is not given, use '*' as a Subversion's convention. | |
2137 key_value_pairs.append((key_value[0], "*")) | |
2138 else: | |
2139 key_value_pairs.append((key_value[0], key_value[1])) | |
2140 return key_value_pairs | |
2141 | |
2142 | |
2143 def GetSubversionPropertyChanges(filename): | |
2144 """Return a Subversion's 'Property changes on ...' string, which is used in | |
2145 the patch file. | |
2146 | |
2147 Args: | |
2148 filename: filename whose property might be set by [auto-props] config. | |
2149 | |
2150 Returns: | |
2151 A string like 'Property changes on |filename| ...' if given |filename| | |
2152 matches any entries in [auto-props] section. None, otherwise. | |
2153 """ | |
2154 global svn_auto_props_map | |
2155 if svn_auto_props_map is None: | |
2156 svn_auto_props_map = LoadSubversionAutoProperties() | |
2157 | |
2158 all_props = [] | |
2159 for file_pattern, props in svn_auto_props_map.items(): | |
2160 if fnmatch.fnmatch(filename, file_pattern): | |
2161 all_props.extend(props) | |
2162 if all_props: | |
2163 return FormatSubversionPropertyChanges(filename, all_props) | |
2164 return None | |
2165 | |
2166 | |
2167 def FormatSubversionPropertyChanges(filename, props): | |
2168 """Returns Subversion's 'Property changes on ...' strings using given filename | |
2169 and properties. | |
2170 | |
2171 Args: | |
2172 filename: filename | |
2173 props: A list whose element is a (svn_prop_key, svn_prop_value) pair. | |
2174 | |
2175 Returns: | |
2176 A string which can be used in the patch file for Subversion. | |
2177 | |
2178 See the following doctest for example. | |
2179 | |
2180 >>> print FormatSubversionPropertyChanges('foo.cc', [('svn:eol-style', 'LF')]) | |
2181 Property changes on: foo.cc | |
2182 ___________________________________________________________________ | |
2183 Added: svn:eol-style | |
2184 + LF | |
2185 <BLANKLINE> | |
2186 """ | |
2187 prop_changes_lines = [ | |
2188 "Property changes on: %s" % filename, | |
2189 "___________________________________________________________________"] | |
2190 for key, value in props: | |
2191 prop_changes_lines.append("Added: " + key) | |
2192 prop_changes_lines.append(" + " + value) | |
2193 return "\n".join(prop_changes_lines) + "\n" | |
2194 | |
2195 | |
2196 def RealMain(argv, data=None): | |
2197 """The real main function. | |
2198 | |
2199 Args: | |
2200 argv: Command line arguments. | |
2201 data: Diff contents. If None (default) the diff is generated by | |
2202 the VersionControlSystem implementation returned by GuessVCS(). | |
2203 | |
2204 Returns: | |
2205 A 2-tuple (issue id, patchset id). | |
2206 The patchset id is None if the base files are not uploaded by this | |
2207 script (applies only to SVN checkouts). | |
2208 """ | |
2209 options, args = parser.parse_args(argv[1:]) | |
2210 if options.help: | |
2211 if options.verbose < 2: | |
2212 # hide Perforce options | |
2213 parser.epilog = "Use '--help -v' to show additional Perforce options." | |
2214 parser.option_groups.remove(parser.get_option_group('--p4_port')) | |
2215 parser.print_help() | |
2216 sys.exit(0) | |
2217 | |
2218 global verbosity | |
2219 verbosity = options.verbose | |
2220 if verbosity >= 3: | |
2221 logging.getLogger().setLevel(logging.DEBUG) | |
2222 elif verbosity >= 2: | |
2223 logging.getLogger().setLevel(logging.INFO) | |
2224 | |
2225 vcs = GuessVCS(options) | |
2226 | |
2227 base = options.base_url | |
2228 if isinstance(vcs, SubversionVCS): | |
2229 # Guessing the base field is only supported for Subversion. | |
2230 # Note: Fetching base files may become deprecated in future releases. | |
2231 guessed_base = vcs.GuessBase(options.download_base) | |
2232 if base: | |
2233 if guessed_base and base != guessed_base: | |
2234 print "Using base URL \"%s\" from --base_url instead of \"%s\"" % \ | |
2235 (base, guessed_base) | |
2236 else: | |
2237 base = guessed_base | |
2238 | |
2239 if not base and options.download_base: | |
2240 options.download_base = True | |
2241 logging.info("Enabled upload of base file") | |
2242 if not options.assume_yes: | |
2243 vcs.CheckForUnknownFiles() | |
2244 if data is None: | |
2245 data = vcs.GenerateDiff(args) | |
2246 data = vcs.PostProcessDiff(data) | |
2247 if options.print_diffs: | |
2248 print "Rietveld diff start:*****" | |
2249 print data | |
2250 print "Rietveld diff end:*****" | |
2251 files = vcs.GetBaseFiles(data) | |
2252 if verbosity >= 1: | |
2253 print "Upload server:", options.server, "(change with -s/--server)" | |
2254 rpc_server = GetRpcServer(options.server, | |
2255 options.email, | |
2256 options.host, | |
2257 options.save_cookies, | |
2258 options.account_type) | |
2259 form_fields = [] | |
2260 | |
2261 repo_guid = vcs.GetGUID() | |
2262 if repo_guid: | |
2263 form_fields.append(("repo_guid", repo_guid)) | |
2264 if base: | |
2265 b = urlparse.urlparse(base) | |
2266 username, netloc = urllib.splituser(b.netloc) | |
2267 if username: | |
2268 logging.info("Removed username from base URL") | |
2269 base = urlparse.urlunparse((b.scheme, netloc, b.path, b.params, | |
2270 b.query, b.fragment)) | |
2271 form_fields.append(("base", base)) | |
2272 if options.issue: | |
2273 form_fields.append(("issue", str(options.issue))) | |
2274 if options.email: | |
2275 form_fields.append(("user", options.email)) | |
2276 if options.reviewers: | |
2277 for reviewer in options.reviewers.split(','): | |
2278 CheckReviewer(reviewer) | |
2279 form_fields.append(("reviewers", options.reviewers)) | |
2280 if options.cc: | |
2281 for cc in options.cc.split(','): | |
2282 CheckReviewer(cc) | |
2283 form_fields.append(("cc", options.cc)) | |
2284 | |
2285 # Process --message, --title and --file. | |
2286 message = options.message or "" | |
2287 title = options.title or "" | |
2288 if options.file: | |
2289 if options.message: | |
2290 ErrorExit("Can't specify both message and message file options") | |
2291 file = open(options.file, 'r') | |
2292 message = file.read() | |
2293 file.close() | |
2294 if options.issue: | |
2295 prompt = "Title describing this patch set: " | |
2296 else: | |
2297 prompt = "New issue subject: " | |
2298 title = ( | |
2299 title or message.split('\n', 1)[0].strip() or raw_input(prompt).strip()) | |
2300 if not title and not options.issue: | |
2301 ErrorExit("A non-empty title is required for a new issue") | |
2302 # For existing issues, it's fine to give a patchset an empty name. Rietveld | |
2303 # doesn't accept that so use a whitespace. | |
2304 title = title or " " | |
2305 if len(title) > 100: | |
2306 title = title[:99] + '…' | |
2307 if title and not options.issue: | |
2308 message = message or title | |
2309 | |
2310 form_fields.append(("subject", title)) | |
2311 # If it's a new issue send message as description. Otherwise a new | |
2312 # message is created below on upload_complete. | |
2313 if message and not options.issue: | |
2314 form_fields.append(("description", message)) | |
2315 | |
2316 # Send a hash of all the base file so the server can determine if a copy | |
2317 # already exists in an earlier patchset. | |
2318 base_hashes = "" | |
2319 for file, info in files.iteritems(): | |
2320 if not info[0] is None: | |
2321 checksum = md5(info[0]).hexdigest() | |
2322 if base_hashes: | |
2323 base_hashes += "|" | |
2324 base_hashes += checksum + ":" + file | |
2325 form_fields.append(("base_hashes", base_hashes)) | |
2326 if options.private: | |
2327 if options.issue: | |
2328 print "Warning: Private flag ignored when updating an existing issue." | |
2329 else: | |
2330 form_fields.append(("private", "1")) | |
2331 if options.send_patch: | |
2332 options.send_mail = True | |
2333 if not options.download_base: | |
2334 form_fields.append(("content_upload", "1")) | |
2335 if len(data) > MAX_UPLOAD_SIZE: | |
2336 print "Patch is large, so uploading file patches separately." | |
2337 uploaded_diff_file = [] | |
2338 form_fields.append(("separate_patches", "1")) | |
2339 else: | |
2340 uploaded_diff_file = [("data", "data.diff", data)] | |
2341 ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file) | |
2342 response_body = rpc_server.Send("/upload", body, content_type=ctype) | |
2343 patchset = None | |
2344 if not options.download_base or not uploaded_diff_file: | |
2345 lines = response_body.splitlines() | |
2346 if len(lines) >= 2: | |
2347 msg = lines[0] | |
2348 patchset = lines[1].strip() | |
2349 patches = [x.split(" ", 1) for x in lines[2:]] | |
2350 else: | |
2351 msg = response_body | |
2352 else: | |
2353 msg = response_body | |
2354 StatusUpdate(msg) | |
2355 if not response_body.startswith("Issue created.") and \ | |
2356 not response_body.startswith("Issue updated."): | |
2357 sys.exit(0) | |
2358 issue = msg[msg.rfind("/")+1:] | |
2359 | |
2360 if not uploaded_diff_file: | |
2361 result = UploadSeparatePatches(issue, rpc_server, patchset, data, options) | |
2362 if not options.download_base: | |
2363 patches = result | |
2364 | |
2365 if not options.download_base: | |
2366 vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files) | |
2367 | |
2368 payload = {} # payload for final request | |
2369 if options.send_mail: | |
2370 payload["send_mail"] = "yes" | |
2371 if options.send_patch: | |
2372 payload["attach_patch"] = "yes" | |
2373 if options.issue and message: | |
2374 payload["message"] = message | |
2375 payload = urllib.urlencode(payload) | |
2376 rpc_server.Send("/" + issue + "/upload_complete/" + (patchset or ""), | |
2377 payload=payload) | |
2378 return issue, patchset | |
2379 | |
2380 | |
2381 def main(): | |
2382 try: | |
2383 logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:" | |
2384 "%(lineno)s %(message)s ")) | |
2385 os.environ['LC_ALL'] = 'C' | |
2386 RealMain(sys.argv) | |
2387 except KeyboardInterrupt: | |
2388 print | |
2389 StatusUpdate("Interrupted.") | |
2390 sys.exit(1) | |
2391 | |
2392 | |
2393 if __name__ == "__main__": | |
2394 main() | |
OLD | NEW |