OLD | NEW |
(Empty) | |
| 1 # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/ |
| 2 # Copyright (c) 2010, Eucalyptus Systems, Inc. |
| 3 # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. |
| 4 # All rights reserved. |
| 5 # |
| 6 # Permission is hereby granted, free of charge, to any person obtaining a |
| 7 # copy of this software and associated documentation files (the |
| 8 # "Software"), to deal in the Software without restriction, including |
| 9 # without limitation the rights to use, copy, modify, merge, publish, dis- |
| 10 # tribute, sublicense, and/or sell copies of the Software, and to permit |
| 11 # persons to whom the Software is furnished to do so, subject to the fol- |
| 12 # lowing conditions: |
| 13 # |
| 14 # The above copyright notice and this permission notice shall be included |
| 15 # in all copies or substantial portions of the Software. |
| 16 # |
| 17 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| 18 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- |
| 19 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT |
| 20 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, |
| 21 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
| 22 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 23 # IN THE SOFTWARE. |
| 24 |
| 25 # |
| 26 # Parts of this code were copied or derived from sample code supplied by AWS. |
| 27 # The following notice applies to that code. |
| 28 # |
| 29 # This software code is made available "AS IS" without warranties of any |
| 30 # kind. You may copy, display, modify and redistribute the software |
| 31 # code either by itself or as incorporated into your code; provided that |
| 32 # you do not remove any proprietary notices. Your use of this software |
| 33 # code is at your own risk and you waive any claim against Amazon |
| 34 # Digital Services, Inc. or its affiliates with respect to your use of |
| 35 # this software code. (c) 2006 Amazon Digital Services, Inc. or its |
| 36 # affiliates. |
| 37 |
| 38 """ |
| 39 Some handy utility functions used by several classes. |
| 40 """ |
| 41 |
| 42 import socket |
| 43 import urllib |
| 44 import urllib2 |
| 45 import imp |
| 46 import subprocess |
| 47 import StringIO |
| 48 import time |
| 49 import logging.handlers |
| 50 import boto |
| 51 import boto.provider |
| 52 import tempfile |
| 53 import smtplib |
| 54 import datetime |
| 55 import re |
| 56 import email.mime.multipart |
| 57 import email.mime.base |
| 58 import email.mime.text |
| 59 import email.utils |
| 60 import email.encoders |
| 61 import gzip |
| 62 import base64 |
| 63 try: |
| 64 from hashlib import md5 |
| 65 except ImportError: |
| 66 from md5 import md5 |
| 67 |
| 68 |
| 69 try: |
| 70 import hashlib |
| 71 _hashfn = hashlib.sha512 |
| 72 except ImportError: |
| 73 import md5 |
| 74 _hashfn = md5.md5 |
| 75 |
| 76 from boto.compat import json |
| 77 |
| 78 # List of Query String Arguments of Interest |
| 79 qsa_of_interest = ['acl', 'cors', 'defaultObjectAcl', 'location', 'logging', |
| 80 'partNumber', 'policy', 'requestPayment', 'torrent', |
| 81 'versioning', 'versionId', 'versions', 'website', |
| 82 'uploads', 'uploadId', 'response-content-type', |
| 83 'response-content-language', 'response-expires', |
| 84 'response-cache-control', 'response-content-disposition', |
| 85 'response-content-encoding', 'delete', 'lifecycle', |
| 86 'tagging', 'restore', |
| 87 # storageClass is a QSA for buckets in Google Cloud Storage. |
| 88 # (StorageClass is associated to individual keys in S3, but |
| 89 # having it listed here should cause no problems because |
| 90 # GET bucket?storageClass is not part of the S3 API.) |
| 91 'storageClass'] |
| 92 |
| 93 |
| 94 _first_cap_regex = re.compile('(.)([A-Z][a-z]+)') |
| 95 _number_cap_regex = re.compile('([a-z])([0-9]+)') |
| 96 _end_cap_regex = re.compile('([a-z0-9])([A-Z])') |
| 97 |
| 98 |
| 99 def unquote_v(nv): |
| 100 if len(nv) == 1: |
| 101 return nv |
| 102 else: |
| 103 return (nv[0], urllib.unquote(nv[1])) |
| 104 |
| 105 |
| 106 def canonical_string(method, path, headers, expires=None, |
| 107 provider=None): |
| 108 """ |
| 109 Generates the aws canonical string for the given parameters |
| 110 """ |
| 111 if not provider: |
| 112 provider = boto.provider.get_default() |
| 113 interesting_headers = {} |
| 114 for key in headers: |
| 115 lk = key.lower() |
| 116 if headers[key] != None and (lk in ['content-md5', 'content-type', 'date
'] or |
| 117 lk.startswith(provider.header_prefix)): |
| 118 interesting_headers[lk] = headers[key].strip() |
| 119 |
| 120 # these keys get empty strings if they don't exist |
| 121 if 'content-type' not in interesting_headers: |
| 122 interesting_headers['content-type'] = '' |
| 123 if 'content-md5' not in interesting_headers: |
| 124 interesting_headers['content-md5'] = '' |
| 125 |
| 126 # just in case someone used this. it's not necessary in this lib. |
| 127 if provider.date_header in interesting_headers: |
| 128 interesting_headers['date'] = '' |
| 129 |
| 130 # if you're using expires for query string auth, then it trumps date |
| 131 # (and provider.date_header) |
| 132 if expires: |
| 133 interesting_headers['date'] = str(expires) |
| 134 |
| 135 sorted_header_keys = sorted(interesting_headers.keys()) |
| 136 |
| 137 buf = "%s\n" % method |
| 138 for key in sorted_header_keys: |
| 139 val = interesting_headers[key] |
| 140 if key.startswith(provider.header_prefix): |
| 141 buf += "%s:%s\n" % (key, val) |
| 142 else: |
| 143 buf += "%s\n" % val |
| 144 |
| 145 # don't include anything after the first ? in the resource... |
| 146 # unless it is one of the QSA of interest, defined above |
| 147 t = path.split('?') |
| 148 buf += t[0] |
| 149 |
| 150 if len(t) > 1: |
| 151 qsa = t[1].split('&') |
| 152 qsa = [a.split('=', 1) for a in qsa] |
| 153 qsa = [unquote_v(a) for a in qsa if a[0] in qsa_of_interest] |
| 154 if len(qsa) > 0: |
| 155 qsa.sort(cmp=lambda x, y:cmp(x[0], y[0])) |
| 156 qsa = ['='.join(a) for a in qsa] |
| 157 buf += '?' |
| 158 buf += '&'.join(qsa) |
| 159 |
| 160 return buf |
| 161 |
| 162 |
| 163 def merge_meta(headers, metadata, provider=None): |
| 164 if not provider: |
| 165 provider = boto.provider.get_default() |
| 166 metadata_prefix = provider.metadata_prefix |
| 167 final_headers = headers.copy() |
| 168 for k in metadata.keys(): |
| 169 if k.lower() in ['cache-control', 'content-md5', 'content-type', |
| 170 'content-encoding', 'content-disposition', |
| 171 'date', 'expires']: |
| 172 final_headers[k] = metadata[k] |
| 173 else: |
| 174 final_headers[metadata_prefix + k] = metadata[k] |
| 175 |
| 176 return final_headers |
| 177 |
| 178 |
| 179 def get_aws_metadata(headers, provider=None): |
| 180 if not provider: |
| 181 provider = boto.provider.get_default() |
| 182 metadata_prefix = provider.metadata_prefix |
| 183 metadata = {} |
| 184 for hkey in headers.keys(): |
| 185 if hkey.lower().startswith(metadata_prefix): |
| 186 val = urllib.unquote_plus(headers[hkey]) |
| 187 try: |
| 188 metadata[hkey[len(metadata_prefix):]] = unicode(val, 'utf-8') |
| 189 except UnicodeDecodeError: |
| 190 metadata[hkey[len(metadata_prefix):]] = val |
| 191 del headers[hkey] |
| 192 return metadata |
| 193 |
| 194 |
| 195 def retry_url(url, retry_on_404=True, num_retries=10): |
| 196 """ |
| 197 Retry a url. This is specifically used for accessing the metadata |
| 198 service on an instance. Since this address should never be proxied |
| 199 (for security reasons), we create a ProxyHandler with a NULL |
| 200 dictionary to override any proxy settings in the environment. |
| 201 """ |
| 202 for i in range(0, num_retries): |
| 203 try: |
| 204 proxy_handler = urllib2.ProxyHandler({}) |
| 205 opener = urllib2.build_opener(proxy_handler) |
| 206 req = urllib2.Request(url) |
| 207 r = opener.open(req) |
| 208 result = r.read() |
| 209 resp = urllib2.urlopen(req) |
| 210 return resp.read() |
| 211 except urllib2.HTTPError, e: |
| 212 # in 2.6 you use getcode(), in 2.5 and earlier you use code |
| 213 if hasattr(e, 'getcode'): |
| 214 code = e.getcode() |
| 215 else: |
| 216 code = e.code |
| 217 if code == 404 and not retry_on_404: |
| 218 return '' |
| 219 except urllib2.URLError, e: |
| 220 raise e |
| 221 except Exception, e: |
| 222 pass |
| 223 boto.log.exception('Caught exception reading instance data') |
| 224 time.sleep(2 ** i) |
| 225 boto.log.error('Unable to read instance data, giving up') |
| 226 return '' |
| 227 |
| 228 |
| 229 def _get_instance_metadata(url, num_retries): |
| 230 return LazyLoadMetadata(url, num_retries) |
| 231 |
| 232 |
| 233 class LazyLoadMetadata(dict): |
| 234 def __init__(self, url, num_retries): |
| 235 self._url = url |
| 236 self._num_retries = num_retries |
| 237 self._leaves = {} |
| 238 self._dicts = [] |
| 239 data = boto.utils.retry_url(self._url, num_retries=self._num_retries) |
| 240 if data: |
| 241 fields = data.split('\n') |
| 242 for field in fields: |
| 243 if field.endswith('/'): |
| 244 key = field[0:-1] |
| 245 self._dicts.append(key) |
| 246 else: |
| 247 p = field.find('=') |
| 248 if p > 0: |
| 249 key = field[p + 1:] |
| 250 resource = field[0:p] + '/openssh-key' |
| 251 else: |
| 252 key = resource = field |
| 253 self._leaves[key] = resource |
| 254 self[key] = None |
| 255 |
| 256 def _materialize(self): |
| 257 for key in self: |
| 258 self[key] |
| 259 |
| 260 def __getitem__(self, key): |
| 261 if key not in self: |
| 262 # allow dict to throw the KeyError |
| 263 return super(LazyLoadMetadata, self).__getitem__(key) |
| 264 |
| 265 # already loaded |
| 266 val = super(LazyLoadMetadata, self).__getitem__(key) |
| 267 if val is not None: |
| 268 return val |
| 269 |
| 270 if key in self._leaves: |
| 271 resource = self._leaves[key] |
| 272 val = boto.utils.retry_url(self._url + urllib.quote(resource, |
| 273 safe="/:"), |
| 274 num_retries=self._num_retries) |
| 275 if val and val[0] == '{': |
| 276 val = json.loads(val) |
| 277 else: |
| 278 p = val.find('\n') |
| 279 if p > 0: |
| 280 val = val.split('\n') |
| 281 self[key] = val |
| 282 elif key in self._dicts: |
| 283 self[key] = LazyLoadMetadata(self._url + key + '/', |
| 284 self._num_retries) |
| 285 |
| 286 return super(LazyLoadMetadata, self).__getitem__(key) |
| 287 |
| 288 def get(self, key, default=None): |
| 289 try: |
| 290 return self[key] |
| 291 except KeyError: |
| 292 return default |
| 293 |
| 294 def values(self): |
| 295 self._materialize() |
| 296 return super(LazyLoadMetadata, self).values() |
| 297 |
| 298 def items(self): |
| 299 self._materialize() |
| 300 return super(LazyLoadMetadata, self).items() |
| 301 |
| 302 def __str__(self): |
| 303 self._materialize() |
| 304 return super(LazyLoadMetadata, self).__str__() |
| 305 |
| 306 def __repr__(self): |
| 307 self._materialize() |
| 308 return super(LazyLoadMetadata, self).__repr__() |
| 309 |
| 310 |
| 311 def get_instance_metadata(version='latest', url='http://169.254.169.254', |
| 312 timeout=None, num_retries=5): |
| 313 """ |
| 314 Returns the instance metadata as a nested Python dictionary. |
| 315 Simple values (e.g. local_hostname, hostname, etc.) will be |
| 316 stored as string values. Values such as ancestor-ami-ids will |
| 317 be stored in the dict as a list of string values. More complex |
| 318 fields such as public-keys and will be stored as nested dicts. |
| 319 |
| 320 If the timeout is specified, the connection to the specified url |
| 321 will time out after the specified number of seconds. |
| 322 |
| 323 """ |
| 324 if timeout is not None: |
| 325 original = socket.getdefaulttimeout() |
| 326 socket.setdefaulttimeout(timeout) |
| 327 try: |
| 328 return _get_instance_metadata('%s/%s/meta-data/' % (url, version), |
| 329 num_retries=num_retries) |
| 330 except urllib2.URLError, e: |
| 331 return None |
| 332 finally: |
| 333 if timeout is not None: |
| 334 socket.setdefaulttimeout(original) |
| 335 |
| 336 |
| 337 def get_instance_identity(version='latest', url='http://169.254.169.254', |
| 338 timeout=None, num_retries=5): |
| 339 """ |
| 340 Returns the instance identity as a nested Python dictionary. |
| 341 """ |
| 342 iid = {} |
| 343 base_url = 'http://169.254.169.254/latest/dynamic/instance-identity' |
| 344 if timeout is not None: |
| 345 original = socket.getdefaulttimeout() |
| 346 socket.setdefaulttimeout(timeout) |
| 347 try: |
| 348 data = retry_url(base_url, num_retries=num_retries) |
| 349 fields = data.split('\n') |
| 350 for field in fields: |
| 351 val = retry_url(base_url + '/' + field + '/') |
| 352 if val[0] == '{': |
| 353 val = json.loads(val) |
| 354 if field: |
| 355 iid[field] = val |
| 356 return iid |
| 357 except urllib2.URLError, e: |
| 358 return None |
| 359 finally: |
| 360 if timeout is not None: |
| 361 socket.setdefaulttimeout(original) |
| 362 |
| 363 |
| 364 def get_instance_userdata(version='latest', sep=None, |
| 365 url='http://169.254.169.254'): |
| 366 ud_url = '%s/%s/user-data' % (url, version) |
| 367 user_data = retry_url(ud_url, retry_on_404=False) |
| 368 if user_data: |
| 369 if sep: |
| 370 l = user_data.split(sep) |
| 371 user_data = {} |
| 372 for nvpair in l: |
| 373 t = nvpair.split('=') |
| 374 user_data[t[0].strip()] = t[1].strip() |
| 375 return user_data |
| 376 |
| 377 ISO8601 = '%Y-%m-%dT%H:%M:%SZ' |
| 378 ISO8601_MS = '%Y-%m-%dT%H:%M:%S.%fZ' |
| 379 |
| 380 |
| 381 def get_ts(ts=None): |
| 382 if not ts: |
| 383 ts = time.gmtime() |
| 384 return time.strftime(ISO8601, ts) |
| 385 |
| 386 |
| 387 def parse_ts(ts): |
| 388 ts = ts.strip() |
| 389 try: |
| 390 dt = datetime.datetime.strptime(ts, ISO8601) |
| 391 return dt |
| 392 except ValueError: |
| 393 dt = datetime.datetime.strptime(ts, ISO8601_MS) |
| 394 return dt |
| 395 |
| 396 |
| 397 def find_class(module_name, class_name=None): |
| 398 if class_name: |
| 399 module_name = "%s.%s" % (module_name, class_name) |
| 400 modules = module_name.split('.') |
| 401 c = None |
| 402 |
| 403 try: |
| 404 for m in modules[1:]: |
| 405 if c: |
| 406 c = getattr(c, m) |
| 407 else: |
| 408 c = getattr(__import__(".".join(modules[0:-1])), m) |
| 409 return c |
| 410 except: |
| 411 return None |
| 412 |
| 413 |
| 414 def update_dme(username, password, dme_id, ip_address): |
| 415 """ |
| 416 Update your Dynamic DNS record with DNSMadeEasy.com |
| 417 """ |
| 418 dme_url = 'https://www.dnsmadeeasy.com/servlet/updateip' |
| 419 dme_url += '?username=%s&password=%s&id=%s&ip=%s' |
| 420 s = urllib2.urlopen(dme_url % (username, password, dme_id, ip_address)) |
| 421 return s.read() |
| 422 |
| 423 |
| 424 def fetch_file(uri, file=None, username=None, password=None): |
| 425 """ |
| 426 Fetch a file based on the URI provided. If you do not pass in a file pointer |
| 427 a tempfile.NamedTemporaryFile, or None if the file could not be |
| 428 retrieved is returned. |
| 429 The URI can be either an HTTP url, or "s3://bucket_name/key_name" |
| 430 """ |
| 431 boto.log.info('Fetching %s' % uri) |
| 432 if file == None: |
| 433 file = tempfile.NamedTemporaryFile() |
| 434 try: |
| 435 if uri.startswith('s3://'): |
| 436 bucket_name, key_name = uri[len('s3://'):].split('/', 1) |
| 437 c = boto.connect_s3(aws_access_key_id=username, |
| 438 aws_secret_access_key=password) |
| 439 bucket = c.get_bucket(bucket_name) |
| 440 key = bucket.get_key(key_name) |
| 441 key.get_contents_to_file(file) |
| 442 else: |
| 443 if username and password: |
| 444 passman = urllib2.HTTPPasswordMgrWithDefaultRealm() |
| 445 passman.add_password(None, uri, username, password) |
| 446 authhandler = urllib2.HTTPBasicAuthHandler(passman) |
| 447 opener = urllib2.build_opener(authhandler) |
| 448 urllib2.install_opener(opener) |
| 449 s = urllib2.urlopen(uri) |
| 450 file.write(s.read()) |
| 451 file.seek(0) |
| 452 except: |
| 453 raise |
| 454 boto.log.exception('Problem Retrieving file: %s' % uri) |
| 455 file = None |
| 456 return file |
| 457 |
| 458 |
| 459 class ShellCommand(object): |
| 460 |
| 461 def __init__(self, command, wait=True, fail_fast=False, cwd=None): |
| 462 self.exit_code = 0 |
| 463 self.command = command |
| 464 self.log_fp = StringIO.StringIO() |
| 465 self.wait = wait |
| 466 self.fail_fast = fail_fast |
| 467 self.run(cwd=cwd) |
| 468 |
| 469 def run(self, cwd=None): |
| 470 boto.log.info('running:%s' % self.command) |
| 471 self.process = subprocess.Popen(self.command, shell=True, |
| 472 stdin=subprocess.PIPE, |
| 473 stdout=subprocess.PIPE, |
| 474 stderr=subprocess.PIPE, |
| 475 cwd=cwd) |
| 476 if(self.wait): |
| 477 while self.process.poll() == None: |
| 478 time.sleep(1) |
| 479 t = self.process.communicate() |
| 480 self.log_fp.write(t[0]) |
| 481 self.log_fp.write(t[1]) |
| 482 boto.log.info(self.log_fp.getvalue()) |
| 483 self.exit_code = self.process.returncode |
| 484 |
| 485 if self.fail_fast and self.exit_code != 0: |
| 486 raise Exception("Command " + self.command + " failed with status
" + self.exit_code) |
| 487 |
| 488 return self.exit_code |
| 489 |
| 490 def setReadOnly(self, value): |
| 491 raise AttributeError |
| 492 |
| 493 def getStatus(self): |
| 494 return self.exit_code |
| 495 |
| 496 status = property(getStatus, setReadOnly, None, 'The exit code for the comma
nd') |
| 497 |
| 498 def getOutput(self): |
| 499 return self.log_fp.getvalue() |
| 500 |
| 501 output = property(getOutput, setReadOnly, None, 'The STDIN and STDERR output
of the command') |
| 502 |
| 503 |
| 504 class AuthSMTPHandler(logging.handlers.SMTPHandler): |
| 505 """ |
| 506 This class extends the SMTPHandler in the standard Python logging module |
| 507 to accept a username and password on the constructor and to then use those |
| 508 credentials to authenticate with the SMTP server. To use this, you could |
| 509 add something like this in your boto config file: |
| 510 |
| 511 [handler_hand07] |
| 512 class=boto.utils.AuthSMTPHandler |
| 513 level=WARN |
| 514 formatter=form07 |
| 515 args=('localhost', 'username', 'password', 'from@abc', ['user1@abc', 'user2@
xyz'], 'Logger Subject') |
| 516 """ |
| 517 |
| 518 def __init__(self, mailhost, username, password, |
| 519 fromaddr, toaddrs, subject): |
| 520 """ |
| 521 Initialize the handler. |
| 522 |
| 523 We have extended the constructor to accept a username/password |
| 524 for SMTP authentication. |
| 525 """ |
| 526 logging.handlers.SMTPHandler.__init__(self, mailhost, fromaddr, |
| 527 toaddrs, subject) |
| 528 self.username = username |
| 529 self.password = password |
| 530 |
| 531 def emit(self, record): |
| 532 """ |
| 533 Emit a record. |
| 534 |
| 535 Format the record and send it to the specified addressees. |
| 536 It would be really nice if I could add authorization to this class |
| 537 without having to resort to cut and paste inheritance but, no. |
| 538 """ |
| 539 try: |
| 540 port = self.mailport |
| 541 if not port: |
| 542 port = smtplib.SMTP_PORT |
| 543 smtp = smtplib.SMTP(self.mailhost, port) |
| 544 smtp.login(self.username, self.password) |
| 545 msg = self.format(record) |
| 546 msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % ( |
| 547 self.fromaddr, |
| 548 ','.join(self.toaddrs), |
| 549 self.getSubject(record), |
| 550 email.utils.formatdate(), msg) |
| 551 smtp.sendmail(self.fromaddr, self.toaddrs, msg) |
| 552 smtp.quit() |
| 553 except (KeyboardInterrupt, SystemExit): |
| 554 raise |
| 555 except: |
| 556 self.handleError(record) |
| 557 |
| 558 |
| 559 class LRUCache(dict): |
| 560 """A dictionary-like object that stores only a certain number of items, and |
| 561 discards its least recently used item when full. |
| 562 |
| 563 >>> cache = LRUCache(3) |
| 564 >>> cache['A'] = 0 |
| 565 >>> cache['B'] = 1 |
| 566 >>> cache['C'] = 2 |
| 567 >>> len(cache) |
| 568 3 |
| 569 |
| 570 >>> cache['A'] |
| 571 0 |
| 572 |
| 573 Adding new items to the cache does not increase its size. Instead, the least |
| 574 recently used item is dropped: |
| 575 |
| 576 >>> cache['D'] = 3 |
| 577 >>> len(cache) |
| 578 3 |
| 579 >>> 'B' in cache |
| 580 False |
| 581 |
| 582 Iterating over the cache returns the keys, starting with the most recently |
| 583 used: |
| 584 |
| 585 >>> for key in cache: |
| 586 ... print key |
| 587 D |
| 588 A |
| 589 C |
| 590 |
| 591 This code is based on the LRUCache class from Genshi which is based on |
| 592 Mighty's LRUCache from ``myghtyutils.util``, written |
| 593 by Mike Bayer and released under the MIT license (Genshi uses the |
| 594 BSD License). See: |
| 595 |
| 596 http://svn.myghty.org/myghtyutils/trunk/lib/myghtyutils/util.py |
| 597 """ |
| 598 |
| 599 class _Item(object): |
| 600 def __init__(self, key, value): |
| 601 self.previous = self.next = None |
| 602 self.key = key |
| 603 self.value = value |
| 604 |
| 605 def __repr__(self): |
| 606 return repr(self.value) |
| 607 |
| 608 def __init__(self, capacity): |
| 609 self._dict = dict() |
| 610 self.capacity = capacity |
| 611 self.head = None |
| 612 self.tail = None |
| 613 |
| 614 def __contains__(self, key): |
| 615 return key in self._dict |
| 616 |
| 617 def __iter__(self): |
| 618 cur = self.head |
| 619 while cur: |
| 620 yield cur.key |
| 621 cur = cur.next |
| 622 |
| 623 def __len__(self): |
| 624 return len(self._dict) |
| 625 |
| 626 def __getitem__(self, key): |
| 627 item = self._dict[key] |
| 628 self._update_item(item) |
| 629 return item.value |
| 630 |
| 631 def __setitem__(self, key, value): |
| 632 item = self._dict.get(key) |
| 633 if item is None: |
| 634 item = self._Item(key, value) |
| 635 self._dict[key] = item |
| 636 self._insert_item(item) |
| 637 else: |
| 638 item.value = value |
| 639 self._update_item(item) |
| 640 self._manage_size() |
| 641 |
| 642 def __repr__(self): |
| 643 return repr(self._dict) |
| 644 |
| 645 def _insert_item(self, item): |
| 646 item.previous = None |
| 647 item.next = self.head |
| 648 if self.head is not None: |
| 649 self.head.previous = item |
| 650 else: |
| 651 self.tail = item |
| 652 self.head = item |
| 653 self._manage_size() |
| 654 |
| 655 def _manage_size(self): |
| 656 while len(self._dict) > self.capacity: |
| 657 del self._dict[self.tail.key] |
| 658 if self.tail != self.head: |
| 659 self.tail = self.tail.previous |
| 660 self.tail.next = None |
| 661 else: |
| 662 self.head = self.tail = None |
| 663 |
| 664 def _update_item(self, item): |
| 665 if self.head == item: |
| 666 return |
| 667 |
| 668 previous = item.previous |
| 669 previous.next = item.next |
| 670 if item.next is not None: |
| 671 item.next.previous = previous |
| 672 else: |
| 673 self.tail = previous |
| 674 |
| 675 item.previous = None |
| 676 item.next = self.head |
| 677 self.head.previous = self.head = item |
| 678 |
| 679 |
| 680 class Password(object): |
| 681 """ |
| 682 Password object that stores itself as hashed. |
| 683 Hash defaults to SHA512 if available, MD5 otherwise. |
| 684 """ |
| 685 hashfunc = _hashfn |
| 686 |
| 687 def __init__(self, str=None, hashfunc=None): |
| 688 """ |
| 689 Load the string from an initial value, this should be the |
| 690 raw hashed password. |
| 691 """ |
| 692 self.str = str |
| 693 if hashfunc: |
| 694 self.hashfunc = hashfunc |
| 695 |
| 696 def set(self, value): |
| 697 self.str = self.hashfunc(value).hexdigest() |
| 698 |
| 699 def __str__(self): |
| 700 return str(self.str) |
| 701 |
| 702 def __eq__(self, other): |
| 703 if other == None: |
| 704 return False |
| 705 return str(self.hashfunc(other).hexdigest()) == str(self.str) |
| 706 |
| 707 def __len__(self): |
| 708 if self.str: |
| 709 return len(self.str) |
| 710 else: |
| 711 return 0 |
| 712 |
| 713 |
| 714 def notify(subject, body=None, html_body=None, to_string=None, |
| 715 attachments=None, append_instance_id=True): |
| 716 attachments = attachments or [] |
| 717 if append_instance_id: |
| 718 subject = "[%s] %s" % (boto.config.get_value("Instance", "instance-id"),
subject) |
| 719 if not to_string: |
| 720 to_string = boto.config.get_value('Notification', 'smtp_to', None) |
| 721 if to_string: |
| 722 try: |
| 723 from_string = boto.config.get_value('Notification', 'smtp_from', 'bo
to') |
| 724 msg = email.mime.multipart.MIMEMultipart() |
| 725 msg['From'] = from_string |
| 726 msg['Reply-To'] = from_string |
| 727 msg['To'] = to_string |
| 728 msg['Date'] = email.utils.formatdate(localtime=True) |
| 729 msg['Subject'] = subject |
| 730 |
| 731 if body: |
| 732 msg.attach(email.mime.text.MIMEText(body)) |
| 733 |
| 734 if html_body: |
| 735 part = email.mime.base.MIMEBase('text', 'html') |
| 736 part.set_payload(html_body) |
| 737 email.encoders.encode_base64(part) |
| 738 msg.attach(part) |
| 739 |
| 740 for part in attachments: |
| 741 msg.attach(part) |
| 742 |
| 743 smtp_host = boto.config.get_value('Notification', 'smtp_host', 'loca
lhost') |
| 744 |
| 745 # Alternate port support |
| 746 if boto.config.get_value("Notification", "smtp_port"): |
| 747 server = smtplib.SMTP(smtp_host, int(boto.config.get_value("Noti
fication", "smtp_port"))) |
| 748 else: |
| 749 server = smtplib.SMTP(smtp_host) |
| 750 |
| 751 # TLS support |
| 752 if boto.config.getbool("Notification", "smtp_tls"): |
| 753 server.ehlo() |
| 754 server.starttls() |
| 755 server.ehlo() |
| 756 smtp_user = boto.config.get_value('Notification', 'smtp_user', '') |
| 757 smtp_pass = boto.config.get_value('Notification', 'smtp_pass', '') |
| 758 if smtp_user: |
| 759 server.login(smtp_user, smtp_pass) |
| 760 server.sendmail(from_string, to_string, msg.as_string()) |
| 761 server.quit() |
| 762 except: |
| 763 boto.log.exception('notify failed') |
| 764 |
| 765 |
| 766 def get_utf8_value(value): |
| 767 if not isinstance(value, str) and not isinstance(value, unicode): |
| 768 value = str(value) |
| 769 if isinstance(value, unicode): |
| 770 return value.encode('utf-8') |
| 771 else: |
| 772 return value |
| 773 |
| 774 |
| 775 def mklist(value): |
| 776 if not isinstance(value, list): |
| 777 if isinstance(value, tuple): |
| 778 value = list(value) |
| 779 else: |
| 780 value = [value] |
| 781 return value |
| 782 |
| 783 |
| 784 def pythonize_name(name): |
| 785 """Convert camel case to a "pythonic" name. |
| 786 |
| 787 Examples:: |
| 788 |
| 789 pythonize_name('CamelCase') -> 'camel_case' |
| 790 pythonize_name('already_pythonized') -> 'already_pythonized' |
| 791 pythonize_name('HTTPRequest') -> 'http_request' |
| 792 pythonize_name('HTTPStatus200Ok') -> 'http_status_200_ok' |
| 793 pythonize_name('UPPER') -> 'upper' |
| 794 pythonize_name('') -> '' |
| 795 |
| 796 """ |
| 797 s1 = _first_cap_regex.sub(r'\1_\2', name) |
| 798 s2 = _number_cap_regex.sub(r'\1_\2', s1) |
| 799 return _end_cap_regex.sub(r'\1_\2', s2).lower() |
| 800 |
| 801 |
| 802 def write_mime_multipart(content, compress=False, deftype='text/plain', delimite
r=':'): |
| 803 """Description: |
| 804 :param content: A list of tuples of name-content pairs. This is used |
| 805 instead of a dict to ensure that scripts run in order |
| 806 :type list of tuples: |
| 807 |
| 808 :param compress: Use gzip to compress the scripts, defaults to no compressio
n |
| 809 :type bool: |
| 810 |
| 811 :param deftype: The type that should be assumed if nothing else can be figur
ed out |
| 812 :type str: |
| 813 |
| 814 :param delimiter: mime delimiter |
| 815 :type str: |
| 816 |
| 817 :return: Final mime multipart |
| 818 :rtype: str: |
| 819 """ |
| 820 wrapper = email.mime.multipart.MIMEMultipart() |
| 821 for name, con in content: |
| 822 definite_type = guess_mime_type(con, deftype) |
| 823 maintype, subtype = definite_type.split('/', 1) |
| 824 if maintype == 'text': |
| 825 mime_con = email.mime.text.MIMEText(con, _subtype=subtype) |
| 826 else: |
| 827 mime_con = email.mime.base.MIMEBase(maintype, subtype) |
| 828 mime_con.set_payload(con) |
| 829 # Encode the payload using Base64 |
| 830 email.encoders.encode_base64(mime_con) |
| 831 mime_con.add_header('Content-Disposition', 'attachment', filename=name) |
| 832 wrapper.attach(mime_con) |
| 833 rcontent = wrapper.as_string() |
| 834 |
| 835 if compress: |
| 836 buf = StringIO.StringIO() |
| 837 gz = gzip.GzipFile(mode='wb', fileobj=buf) |
| 838 try: |
| 839 gz.write(rcontent) |
| 840 finally: |
| 841 gz.close() |
| 842 rcontent = buf.getvalue() |
| 843 |
| 844 return rcontent |
| 845 |
| 846 |
| 847 def guess_mime_type(content, deftype): |
| 848 """Description: Guess the mime type of a block of text |
| 849 :param content: content we're finding the type of |
| 850 :type str: |
| 851 |
| 852 :param deftype: Default mime type |
| 853 :type str: |
| 854 |
| 855 :rtype: <type>: |
| 856 :return: <description> |
| 857 """ |
| 858 #Mappings recognized by cloudinit |
| 859 starts_with_mappings = { |
| 860 '#include': 'text/x-include-url', |
| 861 '#!': 'text/x-shellscript', |
| 862 '#cloud-config': 'text/cloud-config', |
| 863 '#upstart-job': 'text/upstart-job', |
| 864 '#part-handler': 'text/part-handler', |
| 865 '#cloud-boothook': 'text/cloud-boothook' |
| 866 } |
| 867 rtype = deftype |
| 868 for possible_type, mimetype in starts_with_mappings.items(): |
| 869 if content.startswith(possible_type): |
| 870 rtype = mimetype |
| 871 break |
| 872 return(rtype) |
| 873 |
| 874 |
| 875 def compute_md5(fp, buf_size=8192, size=None): |
| 876 """ |
| 877 Compute MD5 hash on passed file and return results in a tuple of values. |
| 878 |
| 879 :type fp: file |
| 880 :param fp: File pointer to the file to MD5 hash. The file pointer |
| 881 will be reset to its current location before the |
| 882 method returns. |
| 883 |
| 884 :type buf_size: integer |
| 885 :param buf_size: Number of bytes per read request. |
| 886 |
| 887 :type size: int |
| 888 :param size: (optional) The Maximum number of bytes to read from |
| 889 the file pointer (fp). This is useful when uploading |
| 890 a file in multiple parts where the file is being |
| 891 split inplace into different parts. Less bytes may |
| 892 be available. |
| 893 |
| 894 :rtype: tuple |
| 895 :return: A tuple containing the hex digest version of the MD5 hash |
| 896 as the first element, the base64 encoded version of the |
| 897 plain digest as the second element and the data size as |
| 898 the third element. |
| 899 """ |
| 900 return compute_hash(fp, buf_size, size, hash_algorithm=md5) |
| 901 |
| 902 |
| 903 def compute_hash(fp, buf_size=8192, size=None, hash_algorithm=md5): |
| 904 hash_obj = hash_algorithm() |
| 905 spos = fp.tell() |
| 906 if size and size < buf_size: |
| 907 s = fp.read(size) |
| 908 else: |
| 909 s = fp.read(buf_size) |
| 910 while s: |
| 911 hash_obj.update(s) |
| 912 if size: |
| 913 size -= len(s) |
| 914 if size <= 0: |
| 915 break |
| 916 if size and size < buf_size: |
| 917 s = fp.read(size) |
| 918 else: |
| 919 s = fp.read(buf_size) |
| 920 hex_digest = hash_obj.hexdigest() |
| 921 base64_digest = base64.encodestring(hash_obj.digest()) |
| 922 if base64_digest[-1] == '\n': |
| 923 base64_digest = base64_digest[0:-1] |
| 924 # data_size based on bytes read. |
| 925 data_size = fp.tell() - spos |
| 926 fp.seek(spos) |
| 927 return (hex_digest, base64_digest, data_size) |
OLD | NEW |