| OLD | NEW |
| (Empty) |
| 1 #!/usr/bin/env python | |
| 2 # Copyright 2013 The Chromium Authors. All rights reserved. | |
| 3 # Use of this source code is governed by a BSD-style license that can be | |
| 4 # found in the LICENSE file. | |
| 5 | |
| 6 """This is a simple HTTP/FTP/TCP/UDP/BASIC_AUTH_PROXY/WEBSOCKET server used for | |
| 7 testing Chrome. | |
| 8 | |
| 9 It supports several test URLs, as specified by the handlers in TestPageHandler. | |
| 10 By default, it listens on an ephemeral port and sends the port number back to | |
| 11 the originating process over a pipe. The originating process can specify an | |
| 12 explicit port if necessary. | |
| 13 It can use https if you specify the flag --https=CERT where CERT is the path | |
| 14 to a pem file containing the certificate and private key that should be used. | |
| 15 """ | |
| 16 | |
| 17 import base64 | |
| 18 import BaseHTTPServer | |
| 19 import cgi | |
| 20 import hashlib | |
| 21 import logging | |
| 22 import minica | |
| 23 import os | |
| 24 import json | |
| 25 import random | |
| 26 import re | |
| 27 import select | |
| 28 import socket | |
| 29 import SocketServer | |
| 30 import ssl | |
| 31 import struct | |
| 32 import sys | |
| 33 import threading | |
| 34 import time | |
| 35 import urllib | |
| 36 import urlparse | |
| 37 import zlib | |
| 38 | |
| 39 BASE_DIR = os.path.dirname(os.path.abspath(__file__)) | |
| 40 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.dirname(BASE_DIR))) | |
| 41 | |
| 42 # Temporary hack to deal with tlslite 0.3.8 -> 0.4.6 upgrade. | |
| 43 # | |
| 44 # TODO(davidben): Remove this when it has cycled through all the bots and | |
| 45 # developer checkouts or when http://crbug.com/356276 is resolved. | |
| 46 try: | |
| 47 os.remove(os.path.join(ROOT_DIR, 'third_party', 'tlslite', | |
| 48 'tlslite', 'utils', 'hmac.pyc')) | |
| 49 except Exception: | |
| 50 pass | |
| 51 | |
| 52 # Append at the end of sys.path, it's fine to use the system library. | |
| 53 sys.path.append(os.path.join(ROOT_DIR, 'third_party', 'pyftpdlib', 'src')) | |
| 54 | |
| 55 # Insert at the beginning of the path, we want to use our copies of the library | |
| 56 # unconditionally. | |
| 57 sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party', 'pywebsocket', 'src')) | |
| 58 sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party', 'tlslite')) | |
| 59 | |
| 60 import mod_pywebsocket.standalone | |
| 61 from mod_pywebsocket.standalone import WebSocketServer | |
| 62 # import manually | |
| 63 mod_pywebsocket.standalone.ssl = ssl | |
| 64 | |
| 65 import pyftpdlib.ftpserver | |
| 66 | |
| 67 import tlslite | |
| 68 import tlslite.api | |
| 69 | |
| 70 import echo_message | |
| 71 import testserver_base | |
| 72 | |
| 73 SERVER_HTTP = 0 | |
| 74 SERVER_FTP = 1 | |
| 75 SERVER_TCP_ECHO = 2 | |
| 76 SERVER_UDP_ECHO = 3 | |
| 77 SERVER_BASIC_AUTH_PROXY = 4 | |
| 78 SERVER_WEBSOCKET = 5 | |
| 79 | |
| 80 # Default request queue size for WebSocketServer. | |
| 81 _DEFAULT_REQUEST_QUEUE_SIZE = 128 | |
| 82 | |
| 83 class WebSocketOptions: | |
| 84 """Holds options for WebSocketServer.""" | |
| 85 | |
| 86 def __init__(self, host, port, data_dir): | |
| 87 self.request_queue_size = _DEFAULT_REQUEST_QUEUE_SIZE | |
| 88 self.server_host = host | |
| 89 self.port = port | |
| 90 self.websock_handlers = data_dir | |
| 91 self.scan_dir = None | |
| 92 self.allow_handlers_outside_root_dir = False | |
| 93 self.websock_handlers_map_file = None | |
| 94 self.cgi_directories = [] | |
| 95 self.is_executable_method = None | |
| 96 self.allow_draft75 = False | |
| 97 self.strict = True | |
| 98 | |
| 99 self.use_tls = False | |
| 100 self.private_key = None | |
| 101 self.certificate = None | |
| 102 self.tls_client_auth = False | |
| 103 self.tls_client_ca = None | |
| 104 self.tls_module = 'ssl' | |
| 105 self.use_basic_auth = False | |
| 106 self.basic_auth_credential = 'Basic ' + base64.b64encode('test:test') | |
| 107 | |
| 108 | |
| 109 class RecordingSSLSessionCache(object): | |
| 110 """RecordingSSLSessionCache acts as a TLS session cache and maintains a log of | |
| 111 lookups and inserts in order to test session cache behaviours.""" | |
| 112 | |
| 113 def __init__(self): | |
| 114 self.log = [] | |
| 115 | |
| 116 def __getitem__(self, sessionID): | |
| 117 self.log.append(('lookup', sessionID)) | |
| 118 raise KeyError() | |
| 119 | |
| 120 def __setitem__(self, sessionID, session): | |
| 121 self.log.append(('insert', sessionID)) | |
| 122 | |
| 123 | |
| 124 class HTTPServer(testserver_base.ClientRestrictingServerMixIn, | |
| 125 testserver_base.BrokenPipeHandlerMixIn, | |
| 126 testserver_base.StoppableHTTPServer): | |
| 127 """This is a specialization of StoppableHTTPServer that adds client | |
| 128 verification.""" | |
| 129 | |
| 130 pass | |
| 131 | |
| 132 class OCSPServer(testserver_base.ClientRestrictingServerMixIn, | |
| 133 testserver_base.BrokenPipeHandlerMixIn, | |
| 134 BaseHTTPServer.HTTPServer): | |
| 135 """This is a specialization of HTTPServer that serves an | |
| 136 OCSP response""" | |
| 137 | |
| 138 def serve_forever_on_thread(self): | |
| 139 self.thread = threading.Thread(target = self.serve_forever, | |
| 140 name = "OCSPServerThread") | |
| 141 self.thread.start() | |
| 142 | |
| 143 def stop_serving(self): | |
| 144 self.shutdown() | |
| 145 self.thread.join() | |
| 146 | |
| 147 | |
| 148 class HTTPSServer(tlslite.api.TLSSocketServerMixIn, | |
| 149 testserver_base.ClientRestrictingServerMixIn, | |
| 150 testserver_base.BrokenPipeHandlerMixIn, | |
| 151 testserver_base.StoppableHTTPServer): | |
| 152 """This is a specialization of StoppableHTTPServer that add https support and | |
| 153 client verification.""" | |
| 154 | |
| 155 def __init__(self, server_address, request_hander_class, pem_cert_and_key, | |
| 156 ssl_client_auth, ssl_client_cas, ssl_client_cert_types, | |
| 157 ssl_bulk_ciphers, ssl_key_exchanges, enable_npn, | |
| 158 record_resume_info, tls_intolerant, | |
| 159 tls_intolerance_type, signed_cert_timestamps, | |
| 160 fallback_scsv_enabled, ocsp_response, disable_session_cache): | |
| 161 self.cert_chain = tlslite.api.X509CertChain() | |
| 162 self.cert_chain.parsePemList(pem_cert_and_key) | |
| 163 # Force using only python implementation - otherwise behavior is different | |
| 164 # depending on whether m2crypto Python module is present (error is thrown | |
| 165 # when it is). m2crypto uses a C (based on OpenSSL) implementation under | |
| 166 # the hood. | |
| 167 self.private_key = tlslite.api.parsePEMKey(pem_cert_and_key, | |
| 168 private=True, | |
| 169 implementations=['python']) | |
| 170 self.ssl_client_auth = ssl_client_auth | |
| 171 self.ssl_client_cas = [] | |
| 172 self.ssl_client_cert_types = [] | |
| 173 if enable_npn: | |
| 174 self.next_protos = ['http/1.1'] | |
| 175 else: | |
| 176 self.next_protos = None | |
| 177 self.signed_cert_timestamps = signed_cert_timestamps | |
| 178 self.fallback_scsv_enabled = fallback_scsv_enabled | |
| 179 self.ocsp_response = ocsp_response | |
| 180 | |
| 181 if ssl_client_auth: | |
| 182 for ca_file in ssl_client_cas: | |
| 183 s = open(ca_file).read() | |
| 184 x509 = tlslite.api.X509() | |
| 185 x509.parse(s) | |
| 186 self.ssl_client_cas.append(x509.subject) | |
| 187 | |
| 188 for cert_type in ssl_client_cert_types: | |
| 189 self.ssl_client_cert_types.append({ | |
| 190 "rsa_sign": tlslite.api.ClientCertificateType.rsa_sign, | |
| 191 "dss_sign": tlslite.api.ClientCertificateType.dss_sign, | |
| 192 "ecdsa_sign": tlslite.api.ClientCertificateType.ecdsa_sign, | |
| 193 }[cert_type]) | |
| 194 | |
| 195 self.ssl_handshake_settings = tlslite.api.HandshakeSettings() | |
| 196 # Enable SSLv3 for testing purposes. | |
| 197 self.ssl_handshake_settings.minVersion = (3, 0) | |
| 198 if ssl_bulk_ciphers is not None: | |
| 199 self.ssl_handshake_settings.cipherNames = ssl_bulk_ciphers | |
| 200 if ssl_key_exchanges is not None: | |
| 201 self.ssl_handshake_settings.keyExchangeNames = ssl_key_exchanges | |
| 202 if tls_intolerant != 0: | |
| 203 self.ssl_handshake_settings.tlsIntolerant = (3, tls_intolerant) | |
| 204 self.ssl_handshake_settings.tlsIntoleranceType = tls_intolerance_type | |
| 205 | |
| 206 | |
| 207 if disable_session_cache: | |
| 208 self.session_cache = None | |
| 209 elif record_resume_info: | |
| 210 # If record_resume_info is true then we'll replace the session cache with | |
| 211 # an object that records the lookups and inserts that it sees. | |
| 212 self.session_cache = RecordingSSLSessionCache() | |
| 213 else: | |
| 214 self.session_cache = tlslite.api.SessionCache() | |
| 215 testserver_base.StoppableHTTPServer.__init__(self, | |
| 216 server_address, | |
| 217 request_hander_class) | |
| 218 | |
| 219 def handshake(self, tlsConnection): | |
| 220 """Creates the SSL connection.""" | |
| 221 | |
| 222 try: | |
| 223 self.tlsConnection = tlsConnection | |
| 224 tlsConnection.handshakeServer(certChain=self.cert_chain, | |
| 225 privateKey=self.private_key, | |
| 226 sessionCache=self.session_cache, | |
| 227 reqCert=self.ssl_client_auth, | |
| 228 settings=self.ssl_handshake_settings, | |
| 229 reqCAs=self.ssl_client_cas, | |
| 230 reqCertTypes=self.ssl_client_cert_types, | |
| 231 nextProtos=self.next_protos, | |
| 232 signedCertTimestamps= | |
| 233 self.signed_cert_timestamps, | |
| 234 fallbackSCSV=self.fallback_scsv_enabled, | |
| 235 ocspResponse = self.ocsp_response) | |
| 236 tlsConnection.ignoreAbruptClose = True | |
| 237 return True | |
| 238 except tlslite.api.TLSAbruptCloseError: | |
| 239 # Ignore abrupt close. | |
| 240 return True | |
| 241 except tlslite.api.TLSError, error: | |
| 242 print "Handshake failure:", str(error) | |
| 243 return False | |
| 244 | |
| 245 | |
| 246 class FTPServer(testserver_base.ClientRestrictingServerMixIn, | |
| 247 pyftpdlib.ftpserver.FTPServer): | |
| 248 """This is a specialization of FTPServer that adds client verification.""" | |
| 249 | |
| 250 pass | |
| 251 | |
| 252 | |
| 253 class TCPEchoServer(testserver_base.ClientRestrictingServerMixIn, | |
| 254 SocketServer.TCPServer): | |
| 255 """A TCP echo server that echoes back what it has received.""" | |
| 256 | |
| 257 def server_bind(self): | |
| 258 """Override server_bind to store the server name.""" | |
| 259 | |
| 260 SocketServer.TCPServer.server_bind(self) | |
| 261 host, port = self.socket.getsockname()[:2] | |
| 262 self.server_name = socket.getfqdn(host) | |
| 263 self.server_port = port | |
| 264 | |
| 265 def serve_forever(self): | |
| 266 self.stop = False | |
| 267 self.nonce_time = None | |
| 268 while not self.stop: | |
| 269 self.handle_request() | |
| 270 self.socket.close() | |
| 271 | |
| 272 | |
| 273 class UDPEchoServer(testserver_base.ClientRestrictingServerMixIn, | |
| 274 SocketServer.UDPServer): | |
| 275 """A UDP echo server that echoes back what it has received.""" | |
| 276 | |
| 277 def server_bind(self): | |
| 278 """Override server_bind to store the server name.""" | |
| 279 | |
| 280 SocketServer.UDPServer.server_bind(self) | |
| 281 host, port = self.socket.getsockname()[:2] | |
| 282 self.server_name = socket.getfqdn(host) | |
| 283 self.server_port = port | |
| 284 | |
| 285 def serve_forever(self): | |
| 286 self.stop = False | |
| 287 self.nonce_time = None | |
| 288 while not self.stop: | |
| 289 self.handle_request() | |
| 290 self.socket.close() | |
| 291 | |
| 292 | |
| 293 class TestPageHandler(testserver_base.BasePageHandler): | |
| 294 # Class variables to allow for persistence state between page handler | |
| 295 # invocations | |
| 296 rst_limits = {} | |
| 297 fail_precondition = {} | |
| 298 | |
| 299 def __init__(self, request, client_address, socket_server): | |
| 300 connect_handlers = [ | |
| 301 self.RedirectConnectHandler, | |
| 302 self.ServerAuthConnectHandler, | |
| 303 self.DefaultConnectResponseHandler] | |
| 304 get_handlers = [ | |
| 305 self.NoCacheMaxAgeTimeHandler, | |
| 306 self.NoCacheTimeHandler, | |
| 307 self.CacheTimeHandler, | |
| 308 self.CacheExpiresHandler, | |
| 309 self.CacheProxyRevalidateHandler, | |
| 310 self.CachePrivateHandler, | |
| 311 self.CachePublicHandler, | |
| 312 self.CacheSMaxAgeHandler, | |
| 313 self.CacheMustRevalidateHandler, | |
| 314 self.CacheMustRevalidateMaxAgeHandler, | |
| 315 self.CacheNoStoreHandler, | |
| 316 self.CacheNoStoreMaxAgeHandler, | |
| 317 self.CacheNoTransformHandler, | |
| 318 self.DownloadHandler, | |
| 319 self.DownloadFinishHandler, | |
| 320 self.EchoHeader, | |
| 321 self.EchoHeaderCache, | |
| 322 self.EchoAllHandler, | |
| 323 self.ZipFileHandler, | |
| 324 self.FileHandler, | |
| 325 self.SetCookieHandler, | |
| 326 self.SetManyCookiesHandler, | |
| 327 self.ExpectAndSetCookieHandler, | |
| 328 self.SetHeaderHandler, | |
| 329 self.AuthBasicHandler, | |
| 330 self.AuthDigestHandler, | |
| 331 self.SlowServerHandler, | |
| 332 self.ChunkedServerHandler, | |
| 333 self.ContentTypeHandler, | |
| 334 self.NoContentHandler, | |
| 335 self.ServerRedirectHandler, | |
| 336 self.CrossSiteRedirectHandler, | |
| 337 self.ClientRedirectHandler, | |
| 338 self.GetSSLSessionCacheHandler, | |
| 339 self.SSLManySmallRecords, | |
| 340 self.GetChannelID, | |
| 341 self.ClientCipherListHandler, | |
| 342 self.CloseSocketHandler, | |
| 343 self.RangeResetHandler, | |
| 344 self.DefaultResponseHandler] | |
| 345 post_handlers = [ | |
| 346 self.EchoTitleHandler, | |
| 347 self.EchoHandler, | |
| 348 self.PostOnlyFileHandler, | |
| 349 self.EchoMultipartPostHandler] + get_handlers | |
| 350 put_handlers = [ | |
| 351 self.EchoTitleHandler, | |
| 352 self.EchoHandler] + get_handlers | |
| 353 head_handlers = [ | |
| 354 self.FileHandler, | |
| 355 self.DefaultResponseHandler] | |
| 356 | |
| 357 self._mime_types = { | |
| 358 'crx' : 'application/x-chrome-extension', | |
| 359 'exe' : 'application/octet-stream', | |
| 360 'gif': 'image/gif', | |
| 361 'jpeg' : 'image/jpeg', | |
| 362 'jpg' : 'image/jpeg', | |
| 363 'js' : 'application/javascript', | |
| 364 'json': 'application/json', | |
| 365 'pdf' : 'application/pdf', | |
| 366 'txt' : 'text/plain', | |
| 367 'wav' : 'audio/wav', | |
| 368 'xml' : 'text/xml' | |
| 369 } | |
| 370 self._default_mime_type = 'text/html' | |
| 371 | |
| 372 testserver_base.BasePageHandler.__init__(self, request, client_address, | |
| 373 socket_server, connect_handlers, | |
| 374 get_handlers, head_handlers, | |
| 375 post_handlers, put_handlers) | |
| 376 | |
| 377 def GetMIMETypeFromName(self, file_name): | |
| 378 """Returns the mime type for the specified file_name. So far it only looks | |
| 379 at the file extension.""" | |
| 380 | |
| 381 (_shortname, extension) = os.path.splitext(file_name.split("?")[0]) | |
| 382 if len(extension) == 0: | |
| 383 # no extension. | |
| 384 return self._default_mime_type | |
| 385 | |
| 386 # extension starts with a dot, so we need to remove it | |
| 387 return self._mime_types.get(extension[1:], self._default_mime_type) | |
| 388 | |
| 389 def NoCacheMaxAgeTimeHandler(self): | |
| 390 """This request handler yields a page with the title set to the current | |
| 391 system time, and no caching requested.""" | |
| 392 | |
| 393 if not self._ShouldHandleRequest("/nocachetime/maxage"): | |
| 394 return False | |
| 395 | |
| 396 self.send_response(200) | |
| 397 self.send_header('Cache-Control', 'max-age=0') | |
| 398 self.send_header('Content-Type', 'text/html') | |
| 399 self.end_headers() | |
| 400 | |
| 401 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 402 time.time()) | |
| 403 | |
| 404 return True | |
| 405 | |
| 406 def NoCacheTimeHandler(self): | |
| 407 """This request handler yields a page with the title set to the current | |
| 408 system time, and no caching requested.""" | |
| 409 | |
| 410 if not self._ShouldHandleRequest("/nocachetime"): | |
| 411 return False | |
| 412 | |
| 413 self.send_response(200) | |
| 414 self.send_header('Cache-Control', 'no-cache') | |
| 415 self.send_header('Content-Type', 'text/html') | |
| 416 self.end_headers() | |
| 417 | |
| 418 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 419 time.time()) | |
| 420 | |
| 421 return True | |
| 422 | |
| 423 def CacheTimeHandler(self): | |
| 424 """This request handler yields a page with the title set to the current | |
| 425 system time, and allows caching for one minute.""" | |
| 426 | |
| 427 if not self._ShouldHandleRequest("/cachetime"): | |
| 428 return False | |
| 429 | |
| 430 self.send_response(200) | |
| 431 self.send_header('Cache-Control', 'max-age=60') | |
| 432 self.send_header('Content-Type', 'text/html') | |
| 433 self.end_headers() | |
| 434 | |
| 435 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 436 time.time()) | |
| 437 | |
| 438 return True | |
| 439 | |
| 440 def CacheExpiresHandler(self): | |
| 441 """This request handler yields a page with the title set to the current | |
| 442 system time, and set the page to expire on 1 Jan 2099.""" | |
| 443 | |
| 444 if not self._ShouldHandleRequest("/cache/expires"): | |
| 445 return False | |
| 446 | |
| 447 self.send_response(200) | |
| 448 self.send_header('Expires', 'Thu, 1 Jan 2099 00:00:00 GMT') | |
| 449 self.send_header('Content-Type', 'text/html') | |
| 450 self.end_headers() | |
| 451 | |
| 452 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 453 time.time()) | |
| 454 | |
| 455 return True | |
| 456 | |
| 457 def CacheProxyRevalidateHandler(self): | |
| 458 """This request handler yields a page with the title set to the current | |
| 459 system time, and allows caching for 60 seconds""" | |
| 460 | |
| 461 if not self._ShouldHandleRequest("/cache/proxy-revalidate"): | |
| 462 return False | |
| 463 | |
| 464 self.send_response(200) | |
| 465 self.send_header('Content-Type', 'text/html') | |
| 466 self.send_header('Cache-Control', 'max-age=60, proxy-revalidate') | |
| 467 self.end_headers() | |
| 468 | |
| 469 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 470 time.time()) | |
| 471 | |
| 472 return True | |
| 473 | |
| 474 def CachePrivateHandler(self): | |
| 475 """This request handler yields a page with the title set to the current | |
| 476 system time, and allows caching for 5 seconds.""" | |
| 477 | |
| 478 if not self._ShouldHandleRequest("/cache/private"): | |
| 479 return False | |
| 480 | |
| 481 self.send_response(200) | |
| 482 self.send_header('Content-Type', 'text/html') | |
| 483 self.send_header('Cache-Control', 'max-age=3, private') | |
| 484 self.end_headers() | |
| 485 | |
| 486 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 487 time.time()) | |
| 488 | |
| 489 return True | |
| 490 | |
| 491 def CachePublicHandler(self): | |
| 492 """This request handler yields a page with the title set to the current | |
| 493 system time, and allows caching for 5 seconds.""" | |
| 494 | |
| 495 if not self._ShouldHandleRequest("/cache/public"): | |
| 496 return False | |
| 497 | |
| 498 self.send_response(200) | |
| 499 self.send_header('Content-Type', 'text/html') | |
| 500 self.send_header('Cache-Control', 'max-age=3, public') | |
| 501 self.end_headers() | |
| 502 | |
| 503 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 504 time.time()) | |
| 505 | |
| 506 return True | |
| 507 | |
| 508 def CacheSMaxAgeHandler(self): | |
| 509 """This request handler yields a page with the title set to the current | |
| 510 system time, and does not allow for caching.""" | |
| 511 | |
| 512 if not self._ShouldHandleRequest("/cache/s-maxage"): | |
| 513 return False | |
| 514 | |
| 515 self.send_response(200) | |
| 516 self.send_header('Content-Type', 'text/html') | |
| 517 self.send_header('Cache-Control', 'public, s-maxage = 60, max-age = 0') | |
| 518 self.end_headers() | |
| 519 | |
| 520 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 521 time.time()) | |
| 522 | |
| 523 return True | |
| 524 | |
| 525 def CacheMustRevalidateHandler(self): | |
| 526 """This request handler yields a page with the title set to the current | |
| 527 system time, and does not allow caching.""" | |
| 528 | |
| 529 if not self._ShouldHandleRequest("/cache/must-revalidate"): | |
| 530 return False | |
| 531 | |
| 532 self.send_response(200) | |
| 533 self.send_header('Content-Type', 'text/html') | |
| 534 self.send_header('Cache-Control', 'must-revalidate') | |
| 535 self.end_headers() | |
| 536 | |
| 537 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 538 time.time()) | |
| 539 | |
| 540 return True | |
| 541 | |
| 542 def CacheMustRevalidateMaxAgeHandler(self): | |
| 543 """This request handler yields a page with the title set to the current | |
| 544 system time, and does not allow caching event though max-age of 60 | |
| 545 seconds is specified.""" | |
| 546 | |
| 547 if not self._ShouldHandleRequest("/cache/must-revalidate/max-age"): | |
| 548 return False | |
| 549 | |
| 550 self.send_response(200) | |
| 551 self.send_header('Content-Type', 'text/html') | |
| 552 self.send_header('Cache-Control', 'max-age=60, must-revalidate') | |
| 553 self.end_headers() | |
| 554 | |
| 555 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 556 time.time()) | |
| 557 | |
| 558 return True | |
| 559 | |
| 560 def CacheNoStoreHandler(self): | |
| 561 """This request handler yields a page with the title set to the current | |
| 562 system time, and does not allow the page to be stored.""" | |
| 563 | |
| 564 if not self._ShouldHandleRequest("/cache/no-store"): | |
| 565 return False | |
| 566 | |
| 567 self.send_response(200) | |
| 568 self.send_header('Content-Type', 'text/html') | |
| 569 self.send_header('Cache-Control', 'no-store') | |
| 570 self.end_headers() | |
| 571 | |
| 572 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 573 time.time()) | |
| 574 | |
| 575 return True | |
| 576 | |
| 577 def CacheNoStoreMaxAgeHandler(self): | |
| 578 """This request handler yields a page with the title set to the current | |
| 579 system time, and does not allow the page to be stored even though max-age | |
| 580 of 60 seconds is specified.""" | |
| 581 | |
| 582 if not self._ShouldHandleRequest("/cache/no-store/max-age"): | |
| 583 return False | |
| 584 | |
| 585 self.send_response(200) | |
| 586 self.send_header('Content-Type', 'text/html') | |
| 587 self.send_header('Cache-Control', 'max-age=60, no-store') | |
| 588 self.end_headers() | |
| 589 | |
| 590 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 591 time.time()) | |
| 592 | |
| 593 return True | |
| 594 | |
| 595 | |
| 596 def CacheNoTransformHandler(self): | |
| 597 """This request handler yields a page with the title set to the current | |
| 598 system time, and does not allow the content to transformed during | |
| 599 user-agent caching""" | |
| 600 | |
| 601 if not self._ShouldHandleRequest("/cache/no-transform"): | |
| 602 return False | |
| 603 | |
| 604 self.send_response(200) | |
| 605 self.send_header('Content-Type', 'text/html') | |
| 606 self.send_header('Cache-Control', 'no-transform') | |
| 607 self.end_headers() | |
| 608 | |
| 609 self.wfile.write('<html><head><title>%s</title></head></html>' % | |
| 610 time.time()) | |
| 611 | |
| 612 return True | |
| 613 | |
| 614 def EchoHeader(self): | |
| 615 """This handler echoes back the value of a specific request header.""" | |
| 616 | |
| 617 return self.EchoHeaderHelper("/echoheader") | |
| 618 | |
| 619 def EchoHeaderCache(self): | |
| 620 """This function echoes back the value of a specific request header while | |
| 621 allowing caching for 16 hours.""" | |
| 622 | |
| 623 return self.EchoHeaderHelper("/echoheadercache") | |
| 624 | |
| 625 def EchoHeaderHelper(self, echo_header): | |
| 626 """This function echoes back the value of the request header passed in.""" | |
| 627 | |
| 628 if not self._ShouldHandleRequest(echo_header): | |
| 629 return False | |
| 630 | |
| 631 query_char = self.path.find('?') | |
| 632 if query_char != -1: | |
| 633 header_name = self.path[query_char+1:] | |
| 634 | |
| 635 self.send_response(200) | |
| 636 self.send_header('Content-Type', 'text/plain') | |
| 637 if echo_header == '/echoheadercache': | |
| 638 self.send_header('Cache-control', 'max-age=60000') | |
| 639 else: | |
| 640 self.send_header('Cache-control', 'no-cache') | |
| 641 # insert a vary header to properly indicate that the cachability of this | |
| 642 # request is subject to value of the request header being echoed. | |
| 643 if len(header_name) > 0: | |
| 644 self.send_header('Vary', header_name) | |
| 645 self.end_headers() | |
| 646 | |
| 647 if len(header_name) > 0: | |
| 648 self.wfile.write(self.headers.getheader(header_name)) | |
| 649 | |
| 650 return True | |
| 651 | |
| 652 def ReadRequestBody(self): | |
| 653 """This function reads the body of the current HTTP request, handling | |
| 654 both plain and chunked transfer encoded requests.""" | |
| 655 | |
| 656 if self.headers.getheader('transfer-encoding') != 'chunked': | |
| 657 length = int(self.headers.getheader('content-length')) | |
| 658 return self.rfile.read(length) | |
| 659 | |
| 660 # Read the request body as chunks. | |
| 661 body = "" | |
| 662 while True: | |
| 663 line = self.rfile.readline() | |
| 664 length = int(line, 16) | |
| 665 if length == 0: | |
| 666 self.rfile.readline() | |
| 667 break | |
| 668 body += self.rfile.read(length) | |
| 669 self.rfile.read(2) | |
| 670 return body | |
| 671 | |
| 672 def EchoHandler(self): | |
| 673 """This handler just echoes back the payload of the request, for testing | |
| 674 form submission.""" | |
| 675 | |
| 676 if not self._ShouldHandleRequest("/echo"): | |
| 677 return False | |
| 678 | |
| 679 _, _, _, _, query, _ = urlparse.urlparse(self.path) | |
| 680 query_params = cgi.parse_qs(query, True) | |
| 681 if 'status' in query_params: | |
| 682 self.send_response(int(query_params['status'][0])) | |
| 683 else: | |
| 684 self.send_response(200) | |
| 685 self.send_header('Content-Type', 'text/html') | |
| 686 self.end_headers() | |
| 687 self.wfile.write(self.ReadRequestBody()) | |
| 688 return True | |
| 689 | |
| 690 def EchoTitleHandler(self): | |
| 691 """This handler is like Echo, but sets the page title to the request.""" | |
| 692 | |
| 693 if not self._ShouldHandleRequest("/echotitle"): | |
| 694 return False | |
| 695 | |
| 696 self.send_response(200) | |
| 697 self.send_header('Content-Type', 'text/html') | |
| 698 self.end_headers() | |
| 699 request = self.ReadRequestBody() | |
| 700 self.wfile.write('<html><head><title>') | |
| 701 self.wfile.write(request) | |
| 702 self.wfile.write('</title></head></html>') | |
| 703 return True | |
| 704 | |
| 705 def EchoAllHandler(self): | |
| 706 """This handler yields a (more) human-readable page listing information | |
| 707 about the request header & contents.""" | |
| 708 | |
| 709 if not self._ShouldHandleRequest("/echoall"): | |
| 710 return False | |
| 711 | |
| 712 self.send_response(200) | |
| 713 self.send_header('Content-Type', 'text/html') | |
| 714 self.end_headers() | |
| 715 self.wfile.write('<html><head><style>' | |
| 716 'pre { border: 1px solid black; margin: 5px; padding: 5px }' | |
| 717 '</style></head><body>' | |
| 718 '<div style="float: right">' | |
| 719 '<a href="/echo">back to referring page</a></div>' | |
| 720 '<h1>Request Body:</h1><pre>') | |
| 721 | |
| 722 if self.command == 'POST' or self.command == 'PUT': | |
| 723 qs = self.ReadRequestBody() | |
| 724 params = cgi.parse_qs(qs, keep_blank_values=1) | |
| 725 | |
| 726 for param in params: | |
| 727 self.wfile.write('%s=%s\n' % (param, params[param][0])) | |
| 728 | |
| 729 self.wfile.write('</pre>') | |
| 730 | |
| 731 self.wfile.write('<h1>Request Headers:</h1><pre>%s</pre>' % self.headers) | |
| 732 | |
| 733 self.wfile.write('</body></html>') | |
| 734 return True | |
| 735 | |
| 736 def EchoMultipartPostHandler(self): | |
| 737 """This handler echoes received multipart post data as json format.""" | |
| 738 | |
| 739 if not (self._ShouldHandleRequest("/echomultipartpost") or | |
| 740 self._ShouldHandleRequest("/searchbyimage")): | |
| 741 return False | |
| 742 | |
| 743 content_type, parameters = cgi.parse_header( | |
| 744 self.headers.getheader('content-type')) | |
| 745 if content_type == 'multipart/form-data': | |
| 746 post_multipart = cgi.parse_multipart(self.rfile, parameters) | |
| 747 elif content_type == 'application/x-www-form-urlencoded': | |
| 748 raise Exception('POST by application/x-www-form-urlencoded is ' | |
| 749 'not implemented.') | |
| 750 else: | |
| 751 post_multipart = {} | |
| 752 | |
| 753 # Since the data can be binary, we encode them by base64. | |
| 754 post_multipart_base64_encoded = {} | |
| 755 for field, values in post_multipart.items(): | |
| 756 post_multipart_base64_encoded[field] = [base64.b64encode(value) | |
| 757 for value in values] | |
| 758 | |
| 759 result = {'POST_multipart' : post_multipart_base64_encoded} | |
| 760 | |
| 761 self.send_response(200) | |
| 762 self.send_header("Content-type", "text/plain") | |
| 763 self.end_headers() | |
| 764 self.wfile.write(json.dumps(result, indent=2, sort_keys=False)) | |
| 765 return True | |
| 766 | |
| 767 def DownloadHandler(self): | |
| 768 """This handler sends a downloadable file with or without reporting | |
| 769 the size (6K).""" | |
| 770 | |
| 771 if self.path.startswith("/download-unknown-size"): | |
| 772 send_length = False | |
| 773 elif self.path.startswith("/download-known-size"): | |
| 774 send_length = True | |
| 775 else: | |
| 776 return False | |
| 777 | |
| 778 # | |
| 779 # The test which uses this functionality is attempting to send | |
| 780 # small chunks of data to the client. Use a fairly large buffer | |
| 781 # so that we'll fill chrome's IO buffer enough to force it to | |
| 782 # actually write the data. | |
| 783 # See also the comments in the client-side of this test in | |
| 784 # download_uitest.cc | |
| 785 # | |
| 786 size_chunk1 = 35*1024 | |
| 787 size_chunk2 = 10*1024 | |
| 788 | |
| 789 self.send_response(200) | |
| 790 self.send_header('Content-Type', 'application/octet-stream') | |
| 791 self.send_header('Cache-Control', 'max-age=0') | |
| 792 if send_length: | |
| 793 self.send_header('Content-Length', size_chunk1 + size_chunk2) | |
| 794 self.end_headers() | |
| 795 | |
| 796 # First chunk of data: | |
| 797 self.wfile.write("*" * size_chunk1) | |
| 798 self.wfile.flush() | |
| 799 | |
| 800 # handle requests until one of them clears this flag. | |
| 801 self.server.wait_for_download = True | |
| 802 while self.server.wait_for_download: | |
| 803 self.server.handle_request() | |
| 804 | |
| 805 # Second chunk of data: | |
| 806 self.wfile.write("*" * size_chunk2) | |
| 807 return True | |
| 808 | |
| 809 def DownloadFinishHandler(self): | |
| 810 """This handler just tells the server to finish the current download.""" | |
| 811 | |
| 812 if not self._ShouldHandleRequest("/download-finish"): | |
| 813 return False | |
| 814 | |
| 815 self.server.wait_for_download = False | |
| 816 self.send_response(200) | |
| 817 self.send_header('Content-Type', 'text/html') | |
| 818 self.send_header('Cache-Control', 'max-age=0') | |
| 819 self.end_headers() | |
| 820 return True | |
| 821 | |
| 822 def _ReplaceFileData(self, data, query_parameters): | |
| 823 """Replaces matching substrings in a file. | |
| 824 | |
| 825 If the 'replace_text' URL query parameter is present, it is expected to be | |
| 826 of the form old_text:new_text, which indicates that any old_text strings in | |
| 827 the file are replaced with new_text. Multiple 'replace_text' parameters may | |
| 828 be specified. | |
| 829 | |
| 830 If the parameters are not present, |data| is returned. | |
| 831 """ | |
| 832 | |
| 833 query_dict = cgi.parse_qs(query_parameters) | |
| 834 replace_text_values = query_dict.get('replace_text', []) | |
| 835 for replace_text_value in replace_text_values: | |
| 836 replace_text_args = replace_text_value.split(':') | |
| 837 if len(replace_text_args) != 2: | |
| 838 raise ValueError( | |
| 839 'replace_text must be of form old_text:new_text. Actual value: %s' % | |
| 840 replace_text_value) | |
| 841 old_text_b64, new_text_b64 = replace_text_args | |
| 842 old_text = base64.urlsafe_b64decode(old_text_b64) | |
| 843 new_text = base64.urlsafe_b64decode(new_text_b64) | |
| 844 data = data.replace(old_text, new_text) | |
| 845 return data | |
| 846 | |
| 847 def ZipFileHandler(self): | |
| 848 """This handler sends the contents of the requested file in compressed form. | |
| 849 Can pass in a parameter that specifies that the content length be | |
| 850 C - the compressed size (OK), | |
| 851 U - the uncompressed size (Non-standard, but handled), | |
| 852 S - less than compressed (OK because we keep going), | |
| 853 M - larger than compressed but less than uncompressed (an error), | |
| 854 L - larger than uncompressed (an error) | |
| 855 Example: compressedfiles/Picture_1.doc?C | |
| 856 """ | |
| 857 | |
| 858 prefix = "/compressedfiles/" | |
| 859 if not self.path.startswith(prefix): | |
| 860 return False | |
| 861 | |
| 862 # Consume a request body if present. | |
| 863 if self.command == 'POST' or self.command == 'PUT' : | |
| 864 self.ReadRequestBody() | |
| 865 | |
| 866 _, _, url_path, _, query, _ = urlparse.urlparse(self.path) | |
| 867 | |
| 868 if not query in ('C', 'U', 'S', 'M', 'L'): | |
| 869 return False | |
| 870 | |
| 871 sub_path = url_path[len(prefix):] | |
| 872 entries = sub_path.split('/') | |
| 873 file_path = os.path.join(self.server.data_dir, *entries) | |
| 874 if os.path.isdir(file_path): | |
| 875 file_path = os.path.join(file_path, 'index.html') | |
| 876 | |
| 877 if not os.path.isfile(file_path): | |
| 878 print "File not found " + sub_path + " full path:" + file_path | |
| 879 self.send_error(404) | |
| 880 return True | |
| 881 | |
| 882 f = open(file_path, "rb") | |
| 883 data = f.read() | |
| 884 uncompressed_len = len(data) | |
| 885 f.close() | |
| 886 | |
| 887 # Compress the data. | |
| 888 data = zlib.compress(data) | |
| 889 compressed_len = len(data) | |
| 890 | |
| 891 content_length = compressed_len | |
| 892 if query == 'U': | |
| 893 content_length = uncompressed_len | |
| 894 elif query == 'S': | |
| 895 content_length = compressed_len / 2 | |
| 896 elif query == 'M': | |
| 897 content_length = (compressed_len + uncompressed_len) / 2 | |
| 898 elif query == 'L': | |
| 899 content_length = compressed_len + uncompressed_len | |
| 900 | |
| 901 self.send_response(200) | |
| 902 self.send_header('Content-Type', 'application/msword') | |
| 903 self.send_header('Content-encoding', 'deflate') | |
| 904 self.send_header('Connection', 'close') | |
| 905 self.send_header('Content-Length', content_length) | |
| 906 self.send_header('ETag', '\'' + file_path + '\'') | |
| 907 self.end_headers() | |
| 908 | |
| 909 self.wfile.write(data) | |
| 910 | |
| 911 return True | |
| 912 | |
| 913 def FileHandler(self): | |
| 914 """This handler sends the contents of the requested file. Wow, it's like | |
| 915 a real webserver!""" | |
| 916 | |
| 917 prefix = self.server.file_root_url | |
| 918 if not self.path.startswith(prefix): | |
| 919 return False | |
| 920 return self._FileHandlerHelper(prefix) | |
| 921 | |
| 922 def PostOnlyFileHandler(self): | |
| 923 """This handler sends the contents of the requested file on a POST.""" | |
| 924 | |
| 925 prefix = urlparse.urljoin(self.server.file_root_url, 'post/') | |
| 926 if not self.path.startswith(prefix): | |
| 927 return False | |
| 928 return self._FileHandlerHelper(prefix) | |
| 929 | |
| 930 def _FileHandlerHelper(self, prefix): | |
| 931 request_body = '' | |
| 932 if self.command == 'POST' or self.command == 'PUT': | |
| 933 # Consume a request body if present. | |
| 934 request_body = self.ReadRequestBody() | |
| 935 | |
| 936 _, _, url_path, _, query, _ = urlparse.urlparse(self.path) | |
| 937 query_dict = cgi.parse_qs(query) | |
| 938 | |
| 939 expected_body = query_dict.get('expected_body', []) | |
| 940 if expected_body and request_body not in expected_body: | |
| 941 self.send_response(404) | |
| 942 self.end_headers() | |
| 943 self.wfile.write('') | |
| 944 return True | |
| 945 | |
| 946 expected_headers = query_dict.get('expected_headers', []) | |
| 947 for expected_header in expected_headers: | |
| 948 header_name, expected_value = expected_header.split(':') | |
| 949 if self.headers.getheader(header_name) != expected_value: | |
| 950 self.send_response(404) | |
| 951 self.end_headers() | |
| 952 self.wfile.write('') | |
| 953 return True | |
| 954 | |
| 955 sub_path = url_path[len(prefix):] | |
| 956 entries = sub_path.split('/') | |
| 957 file_path = os.path.join(self.server.data_dir, *entries) | |
| 958 if os.path.isdir(file_path): | |
| 959 file_path = os.path.join(file_path, 'index.html') | |
| 960 | |
| 961 if not os.path.isfile(file_path): | |
| 962 print "File not found " + sub_path + " full path:" + file_path | |
| 963 self.send_error(404) | |
| 964 return True | |
| 965 | |
| 966 f = open(file_path, "rb") | |
| 967 data = f.read() | |
| 968 f.close() | |
| 969 | |
| 970 data = self._ReplaceFileData(data, query) | |
| 971 | |
| 972 old_protocol_version = self.protocol_version | |
| 973 | |
| 974 # If file.mock-http-headers exists, it contains the headers we | |
| 975 # should send. Read them in and parse them. | |
| 976 headers_path = file_path + '.mock-http-headers' | |
| 977 if os.path.isfile(headers_path): | |
| 978 f = open(headers_path, "r") | |
| 979 | |
| 980 # "HTTP/1.1 200 OK" | |
| 981 response = f.readline() | |
| 982 http_major, http_minor, status_code = re.findall( | |
| 983 'HTTP/(\d+).(\d+) (\d+)', response)[0] | |
| 984 self.protocol_version = "HTTP/%s.%s" % (http_major, http_minor) | |
| 985 self.send_response(int(status_code)) | |
| 986 | |
| 987 for line in f: | |
| 988 header_values = re.findall('(\S+):\s*(.*)', line) | |
| 989 if len(header_values) > 0: | |
| 990 # "name: value" | |
| 991 name, value = header_values[0] | |
| 992 self.send_header(name, value) | |
| 993 f.close() | |
| 994 else: | |
| 995 # Could be more generic once we support mime-type sniffing, but for | |
| 996 # now we need to set it explicitly. | |
| 997 | |
| 998 range_header = self.headers.get('Range') | |
| 999 if range_header and range_header.startswith('bytes='): | |
| 1000 # Note this doesn't handle all valid byte range_header values (i.e. | |
| 1001 # left open ended ones), just enough for what we needed so far. | |
| 1002 range_header = range_header[6:].split('-') | |
| 1003 start = int(range_header[0]) | |
| 1004 if range_header[1]: | |
| 1005 end = int(range_header[1]) | |
| 1006 else: | |
| 1007 end = len(data) - 1 | |
| 1008 | |
| 1009 self.send_response(206) | |
| 1010 content_range = ('bytes ' + str(start) + '-' + str(end) + '/' + | |
| 1011 str(len(data))) | |
| 1012 self.send_header('Content-Range', content_range) | |
| 1013 data = data[start: end + 1] | |
| 1014 else: | |
| 1015 self.send_response(200) | |
| 1016 | |
| 1017 self.send_header('Content-Type', self.GetMIMETypeFromName(file_path)) | |
| 1018 self.send_header('Accept-Ranges', 'bytes') | |
| 1019 self.send_header('Content-Length', len(data)) | |
| 1020 self.send_header('ETag', '\'' + file_path + '\'') | |
| 1021 self.end_headers() | |
| 1022 | |
| 1023 if (self.command != 'HEAD'): | |
| 1024 self.wfile.write(data) | |
| 1025 | |
| 1026 self.protocol_version = old_protocol_version | |
| 1027 return True | |
| 1028 | |
| 1029 def SetCookieHandler(self): | |
| 1030 """This handler just sets a cookie, for testing cookie handling.""" | |
| 1031 | |
| 1032 if not self._ShouldHandleRequest("/set-cookie"): | |
| 1033 return False | |
| 1034 | |
| 1035 query_char = self.path.find('?') | |
| 1036 if query_char != -1: | |
| 1037 cookie_values = self.path[query_char + 1:].split('&') | |
| 1038 else: | |
| 1039 cookie_values = ("",) | |
| 1040 self.send_response(200) | |
| 1041 self.send_header('Content-Type', 'text/html') | |
| 1042 for cookie_value in cookie_values: | |
| 1043 self.send_header('Set-Cookie', '%s' % cookie_value) | |
| 1044 self.end_headers() | |
| 1045 for cookie_value in cookie_values: | |
| 1046 self.wfile.write('%s' % cookie_value) | |
| 1047 return True | |
| 1048 | |
| 1049 def SetManyCookiesHandler(self): | |
| 1050 """This handler just sets a given number of cookies, for testing handling | |
| 1051 of large numbers of cookies.""" | |
| 1052 | |
| 1053 if not self._ShouldHandleRequest("/set-many-cookies"): | |
| 1054 return False | |
| 1055 | |
| 1056 query_char = self.path.find('?') | |
| 1057 if query_char != -1: | |
| 1058 num_cookies = int(self.path[query_char + 1:]) | |
| 1059 else: | |
| 1060 num_cookies = 0 | |
| 1061 self.send_response(200) | |
| 1062 self.send_header('', 'text/html') | |
| 1063 for _i in range(0, num_cookies): | |
| 1064 self.send_header('Set-Cookie', 'a=') | |
| 1065 self.end_headers() | |
| 1066 self.wfile.write('%d cookies were sent' % num_cookies) | |
| 1067 return True | |
| 1068 | |
| 1069 def ExpectAndSetCookieHandler(self): | |
| 1070 """Expects some cookies to be sent, and if they are, sets more cookies. | |
| 1071 | |
| 1072 The expect parameter specifies a required cookie. May be specified multiple | |
| 1073 times. | |
| 1074 The set parameter specifies a cookie to set if all required cookies are | |
| 1075 preset. May be specified multiple times. | |
| 1076 The data parameter specifies the response body data to be returned.""" | |
| 1077 | |
| 1078 if not self._ShouldHandleRequest("/expect-and-set-cookie"): | |
| 1079 return False | |
| 1080 | |
| 1081 _, _, _, _, query, _ = urlparse.urlparse(self.path) | |
| 1082 query_dict = cgi.parse_qs(query) | |
| 1083 cookies = set() | |
| 1084 if 'Cookie' in self.headers: | |
| 1085 cookie_header = self.headers.getheader('Cookie') | |
| 1086 cookies.update([s.strip() for s in cookie_header.split(';')]) | |
| 1087 got_all_expected_cookies = True | |
| 1088 for expected_cookie in query_dict.get('expect', []): | |
| 1089 if expected_cookie not in cookies: | |
| 1090 got_all_expected_cookies = False | |
| 1091 self.send_response(200) | |
| 1092 self.send_header('Content-Type', 'text/html') | |
| 1093 if got_all_expected_cookies: | |
| 1094 for cookie_value in query_dict.get('set', []): | |
| 1095 self.send_header('Set-Cookie', '%s' % cookie_value) | |
| 1096 self.end_headers() | |
| 1097 for data_value in query_dict.get('data', []): | |
| 1098 self.wfile.write(data_value) | |
| 1099 return True | |
| 1100 | |
| 1101 def SetHeaderHandler(self): | |
| 1102 """This handler sets a response header. Parameters are in the | |
| 1103 key%3A%20value&key2%3A%20value2 format.""" | |
| 1104 | |
| 1105 if not self._ShouldHandleRequest("/set-header"): | |
| 1106 return False | |
| 1107 | |
| 1108 query_char = self.path.find('?') | |
| 1109 if query_char != -1: | |
| 1110 headers_values = self.path[query_char + 1:].split('&') | |
| 1111 else: | |
| 1112 headers_values = ("",) | |
| 1113 self.send_response(200) | |
| 1114 self.send_header('Content-Type', 'text/html') | |
| 1115 for header_value in headers_values: | |
| 1116 header_value = urllib.unquote(header_value) | |
| 1117 (key, value) = header_value.split(': ', 1) | |
| 1118 self.send_header(key, value) | |
| 1119 self.end_headers() | |
| 1120 for header_value in headers_values: | |
| 1121 self.wfile.write('%s' % header_value) | |
| 1122 return True | |
| 1123 | |
| 1124 def AuthBasicHandler(self): | |
| 1125 """This handler tests 'Basic' authentication. It just sends a page with | |
| 1126 title 'user/pass' if you succeed.""" | |
| 1127 | |
| 1128 if not self._ShouldHandleRequest("/auth-basic"): | |
| 1129 return False | |
| 1130 | |
| 1131 username = userpass = password = b64str = "" | |
| 1132 expected_password = 'secret' | |
| 1133 realm = 'testrealm' | |
| 1134 set_cookie_if_challenged = False | |
| 1135 | |
| 1136 _, _, url_path, _, query, _ = urlparse.urlparse(self.path) | |
| 1137 query_params = cgi.parse_qs(query, True) | |
| 1138 if 'set-cookie-if-challenged' in query_params: | |
| 1139 set_cookie_if_challenged = True | |
| 1140 if 'password' in query_params: | |
| 1141 expected_password = query_params['password'][0] | |
| 1142 if 'realm' in query_params: | |
| 1143 realm = query_params['realm'][0] | |
| 1144 | |
| 1145 auth = self.headers.getheader('authorization') | |
| 1146 try: | |
| 1147 if not auth: | |
| 1148 raise Exception('no auth') | |
| 1149 b64str = re.findall(r'Basic (\S+)', auth)[0] | |
| 1150 userpass = base64.b64decode(b64str) | |
| 1151 username, password = re.findall(r'([^:]+):(\S+)', userpass)[0] | |
| 1152 if password != expected_password: | |
| 1153 raise Exception('wrong password') | |
| 1154 except Exception, e: | |
| 1155 # Authentication failed. | |
| 1156 self.send_response(401) | |
| 1157 self.send_header('WWW-Authenticate', 'Basic realm="%s"' % realm) | |
| 1158 self.send_header('Content-Type', 'text/html') | |
| 1159 if set_cookie_if_challenged: | |
| 1160 self.send_header('Set-Cookie', 'got_challenged=true') | |
| 1161 self.end_headers() | |
| 1162 self.wfile.write('<html><head>') | |
| 1163 self.wfile.write('<title>Denied: %s</title>' % e) | |
| 1164 self.wfile.write('</head><body>') | |
| 1165 self.wfile.write('auth=%s<p>' % auth) | |
| 1166 self.wfile.write('b64str=%s<p>' % b64str) | |
| 1167 self.wfile.write('username: %s<p>' % username) | |
| 1168 self.wfile.write('userpass: %s<p>' % userpass) | |
| 1169 self.wfile.write('password: %s<p>' % password) | |
| 1170 self.wfile.write('You sent:<br>%s<p>' % self.headers) | |
| 1171 self.wfile.write('</body></html>') | |
| 1172 return True | |
| 1173 | |
| 1174 # Authentication successful. (Return a cachable response to allow for | |
| 1175 # testing cached pages that require authentication.) | |
| 1176 old_protocol_version = self.protocol_version | |
| 1177 self.protocol_version = "HTTP/1.1" | |
| 1178 | |
| 1179 if_none_match = self.headers.getheader('if-none-match') | |
| 1180 if if_none_match == "abc": | |
| 1181 self.send_response(304) | |
| 1182 self.end_headers() | |
| 1183 elif url_path.endswith(".gif"): | |
| 1184 # Using chrome/test/data/google/logo.gif as the test image | |
| 1185 test_image_path = ['google', 'logo.gif'] | |
| 1186 gif_path = os.path.join(self.server.data_dir, *test_image_path) | |
| 1187 if not os.path.isfile(gif_path): | |
| 1188 self.send_error(404) | |
| 1189 self.protocol_version = old_protocol_version | |
| 1190 return True | |
| 1191 | |
| 1192 f = open(gif_path, "rb") | |
| 1193 data = f.read() | |
| 1194 f.close() | |
| 1195 | |
| 1196 self.send_response(200) | |
| 1197 self.send_header('Content-Type', 'image/gif') | |
| 1198 self.send_header('Cache-control', 'max-age=60000') | |
| 1199 self.send_header('Etag', 'abc') | |
| 1200 self.end_headers() | |
| 1201 self.wfile.write(data) | |
| 1202 else: | |
| 1203 self.send_response(200) | |
| 1204 self.send_header('Content-Type', 'text/html') | |
| 1205 self.send_header('Cache-control', 'max-age=60000') | |
| 1206 self.send_header('Etag', 'abc') | |
| 1207 self.end_headers() | |
| 1208 self.wfile.write('<html><head>') | |
| 1209 self.wfile.write('<title>%s/%s</title>' % (username, password)) | |
| 1210 self.wfile.write('</head><body>') | |
| 1211 self.wfile.write('auth=%s<p>' % auth) | |
| 1212 self.wfile.write('You sent:<br>%s<p>' % self.headers) | |
| 1213 self.wfile.write('</body></html>') | |
| 1214 | |
| 1215 self.protocol_version = old_protocol_version | |
| 1216 return True | |
| 1217 | |
| 1218 def GetNonce(self, force_reset=False): | |
| 1219 """Returns a nonce that's stable per request path for the server's lifetime. | |
| 1220 This is a fake implementation. A real implementation would only use a given | |
| 1221 nonce a single time (hence the name n-once). However, for the purposes of | |
| 1222 unittesting, we don't care about the security of the nonce. | |
| 1223 | |
| 1224 Args: | |
| 1225 force_reset: Iff set, the nonce will be changed. Useful for testing the | |
| 1226 "stale" response. | |
| 1227 """ | |
| 1228 | |
| 1229 if force_reset or not self.server.nonce_time: | |
| 1230 self.server.nonce_time = time.time() | |
| 1231 return hashlib.md5('privatekey%s%d' % | |
| 1232 (self.path, self.server.nonce_time)).hexdigest() | |
| 1233 | |
| 1234 def AuthDigestHandler(self): | |
| 1235 """This handler tests 'Digest' authentication. | |
| 1236 | |
| 1237 It just sends a page with title 'user/pass' if you succeed. | |
| 1238 | |
| 1239 A stale response is sent iff "stale" is present in the request path. | |
| 1240 """ | |
| 1241 | |
| 1242 if not self._ShouldHandleRequest("/auth-digest"): | |
| 1243 return False | |
| 1244 | |
| 1245 stale = 'stale' in self.path | |
| 1246 nonce = self.GetNonce(force_reset=stale) | |
| 1247 opaque = hashlib.md5('opaque').hexdigest() | |
| 1248 password = 'secret' | |
| 1249 realm = 'testrealm' | |
| 1250 | |
| 1251 auth = self.headers.getheader('authorization') | |
| 1252 pairs = {} | |
| 1253 try: | |
| 1254 if not auth: | |
| 1255 raise Exception('no auth') | |
| 1256 if not auth.startswith('Digest'): | |
| 1257 raise Exception('not digest') | |
| 1258 # Pull out all the name="value" pairs as a dictionary. | |
| 1259 pairs = dict(re.findall(r'(\b[^ ,=]+)="?([^",]+)"?', auth)) | |
| 1260 | |
| 1261 # Make sure it's all valid. | |
| 1262 if pairs['nonce'] != nonce: | |
| 1263 raise Exception('wrong nonce') | |
| 1264 if pairs['opaque'] != opaque: | |
| 1265 raise Exception('wrong opaque') | |
| 1266 | |
| 1267 # Check the 'response' value and make sure it matches our magic hash. | |
| 1268 # See http://www.ietf.org/rfc/rfc2617.txt | |
| 1269 hash_a1 = hashlib.md5( | |
| 1270 ':'.join([pairs['username'], realm, password])).hexdigest() | |
| 1271 hash_a2 = hashlib.md5(':'.join([self.command, pairs['uri']])).hexdigest() | |
| 1272 if 'qop' in pairs and 'nc' in pairs and 'cnonce' in pairs: | |
| 1273 response = hashlib.md5(':'.join([hash_a1, nonce, pairs['nc'], | |
| 1274 pairs['cnonce'], pairs['qop'], hash_a2])).hexdigest() | |
| 1275 else: | |
| 1276 response = hashlib.md5(':'.join([hash_a1, nonce, hash_a2])).hexdigest() | |
| 1277 | |
| 1278 if pairs['response'] != response: | |
| 1279 raise Exception('wrong password') | |
| 1280 except Exception, e: | |
| 1281 # Authentication failed. | |
| 1282 self.send_response(401) | |
| 1283 hdr = ('Digest ' | |
| 1284 'realm="%s", ' | |
| 1285 'domain="/", ' | |
| 1286 'qop="auth", ' | |
| 1287 'algorithm=MD5, ' | |
| 1288 'nonce="%s", ' | |
| 1289 'opaque="%s"') % (realm, nonce, opaque) | |
| 1290 if stale: | |
| 1291 hdr += ', stale="TRUE"' | |
| 1292 self.send_header('WWW-Authenticate', hdr) | |
| 1293 self.send_header('Content-Type', 'text/html') | |
| 1294 self.end_headers() | |
| 1295 self.wfile.write('<html><head>') | |
| 1296 self.wfile.write('<title>Denied: %s</title>' % e) | |
| 1297 self.wfile.write('</head><body>') | |
| 1298 self.wfile.write('auth=%s<p>' % auth) | |
| 1299 self.wfile.write('pairs=%s<p>' % pairs) | |
| 1300 self.wfile.write('You sent:<br>%s<p>' % self.headers) | |
| 1301 self.wfile.write('We are replying:<br>%s<p>' % hdr) | |
| 1302 self.wfile.write('</body></html>') | |
| 1303 return True | |
| 1304 | |
| 1305 # Authentication successful. | |
| 1306 self.send_response(200) | |
| 1307 self.send_header('Content-Type', 'text/html') | |
| 1308 self.end_headers() | |
| 1309 self.wfile.write('<html><head>') | |
| 1310 self.wfile.write('<title>%s/%s</title>' % (pairs['username'], password)) | |
| 1311 self.wfile.write('</head><body>') | |
| 1312 self.wfile.write('auth=%s<p>' % auth) | |
| 1313 self.wfile.write('pairs=%s<p>' % pairs) | |
| 1314 self.wfile.write('</body></html>') | |
| 1315 | |
| 1316 return True | |
| 1317 | |
| 1318 def SlowServerHandler(self): | |
| 1319 """Wait for the user suggested time before responding. The syntax is | |
| 1320 /slow?0.5 to wait for half a second.""" | |
| 1321 | |
| 1322 if not self._ShouldHandleRequest("/slow"): | |
| 1323 return False | |
| 1324 query_char = self.path.find('?') | |
| 1325 wait_sec = 1.0 | |
| 1326 if query_char >= 0: | |
| 1327 try: | |
| 1328 wait_sec = int(self.path[query_char + 1:]) | |
| 1329 except ValueError: | |
| 1330 pass | |
| 1331 time.sleep(wait_sec) | |
| 1332 self.send_response(200) | |
| 1333 self.send_header('Content-Type', 'text/plain') | |
| 1334 self.end_headers() | |
| 1335 self.wfile.write("waited %d seconds" % wait_sec) | |
| 1336 return True | |
| 1337 | |
| 1338 def ChunkedServerHandler(self): | |
| 1339 """Send chunked response. Allows to specify chunks parameters: | |
| 1340 - waitBeforeHeaders - ms to wait before sending headers | |
| 1341 - waitBetweenChunks - ms to wait between chunks | |
| 1342 - chunkSize - size of each chunk in bytes | |
| 1343 - chunksNumber - number of chunks | |
| 1344 Example: /chunked?waitBeforeHeaders=1000&chunkSize=5&chunksNumber=5 | |
| 1345 waits one second, then sends headers and five chunks five bytes each.""" | |
| 1346 | |
| 1347 if not self._ShouldHandleRequest("/chunked"): | |
| 1348 return False | |
| 1349 query_char = self.path.find('?') | |
| 1350 chunkedSettings = {'waitBeforeHeaders' : 0, | |
| 1351 'waitBetweenChunks' : 0, | |
| 1352 'chunkSize' : 5, | |
| 1353 'chunksNumber' : 5} | |
| 1354 if query_char >= 0: | |
| 1355 params = self.path[query_char + 1:].split('&') | |
| 1356 for param in params: | |
| 1357 keyValue = param.split('=') | |
| 1358 if len(keyValue) == 2: | |
| 1359 try: | |
| 1360 chunkedSettings[keyValue[0]] = int(keyValue[1]) | |
| 1361 except ValueError: | |
| 1362 pass | |
| 1363 time.sleep(0.001 * chunkedSettings['waitBeforeHeaders']) | |
| 1364 self.protocol_version = 'HTTP/1.1' # Needed for chunked encoding | |
| 1365 self.send_response(200) | |
| 1366 self.send_header('Content-Type', 'text/plain') | |
| 1367 self.send_header('Connection', 'close') | |
| 1368 self.send_header('Transfer-Encoding', 'chunked') | |
| 1369 self.end_headers() | |
| 1370 # Chunked encoding: sending all chunks, then final zero-length chunk and | |
| 1371 # then final CRLF. | |
| 1372 for i in range(0, chunkedSettings['chunksNumber']): | |
| 1373 if i > 0: | |
| 1374 time.sleep(0.001 * chunkedSettings['waitBetweenChunks']) | |
| 1375 self.sendChunkHelp('*' * chunkedSettings['chunkSize']) | |
| 1376 self.wfile.flush() # Keep in mind that we start flushing only after 1kb. | |
| 1377 self.sendChunkHelp('') | |
| 1378 return True | |
| 1379 | |
| 1380 def ContentTypeHandler(self): | |
| 1381 """Returns a string of html with the given content type. E.g., | |
| 1382 /contenttype?text/css returns an html file with the Content-Type | |
| 1383 header set to text/css.""" | |
| 1384 | |
| 1385 if not self._ShouldHandleRequest("/contenttype"): | |
| 1386 return False | |
| 1387 query_char = self.path.find('?') | |
| 1388 content_type = self.path[query_char + 1:].strip() | |
| 1389 if not content_type: | |
| 1390 content_type = 'text/html' | |
| 1391 self.send_response(200) | |
| 1392 self.send_header('Content-Type', content_type) | |
| 1393 self.end_headers() | |
| 1394 self.wfile.write("<html>\n<body>\n<p>HTML text</p>\n</body>\n</html>\n") | |
| 1395 return True | |
| 1396 | |
| 1397 def NoContentHandler(self): | |
| 1398 """Returns a 204 No Content response.""" | |
| 1399 | |
| 1400 if not self._ShouldHandleRequest("/nocontent"): | |
| 1401 return False | |
| 1402 self.send_response(204) | |
| 1403 self.end_headers() | |
| 1404 return True | |
| 1405 | |
| 1406 def ServerRedirectHandler(self): | |
| 1407 """Sends a server redirect to the given URL. The syntax is | |
| 1408 '/server-redirect?http://foo.bar/asdf' to redirect to | |
| 1409 'http://foo.bar/asdf'""" | |
| 1410 | |
| 1411 test_name = "/server-redirect" | |
| 1412 if not self._ShouldHandleRequest(test_name): | |
| 1413 return False | |
| 1414 | |
| 1415 query_char = self.path.find('?') | |
| 1416 if query_char < 0 or len(self.path) <= query_char + 1: | |
| 1417 self.sendRedirectHelp(test_name) | |
| 1418 return True | |
| 1419 dest = urllib.unquote(self.path[query_char + 1:]) | |
| 1420 | |
| 1421 self.send_response(301) # moved permanently | |
| 1422 self.send_header('Location', dest) | |
| 1423 self.send_header('Content-Type', 'text/html') | |
| 1424 self.end_headers() | |
| 1425 self.wfile.write('<html><head>') | |
| 1426 self.wfile.write('</head><body>Redirecting to %s</body></html>' % dest) | |
| 1427 | |
| 1428 return True | |
| 1429 | |
| 1430 def CrossSiteRedirectHandler(self): | |
| 1431 """Sends a server redirect to the given site. The syntax is | |
| 1432 '/cross-site/hostname/...' to redirect to //hostname/... | |
| 1433 It is used to navigate between different Sites, causing | |
| 1434 cross-site/cross-process navigations in the browser.""" | |
| 1435 | |
| 1436 test_name = "/cross-site" | |
| 1437 if not self._ShouldHandleRequest(test_name): | |
| 1438 return False | |
| 1439 | |
| 1440 params = urllib.unquote(self.path[(len(test_name) + 1):]) | |
| 1441 slash = params.find('/') | |
| 1442 if slash < 0: | |
| 1443 self.sendRedirectHelp(test_name) | |
| 1444 return True | |
| 1445 | |
| 1446 host = params[:slash] | |
| 1447 path = params[(slash+1):] | |
| 1448 dest = "//%s:%s/%s" % (host, str(self.server.server_port), path) | |
| 1449 | |
| 1450 self.send_response(301) # moved permanently | |
| 1451 self.send_header('Location', dest) | |
| 1452 self.send_header('Content-Type', 'text/html') | |
| 1453 self.end_headers() | |
| 1454 self.wfile.write('<html><head>') | |
| 1455 self.wfile.write('</head><body>Redirecting to %s</body></html>' % dest) | |
| 1456 | |
| 1457 return True | |
| 1458 | |
| 1459 def ClientRedirectHandler(self): | |
| 1460 """Sends a client redirect to the given URL. The syntax is | |
| 1461 '/client-redirect?http://foo.bar/asdf' to redirect to | |
| 1462 'http://foo.bar/asdf'""" | |
| 1463 | |
| 1464 test_name = "/client-redirect" | |
| 1465 if not self._ShouldHandleRequest(test_name): | |
| 1466 return False | |
| 1467 | |
| 1468 query_char = self.path.find('?') | |
| 1469 if query_char < 0 or len(self.path) <= query_char + 1: | |
| 1470 self.sendRedirectHelp(test_name) | |
| 1471 return True | |
| 1472 dest = urllib.unquote(self.path[query_char + 1:]) | |
| 1473 | |
| 1474 self.send_response(200) | |
| 1475 self.send_header('Content-Type', 'text/html') | |
| 1476 self.end_headers() | |
| 1477 self.wfile.write('<html><head>') | |
| 1478 self.wfile.write('<meta http-equiv="refresh" content="0;url=%s">' % dest) | |
| 1479 self.wfile.write('</head><body>Redirecting to %s</body></html>' % dest) | |
| 1480 | |
| 1481 return True | |
| 1482 | |
| 1483 def GetSSLSessionCacheHandler(self): | |
| 1484 """Send a reply containing a log of the session cache operations.""" | |
| 1485 | |
| 1486 if not self._ShouldHandleRequest('/ssl-session-cache'): | |
| 1487 return False | |
| 1488 | |
| 1489 self.send_response(200) | |
| 1490 self.send_header('Content-Type', 'text/plain') | |
| 1491 self.end_headers() | |
| 1492 try: | |
| 1493 log = self.server.session_cache.log | |
| 1494 except AttributeError: | |
| 1495 self.wfile.write('Pass --https-record-resume in order to use' + | |
| 1496 ' this request') | |
| 1497 return True | |
| 1498 | |
| 1499 for (action, sessionID) in log: | |
| 1500 self.wfile.write('%s\t%s\n' % (action, bytes(sessionID).encode('hex'))) | |
| 1501 return True | |
| 1502 | |
| 1503 def SSLManySmallRecords(self): | |
| 1504 """Sends a reply consisting of a variety of small writes. These will be | |
| 1505 translated into a series of small SSL records when used over an HTTPS | |
| 1506 server.""" | |
| 1507 | |
| 1508 if not self._ShouldHandleRequest('/ssl-many-small-records'): | |
| 1509 return False | |
| 1510 | |
| 1511 self.send_response(200) | |
| 1512 self.send_header('Content-Type', 'text/plain') | |
| 1513 self.end_headers() | |
| 1514 | |
| 1515 # Write ~26K of data, in 1350 byte chunks | |
| 1516 for i in xrange(20): | |
| 1517 self.wfile.write('*' * 1350) | |
| 1518 self.wfile.flush() | |
| 1519 return True | |
| 1520 | |
| 1521 def GetChannelID(self): | |
| 1522 """Send a reply containing the hashed ChannelID that the client provided.""" | |
| 1523 | |
| 1524 if not self._ShouldHandleRequest('/channel-id'): | |
| 1525 return False | |
| 1526 | |
| 1527 self.send_response(200) | |
| 1528 self.send_header('Content-Type', 'text/plain') | |
| 1529 self.end_headers() | |
| 1530 channel_id = bytes(self.server.tlsConnection.channel_id) | |
| 1531 self.wfile.write(hashlib.sha256(channel_id).digest().encode('base64')) | |
| 1532 return True | |
| 1533 | |
| 1534 def ClientCipherListHandler(self): | |
| 1535 """Send a reply containing the cipher suite list that the client | |
| 1536 provided. Each cipher suite value is serialized in decimal, followed by a | |
| 1537 newline.""" | |
| 1538 | |
| 1539 if not self._ShouldHandleRequest('/client-cipher-list'): | |
| 1540 return False | |
| 1541 | |
| 1542 self.send_response(200) | |
| 1543 self.send_header('Content-Type', 'text/plain') | |
| 1544 self.end_headers() | |
| 1545 | |
| 1546 cipher_suites = self.server.tlsConnection.clientHello.cipher_suites | |
| 1547 self.wfile.write('\n'.join(str(c) for c in cipher_suites)) | |
| 1548 return True | |
| 1549 | |
| 1550 def CloseSocketHandler(self): | |
| 1551 """Closes the socket without sending anything.""" | |
| 1552 | |
| 1553 if not self._ShouldHandleRequest('/close-socket'): | |
| 1554 return False | |
| 1555 | |
| 1556 self.wfile.close() | |
| 1557 return True | |
| 1558 | |
| 1559 def RangeResetHandler(self): | |
| 1560 """Send data broken up by connection resets every N (default 4K) bytes. | |
| 1561 Support range requests. If the data requested doesn't straddle a reset | |
| 1562 boundary, it will all be sent. Used for testing resuming downloads.""" | |
| 1563 | |
| 1564 def DataForRange(start, end): | |
| 1565 """Data to be provided for a particular range of bytes.""" | |
| 1566 # Offset and scale to avoid too obvious (and hence potentially | |
| 1567 # collidable) data. | |
| 1568 return ''.join([chr(y % 256) | |
| 1569 for y in range(start * 2 + 15, end * 2 + 15, 2)]) | |
| 1570 | |
| 1571 if not self._ShouldHandleRequest('/rangereset'): | |
| 1572 return False | |
| 1573 | |
| 1574 # HTTP/1.1 is required for ETag and range support. | |
| 1575 self.protocol_version = 'HTTP/1.1' | |
| 1576 _, _, url_path, _, query, _ = urlparse.urlparse(self.path) | |
| 1577 | |
| 1578 # Defaults | |
| 1579 size = 8000 | |
| 1580 # Note that the rst is sent just before sending the rst_boundary byte. | |
| 1581 rst_boundary = 4000 | |
| 1582 respond_to_range = True | |
| 1583 hold_for_signal = False | |
| 1584 rst_limit = -1 | |
| 1585 token = 'DEFAULT' | |
| 1586 fail_precondition = 0 | |
| 1587 send_verifiers = True | |
| 1588 | |
| 1589 # Parse the query | |
| 1590 qdict = urlparse.parse_qs(query, True) | |
| 1591 if 'size' in qdict: | |
| 1592 size = int(qdict['size'][0]) | |
| 1593 if 'rst_boundary' in qdict: | |
| 1594 rst_boundary = int(qdict['rst_boundary'][0]) | |
| 1595 if 'token' in qdict: | |
| 1596 # Identifying token for stateful tests. | |
| 1597 token = qdict['token'][0] | |
| 1598 if 'rst_limit' in qdict: | |
| 1599 # Max number of rsts for a given token. | |
| 1600 rst_limit = int(qdict['rst_limit'][0]) | |
| 1601 if 'bounce_range' in qdict: | |
| 1602 respond_to_range = False | |
| 1603 if 'hold' in qdict: | |
| 1604 # Note that hold_for_signal will not work with null range requests; | |
| 1605 # see TODO below. | |
| 1606 hold_for_signal = True | |
| 1607 if 'no_verifiers' in qdict: | |
| 1608 send_verifiers = False | |
| 1609 if 'fail_precondition' in qdict: | |
| 1610 fail_precondition = int(qdict['fail_precondition'][0]) | |
| 1611 | |
| 1612 # Record already set information, or set it. | |
| 1613 rst_limit = TestPageHandler.rst_limits.setdefault(token, rst_limit) | |
| 1614 if rst_limit != 0: | |
| 1615 TestPageHandler.rst_limits[token] -= 1 | |
| 1616 fail_precondition = TestPageHandler.fail_precondition.setdefault( | |
| 1617 token, fail_precondition) | |
| 1618 if fail_precondition != 0: | |
| 1619 TestPageHandler.fail_precondition[token] -= 1 | |
| 1620 | |
| 1621 first_byte = 0 | |
| 1622 last_byte = size - 1 | |
| 1623 | |
| 1624 # Does that define what we want to return, or do we need to apply | |
| 1625 # a range? | |
| 1626 range_response = False | |
| 1627 range_header = self.headers.getheader('range') | |
| 1628 if range_header and respond_to_range: | |
| 1629 mo = re.match("bytes=(\d*)-(\d*)", range_header) | |
| 1630 if mo.group(1): | |
| 1631 first_byte = int(mo.group(1)) | |
| 1632 if mo.group(2): | |
| 1633 last_byte = int(mo.group(2)) | |
| 1634 if last_byte > size - 1: | |
| 1635 last_byte = size - 1 | |
| 1636 range_response = True | |
| 1637 if last_byte < first_byte: | |
| 1638 return False | |
| 1639 | |
| 1640 if (fail_precondition and | |
| 1641 (self.headers.getheader('If-Modified-Since') or | |
| 1642 self.headers.getheader('If-Match'))): | |
| 1643 self.send_response(412) | |
| 1644 self.end_headers() | |
| 1645 return True | |
| 1646 | |
| 1647 if range_response: | |
| 1648 self.send_response(206) | |
| 1649 self.send_header('Content-Range', | |
| 1650 'bytes %d-%d/%d' % (first_byte, last_byte, size)) | |
| 1651 else: | |
| 1652 self.send_response(200) | |
| 1653 self.send_header('Content-Type', 'application/octet-stream') | |
| 1654 self.send_header('Content-Length', last_byte - first_byte + 1) | |
| 1655 if send_verifiers: | |
| 1656 # If fail_precondition is non-zero, then the ETag for each request will be | |
| 1657 # different. | |
| 1658 etag = "%s%d" % (token, fail_precondition) | |
| 1659 self.send_header('ETag', etag) | |
| 1660 self.send_header('Last-Modified', 'Tue, 19 Feb 2013 14:32 EST') | |
| 1661 self.end_headers() | |
| 1662 | |
| 1663 if hold_for_signal: | |
| 1664 # TODO(rdsmith/phajdan.jr): http://crbug.com/169519: Without writing | |
| 1665 # a single byte, the self.server.handle_request() below hangs | |
| 1666 # without processing new incoming requests. | |
| 1667 self.wfile.write(DataForRange(first_byte, first_byte + 1)) | |
| 1668 first_byte = first_byte + 1 | |
| 1669 # handle requests until one of them clears this flag. | |
| 1670 self.server.wait_for_download = True | |
| 1671 while self.server.wait_for_download: | |
| 1672 self.server.handle_request() | |
| 1673 | |
| 1674 possible_rst = ((first_byte / rst_boundary) + 1) * rst_boundary | |
| 1675 if possible_rst >= last_byte or rst_limit == 0: | |
| 1676 # No RST has been requested in this range, so we don't need to | |
| 1677 # do anything fancy; just write the data and let the python | |
| 1678 # infrastructure close the connection. | |
| 1679 self.wfile.write(DataForRange(first_byte, last_byte + 1)) | |
| 1680 self.wfile.flush() | |
| 1681 return True | |
| 1682 | |
| 1683 # We're resetting the connection part way in; go to the RST | |
| 1684 # boundary and then send an RST. | |
| 1685 # Because socket semantics do not guarantee that all the data will be | |
| 1686 # sent when using the linger semantics to hard close a socket, | |
| 1687 # we send the data and then wait for our peer to release us | |
| 1688 # before sending the reset. | |
| 1689 data = DataForRange(first_byte, possible_rst) | |
| 1690 self.wfile.write(data) | |
| 1691 self.wfile.flush() | |
| 1692 self.server.wait_for_download = True | |
| 1693 while self.server.wait_for_download: | |
| 1694 self.server.handle_request() | |
| 1695 l_onoff = 1 # Linger is active. | |
| 1696 l_linger = 0 # Seconds to linger for. | |
| 1697 self.connection.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, | |
| 1698 struct.pack('ii', l_onoff, l_linger)) | |
| 1699 | |
| 1700 # Close all duplicates of the underlying socket to force the RST. | |
| 1701 self.wfile.close() | |
| 1702 self.rfile.close() | |
| 1703 self.connection.close() | |
| 1704 | |
| 1705 return True | |
| 1706 | |
| 1707 def DefaultResponseHandler(self): | |
| 1708 """This is the catch-all response handler for requests that aren't handled | |
| 1709 by one of the special handlers above. | |
| 1710 Note that we specify the content-length as without it the https connection | |
| 1711 is not closed properly (and the browser keeps expecting data).""" | |
| 1712 | |
| 1713 contents = "Default response given for path: " + self.path | |
| 1714 self.send_response(200) | |
| 1715 self.send_header('Content-Type', 'text/html') | |
| 1716 self.send_header('Content-Length', len(contents)) | |
| 1717 self.end_headers() | |
| 1718 if (self.command != 'HEAD'): | |
| 1719 self.wfile.write(contents) | |
| 1720 return True | |
| 1721 | |
| 1722 def RedirectConnectHandler(self): | |
| 1723 """Sends a redirect to the CONNECT request for www.redirect.com. This | |
| 1724 response is not specified by the RFC, so the browser should not follow | |
| 1725 the redirect.""" | |
| 1726 | |
| 1727 if (self.path.find("www.redirect.com") < 0): | |
| 1728 return False | |
| 1729 | |
| 1730 dest = "http://www.destination.com/foo.js" | |
| 1731 | |
| 1732 self.send_response(302) # moved temporarily | |
| 1733 self.send_header('Location', dest) | |
| 1734 self.send_header('Connection', 'close') | |
| 1735 self.end_headers() | |
| 1736 return True | |
| 1737 | |
| 1738 def ServerAuthConnectHandler(self): | |
| 1739 """Sends a 401 to the CONNECT request for www.server-auth.com. This | |
| 1740 response doesn't make sense because the proxy server cannot request | |
| 1741 server authentication.""" | |
| 1742 | |
| 1743 if (self.path.find("www.server-auth.com") < 0): | |
| 1744 return False | |
| 1745 | |
| 1746 challenge = 'Basic realm="WallyWorld"' | |
| 1747 | |
| 1748 self.send_response(401) # unauthorized | |
| 1749 self.send_header('WWW-Authenticate', challenge) | |
| 1750 self.send_header('Connection', 'close') | |
| 1751 self.end_headers() | |
| 1752 return True | |
| 1753 | |
| 1754 def DefaultConnectResponseHandler(self): | |
| 1755 """This is the catch-all response handler for CONNECT requests that aren't | |
| 1756 handled by one of the special handlers above. Real Web servers respond | |
| 1757 with 400 to CONNECT requests.""" | |
| 1758 | |
| 1759 contents = "Your client has issued a malformed or illegal request." | |
| 1760 self.send_response(400) # bad request | |
| 1761 self.send_header('Content-Type', 'text/html') | |
| 1762 self.send_header('Content-Length', len(contents)) | |
| 1763 self.end_headers() | |
| 1764 self.wfile.write(contents) | |
| 1765 return True | |
| 1766 | |
| 1767 # called by the redirect handling function when there is no parameter | |
| 1768 def sendRedirectHelp(self, redirect_name): | |
| 1769 self.send_response(200) | |
| 1770 self.send_header('Content-Type', 'text/html') | |
| 1771 self.end_headers() | |
| 1772 self.wfile.write('<html><body><h1>Error: no redirect destination</h1>') | |
| 1773 self.wfile.write('Use <pre>%s?http://dest...</pre>' % redirect_name) | |
| 1774 self.wfile.write('</body></html>') | |
| 1775 | |
| 1776 # called by chunked handling function | |
| 1777 def sendChunkHelp(self, chunk): | |
| 1778 # Each chunk consists of: chunk size (hex), CRLF, chunk body, CRLF | |
| 1779 self.wfile.write('%X\r\n' % len(chunk)) | |
| 1780 self.wfile.write(chunk) | |
| 1781 self.wfile.write('\r\n') | |
| 1782 | |
| 1783 | |
| 1784 class OCSPHandler(testserver_base.BasePageHandler): | |
| 1785 def __init__(self, request, client_address, socket_server): | |
| 1786 handlers = [self.OCSPResponse] | |
| 1787 self.ocsp_response = socket_server.ocsp_response | |
| 1788 testserver_base.BasePageHandler.__init__(self, request, client_address, | |
| 1789 socket_server, [], handlers, [], | |
| 1790 handlers, []) | |
| 1791 | |
| 1792 def OCSPResponse(self): | |
| 1793 self.send_response(200) | |
| 1794 self.send_header('Content-Type', 'application/ocsp-response') | |
| 1795 self.send_header('Content-Length', str(len(self.ocsp_response))) | |
| 1796 self.end_headers() | |
| 1797 | |
| 1798 self.wfile.write(self.ocsp_response) | |
| 1799 | |
| 1800 | |
| 1801 class TCPEchoHandler(SocketServer.BaseRequestHandler): | |
| 1802 """The RequestHandler class for TCP echo server. | |
| 1803 | |
| 1804 It is instantiated once per connection to the server, and overrides the | |
| 1805 handle() method to implement communication to the client. | |
| 1806 """ | |
| 1807 | |
| 1808 def handle(self): | |
| 1809 """Handles the request from the client and constructs a response.""" | |
| 1810 | |
| 1811 data = self.request.recv(65536).strip() | |
| 1812 # Verify the "echo request" message received from the client. Send back | |
| 1813 # "echo response" message if "echo request" message is valid. | |
| 1814 try: | |
| 1815 return_data = echo_message.GetEchoResponseData(data) | |
| 1816 if not return_data: | |
| 1817 return | |
| 1818 except ValueError: | |
| 1819 return | |
| 1820 | |
| 1821 self.request.send(return_data) | |
| 1822 | |
| 1823 | |
| 1824 class UDPEchoHandler(SocketServer.BaseRequestHandler): | |
| 1825 """The RequestHandler class for UDP echo server. | |
| 1826 | |
| 1827 It is instantiated once per connection to the server, and overrides the | |
| 1828 handle() method to implement communication to the client. | |
| 1829 """ | |
| 1830 | |
| 1831 def handle(self): | |
| 1832 """Handles the request from the client and constructs a response.""" | |
| 1833 | |
| 1834 data = self.request[0].strip() | |
| 1835 request_socket = self.request[1] | |
| 1836 # Verify the "echo request" message received from the client. Send back | |
| 1837 # "echo response" message if "echo request" message is valid. | |
| 1838 try: | |
| 1839 return_data = echo_message.GetEchoResponseData(data) | |
| 1840 if not return_data: | |
| 1841 return | |
| 1842 except ValueError: | |
| 1843 return | |
| 1844 request_socket.sendto(return_data, self.client_address) | |
| 1845 | |
| 1846 | |
| 1847 class BasicAuthProxyRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): | |
| 1848 """A request handler that behaves as a proxy server which requires | |
| 1849 basic authentication. Only CONNECT, GET and HEAD is supported for now. | |
| 1850 """ | |
| 1851 | |
| 1852 _AUTH_CREDENTIAL = 'Basic Zm9vOmJhcg==' # foo:bar | |
| 1853 | |
| 1854 def parse_request(self): | |
| 1855 """Overrides parse_request to check credential.""" | |
| 1856 | |
| 1857 if not BaseHTTPServer.BaseHTTPRequestHandler.parse_request(self): | |
| 1858 return False | |
| 1859 | |
| 1860 auth = self.headers.getheader('Proxy-Authorization') | |
| 1861 if auth != self._AUTH_CREDENTIAL: | |
| 1862 self.send_response(407) | |
| 1863 self.send_header('Proxy-Authenticate', 'Basic realm="MyRealm1"') | |
| 1864 self.end_headers() | |
| 1865 return False | |
| 1866 | |
| 1867 return True | |
| 1868 | |
| 1869 def _start_read_write(self, sock): | |
| 1870 sock.setblocking(0) | |
| 1871 self.request.setblocking(0) | |
| 1872 rlist = [self.request, sock] | |
| 1873 while True: | |
| 1874 ready_sockets, _unused, errors = select.select(rlist, [], []) | |
| 1875 if errors: | |
| 1876 self.send_response(500) | |
| 1877 self.end_headers() | |
| 1878 return | |
| 1879 for s in ready_sockets: | |
| 1880 received = s.recv(1024) | |
| 1881 if len(received) == 0: | |
| 1882 return | |
| 1883 if s == self.request: | |
| 1884 other = sock | |
| 1885 else: | |
| 1886 other = self.request | |
| 1887 other.send(received) | |
| 1888 | |
| 1889 def _do_common_method(self): | |
| 1890 url = urlparse.urlparse(self.path) | |
| 1891 port = url.port | |
| 1892 if not port: | |
| 1893 if url.scheme == 'http': | |
| 1894 port = 80 | |
| 1895 elif url.scheme == 'https': | |
| 1896 port = 443 | |
| 1897 if not url.hostname or not port: | |
| 1898 self.send_response(400) | |
| 1899 self.end_headers() | |
| 1900 return | |
| 1901 | |
| 1902 if len(url.path) == 0: | |
| 1903 path = '/' | |
| 1904 else: | |
| 1905 path = url.path | |
| 1906 if len(url.query) > 0: | |
| 1907 path = '%s?%s' % (url.path, url.query) | |
| 1908 | |
| 1909 sock = None | |
| 1910 try: | |
| 1911 sock = socket.create_connection((url.hostname, port)) | |
| 1912 sock.send('%s %s %s\r\n' % ( | |
| 1913 self.command, path, self.protocol_version)) | |
| 1914 for header in self.headers.headers: | |
| 1915 header = header.strip() | |
| 1916 if (header.lower().startswith('connection') or | |
| 1917 header.lower().startswith('proxy')): | |
| 1918 continue | |
| 1919 sock.send('%s\r\n' % header) | |
| 1920 sock.send('\r\n') | |
| 1921 self._start_read_write(sock) | |
| 1922 except Exception: | |
| 1923 self.send_response(500) | |
| 1924 self.end_headers() | |
| 1925 finally: | |
| 1926 if sock is not None: | |
| 1927 sock.close() | |
| 1928 | |
| 1929 def do_CONNECT(self): | |
| 1930 try: | |
| 1931 pos = self.path.rfind(':') | |
| 1932 host = self.path[:pos] | |
| 1933 port = int(self.path[pos+1:]) | |
| 1934 except Exception: | |
| 1935 self.send_response(400) | |
| 1936 self.end_headers() | |
| 1937 | |
| 1938 try: | |
| 1939 sock = socket.create_connection((host, port)) | |
| 1940 self.send_response(200, 'Connection established') | |
| 1941 self.end_headers() | |
| 1942 self._start_read_write(sock) | |
| 1943 except Exception: | |
| 1944 self.send_response(500) | |
| 1945 self.end_headers() | |
| 1946 finally: | |
| 1947 sock.close() | |
| 1948 | |
| 1949 def do_GET(self): | |
| 1950 self._do_common_method() | |
| 1951 | |
| 1952 def do_HEAD(self): | |
| 1953 self._do_common_method() | |
| 1954 | |
| 1955 | |
| 1956 class ServerRunner(testserver_base.TestServerRunner): | |
| 1957 """TestServerRunner for the net test servers.""" | |
| 1958 | |
| 1959 def __init__(self): | |
| 1960 super(ServerRunner, self).__init__() | |
| 1961 self.__ocsp_server = None | |
| 1962 | |
| 1963 def __make_data_dir(self): | |
| 1964 if self.options.data_dir: | |
| 1965 if not os.path.isdir(self.options.data_dir): | |
| 1966 raise testserver_base.OptionError('specified data dir not found: ' + | |
| 1967 self.options.data_dir + ' exiting...') | |
| 1968 my_data_dir = self.options.data_dir | |
| 1969 else: | |
| 1970 # Create the default path to our data dir, relative to the exe dir. | |
| 1971 my_data_dir = os.path.join(BASE_DIR, "..", "..", "..", "..", | |
| 1972 "test", "data") | |
| 1973 | |
| 1974 #TODO(ibrar): Must use Find* funtion defined in google\tools | |
| 1975 #i.e my_data_dir = FindUpward(my_data_dir, "test", "data") | |
| 1976 | |
| 1977 return my_data_dir | |
| 1978 | |
| 1979 def create_server(self, server_data): | |
| 1980 port = self.options.port | |
| 1981 host = self.options.host | |
| 1982 | |
| 1983 if self.options.server_type == SERVER_HTTP: | |
| 1984 if self.options.https: | |
| 1985 pem_cert_and_key = None | |
| 1986 ocsp_der = None | |
| 1987 if self.options.cert_and_key_file: | |
| 1988 if not os.path.isfile(self.options.cert_and_key_file): | |
| 1989 raise testserver_base.OptionError( | |
| 1990 'specified server cert file not found: ' + | |
| 1991 self.options.cert_and_key_file + ' exiting...') | |
| 1992 pem_cert_and_key = file(self.options.cert_and_key_file, 'r').read() | |
| 1993 else: | |
| 1994 # generate a new certificate and run an OCSP server for it. | |
| 1995 self.__ocsp_server = OCSPServer((host, 0), OCSPHandler) | |
| 1996 print ('OCSP server started on %s:%d...' % | |
| 1997 (host, self.__ocsp_server.server_port)) | |
| 1998 | |
| 1999 ocsp_state = None | |
| 2000 | |
| 2001 if self.options.ocsp == 'ok': | |
| 2002 ocsp_state = minica.OCSP_STATE_GOOD | |
| 2003 elif self.options.ocsp == 'revoked': | |
| 2004 ocsp_state = minica.OCSP_STATE_REVOKED | |
| 2005 elif self.options.ocsp == 'invalid': | |
| 2006 ocsp_state = minica.OCSP_STATE_INVALID | |
| 2007 elif self.options.ocsp == 'unauthorized': | |
| 2008 ocsp_state = minica.OCSP_STATE_UNAUTHORIZED | |
| 2009 elif self.options.ocsp == 'unknown': | |
| 2010 ocsp_state = minica.OCSP_STATE_UNKNOWN | |
| 2011 else: | |
| 2012 raise testserver_base.OptionError('unknown OCSP status: ' + | |
| 2013 self.options.ocsp_status) | |
| 2014 | |
| 2015 (pem_cert_and_key, ocsp_der) = minica.GenerateCertKeyAndOCSP( | |
| 2016 subject = "127.0.0.1", | |
| 2017 ocsp_url = ("http://%s:%d/ocsp" % | |
| 2018 (host, self.__ocsp_server.server_port)), | |
| 2019 ocsp_state = ocsp_state, | |
| 2020 serial = self.options.cert_serial) | |
| 2021 | |
| 2022 if self.options.ocsp_server_unavailable: | |
| 2023 # SEQUENCE containing ENUMERATED with value 3 (tryLater). | |
| 2024 self.__ocsp_server.ocsp_response = '30030a0103'.decode('hex') | |
| 2025 else: | |
| 2026 self.__ocsp_server.ocsp_response = ocsp_der | |
| 2027 | |
| 2028 for ca_cert in self.options.ssl_client_ca: | |
| 2029 if not os.path.isfile(ca_cert): | |
| 2030 raise testserver_base.OptionError( | |
| 2031 'specified trusted client CA file not found: ' + ca_cert + | |
| 2032 ' exiting...') | |
| 2033 | |
| 2034 stapled_ocsp_response = None | |
| 2035 if self.options.staple_ocsp_response: | |
| 2036 stapled_ocsp_response = ocsp_der | |
| 2037 | |
| 2038 server = HTTPSServer((host, port), TestPageHandler, pem_cert_and_key, | |
| 2039 self.options.ssl_client_auth, | |
| 2040 self.options.ssl_client_ca, | |
| 2041 self.options.ssl_client_cert_type, | |
| 2042 self.options.ssl_bulk_cipher, | |
| 2043 self.options.ssl_key_exchange, | |
| 2044 self.options.enable_npn, | |
| 2045 self.options.record_resume, | |
| 2046 self.options.tls_intolerant, | |
| 2047 self.options.tls_intolerance_type, | |
| 2048 self.options.signed_cert_timestamps_tls_ext.decode( | |
| 2049 "base64"), | |
| 2050 self.options.fallback_scsv, | |
| 2051 stapled_ocsp_response, | |
| 2052 self.options.disable_session_cache) | |
| 2053 print 'HTTPS server started on https://%s:%d...' % \ | |
| 2054 (host, server.server_port) | |
| 2055 else: | |
| 2056 server = HTTPServer((host, port), TestPageHandler) | |
| 2057 print 'HTTP server started on http://%s:%d...' % \ | |
| 2058 (host, server.server_port) | |
| 2059 | |
| 2060 server.data_dir = self.__make_data_dir() | |
| 2061 server.file_root_url = self.options.file_root_url | |
| 2062 server_data['port'] = server.server_port | |
| 2063 elif self.options.server_type == SERVER_WEBSOCKET: | |
| 2064 # Launch pywebsocket via WebSocketServer. | |
| 2065 logger = logging.getLogger() | |
| 2066 logger.addHandler(logging.StreamHandler()) | |
| 2067 # TODO(toyoshim): Remove following os.chdir. Currently this operation | |
| 2068 # is required to work correctly. It should be fixed from pywebsocket side. | |
| 2069 os.chdir(self.__make_data_dir()) | |
| 2070 websocket_options = WebSocketOptions(host, port, '.') | |
| 2071 scheme = "ws" | |
| 2072 if self.options.cert_and_key_file: | |
| 2073 scheme = "wss" | |
| 2074 websocket_options.use_tls = True | |
| 2075 websocket_options.private_key = self.options.cert_and_key_file | |
| 2076 websocket_options.certificate = self.options.cert_and_key_file | |
| 2077 if self.options.ssl_client_auth: | |
| 2078 websocket_options.tls_client_cert_optional = False | |
| 2079 websocket_options.tls_client_auth = True | |
| 2080 if len(self.options.ssl_client_ca) != 1: | |
| 2081 raise testserver_base.OptionError( | |
| 2082 'one trusted client CA file should be specified') | |
| 2083 if not os.path.isfile(self.options.ssl_client_ca[0]): | |
| 2084 raise testserver_base.OptionError( | |
| 2085 'specified trusted client CA file not found: ' + | |
| 2086 self.options.ssl_client_ca[0] + ' exiting...') | |
| 2087 websocket_options.tls_client_ca = self.options.ssl_client_ca[0] | |
| 2088 server = WebSocketServer(websocket_options) | |
| 2089 print 'WebSocket server started on %s://%s:%d...' % \ | |
| 2090 (scheme, host, server.server_port) | |
| 2091 server_data['port'] = server.server_port | |
| 2092 websocket_options.use_basic_auth = self.options.ws_basic_auth | |
| 2093 elif self.options.server_type == SERVER_TCP_ECHO: | |
| 2094 # Used for generating the key (randomly) that encodes the "echo request" | |
| 2095 # message. | |
| 2096 random.seed() | |
| 2097 server = TCPEchoServer((host, port), TCPEchoHandler) | |
| 2098 print 'Echo TCP server started on port %d...' % server.server_port | |
| 2099 server_data['port'] = server.server_port | |
| 2100 elif self.options.server_type == SERVER_UDP_ECHO: | |
| 2101 # Used for generating the key (randomly) that encodes the "echo request" | |
| 2102 # message. | |
| 2103 random.seed() | |
| 2104 server = UDPEchoServer((host, port), UDPEchoHandler) | |
| 2105 print 'Echo UDP server started on port %d...' % server.server_port | |
| 2106 server_data['port'] = server.server_port | |
| 2107 elif self.options.server_type == SERVER_BASIC_AUTH_PROXY: | |
| 2108 server = HTTPServer((host, port), BasicAuthProxyRequestHandler) | |
| 2109 print 'BasicAuthProxy server started on port %d...' % server.server_port | |
| 2110 server_data['port'] = server.server_port | |
| 2111 elif self.options.server_type == SERVER_FTP: | |
| 2112 my_data_dir = self.__make_data_dir() | |
| 2113 | |
| 2114 # Instantiate a dummy authorizer for managing 'virtual' users | |
| 2115 authorizer = pyftpdlib.ftpserver.DummyAuthorizer() | |
| 2116 | |
| 2117 # Define a new user having full r/w permissions and a read-only | |
| 2118 # anonymous user | |
| 2119 authorizer.add_user('chrome', 'chrome', my_data_dir, perm='elradfmw') | |
| 2120 | |
| 2121 authorizer.add_anonymous(my_data_dir) | |
| 2122 | |
| 2123 # Instantiate FTP handler class | |
| 2124 ftp_handler = pyftpdlib.ftpserver.FTPHandler | |
| 2125 ftp_handler.authorizer = authorizer | |
| 2126 | |
| 2127 # Define a customized banner (string returned when client connects) | |
| 2128 ftp_handler.banner = ("pyftpdlib %s based ftpd ready." % | |
| 2129 pyftpdlib.ftpserver.__ver__) | |
| 2130 | |
| 2131 # Instantiate FTP server class and listen to address:port | |
| 2132 server = pyftpdlib.ftpserver.FTPServer((host, port), ftp_handler) | |
| 2133 server_data['port'] = server.socket.getsockname()[1] | |
| 2134 print 'FTP server started on port %d...' % server_data['port'] | |
| 2135 else: | |
| 2136 raise testserver_base.OptionError('unknown server type' + | |
| 2137 self.options.server_type) | |
| 2138 | |
| 2139 return server | |
| 2140 | |
| 2141 def run_server(self): | |
| 2142 if self.__ocsp_server: | |
| 2143 self.__ocsp_server.serve_forever_on_thread() | |
| 2144 | |
| 2145 testserver_base.TestServerRunner.run_server(self) | |
| 2146 | |
| 2147 if self.__ocsp_server: | |
| 2148 self.__ocsp_server.stop_serving() | |
| 2149 | |
| 2150 def add_options(self): | |
| 2151 testserver_base.TestServerRunner.add_options(self) | |
| 2152 self.option_parser.add_option('--disable-session-cache', | |
| 2153 action='store_true', | |
| 2154 dest='disable_session_cache', | |
| 2155 help='tells the server to disable the' | |
| 2156 'TLS session cache.') | |
| 2157 self.option_parser.add_option('-f', '--ftp', action='store_const', | |
| 2158 const=SERVER_FTP, default=SERVER_HTTP, | |
| 2159 dest='server_type', | |
| 2160 help='start up an FTP server.') | |
| 2161 self.option_parser.add_option('--tcp-echo', action='store_const', | |
| 2162 const=SERVER_TCP_ECHO, default=SERVER_HTTP, | |
| 2163 dest='server_type', | |
| 2164 help='start up a tcp echo server.') | |
| 2165 self.option_parser.add_option('--udp-echo', action='store_const', | |
| 2166 const=SERVER_UDP_ECHO, default=SERVER_HTTP, | |
| 2167 dest='server_type', | |
| 2168 help='start up a udp echo server.') | |
| 2169 self.option_parser.add_option('--basic-auth-proxy', action='store_const', | |
| 2170 const=SERVER_BASIC_AUTH_PROXY, | |
| 2171 default=SERVER_HTTP, dest='server_type', | |
| 2172 help='start up a proxy server which requires ' | |
| 2173 'basic authentication.') | |
| 2174 self.option_parser.add_option('--websocket', action='store_const', | |
| 2175 const=SERVER_WEBSOCKET, default=SERVER_HTTP, | |
| 2176 dest='server_type', | |
| 2177 help='start up a WebSocket server.') | |
| 2178 self.option_parser.add_option('--https', action='store_true', | |
| 2179 dest='https', help='Specify that https ' | |
| 2180 'should be used.') | |
| 2181 self.option_parser.add_option('--cert-and-key-file', | |
| 2182 dest='cert_and_key_file', help='specify the ' | |
| 2183 'path to the file containing the certificate ' | |
| 2184 'and private key for the server in PEM ' | |
| 2185 'format') | |
| 2186 self.option_parser.add_option('--ocsp', dest='ocsp', default='ok', | |
| 2187 help='The type of OCSP response generated ' | |
| 2188 'for the automatically generated ' | |
| 2189 'certificate. One of [ok,revoked,invalid]') | |
| 2190 self.option_parser.add_option('--cert-serial', dest='cert_serial', | |
| 2191 default=0, type=int, | |
| 2192 help='If non-zero then the generated ' | |
| 2193 'certificate will have this serial number') | |
| 2194 self.option_parser.add_option('--tls-intolerant', dest='tls_intolerant', | |
| 2195 default='0', type='int', | |
| 2196 help='If nonzero, certain TLS connections ' | |
| 2197 'will be aborted in order to test version ' | |
| 2198 'fallback. 1 means all TLS versions will be ' | |
| 2199 'aborted. 2 means TLS 1.1 or higher will be ' | |
| 2200 'aborted. 3 means TLS 1.2 or higher will be ' | |
| 2201 'aborted.') | |
| 2202 self.option_parser.add_option('--tls-intolerance-type', | |
| 2203 dest='tls_intolerance_type', | |
| 2204 default="alert", | |
| 2205 help='Controls how the server reacts to a ' | |
| 2206 'TLS version it is intolerant to. Valid ' | |
| 2207 'values are "alert", "close", and "reset".') | |
| 2208 self.option_parser.add_option('--signed-cert-timestamps-tls-ext', | |
| 2209 dest='signed_cert_timestamps_tls_ext', | |
| 2210 default='', | |
| 2211 help='Base64 encoded SCT list. If set, ' | |
| 2212 'server will respond with a ' | |
| 2213 'signed_certificate_timestamp TLS extension ' | |
| 2214 'whenever the client supports it.') | |
| 2215 self.option_parser.add_option('--fallback-scsv', dest='fallback_scsv', | |
| 2216 default=False, const=True, | |
| 2217 action='store_const', | |
| 2218 help='If given, TLS_FALLBACK_SCSV support ' | |
| 2219 'will be enabled. This causes the server to ' | |
| 2220 'reject fallback connections from compatible ' | |
| 2221 'clients (e.g. Chrome).') | |
| 2222 self.option_parser.add_option('--staple-ocsp-response', | |
| 2223 dest='staple_ocsp_response', | |
| 2224 default=False, action='store_true', | |
| 2225 help='If set, server will staple the OCSP ' | |
| 2226 'response whenever OCSP is on and the client ' | |
| 2227 'supports OCSP stapling.') | |
| 2228 self.option_parser.add_option('--https-record-resume', | |
| 2229 dest='record_resume', const=True, | |
| 2230 default=False, action='store_const', | |
| 2231 help='Record resumption cache events rather ' | |
| 2232 'than resuming as normal. Allows the use of ' | |
| 2233 'the /ssl-session-cache request') | |
| 2234 self.option_parser.add_option('--ssl-client-auth', action='store_true', | |
| 2235 help='Require SSL client auth on every ' | |
| 2236 'connection.') | |
| 2237 self.option_parser.add_option('--ssl-client-ca', action='append', | |
| 2238 default=[], help='Specify that the client ' | |
| 2239 'certificate request should include the CA ' | |
| 2240 'named in the subject of the DER-encoded ' | |
| 2241 'certificate contained in the specified ' | |
| 2242 'file. This option may appear multiple ' | |
| 2243 'times, indicating multiple CA names should ' | |
| 2244 'be sent in the request.') | |
| 2245 self.option_parser.add_option('--ssl-client-cert-type', action='append', | |
| 2246 default=[], help='Specify that the client ' | |
| 2247 'certificate request should include the ' | |
| 2248 'specified certificate_type value. This ' | |
| 2249 'option may appear multiple times, ' | |
| 2250 'indicating multiple values should be send ' | |
| 2251 'in the request. Valid values are ' | |
| 2252 '"rsa_sign", "dss_sign", and "ecdsa_sign". ' | |
| 2253 'If omitted, "rsa_sign" will be used.') | |
| 2254 self.option_parser.add_option('--ssl-bulk-cipher', action='append', | |
| 2255 help='Specify the bulk encryption ' | |
| 2256 'algorithm(s) that will be accepted by the ' | |
| 2257 'SSL server. Valid values are "aes128gcm", ' | |
| 2258 '"aes256", "aes128", "3des", "rc4". If ' | |
| 2259 'omitted, all algorithms will be used. This ' | |
| 2260 'option may appear multiple times, ' | |
| 2261 'indicating multiple algorithms should be ' | |
| 2262 'enabled.'); | |
| 2263 self.option_parser.add_option('--ssl-key-exchange', action='append', | |
| 2264 help='Specify the key exchange algorithm(s)' | |
| 2265 'that will be accepted by the SSL server. ' | |
| 2266 'Valid values are "rsa", "dhe_rsa". If ' | |
| 2267 'omitted, all algorithms will be used. This ' | |
| 2268 'option may appear multiple times, ' | |
| 2269 'indicating multiple algorithms should be ' | |
| 2270 'enabled.'); | |
| 2271 # TODO(davidben): Add ALPN support to tlslite. | |
| 2272 self.option_parser.add_option('--enable-npn', dest='enable_npn', | |
| 2273 default=False, const=True, | |
| 2274 action='store_const', | |
| 2275 help='Enable server support for the NPN ' | |
| 2276 'extension. The server will advertise ' | |
| 2277 'support for exactly one protocol, http/1.1') | |
| 2278 self.option_parser.add_option('--file-root-url', default='/files/', | |
| 2279 help='Specify a root URL for files served.') | |
| 2280 # TODO(ricea): Generalize this to support basic auth for HTTP too. | |
| 2281 self.option_parser.add_option('--ws-basic-auth', action='store_true', | |
| 2282 dest='ws_basic_auth', | |
| 2283 help='Enable basic-auth for WebSocket') | |
| 2284 self.option_parser.add_option('--ocsp-server-unavailable', | |
| 2285 dest='ocsp_server_unavailable', | |
| 2286 default=False, action='store_true', | |
| 2287 help='If set, the OCSP server will return ' | |
| 2288 'a tryLater status rather than the actual ' | |
| 2289 'OCSP response.') | |
| 2290 | |
| 2291 | |
| 2292 if __name__ == '__main__': | |
| 2293 sys.exit(ServerRunner().main()) | |
| OLD | NEW |