| OLD | NEW |
| (Empty) |
| 1 #!/usr/bin/env python | |
| 2 # Copyright 2015 Google Inc. All Rights Reserved. | |
| 3 # | |
| 4 # Licensed under the Apache License, Version 2.0 (the "License"); | |
| 5 # you may not use this file except in compliance with the License. | |
| 6 # You may obtain a copy of the License at | |
| 7 # | |
| 8 # http://www.apache.org/licenses/LICENSE-2.0 | |
| 9 # | |
| 10 # Unless required by applicable law or agreed to in writing, software | |
| 11 # distributed under the License is distributed on an "AS IS" BASIS, | |
| 12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
| 13 # See the License for the specific language governing permissions and | |
| 14 # limitations under the License. | |
| 15 | |
| 16 | |
| 17 import httparchive | |
| 18 import httplib | |
| 19 import httpproxy | |
| 20 import threading | |
| 21 import unittest | |
| 22 import util | |
| 23 | |
| 24 | |
| 25 class MockCustomResponseHandler(object): | |
| 26 def __init__(self, response): | |
| 27 """ | |
| 28 Args: | |
| 29 response: An instance of ArchivedHttpResponse that is returned for each | |
| 30 request. | |
| 31 """ | |
| 32 self._response = response | |
| 33 | |
| 34 def handle(self, request): | |
| 35 del request | |
| 36 return self._response | |
| 37 | |
| 38 | |
| 39 class MockHttpArchiveFetch(object): | |
| 40 def __init__(self, response): | |
| 41 """ | |
| 42 Args: | |
| 43 response: An instance of ArchivedHttpResponse that is returned for each | |
| 44 request. | |
| 45 """ | |
| 46 self.is_record_mode = False | |
| 47 self._response = response | |
| 48 | |
| 49 def __call__(self, request): | |
| 50 del request # unused | |
| 51 return self._response | |
| 52 | |
| 53 | |
| 54 class MockHttpArchiveHandler(httpproxy.HttpArchiveHandler): | |
| 55 def handle_one_request(self): | |
| 56 httpproxy.HttpArchiveHandler.handle_one_request(self) | |
| 57 HttpProxyTest.HANDLED_REQUEST_COUNT += 1 | |
| 58 | |
| 59 | |
| 60 class MockRules(object): | |
| 61 def Find(self, unused_rule_type_name): # pylint: disable=unused-argument | |
| 62 return lambda unused_request, unused_response: None | |
| 63 | |
| 64 | |
| 65 class HttpProxyTest(unittest.TestCase): | |
| 66 def setUp(self): | |
| 67 self.has_proxy_server_bound_port = False | |
| 68 self.has_proxy_server_started = False | |
| 69 self.allow_generate_304 = False | |
| 70 self.serve_response_by_http_archive = False | |
| 71 | |
| 72 def set_up_proxy_server(self, response): | |
| 73 """ | |
| 74 Args: | |
| 75 response: An instance of ArchivedHttpResponse that is returned for each | |
| 76 request. | |
| 77 """ | |
| 78 HttpProxyTest.HANDLED_REQUEST_COUNT = 0 | |
| 79 self.host = 'localhost' | |
| 80 self.port = 8889 | |
| 81 custom_handlers = MockCustomResponseHandler( | |
| 82 response if not self.serve_response_by_http_archive else None) | |
| 83 rules = MockRules() | |
| 84 http_archive_fetch = MockHttpArchiveFetch( | |
| 85 response if self.serve_response_by_http_archive else None) | |
| 86 self.proxy_server = httpproxy.HttpProxyServer( | |
| 87 http_archive_fetch, custom_handlers, rules, | |
| 88 host=self.host, port=self.port, | |
| 89 allow_generate_304=self.allow_generate_304) | |
| 90 self.proxy_server.RequestHandlerClass = MockHttpArchiveHandler | |
| 91 self.has_proxy_server_bound_port = True | |
| 92 | |
| 93 def tear_down_proxy_server(self): | |
| 94 if self.has_proxy_server_started: | |
| 95 self.proxy_server.shutdown() | |
| 96 if self.has_proxy_server_bound_port: | |
| 97 self.proxy_server.server_close() | |
| 98 | |
| 99 def tearDown(self): | |
| 100 self.tear_down_proxy_server() | |
| 101 | |
| 102 def serve_requests_forever(self): | |
| 103 self.has_proxy_server_started = True | |
| 104 self.proxy_server.serve_forever(poll_interval=0.01) | |
| 105 | |
| 106 # Tests that handle_one_request does not leak threads, and does not try to | |
| 107 # re-handle connections that are finished. | |
| 108 def test_handle_one_request_closes_connection(self): | |
| 109 # By default, BaseHTTPServer.py treats all HTTP 1.1 requests as keep-alive. | |
| 110 # Intentionally use HTTP 1.0 to prevent this behavior. | |
| 111 response = httparchive.ArchivedHttpResponse( | |
| 112 version=10, status=200, reason="OK", | |
| 113 headers=[], response_data=["bat1"]) | |
| 114 self.set_up_proxy_server(response) | |
| 115 t = threading.Thread( | |
| 116 target=HttpProxyTest.serve_requests_forever, args=(self,)) | |
| 117 t.start() | |
| 118 | |
| 119 initial_thread_count = threading.activeCount() | |
| 120 | |
| 121 # Make a bunch of requests. | |
| 122 request_count = 10 | |
| 123 for _ in range(request_count): | |
| 124 conn = httplib.HTTPConnection('localhost', 8889, timeout=10) | |
| 125 conn.request("GET", "/index.html") | |
| 126 res = conn.getresponse().read() | |
| 127 self.assertEqual(res, "bat1") | |
| 128 conn.close() | |
| 129 | |
| 130 # Check to make sure that there is no leaked thread. | |
| 131 util.WaitFor(lambda: threading.activeCount() == initial_thread_count, 2) | |
| 132 | |
| 133 self.assertEqual(request_count, HttpProxyTest.HANDLED_REQUEST_COUNT) | |
| 134 | |
| 135 | |
| 136 # Tests that keep-alive header works. | |
| 137 def test_keep_alive_header(self): | |
| 138 response = httparchive.ArchivedHttpResponse( | |
| 139 version=11, status=200, reason="OK", | |
| 140 headers=[("Connection", "keep-alive")], response_data=["bat1"]) | |
| 141 self.set_up_proxy_server(response) | |
| 142 t = threading.Thread( | |
| 143 target=HttpProxyTest.serve_requests_forever, args=(self,)) | |
| 144 t.start() | |
| 145 | |
| 146 initial_thread_count = threading.activeCount() | |
| 147 | |
| 148 # Make a bunch of requests. | |
| 149 request_count = 10 | |
| 150 connections = [] | |
| 151 for _ in range(request_count): | |
| 152 conn = httplib.HTTPConnection('localhost', 8889, timeout=10) | |
| 153 conn.request("GET", "/index.html", headers={"Connection": "keep-alive"}) | |
| 154 res = conn.getresponse().read() | |
| 155 self.assertEqual(res, "bat1") | |
| 156 connections.append(conn) | |
| 157 | |
| 158 # Repeat the same requests. | |
| 159 for conn in connections: | |
| 160 conn.request("GET", "/index.html", headers={"Connection": "keep-alive"}) | |
| 161 res = conn.getresponse().read() | |
| 162 self.assertEqual(res, "bat1") | |
| 163 | |
| 164 # Check that the right number of requests have been handled. | |
| 165 self.assertEqual(2 * request_count, HttpProxyTest.HANDLED_REQUEST_COUNT) | |
| 166 | |
| 167 # Check to make sure that exactly "request_count" new threads are active. | |
| 168 self.assertEqual( | |
| 169 threading.activeCount(), initial_thread_count + request_count) | |
| 170 | |
| 171 for conn in connections: | |
| 172 conn.close() | |
| 173 | |
| 174 util.WaitFor(lambda: threading.activeCount() == initial_thread_count, 1) | |
| 175 | |
| 176 # Test that opening 400 simultaneous connections does not cause httpproxy to | |
| 177 # hit a process fd limit. The default limit is 256 fds. | |
| 178 def test_max_fd(self): | |
| 179 response = httparchive.ArchivedHttpResponse( | |
| 180 version=11, status=200, reason="OK", | |
| 181 headers=[("Connection", "keep-alive")], response_data=["bat1"]) | |
| 182 self.set_up_proxy_server(response) | |
| 183 t = threading.Thread( | |
| 184 target=HttpProxyTest.serve_requests_forever, args=(self,)) | |
| 185 t.start() | |
| 186 | |
| 187 # Make a bunch of requests. | |
| 188 request_count = 400 | |
| 189 connections = [] | |
| 190 for _ in range(request_count): | |
| 191 conn = httplib.HTTPConnection('localhost', 8889, timeout=10) | |
| 192 conn.request("GET", "/index.html", headers={"Connection": "keep-alive"}) | |
| 193 res = conn.getresponse().read() | |
| 194 self.assertEqual(res, "bat1") | |
| 195 connections.append(conn) | |
| 196 | |
| 197 # Check that the right number of requests have been handled. | |
| 198 self.assertEqual(request_count, HttpProxyTest.HANDLED_REQUEST_COUNT) | |
| 199 | |
| 200 for conn in connections: | |
| 201 conn.close() | |
| 202 | |
| 203 # Tests that conditional requests return 304. | |
| 204 def test_generate_304(self): | |
| 205 REQUEST_HEADERS = [ | |
| 206 {}, | |
| 207 {'If-Modified-Since': 'whatever'}, | |
| 208 {'If-None-Match': 'whatever yet again'}] | |
| 209 RESPONSE_STATUSES = [200, 204, 304, 404] | |
| 210 for allow_generate_304 in [False, True]: | |
| 211 self.allow_generate_304 = allow_generate_304 | |
| 212 for serve_response_by_http_archive in [False, True]: | |
| 213 self.serve_response_by_http_archive = serve_response_by_http_archive | |
| 214 for response_status in RESPONSE_STATUSES: | |
| 215 response = None | |
| 216 if response_status != 404: | |
| 217 response = httparchive.ArchivedHttpResponse( | |
| 218 version=11, status=response_status, reason="OK", headers=[], | |
| 219 response_data=["some content"]) | |
| 220 self.set_up_proxy_server(response) | |
| 221 t = threading.Thread( | |
| 222 target=HttpProxyTest.serve_requests_forever, args=(self,)) | |
| 223 t.start() | |
| 224 for method in ['GET', 'HEAD', 'POST']: | |
| 225 for headers in REQUEST_HEADERS: | |
| 226 connection = httplib.HTTPConnection('localhost', 8889, timeout=10) | |
| 227 connection.request(method, "/index.html", headers=headers) | |
| 228 response = connection.getresponse() | |
| 229 connection.close() | |
| 230 if (allow_generate_304 and | |
| 231 serve_response_by_http_archive and | |
| 232 method in ['GET', 'HEAD'] and | |
| 233 headers and | |
| 234 response_status == 200): | |
| 235 self.assertEqual(304, response.status) | |
| 236 self.assertEqual('', response.read()) | |
| 237 else: | |
| 238 self.assertEqual(response_status, response.status) | |
| 239 self.tear_down_proxy_server() | |
| 240 | |
| 241 | |
| 242 if __name__ == '__main__': | |
| 243 unittest.main() | |
| OLD | NEW |