Merge branch 'stream'

Conflicts:
	libmproxy/protocol/http.py
This commit is contained in:
Maximilian Hils
2014-07-31 22:29:13 +02:00
16 changed files with 291 additions and 81 deletions

View File

@@ -19,6 +19,7 @@
$!nav("sticky.html", this, state)!$
$!nav("reverseproxy.html", this, state)!$
$!nav("upstreamcerts.html", this, state)!$
$!nav("responsestreaming.html", this, state)!$
<li class="nav-header">Installing Certificates</li>
$!nav("ssl.html", this, state)!$

View File

@@ -12,4 +12,5 @@ pages = [
Page("replacements.html", "Replacements"),
Page("reverseproxy.html", "Reverse proxy mode"),
Page("upstreamcerts.html", "Upstream Certs"),
Page("responsestreaming.html", "Response Streaming"),
]

View File

@@ -0,0 +1,50 @@
By using mitmproxy's streaming feature, response contents can be passed to the client incrementally before they have been fully received by the proxy.
This is especially useful for large binary files such as videos, where buffering the whole file slows down the client's browser.
By default, mitmproxy will read the entire response, perform any indicated
manipulations on it and then send the (possibly modified) response to
the client. In some cases this is undesirable and you may wish to "stream"
the reponse back to the client. When streaming is enabled, the response is
not buffered on the proxy but directly sent back to the client instead.
<h2>On the command-line</h2>
Streaming can be enabled on the command line for all response bodies exceeding a certain size. The SIZE argument understands
k/m/g suffixes, e.g. 3m for 3 megabytes.
<table class="table">
<tbody>
<tr>
<th width="20%">command-line</th>
<td>
--stream SIZE
</td>
</tr>
</tbody>
</table>
<h2>Caveats</h2>
When response streaming is enabled, <strong>streamed response contents will not be
recorded or preserved in any way.</strong>
When response streaming is enabled, the response body cannot be modified.
<h2>Customizing Response Streaming</h2>
You can also use an <a href="@!urlTo("scripting/inlinescripts.html")!@">inline script</a> to customize exactly
which responses are streamed.
Responses that should be tagged for streaming by setting their respective .stream attribute to True:
$!example("examples/stream.py")!$
<h2>Implementation Details</h2>
When response streaming is enabled, portions of the code which would have otherwise performed changes
on the response body will see an empty response body instead (<code>libmproxy.protocol.http.CONTENT_MISSING</code>). Any modifications will be ignored.
Streamed responses are usually sent in chunks of 4096 bytes. If the response is sent with a <code>Transfer-Encoding:
chunked</code> header, the response will be streamed one chunk at a time.

View File

@@ -46,12 +46,20 @@ a connection can correspond to multiple HTTP requests.
Called when a client request has been received. The __Flow__ object is
guaranteed to have a non-None __request__ attribute.
### responseheaders(ScriptContext, Flow)
Called when the headers of a server response have been received.
This will always be called before the response hook.
The __Flow__ object is guaranteed to have non-None __request__ and
__response__ attributes. __response.content__ will not be valid,
as the response body has not been read yet.
### response(ScriptContext, Flow)
Called when a server response has been received. The __Flow__ object is
guaranteed to have non-None __request__ and __response__ attributes.
Note that if response streaming is enabled for this response,
__response.content__ will not contain the response body.
### error(ScriptContext, Flow)

5
examples/stream.py Normal file
View File

@@ -0,0 +1,5 @@
def responseheaders(ctx, flow):
"""
Enables streaming for all responses.
"""
flow.response.stream = True

View File

@@ -27,6 +27,15 @@ def request(ctx, flow):
"""
ctx.log("request")
def responseheaders(ctx, flow):
"""
Called when the response headers for a server response have been received,
but the response body has not been processed yet. Can be used to tell mitmproxy
to stream the response.
"""
ctx.log("responseheaders")
def response(ctx, flow):
"""
Called when a server response has been received.

View File

@@ -3,7 +3,7 @@ import re
import argparse
from argparse import ArgumentTypeError
from netlib import http
from . import proxy, filt
from . import filt, utils
from .proxy import config
APP_HOST = "mitm.it"
@@ -23,13 +23,13 @@ def _parse_hook(s):
elif len(parts) == 3:
patt, a, b = parts
else:
raise ParseException("Malformed hook specifier - too few clauses: %s"%s)
raise ParseException("Malformed hook specifier - too few clauses: %s" % s)
if not a:
raise ParseException("Empty clause: %s"%str(patt))
raise ParseException("Empty clause: %s" % str(patt))
if not filt.parse(patt):
raise ParseException("Malformed filter pattern: %s"%patt)
raise ParseException("Malformed filter pattern: %s" % patt)
return patt, a, b
@@ -64,7 +64,7 @@ def parse_replace_hook(s):
try:
re.compile(regex)
except re.error, e:
raise ParseException("Malformed replacement regex: %s"%str(e.message))
raise ParseException("Malformed replacement regex: %s" % str(e.message))
return patt, regex, replacement
@@ -98,7 +98,6 @@ def parse_setheader(s):
def parse_server_spec(url):
normalized_url = re.sub("^https?2", "", url)
p = http.parse_url(normalized_url)
@@ -125,6 +124,8 @@ def get_common_options(options):
if options.stickyauth_filt:
stickyauth = options.stickyauth_filt
stream_large_bodies = utils.parse_size(options.stream_large_bodies)
reps = []
for i in options.replace:
try:
@@ -140,10 +141,9 @@ def get_common_options(options):
try:
v = open(path, "rb").read()
except IOError, e:
raise ArgumentTypeError("Could not read replace file: %s"%path)
raise ArgumentTypeError("Could not read replace file: %s" % path)
reps.append((patt, rex, v))
setheaders = []
for i in options.setheader:
try:
@@ -153,29 +153,30 @@ def get_common_options(options):
setheaders.append(p)
return dict(
app = options.app,
app_host = options.app_host,
app_port = options.app_port,
app_external = options.app_external,
app=options.app,
app_host=options.app_host,
app_port=options.app_port,
app_external=options.app_external,
anticache = options.anticache,
anticomp = options.anticomp,
client_replay = options.client_replay,
kill = options.kill,
no_server = options.no_server,
refresh_server_playback = not options.norefresh,
rheaders = options.rheaders,
rfile = options.rfile,
replacements = reps,
setheaders = setheaders,
server_replay = options.server_replay,
scripts = options.scripts,
stickycookie = stickycookie,
stickyauth = stickyauth,
showhost = options.showhost,
wfile = options.wfile,
verbosity = options.verbose,
nopop = options.nopop,
anticache=options.anticache,
anticomp=options.anticomp,
client_replay=options.client_replay,
kill=options.kill,
no_server=options.no_server,
refresh_server_playback=not options.norefresh,
rheaders=options.rheaders,
rfile=options.rfile,
replacements=reps,
setheaders=setheaders,
server_replay=options.server_replay,
scripts=options.scripts,
stickycookie=stickycookie,
stickyauth=stickyauth,
stream_large_bodies=stream_large_bodies,
showhost=options.showhost,
wfile=options.wfile,
verbosity=options.verbose,
nopop=options.nopop,
)
@@ -187,8 +188,8 @@ def common_options(parser):
)
parser.add_argument(
"--confdir",
action="store", type = str, dest="confdir", default='~/.mitmproxy',
help = "Configuration directory. (~/.mitmproxy)"
action="store", type=str, dest="confdir", default='~/.mitmproxy',
help="Configuration directory. (~/.mitmproxy)"
)
parser.add_argument(
"--host",
@@ -240,10 +241,16 @@ def common_options(parser):
"-Z",
action="store", dest="body_size_limit", default=None,
metavar="SIZE",
help="Byte size limit of HTTP request and response bodies."\
help="Byte size limit of HTTP request and response bodies." \
" Understands k/m/g suffixes, i.e. 3m for 3 megabytes."
)
parser.add_argument(
"--stream",
action="store", dest="stream_large_bodies", default=None,
metavar="SIZE",
help="Stream data to the client if response body exceeds the given threshold. "
"If streamed, the body will not be stored in any way. Understands k/m/g suffixes, i.e. 3m for 3 megabytes."
)
group = parser.add_argument_group("Proxy Options")
# We could make a mutually exclusive group out of -R, -U, -T, but we don't do that because
@@ -251,8 +258,8 @@ def common_options(parser):
# - our own error messages are more helpful
group.add_argument(
"-b",
action="store", type = str, dest="addr", default='',
help = "Address to bind proxy to (defaults to all interfaces)"
action="store", type=str, dest="addr", default='',
help="Address to bind proxy to (defaults to all interfaces)"
)
group.add_argument(
"-U",
@@ -266,8 +273,8 @@ def common_options(parser):
)
group.add_argument(
"-p",
action="store", type = int, dest="port", default=8080,
help = "Proxy service port."
action="store", type=int, dest="port", default=8080,
help="Proxy service port."
)
group.add_argument(
"-R",
@@ -280,7 +287,6 @@ def common_options(parser):
help="Set transparent proxy mode."
)
group = parser.add_argument_group(
"Advanced Proxy Options",
"""
@@ -304,7 +310,6 @@ def common_options(parser):
help="Override the destination server all requests are sent to: http[s][2http[s]]://host[:port]"
)
group = parser.add_argument_group("Web App")
group.add_argument(
"-a",
@@ -315,7 +320,7 @@ def common_options(parser):
"--app-host",
action="store", dest="app_host", default=APP_HOST, metavar="host",
help="Domain to serve the app from. For transparent mode, use an IP when\
a DNS entry for the app domain is not present. Default: %s"%APP_HOST
a DNS entry for the app domain is not present. Default: %s" % APP_HOST
)
group.add_argument(
@@ -329,7 +334,6 @@ def common_options(parser):
help="Serve the app outside of the proxy."
)
group = parser.add_argument_group("Client Replay")
group.add_argument(
"-c",
@@ -352,22 +356,21 @@ def common_options(parser):
"--rheader",
action="append", dest="rheaders", type=str,
help="Request headers to be considered during replay. "
"Can be passed multiple times."
"Can be passed multiple times."
)
group.add_argument(
"--norefresh",
action="store_true", dest="norefresh", default=False,
help= "Disable response refresh, "
"which updates times in cookies and headers for replayed responses."
help="Disable response refresh, "
"which updates times in cookies and headers for replayed responses."
)
group.add_argument(
"--no-pop",
action="store_true", dest="nopop", default=False,
help="Disable response pop from response flow. "
"This makes it possible to replay same response multiple times."
"This makes it possible to replay same response multiple times."
)
group = parser.add_argument_group(
"Replacements",
"""
@@ -389,7 +392,6 @@ def common_options(parser):
help="Replacement pattern, where the replacement clause is a path to a file."
)
group = parser.add_argument_group(
"Set Headers",
"""
@@ -405,7 +407,6 @@ def common_options(parser):
help="Header set pattern."
)
group = parser.add_argument_group(
"Proxy Authentication",
"""
@@ -434,5 +435,4 @@ def common_options(parser):
help="Allow access to users specified in an Apache htpasswd file."
)
config.ssl_option_group(parser)

View File

@@ -165,6 +165,8 @@ class StatusBar(common.WWrap):
opts.append("no-upstream-cert")
if self.master.state.follow_focus:
opts.append("following")
if self.master.stream_large_bodies:
opts.append("stream:%s" % utils.pretty_size(self.master.stream_large_bodies.max_size))
if opts:
r.append("[%s]"%(":".join(opts)))
@@ -343,6 +345,7 @@ class Options(object):
"server_replay",
"stickycookie",
"stickyauth",
"stream_large_bodies",
"verbosity",
"wfile",
"nopop",
@@ -391,6 +394,8 @@ class ConsoleMaster(flow.FlowMaster):
print >> sys.stderr, "Sticky auth error:", r
sys.exit(1)
self.set_stream_large_bodies(options.stream_large_bodies)
self.refresh_server_playback = options.refresh_server_playback
self.anticache = options.anticache
self.anticomp = options.anticomp

View File

@@ -153,7 +153,7 @@ class FlowView(common.WWrap):
def cont_view_handle_missing(self, conn, viewmode):
if conn.content == CONTENT_MISSING:
msg, body = "", [urwid.Text([("error", "[content missing]")])], 0
msg, body = "", [urwid.Text([("error", "[content missing]")])]
else:
msg, body = self.content_view(viewmode, conn)

View File

@@ -49,7 +49,7 @@ class Channel:
try:
# The timeout is here so we can handle a should_exit event.
g = m.reply.q.get(timeout=0.5)
except Queue.Empty: # pragma: nocover
except Queue.Empty: # pragma: nocover
continue
return g

View File

@@ -2,6 +2,7 @@ from __future__ import absolute_import
import sys, os
import netlib.utils
from . import flow, filt, utils
from .protocol import http
class DumpError(Exception): pass
@@ -30,6 +31,7 @@ class Options(object):
"showhost",
"stickycookie",
"stickyauth",
"stream_large_bodies",
"verbosity",
"wfile",
]
@@ -69,6 +71,8 @@ class DumpMaster(flow.FlowMaster):
self.showhost = options.showhost
self.refresh_server_playback = options.refresh_server_playback
self.set_stream_large_bodies(options.stream_large_bodies)
if filtstr:
self.filt = filt.parse(filtstr)
else:
@@ -80,6 +84,7 @@ class DumpMaster(flow.FlowMaster):
if options.stickyauth:
self.set_stickyauth(options.stickyauth)
if options.wfile:
path = os.path.expanduser(options.wfile)
try:
@@ -157,12 +162,17 @@ class DumpMaster(flow.FlowMaster):
if f.response:
if self.o.flow_detail > 0:
sz = utils.pretty_size(len(f.response.content))
if f.response.content == http.CONTENT_MISSING:
sz = "(content missing)"
else:
sz = utils.pretty_size(len(f.response.content))
result = " << %s %s"%(str_response(f.response), sz)
if self.o.flow_detail > 1:
result = result + "\n\n" + self.indent(4, f.response.headers)
if self.o.flow_detail > 2:
if utils.isBin(f.response.content):
if f.response.content == http.CONTENT_MISSING:
cont = self.indent(4, "(content missing)")
elif utils.isBin(f.response.content):
d = netlib.utils.hexdump(f.response.content)
d = "\n".join("%s\t%s %s"%i for i in d)
cont = self.indent(4, d)

View File

@@ -145,6 +145,17 @@ class SetHeaders:
f.request.headers.add(header, value)
class StreamLargeBodies(object):
def __init__(self, max_size):
self.max_size = max_size
def run(self, flow, is_request):
r = flow.request if is_request else flow.response
code = flow.response.code if flow.response else None
expected_size = netlib.http.expected_http_body_size(r.headers, is_request, flow.request.method, code)
if not (0 <= expected_size <= self.max_size):
r.stream = True
class ClientPlaybackState:
def __init__(self, flows, exit):
self.flows, self.exit = flows, exit
@@ -437,6 +448,7 @@ class FlowMaster(controller.Master):
self.anticache = False
self.anticomp = False
self.stream_large_bodies = False
self.refresh_server_playback = False
self.replacehooks = ReplaceHooks()
self.setheaders = SetHeaders()
@@ -522,6 +534,12 @@ class FlowMaster(controller.Master):
self.stickycookie_state = None
self.stickycookie_txt = None
def set_stream_large_bodies(self, max_size):
if max_size is not None:
self.stream_large_bodies = StreamLargeBodies(max_size)
else:
self.stream_large_bodies = False
def set_stickyauth(self, txt):
if txt:
flt = filt.parse(txt)
@@ -701,6 +719,16 @@ class FlowMaster(controller.Master):
self.process_new_request(f)
return f
def handle_responseheaders(self, resp):
f = resp.flow
self.run_script_hook("responseheaders", f)
if self.stream_large_bodies:
self.stream_large_bodies.run(f, False)
resp.reply()
return f
def handle_response(self, r):
f = self.state.add_response(r)
if f:

View File

@@ -293,7 +293,8 @@ class HTTPRequest(HTTPMessage):
raise http.HttpError(400, "Invalid headers")
if include_body:
content = http.read_http_body(rfile, headers, body_size_limit, True)
content = http.read_http_body(rfile, headers, body_size_limit,
method, None, True)
timestamp_end = utils.timestamp()
return HTTPRequest(form_in, method, scheme, host, port, path, httpversion, headers,
@@ -305,7 +306,7 @@ class HTTPRequest(HTTPMessage):
if form == "relative":
path = self.path if self.method != "OPTIONS" else "*"
request_line = '%s %s HTTP/%s.%s' % \
(self.method, path, self.httpversion[0], self.httpversion[1])
(self.method, path, self.httpversion[0], self.httpversion[1])
elif form == "authority":
request_line = '%s %s:%s HTTP/%s.%s' % (self.method, self.host, self.port,
self.httpversion[0], self.httpversion[1])
@@ -591,6 +592,7 @@ class HTTPResponse(HTTPMessage):
# Is this request replayed?
self.is_replay = False
self.stream = False
_stateobject_attributes = HTTPMessage._stateobject_attributes.copy()
_stateobject_attributes.update(
@@ -632,24 +634,22 @@ class HTTPResponse(HTTPMessage):
return 'HTTP/%s.%s %s %s' % \
(self.httpversion[0], self.httpversion[1], self.code, self.msg)
def _assemble_headers(self):
def _assemble_headers(self, preserve_transfer_encoding=False):
headers = self.headers.copy()
utils.del_all(
headers,
[
'Proxy-Connection',
'Transfer-Encoding'
]
)
utils.del_all(headers, ['Proxy-Connection'])
if not preserve_transfer_encoding:
utils.del_all(headers, ['Transfer-Encoding'])
if self.content:
headers["Content-Length"] = [str(len(self.content))]
elif 'Transfer-Encoding' in self.headers: # add content-length for chuncked transfer-encoding with no content
elif not preserve_transfer_encoding and 'Transfer-Encoding' in self.headers: # add content-length for chuncked transfer-encoding with no content
headers["Content-Length"] = ["0"]
return str(headers)
def _assemble_head(self):
return '%s\r\n%s\r\n' % (self._assemble_first_line(), self._assemble_headers())
def _assemble_head(self, preserve_transfer_encoding=False):
return '%s\r\n%s\r\n' % (
self._assemble_first_line(), self._assemble_headers(preserve_transfer_encoding=preserve_transfer_encoding))
def _assemble(self):
"""
@@ -865,15 +865,16 @@ class HTTPHandler(ProtocolHandler, TemporaryServerChangeMixin):
pass
self.c.close = True
def get_response_from_server(self, request):
def get_response_from_server(self, request, include_body=True):
self.c.establish_server_connection()
request_raw = request._assemble()
for i in range(2):
try:
self.c.server_conn.send(request_raw)
return HTTPResponse.from_stream(self.c.server_conn.rfile, request.method,
body_size_limit=self.c.config.body_size_limit)
res = HTTPResponse.from_stream(self.c.server_conn.rfile, request.method,
body_size_limit=self.c.config.body_size_limit, include_body=include_body)
return res
except (tcp.NetLibDisconnect, http.HttpErrorConnClosed), v:
self.c.log("error in server communication: %s" % str(v), level="debug")
if i < 1:
@@ -915,21 +916,53 @@ class HTTPHandler(ProtocolHandler, TemporaryServerChangeMixin):
if isinstance(request_reply, HTTPResponse):
flow.response = request_reply
else:
flow.response = self.get_response_from_server(flow.request)
flow.server_conn = self.c.server_conn # no further manipulation of self.c.server_conn beyond this point
# read initially in "stream" mode, so we can get the headers separately
flow.response = self.get_response_from_server(flow.request, include_body=False)
# call the appropriate script hook - this is an opportunity for an inline script to set flow.stream = True
self.c.channel.ask("responseheaders", flow.response)
# now get the rest of the request body, if body still needs to be read but not streaming this response
if flow.response.stream:
flow.response.content = CONTENT_MISSING
else:
flow.response.content = http.read_http_body(self.c.server_conn.rfile, flow.response.headers,
self.c.config.body_size_limit,
flow.request.method, flow.response.code, False)
# no further manipulation of self.c.server_conn beyond this point
# we can safely set it as the final attribute value here.
flow.server_conn = self.c.server_conn
self.c.log("response", "debug", [flow.response._assemble_first_line()])
response_reply = self.c.channel.ask("response", flow.response)
if response_reply is None or response_reply == KILL:
return False
self.c.client_conn.send(flow.response._assemble())
if not flow.response.stream:
# no streaming:
# we already received the full response from the server and can send it to the client straight away.
self.c.client_conn.send(flow.response._assemble())
else:
# streaming:
# First send the body and then transfer the response incrementally:
h = flow.response._assemble_head(preserve_transfer_encoding=True)
self.c.client_conn.send(h)
for chunk in http.read_http_body_chunked(self.c.server_conn.rfile,
flow.response.headers,
self.c.config.body_size_limit, flow.request.method,
flow.response.code, False, 4096):
for part in chunk:
self.c.client_conn.wfile.write(part)
self.c.client_conn.wfile.flush()
flow.timestamp_end = utils.timestamp()
if (http.connection_close(flow.request.httpversion, flow.request.headers) or
http.connection_close(flow.response.httpversion, flow.response.headers)):
http.connection_close(flow.response.httpversion, flow.response.headers) or
http.expected_http_body_size(flow.response.headers, False, flow.request.method,
flow.response.code) == -1):
if flow.request.form_in == "authority" and flow.response.code == 200:
# Workaround for https://github.com/mitmproxy/mitmproxy/issues/313:
# Some proxies (e.g. Charles) send a CONNECT response with HTTP/1.0 and no Content-Length header
@@ -943,6 +976,7 @@ class HTTPHandler(ProtocolHandler, TemporaryServerChangeMixin):
# If the user has changed the target server on this connection,
# restore the original target server
self.restore_server()
return True
except (HttpAuthenticationError, http.HttpError, proxy.ProxyError, tcp.NetLibError), e:
self.handle_error(e, flow)
@@ -965,7 +999,7 @@ class HTTPHandler(ProtocolHandler, TemporaryServerChangeMixin):
if flow.request and not flow.response:
self.c.channel.ask("error", flow.error)
else:
pass # FIXME: Do we want to persist errors without flows?
pass # FIXME: Do we want to persist errors without flows?
try:
self.send_error(code, message, headers)
@@ -1068,7 +1102,7 @@ class HTTPHandler(ProtocolHandler, TemporaryServerChangeMixin):
return True
raise http.HttpError(400, "Invalid HTTP request form (expected: %s, got: %s)" %
(self.expected_form_in, request.form_in))
(self.expected_form_in, request.form_in))
def authenticate(self, request):
if self.c.config.authenticator:

View File

@@ -95,14 +95,14 @@ class ConnectionHandler:
except (ProxyError, tcp.NetLibError), e:
handle_error(self.conntype, self, e)
except Exception, e:
except Exception:
import traceback, sys
self.log(traceback.format_exc(), "error")
print >> sys.stderr, traceback.format_exc()
print >> sys.stderr, "mitmproxy has crashed!"
print >> sys.stderr, "Please lodge a bug report at: https://github.com/mitmproxy/mitmproxy"
raise e
raise
self.del_server_connection()
self.log("clientdisconnect", "info")

View File

@@ -7,14 +7,18 @@ def serverconnect(ctx, cc):
ctx.log("XSERVERCONNECT")
log.append("serverconnect")
def request(ctx, r):
def request(ctx, f):
ctx.log("XREQUEST")
log.append("request")
def response(ctx, r):
def response(ctx, f):
ctx.log("XRESPONSE")
log.append("response")
def responseheaders(ctx, f):
ctx.log("XRESPONSEHEADERS")
log.append("responseheaders")
def clientdisconnect(ctx, cc):
ctx.log("XCLIENTDISCONNECT")
log.append("clientdisconnect")

View File

@@ -383,6 +383,61 @@ class TestRedirectRequest(tservers.HTTPProxTest):
assert r3.content == r2.content == r1.content
# Make sure that we actually use the same connection in this test case
class MasterStreamRequest(tservers.TestMaster):
"""
Enables the stream flag on the flow for all requests
"""
def handle_responseheaders(self, r):
r.stream = True
r.reply()
class TestStreamRequest(tservers.HTTPProxTest):
masterclass = MasterStreamRequest
def test_stream_simple(self):
p = self.pathoc()
# a request with 100k of data but without content-length
self.server.clear_log()
r1 = p.request("get:'%s/p/200:r:b@100k:d102400'"%self.server.urlbase)
assert r1.status_code == 200
assert len(r1.content) > 100000
assert self.server.last_log()
def test_stream_multiple(self):
p = self.pathoc()
# simple request with streaming turned on
self.server.clear_log()
r1 = p.request("get:'%s/p/200'"%self.server.urlbase)
assert r1.status_code == 200
assert self.server.last_log()
# now send back 100k of data, streamed but not chunked
self.server.clear_log()
r1 = p.request("get:'%s/p/200:b@100k'"%self.server.urlbase)
assert r1.status_code == 200
assert self.server.last_log()
def test_stream_chunked(self):
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.connect(("127.0.0.1", self.proxy.port))
fconn = connection.makefile()
spec = '200:h"Transfer-Encoding"="chunked":r:b"4\\r\\nthis\\r\\n7\\r\\nisatest\\r\\n0\\r\\n\\r\\n"'
connection.send("GET %s/p/%s HTTP/1.1\r\n"%(self.server.urlbase, spec))
connection.send("\r\n")
httpversion, code, msg, headers, content = http.read_response(fconn, "GET", None, include_body=False)
assert headers["Transfer-Encoding"][0] == 'chunked'
assert code == 200
chunks = list(content for _, content, _ in http.read_http_body_chunked(fconn, headers, None, "GET", 200, False))
assert chunks == ["this", "isatest", ""]
connection.close()
class MasterFakeResponse(tservers.TestMaster):
def handle_request(self, m):