Compare commits
319 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b8cb769693 | ||
|
|
5e096c8ec9 | ||
|
|
c298fbfadc | ||
|
|
24a8dc408c | ||
|
|
d7748cea4f | ||
|
|
8fa96d1f3f | ||
|
|
5553eb6371 | ||
|
|
4e2d19714c | ||
|
|
618a9c0e2b | ||
|
|
c1788c37a1 | ||
|
|
7d90eb65ed | ||
|
|
a4f7728fad | ||
|
|
f1dc3f2ab2 | ||
|
|
549512e93e | ||
|
|
8385e586c0 | ||
|
|
747e1f0992 | ||
|
|
5fdf710c81 | ||
|
|
4c6a445361 | ||
|
|
4e1bbc1156 | ||
|
|
786e304bb9 | ||
|
|
4da8054e21 | ||
|
|
99ac7b8401 | ||
|
|
8110a9a3eb | ||
|
|
d8cadd2ff3 | ||
|
|
79a0334a02 | ||
|
|
ab0e10e60f | ||
|
|
b9737ed89e | ||
|
|
c6896d7392 | ||
|
|
61fab03b24 | ||
|
|
f526e5fa12 | ||
|
|
4979a22d3e | ||
|
|
ab1d8fa350 | ||
|
|
bb03255da0 | ||
|
|
c02fdb2463 | ||
|
|
f13e2213ea | ||
|
|
42d06a05c0 | ||
|
|
7ed1c1c231 | ||
|
|
c6ee813479 | ||
|
|
2df2fe0e4c | ||
|
|
15cc09f1b8 | ||
|
|
7fef0ecdf5 | ||
|
|
35f4a1c424 | ||
|
|
585bf9423f | ||
|
|
6dc945571d | ||
|
|
a6df72cfc8 | ||
|
|
e9ac4bef20 | ||
|
|
690f797da2 | ||
|
|
a2fddb4404 | ||
|
|
d187965233 | ||
|
|
a050eeef05 | ||
|
|
b6725ee802 | ||
|
|
a979e1ad50 | ||
|
|
2240d2a6a5 | ||
|
|
74c51df580 | ||
|
|
62e51018d0 | ||
|
|
0d05068f91 | ||
|
|
ed74ed24a0 | ||
|
|
45ab22f0d9 | ||
|
|
1441fade90 | ||
|
|
2153835545 | ||
|
|
2739cb4861 | ||
|
|
bc3ba4c993 | ||
|
|
50630d62fd | ||
|
|
0de97ad9e0 | ||
|
|
65e88f49d4 | ||
|
|
5690e7c399 | ||
|
|
18d0e840b5 | ||
|
|
552146d015 | ||
|
|
ac6987c54a | ||
|
|
76175672ad | ||
|
|
c8ae1e85b3 | ||
|
|
08f410cacc | ||
|
|
d138af7217 | ||
|
|
d51b8cab0c | ||
|
|
8d662e6636 | ||
|
|
fa6305ee98 | ||
|
|
fdffb23989 | ||
|
|
22d4559a7a | ||
|
|
4e13ab1d05 | ||
|
|
d57a1d6035 | ||
|
|
3f2d1381d0 | ||
|
|
d3aad7a185 | ||
|
|
23f7214fc3 | ||
|
|
e67dbf6123 | ||
|
|
041eafba73 | ||
|
|
5b5b79f5c4 | ||
|
|
93565392cd | ||
|
|
ed56d67cea | ||
|
|
e1356dd2b6 | ||
|
|
1790246fed | ||
|
|
15ad7704d2 | ||
|
|
533f61f67a | ||
|
|
8b841bc9e3 | ||
|
|
0bed5fae27 | ||
|
|
a03e1af7e7 | ||
|
|
883424030f | ||
|
|
688faa9baa | ||
|
|
764724748b | ||
|
|
2c73e8f816 | ||
|
|
2ba8296843 | ||
|
|
00942c1431 | ||
|
|
4a2964985c | ||
|
|
bd1d699040 | ||
|
|
4ef8260e9a | ||
|
|
6a5ddbd3d4 | ||
|
|
760d303dfa | ||
|
|
3afa2c38fb | ||
|
|
7789b602c8 | ||
|
|
bbfdc7b7de | ||
|
|
986a41d180 | ||
|
|
de08810a47 | ||
|
|
bcda65e453 | ||
|
|
5810e7c0df | ||
|
|
25fa596cd6 | ||
|
|
ddc9155c24 | ||
|
|
2df9c52c09 | ||
|
|
ee8058a2d9 | ||
|
|
554047da85 | ||
|
|
62ca9b71ff | ||
|
|
bc3bf969ba | ||
|
|
3f6619ff59 | ||
|
|
4f38b3a9c0 | ||
|
|
a4270efaf2 | ||
|
|
d2f5db1f37 | ||
|
|
1af26bb915 | ||
|
|
70dff87240 | ||
|
|
dbd75e02f7 | ||
|
|
18029df99c | ||
|
|
b0f77dfefd | ||
|
|
fa11b7c9be | ||
|
|
2616f490fe | ||
|
|
25a06c3ec1 | ||
|
|
0c3035a2b5 | ||
|
|
86a19faf68 | ||
|
|
9113277cd3 | ||
|
|
77a33c441b | ||
|
|
a3030f3ea3 | ||
|
|
0434988ade | ||
|
|
d32d6bc5e3 | ||
|
|
8ddc3b4ef2 | ||
|
|
b74ba817ea | ||
|
|
5f1d7a0746 | ||
|
|
71ad7140be | ||
|
|
7aa79b89e8 | ||
|
|
6ad8b1a15d | ||
|
|
a7df6e1503 | ||
|
|
acdc2d00b4 | ||
|
|
14def89f50 | ||
|
|
4ed8031172 | ||
|
|
08fdd23e23 | ||
|
|
fcc874fa18 | ||
|
|
a3509b7f22 | ||
|
|
a82ac9eaf0 | ||
|
|
f25156a637 | ||
|
|
3e70fa8d58 | ||
|
|
586472e364 | ||
|
|
da1ccfddeb | ||
|
|
1ad7e91527 | ||
|
|
5f785e26b9 | ||
|
|
b14c29b25c | ||
|
|
5326b7610a | ||
|
|
9c985f2d20 | ||
|
|
d9fda2b207 | ||
|
|
00d3395359 | ||
|
|
2709441d5b | ||
|
|
46bd780862 | ||
|
|
d3dce8f943 | ||
|
|
a1ecd25e8b | ||
|
|
d564086377 | ||
|
|
4914dbc971 | ||
|
|
e484e667a6 | ||
|
|
46c5982d3d | ||
|
|
205d2ad577 | ||
|
|
6874295c45 | ||
|
|
aea96132ec | ||
|
|
9f85f0b846 | ||
|
|
b1b94b49e4 | ||
|
|
5df0b9e961 | ||
|
|
866a93a8bc | ||
|
|
e3f28e1c06 | ||
|
|
76f2595df7 | ||
|
|
4026aa2e5f | ||
|
|
d41095c35e | ||
|
|
2b6bedac0e | ||
|
|
8b5e081233 | ||
|
|
64360f5996 | ||
|
|
7e6196511f | ||
|
|
fa72b2cd10 | ||
|
|
65b587cdbb | ||
|
|
cdd5a53767 | ||
|
|
56d2f9fbdb | ||
|
|
f7b3a6d571 | ||
|
|
a98d287e26 | ||
|
|
71642eac65 | ||
|
|
4b9ee4c31e | ||
|
|
5075ede6a9 | ||
|
|
35a914a549 | ||
|
|
c6150cc198 | ||
|
|
d5e3722c97 | ||
|
|
2a09cad420 | ||
|
|
05111f093d | ||
|
|
965d318164 | ||
|
|
28fd3bd461 | ||
|
|
3b246f7e27 | ||
|
|
17facd8b72 | ||
|
|
ae79fe1660 | ||
|
|
ee71bcfbe8 | ||
|
|
d9db1cf5b3 | ||
|
|
67f2610032 | ||
|
|
28daa93268 | ||
|
|
362fdf9bae | ||
|
|
e5bded7dee | ||
|
|
4cb0e5bfb4 | ||
|
|
d1ff527550 | ||
|
|
7629a43d82 | ||
|
|
b635112d36 | ||
|
|
4ac59a7859 | ||
|
|
8fbba59e8d | ||
|
|
45f4768a5c | ||
|
|
a566684e32 | ||
|
|
34adc83c71 | ||
|
|
6f00987850 | ||
|
|
9abff4f0ac | ||
|
|
e9006ae199 | ||
|
|
82245298f4 | ||
|
|
b1dc418a53 | ||
|
|
25f12b0e5d | ||
|
|
4d02ae0582 | ||
|
|
b9f8645258 | ||
|
|
2346a6d553 | ||
|
|
f8719f13df | ||
|
|
8309ab0ec8 | ||
|
|
2e72b310d9 | ||
|
|
f0122f1403 | ||
|
|
f23818ceea | ||
|
|
cd0e2f18e6 | ||
|
|
89a58d7e30 | ||
|
|
98a7aaca18 | ||
|
|
ce48cb4deb | ||
|
|
be133e7a0b | ||
|
|
ffd7043ee7 | ||
|
|
87623a8d75 | ||
|
|
b51aac8a86 | ||
|
|
730c78ac53 | ||
|
|
1662b8505b | ||
|
|
8ef208a9e2 | ||
|
|
7a3b871b33 | ||
|
|
0760607a7d | ||
|
|
9042d3f3b9 | ||
|
|
57c653be5f | ||
|
|
cbd8d09849 | ||
|
|
9d0e3c8d61 | ||
|
|
028d5bacc5 | ||
|
|
e337682d8e | ||
|
|
cfc6e8777e | ||
|
|
e3196dac4d | ||
|
|
179cf75862 | ||
|
|
f7e4e89b12 | ||
|
|
12d2b1f926 | ||
|
|
62088a6661 | ||
|
|
a817db5bd6 | ||
|
|
8cc0469ee7 | ||
|
|
bb6ec29b18 | ||
|
|
1ff6a767d0 | ||
|
|
357502fe03 | ||
|
|
17835b9b78 | ||
|
|
a1456742a8 | ||
|
|
f3742f29da | ||
|
|
f3f8462ddc | ||
|
|
73a7d893e3 | ||
|
|
759f5d71a6 | ||
|
|
af92153974 | ||
|
|
9b398c03ab | ||
|
|
675b3133b4 | ||
|
|
43f1c72511 | ||
|
|
ddb5748a76 | ||
|
|
c89c4361c3 | ||
|
|
78049abac1 | ||
|
|
acd511f676 | ||
|
|
c1eaa9f74c | ||
|
|
e6288e2d07 | ||
|
|
0f4ae61e7d | ||
|
|
6cd32bf96f | ||
|
|
3648c7953a | ||
|
|
4043829cf2 | ||
|
|
689f5f0d1f | ||
|
|
47e1695512 | ||
|
|
6ce8b49e05 | ||
|
|
1b961fc4ad | ||
|
|
9c24401b18 | ||
|
|
74d8b18408 | ||
|
|
5936a48e59 | ||
|
|
aa7f8ac90b | ||
|
|
ebfa9b2a5d | ||
|
|
5d6f855387 | ||
|
|
25b0631190 | ||
|
|
1c5434d72c | ||
|
|
ecd4645988 | ||
|
|
b0849387b7 | ||
|
|
669ce8ee7c | ||
|
|
6df4be93e3 | ||
|
|
f756d3bec1 | ||
|
|
1559ded009 | ||
|
|
ce41046786 | ||
|
|
7ec03e45a5 | ||
|
|
6dc0f105cc | ||
|
|
94ae720a22 | ||
|
|
76b4c6ba82 | ||
|
|
1a963b91bb | ||
|
|
7e21ac0eb8 | ||
|
|
1c9e7b982a | ||
|
|
b6e1bf63c3 | ||
|
|
76f83d7763 | ||
|
|
1a5b157c8f | ||
|
|
65fbb7bd0d | ||
|
|
8e176c2086 | ||
|
|
2a90ea69fd | ||
|
|
37c8d3425d | ||
|
|
18d4c3a9e9 |
2
.gitignore
vendored
@@ -3,7 +3,7 @@ MANIFEST
|
||||
/dist
|
||||
/tmp
|
||||
/doc
|
||||
*.py[cd]
|
||||
*.py[cdo]
|
||||
*.swp
|
||||
*.swo
|
||||
mitmproxyc
|
||||
|
||||
93
CHANGELOG
@@ -1,3 +1,93 @@
|
||||
|
||||
5 April 2012: mitmproxy 0.8:
|
||||
|
||||
* Detailed tutorial for Android interception. Some features that land in
|
||||
this release have finally made reliable Android interception possible.
|
||||
|
||||
* Upstream-cert mode, which uses information from the upstream server to
|
||||
generate interception certificates.
|
||||
|
||||
* Replacement patterns that let you easily do global replacements in flows
|
||||
matching filter patterns. Can be specified on the command-line, or edited
|
||||
interactively.
|
||||
|
||||
* Much more sophisticated and usable pretty printing of request bodies.
|
||||
Support for auto-indentation of Javascript, inspection of image EXIF
|
||||
data, and more.
|
||||
|
||||
* Details view for flows, showing connection and SSL cert information (X
|
||||
keyboard shortcut).
|
||||
|
||||
* Server certificates are now stored and serialized in saved traffic for
|
||||
later analysis. This means that the 0.8 serialization format is NOT
|
||||
compatible with 0.7.
|
||||
|
||||
* Many other improvements, including bugfixes, and expanded scripting API,
|
||||
and more sophisticated certificate handling.
|
||||
|
||||
|
||||
20 February 2012: mitmproxy 0.7:
|
||||
|
||||
* New built-in key/value editor. This lets you interactively edit URL query
|
||||
strings, headers and URL-encoded form data.
|
||||
|
||||
* Extend script API to allow duplication and replay of flows.
|
||||
|
||||
* API for easy manipulation of URL-encoded forms and query strings.
|
||||
|
||||
* Add "D" shortcut in mitmproxy to duplicate a flow.
|
||||
|
||||
* Reverse proxy mode. In this mode mitmproxy acts as an HTTP server,
|
||||
forwarding all traffic to a specified upstream server.
|
||||
|
||||
* UI improvements - use unicode characters to make GUI more compact,
|
||||
improve spacing and layout throughout.
|
||||
|
||||
* Add support for filtering by HTTP method.
|
||||
|
||||
* Add the ability to specify an HTTP body size limit.
|
||||
|
||||
* Move to typed netstrings for serialization format - this makes 0.7
|
||||
backwards-incompatible with serialized data from 0.6!
|
||||
|
||||
* Significant improvements in speed and responsiveness of UI.
|
||||
|
||||
* Many minor bugfixes and improvements.
|
||||
|
||||
|
||||
7 August 2011: mitmproxy 0.6:
|
||||
|
||||
* New scripting API that allows much more flexible and fine-grained
|
||||
rewriting of traffic. See the docs for more info.
|
||||
|
||||
* Support for gzip and deflate content encodings. A new "z"
|
||||
keybinding in mitmproxy to let us quickly encode and decode content, plus
|
||||
automatic decoding for the "pretty" view mode.
|
||||
|
||||
* An event log, viewable with the "v" shortcut in mitmproxy, and the
|
||||
"-e" command-line flag in mitmdump.
|
||||
|
||||
* Huge performance improvements: mitmproxy interface, loading
|
||||
large numbers of flows from file.
|
||||
|
||||
* A new "replace" convenience method for all flow objects, that does a
|
||||
universal regex-based string replacement.
|
||||
|
||||
* Header management has been rewritten to maintain both case and order.
|
||||
|
||||
* Improved stability for SSL interception.
|
||||
|
||||
* Default expiry time on generated SSL certs has been dropped to avoid an
|
||||
OpenSSL overflow bug that caused certificates to expire in the distant
|
||||
past on some systems.
|
||||
|
||||
* A "pretty" view mode for JSON and form submission data.
|
||||
|
||||
* Expanded documentation and examples.
|
||||
|
||||
* Countless other small improvements and bugfixes.
|
||||
|
||||
|
||||
27 June 2011: mitmproxy 0.5:
|
||||
|
||||
* An -n option to start the tools without binding to a proxy port.
|
||||
@@ -59,6 +149,3 @@
|
||||
* "A" will now accept all intercepted connections
|
||||
|
||||
* Lots of bugfixes
|
||||
|
||||
|
||||
|
||||
|
||||
23
CONTRIBUTORS
@@ -1,5 +1,18 @@
|
||||
203 Aldo Cortesi
|
||||
18 Henrik Nordstrom
|
||||
13 Thomas Roth
|
||||
1 Yuangxuan Wang
|
||||
1 Henrik Nordström
|
||||
485 Aldo Cortesi
|
||||
18 Henrik Nordstrom
|
||||
13 Thomas Roth
|
||||
11 Stephen Altamirano
|
||||
6 András Veres-Szentkirályi
|
||||
4 Valtteri Virtanen
|
||||
2 alts
|
||||
2 Michael Frister
|
||||
2 Mark E. Haase
|
||||
2 Heikki Hannikainen
|
||||
1 meeee
|
||||
1 capt8bit
|
||||
1 Yuangxuan Wang
|
||||
1 Ulrich Petri
|
||||
1 Rune Halvorsen
|
||||
1 Rory McCann
|
||||
1 Henrik Nordström
|
||||
1 Felix Wolfsteller
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
include LICENSE
|
||||
include CHANGELOG
|
||||
include CONTRIBUTORS
|
||||
include README.mkd
|
||||
include README.txt
|
||||
exclude README.mkd
|
||||
recursive-include examples *
|
||||
recursive-include doc *
|
||||
recursive-include test *
|
||||
|
||||
42
README.mkd
@@ -1,22 +1,23 @@
|
||||
|
||||
__mitmproxy__ is an SSL-capable, intercepting HTTP proxy. It provides a console
|
||||
interface that allows traffic flows to be inspected and edited on the fly.
|
||||
__mitmproxy__ is an SSL-capable man-in-the-middle proxy for HTTP. It provides a
|
||||
console interface that allows traffic flows to be inspected and edited on the
|
||||
fly.
|
||||
|
||||
__mitmdump__ is the command-line version of mitmproxy, with the same
|
||||
functionality but without the frills. Think tcpdump for HTTP.
|
||||
functionality but without the user interface. Think tcpdump for HTTP.
|
||||
|
||||
Both tools are fully documentented in the commandline __--help__ flag, and, in
|
||||
the case of __mitmproxy__, a built-in help page accessible through the __?__
|
||||
keyboard shortcut.
|
||||
Complete documentation and a set of practical tutorials is included in the
|
||||
distribution package, and is also available at
|
||||
[mitmproxy.org](http://mitmproxy.org).
|
||||
|
||||
|
||||
Capabilities
|
||||
------------
|
||||
Features
|
||||
--------
|
||||
|
||||
- Intercept HTTP requests and responses and modify them on the fly.
|
||||
- Save complete HTTP conversations for later replay and analysis.
|
||||
- Replay the client-side of an HTTP conversations.
|
||||
- Replay HTTP responses of a previously recorded server.
|
||||
- Reverse proxy mode to forward traffic to a specified server.
|
||||
- Make scripted changes to HTTP traffic using Python.
|
||||
- SSL certificates for interception are generated on the fly.
|
||||
|
||||
@@ -33,17 +34,36 @@ Source is hosted on github:
|
||||
[github.com/cortesi/mitmproxy](http://github.com/cortesi/mitmproxy)
|
||||
|
||||
|
||||
Community
|
||||
---------
|
||||
|
||||
Come join us in the #mitmproxy channel on the OFTC IRC network
|
||||
(irc://irc.oftc.net:6667).
|
||||
|
||||
We also have a mailing list, hosted here:
|
||||
|
||||
http://groups.google.com/group/mitmproxy
|
||||
|
||||
|
||||
Requirements
|
||||
------------
|
||||
|
||||
* [Python](http://www.python.org) 2.6.x or 2.7.x.
|
||||
* [openssl](http://www.openssl.org/). Installed by default on most systems.
|
||||
* [PyOpenSSL](http://pypi.python.org/pypi/pyOpenSSL) 0.12 or newer.
|
||||
* [pyasn1](http://pypi.python.org/pypi/pyasn1) 0.1.2 or newer.
|
||||
* [urwid](http://excess.org/urwid/) version 0.9.8 or newer.
|
||||
* [PIL](http://www.pythonware.com/products/pil/) version 1.1 or newer.
|
||||
* [lxml](http://lxml.de/) version 2.3 or newer.
|
||||
|
||||
The following auxiliary components may be needed if you plan to hack on
|
||||
mitmproxy:
|
||||
|
||||
* The test suite uses the [pry](http://github.com/cortesi/pry) unit testing
|
||||
library.
|
||||
* Rendering the documentation requires [countershape](http://github.com/cortesi/countershape).
|
||||
|
||||
__mitmproxy__ is tested and developed on OSX, Linux and OpenBSD.
|
||||
__mitmproxy__ is tested and developed on OSX, Linux and OpenBSD. Windows is not
|
||||
supported at the moment.
|
||||
|
||||
You should also make sure that your console environment is set up with the
|
||||
following:
|
||||
|
||||
84
README.txt
Normal file
@@ -0,0 +1,84 @@
|
||||
**mitmproxy** is an SSL-capable man-in-the-middle proxy for HTTP. It provides a
|
||||
console interface that allows traffic flows to be inspected and edited on the
|
||||
fly.
|
||||
|
||||
**mitmdump** is the command-line version of mitmproxy, with the same
|
||||
functionality but without the user interface. Think tcpdump for HTTP.
|
||||
|
||||
Complete documentation and a set of practical tutorials is included in the
|
||||
distribution package, and is also available at mitmproxy.org_.
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Intercept HTTP requests and responses and modify them on the fly.
|
||||
- Save complete HTTP conversations for later replay and analysis.
|
||||
- Replay the client-side of an HTTP conversations.
|
||||
- Replay HTTP responses of a previously recorded server.
|
||||
- Reverse proxy mode to forward traffic to a specified server.
|
||||
- Make scripted changes to HTTP traffic using Python.
|
||||
- SSL certificates for interception are generated on the fly.
|
||||
|
||||
|
||||
Download
|
||||
--------
|
||||
|
||||
Releases and rendered documentation can be found on the mitmproxy website:
|
||||
|
||||
mitmproxy.org_
|
||||
|
||||
Source is hosted on github:
|
||||
|
||||
`github.com/cortesi/mitmproxy`_
|
||||
|
||||
|
||||
Community
|
||||
---------
|
||||
|
||||
Come join us in the #mitmproxy channel on the OFTC IRC network
|
||||
(irc.oftc.net, port 6667).
|
||||
|
||||
We also have a mailing list, hosted here:
|
||||
|
||||
http://groups.google.com/group/mitmproxy
|
||||
|
||||
|
||||
Requirements
|
||||
------------
|
||||
|
||||
* Python_ 2.6.x or 2.7.x.
|
||||
* PyOpenSSL_ 0.12 or newer.
|
||||
* pyasn1_ 0.1.2 or newer.
|
||||
* urwid_ version 0.9.8 or newer.
|
||||
* PIL_ version 1.1 or newer.
|
||||
* lxml_ version 2.3 or newer.
|
||||
|
||||
The following auxiliary components may be needed if you plan to hack on
|
||||
mitmproxy:
|
||||
|
||||
* The test suite uses the pry_ unit testing
|
||||
library.
|
||||
* Rendering the documentation requires countershape_.
|
||||
|
||||
**mitmproxy** is tested and developed on OSX, Linux and OpenBSD. Windows is not
|
||||
supported at the moment.
|
||||
|
||||
You should also make sure that your console environment is set up with the
|
||||
following:
|
||||
|
||||
* EDITOR environment variable to determine the external editor.
|
||||
* PAGER environment variable to determine the external pager.
|
||||
* Appropriate entries in your mailcap files to determine external
|
||||
viewers for request and response contents.
|
||||
|
||||
.. _mitmproxy.org: http://mitmproxy.org
|
||||
.. _github.com/cortesi/mitmproxy: http://github.com/cortesi/mitmproxy
|
||||
.. _python: http://www.python.org
|
||||
.. _PyOpenSSL: http://pypi.python.org/pypi/pyOpenSSL
|
||||
.. _pyasn1: http://pypi.python.org/pypi/pyasn1
|
||||
.. _PIL: http://www.pythonware.com/products/pil/
|
||||
.. _lxml: http://lxml.de/
|
||||
.. _urwid: http://excess.org/urwid/
|
||||
.. _pry: http://github.com/cortesi/pry
|
||||
.. _countershape: http://github.com/cortesi/countershape
|
||||
@@ -8,14 +8,14 @@ a {
|
||||
#hd.doc {
|
||||
-x-system-font:none;
|
||||
font-family: Helvetica,Arial,Tahoma,Verdana,Sans-Serif;
|
||||
color: #555555;
|
||||
color: #444444;
|
||||
margin: 0;
|
||||
margin-bottom: 1em;
|
||||
}
|
||||
|
||||
#hd.doc h1 {
|
||||
letter-spacing: 3px;
|
||||
font-size: 2.5em;
|
||||
line-height: 100%;
|
||||
margin: 0.3em 0;
|
||||
font-weight: normal;
|
||||
}
|
||||
@@ -26,26 +26,46 @@ a {
|
||||
|
||||
|
||||
#bd {
|
||||
-x-system-font:none;
|
||||
font-family: Helvetica,Arial,Tahoma,Verdana,Sans-Serif;
|
||||
font-size: 1.6em;
|
||||
color: #555555;
|
||||
font: 16px/21px "HelveticaNeue","Helvetica Neue",Helvetica,Arial,sans-serif;
|
||||
line-height: 1.5;
|
||||
color: #444444;
|
||||
}
|
||||
|
||||
#bd h1, #bd h2, #bd h3 {
|
||||
font-family: "Georgia","Times New Roman",Helvetica,Arial,sans-serif;
|
||||
font-weight: normal;
|
||||
color: #181818;
|
||||
}
|
||||
|
||||
#bd h1 {
|
||||
font-size: 1.4em;
|
||||
border-bottom: 5px solid #ff7033;
|
||||
font-size: 1.9em;
|
||||
border-bottom: 2px solid #ff7033;
|
||||
margin-top: 5px;
|
||||
margin-bottom: 5px;
|
||||
color: #000000;
|
||||
}
|
||||
|
||||
#bd h2 {
|
||||
font-size: 1.1em;
|
||||
font-size: 1.4em;
|
||||
border-bottom: 1px solid #cccccc;
|
||||
margin-top: 5px;
|
||||
margin-bottom: 5px;
|
||||
color: #000000;
|
||||
}
|
||||
|
||||
|
||||
#bd h3 {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
#bd p {
|
||||
margin: 1em 0;
|
||||
margin-top: 0.5em;
|
||||
}
|
||||
|
||||
/* Keyboard shortcuts */
|
||||
#bd em {
|
||||
font-weight: bold;
|
||||
color: #00A700;
|
||||
font-style: normal;
|
||||
}
|
||||
|
||||
#ft.doc {
|
||||
@@ -63,18 +83,22 @@ a {
|
||||
}
|
||||
|
||||
pre {
|
||||
font-size: 0.9em;
|
||||
line-height: 1.4;
|
||||
padding: 10px;
|
||||
background-color: #dddddd;
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
.terminal {
|
||||
color: #ffffff;
|
||||
color: #c0c0c0;
|
||||
font-size: 1em;
|
||||
background: #000000;
|
||||
|
||||
|
||||
}
|
||||
|
||||
.docindex, .docindex ul {
|
||||
.docindex {
|
||||
font-size: 1.3em;
|
||||
line-height: 1.3;
|
||||
margin-top: 0.1em;
|
||||
margin-bottom: 0;
|
||||
margin-left: 0px;
|
||||
@@ -87,6 +111,8 @@ pre {
|
||||
|
||||
.docindex ul {
|
||||
margin-left: 2em;
|
||||
margin-top: 0.1em;
|
||||
margin-bottom: 0.1em;
|
||||
}
|
||||
|
||||
li a {
|
||||
@@ -96,3 +122,16 @@ li a {
|
||||
.highlight {
|
||||
font-size: 14px;
|
||||
}
|
||||
.example_legend{
|
||||
line-height: 1;
|
||||
font-size: 12px;
|
||||
}
|
||||
.example pre {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.kvtable th {
|
||||
text-align: left;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
</div>
|
||||
<!--(end)-->
|
||||
$!nav if this.title!="docs" else ""!$
|
||||
<h1><a href="@!urlTo("/index.html")!@">mitmproxy docs</a></h1>
|
||||
<h1><a href="@!urlTo("/index.html")!@">mitmproxy 0.8 docs</a></h1>
|
||||
</div>
|
||||
<div id="bd">
|
||||
<div id="yui-main">
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
<a href="http://github.com/cortesi/mitmproxy"><img style="position: absolute; top: 0; right: 0; border: 0;" src="https://d3nwyuy0nl342s.cloudfront.net/img/e6bef7a091f5f3138b8cd40bc3e114258dd68ddf/687474703a2f2f73332e616d617a6f6e6177732e636f6d2f6769746875622f726962626f6e732f666f726b6d655f72696768745f7265645f6161303030302e706e67" alt="Fork me on GitHub"></a>
|
||||
<div class="yui-t7" id="doc">
|
||||
<div style="" id="hd">
|
||||
<h1><a href="@!urlTo("/index.html")!@">mitmproxy</a> </h1>
|
||||
<div class="HorizontalNavBar">
|
||||
<ul>
|
||||
<li class="inactive"><a href="@!urlTo("/index.html")!@">home</a></li>
|
||||
@@ -9,8 +8,9 @@
|
||||
<li class="inactive"><a href="@!urlTo("/about.html")!@">about</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<h1><a href="@!urlTo("/index.html")!@">mitmproxy</a> </h1>
|
||||
<br>
|
||||
<p>an SSL-capable intercepting proxy</p>
|
||||
<p>an SSL-capable man-in-the-middle proxy</p>
|
||||
</div>
|
||||
<div id="bd">
|
||||
<div id="yui-main">
|
||||
@@ -29,7 +29,7 @@
|
||||
</div>
|
||||
<!--(end)-->
|
||||
$!nav if this.title!="docs" else ""!$
|
||||
$!title if this.title!="docs" else "<h1>mitmproxy docs</h1>"!$
|
||||
$!title if this.title!="docs" else "<h1>mitmproxy 0.8 docs</h1>"!$
|
||||
$!body!$
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
|
||||
- command-line: _--anticache_
|
||||
- mitmproxy shortcut: _o_, then _a_
|
||||
|
||||
When the __anticache__ option is passed to mitmproxy, it removes headers
|
||||
(__if-none-match__ and __if-modified-since__) that might elicit a
|
||||
304-not-modified response from the server. This is useful when you want to make
|
||||
|
||||
BIN
doc-src/certinstall/android-proxydroidinstall.png
Normal file
|
After Width: | Height: | Size: 122 KiB |
BIN
doc-src/certinstall/android-proxydroidsettings.png
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
doc-src/certinstall/android-settingssecurityinstallca.png
Normal file
|
After Width: | Height: | Size: 56 KiB |
BIN
doc-src/certinstall/android-settingssecuritymenu.png
Normal file
|
After Width: | Height: | Size: 74 KiB |
BIN
doc-src/certinstall/android-settingssecurityuserinstalledca.png
Normal file
|
After Width: | Height: | Size: 46 KiB |
BIN
doc-src/certinstall/android-shellwgetmitmproxyca.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
103
doc-src/certinstall/android.html
Normal file
@@ -0,0 +1,103 @@
|
||||
|
||||
The proxy situation on Android is [an
|
||||
embarrasment](http://code.google.com/p/android/issues/detail?id=1273). It's
|
||||
scarcely credible, but Android didn't have a global proxy setting at all until
|
||||
quite recently, and it's still not supported on many common Android versions.
|
||||
In the meantime the app ecosystem has grown used to life without this basic
|
||||
necessity, and many apps merrily ignore it even if it's there. The upshot is
|
||||
that in many cases the only way to make interception work on Android is to do
|
||||
it without relying on the proxy settings.
|
||||
|
||||
We used an Asus Transformer Prime TF201 with Android 4.0.3 in the examples
|
||||
below - your device may differ, but the broad process should be similar.
|
||||
|
||||
|
||||
Installing the mitmproxy certificate
|
||||
====================================
|
||||
|
||||
The first step is to install mitmproxy's interception certificate on the
|
||||
Android device. In your ~/.mitmproxy directory, there should be a file called
|
||||
__mitmproxy-ca-cert.cer__ - we need to transfer this file to
|
||||
__/sdcard/Downloads__ on the Android device. If this file doesn't exist for
|
||||
you, your certs were generated with an older version of mitmproxy - just copy
|
||||
the __mitmproxy-ca-cert.pem__ file to __mitmproxy-ca-cert.cer__ and proceed
|
||||
from there.
|
||||
|
||||
In this case, we're using wget from Better Terminal Emulator Pro to transfer
|
||||
the certificate from a local HTTP server. Other terminal applications may work,
|
||||
and you might also transfer the file via external media like an SDcard:
|
||||
|
||||
<img src="android-shellwgetmitmproxyca.png"/>
|
||||
|
||||
Once we have the certificate on the local disk, we need to import it into the
|
||||
list of trusted CAs. Go to Settings -> Security -> Credential Storage,
|
||||
and select "Install from storage":
|
||||
|
||||
<img src="android-settingssecuritymenu.png"/>
|
||||
|
||||
The certificate in /sdcard/Downloads is automatically located and offered for
|
||||
installation. Installing the cert will delete the download file from the local
|
||||
disk:
|
||||
|
||||
<img src="android-settingssecurityinstallca.png"/>
|
||||
|
||||
Afterwards, you should see the certificate listed in the Trusted Credentials
|
||||
store:
|
||||
|
||||
<img src="android-settingssecurityuserinstalledca.png"/>
|
||||
|
||||
If you're lucky enough to be working with an app that obeys the wireless proxy
|
||||
settings, you're just about done - simply configure the settings to point at
|
||||
mitmproxy. If not, proceed to the next step...
|
||||
|
||||
|
||||
Working around Android's proxy shortcomings
|
||||
===========================================
|
||||
|
||||
In response to Android's proxy situation, a number of apps have been created to
|
||||
duct-tape proxy support onto the OS. These tools work by running a rudimentary
|
||||
local proxy on the device, and forwarding all traffic destined for HTTP/S ports
|
||||
to it using iptables. Since the proxy is running locally, it can detect what
|
||||
the final IP address of the redirected traffic would have been. The local proxy
|
||||
then connects to a user-configured upstream proxy, and initiates a proxy
|
||||
CONNECT request to the destination IP.
|
||||
|
||||
Now, if the configured upstream proxy is mitmproxy, we have a slight problem.
|
||||
Proxy requests from the Android device in this scheme will specify only the
|
||||
destination IP address, __not__ the destination domain. Mitmproxy needs the
|
||||
target domain to generate a valid interception certificate. The solution is
|
||||
mitmproxy's [upstream certificate](@!urlTo("upstreamcerts.html")!@) option.
|
||||
When this is active, mitmproxy makes a connection to the upstream server to
|
||||
obtain the certificate Common Name and Subject Alternative Names.
|
||||
|
||||
Adding all this together, we can achieve reliable Android interception with
|
||||
only a few more minutes of setup. The instructions below show how to set up an
|
||||
Android device with
|
||||
[ProxyDroid](https://play.google.com/store/apps/details?id=org.proxydroid) (the
|
||||
local "duct-tape" proxy implementation) to achieve interception.
|
||||
|
||||
Install ProxyDroid
|
||||
------------------
|
||||
|
||||
First, root your device - this is required to install ProxyDroid. Then install
|
||||
ProxyDroid from the Google Play store:
|
||||
|
||||
<img src="android-proxydroidinstall.png"/>
|
||||
|
||||
You will be prompted for super-user access, which you must allow. Next, enter
|
||||
the ProxyDroid settings, and change the proxy settings to point to your
|
||||
mitmproxy instance. When you're done, it should look something like this:
|
||||
|
||||
<img src="android-proxydroidsettings.png"/>
|
||||
|
||||
In this case, our mitmproxy instance is at the host __maru.otago.ac.nz__,
|
||||
running on port __8080__.
|
||||
|
||||
When you start mitmproxy, make sure that the upstream certificate option is set
|
||||
(use the _--upstream-cert_ command-line option, or enable it interactively
|
||||
using the _o_ shortcut):
|
||||
|
||||
<pre class="terminal">
|
||||
mitmproxy --upstream-cert
|
||||
</pre>
|
||||
|
||||
@@ -5,4 +5,5 @@ pages = [
|
||||
Page("osx.html", "OSX"),
|
||||
Page("windows7.html", "Windows 7"),
|
||||
Page("ios.html", "IOS"),
|
||||
Page("android.html", "Android"),
|
||||
]
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
|
||||
- command-line: _-c path_
|
||||
- mitmproxy shortcut: _c_
|
||||
|
||||
Client-side replay does what it says on the tin: you provide a previously saved
|
||||
HTTP conversation, and mitmproxy replays the client requests one by one. Note
|
||||
that mitmproxy serializes the requests, waiting for a response from the server
|
||||
before starting the next request. This might differ from the recorded
|
||||
conversation, where requests may have been made concurrently.
|
||||
|
||||
You may want to use client-side replay in conjunction with the __anticache__
|
||||
option. This will modify requests to remove headers (e.g. if-modified-since)
|
||||
that might cause a server to reply with a 304-not-modified.
|
||||
You may want to use client-side replay in conjunction with the
|
||||
[anticache](@!urlTo("anticache.html")!@) option.
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
### Any tips for running mitmproxy on OSX?
|
||||
## Any tips for running mitmproxy on OSX?
|
||||
|
||||
You can use the OSX <b>open</b> program to create a simple and effective
|
||||
<b>~/.mailcap</b> file to view HTTP bodies:
|
||||
@@ -12,7 +12,7 @@ video/*; /usr/bin/open -Wn %s
|
||||
</pre>
|
||||
|
||||
|
||||
### I'd like to hack on mitmproxy. What should I work on?
|
||||
## I'd like to hack on mitmproxy. What should I work on?
|
||||
|
||||
There's a __todo__ file at the top of the source tree that outlines a variety
|
||||
of tasks, from simple to complex. If you don't have your own itch, feel free to
|
||||
|
||||
@@ -26,9 +26,9 @@ URL containing "google.com":
|
||||
|
||||
Requests whose body contains the string "test":
|
||||
|
||||
~r ~b test
|
||||
~q ~b test
|
||||
|
||||
Anything but requests with a text/html content type:
|
||||
|
||||
!(~r & ~t \"text/html\")
|
||||
!(~q & ~t \"text/html\")
|
||||
|
||||
|
||||
@@ -1,19 +1,28 @@
|
||||
|
||||
<ul class="docindex">
|
||||
<li><a href="@!urlTo("intro.html")!@">Introduction</a></li>
|
||||
<li><a href="@!urlTo("interception.html")!@">Interception</a></li>
|
||||
<li><a href="@!urlTo("clientreplay.html")!@">Client-side replay</a></li>
|
||||
<li><a href="@!urlTo("serverreplay.html")!@">Server-side replay</a></li>
|
||||
<li><a href="@!urlTo("sticky.html")!@">Sticky cookies and auth</a></li>
|
||||
<li><a href="@!urlTo("anticache.html")!@">Anticache</a></li>
|
||||
<li><a href="@!urlTo("filters.html")!@">Filter expressions</a></li>
|
||||
<li><a href="@!urlTo("scripts.html")!@">Scripting API</a></li>
|
||||
<li><a href="@!urlTo("ssl.html")!@">SSL</a></li>
|
||||
<li><a href="@!urlTo("install.html")!@">Installation</a></li>
|
||||
<li><a href="@!urlTo("mitmproxy.html")!@">mitmproxy</a></li>
|
||||
<li><a href="@!urlTo("mitmdump.html")!@">mitmdump</a></li>
|
||||
<li>Features</li>
|
||||
<ul>
|
||||
<li><a href="@!urlTo("clientreplay.html")!@">Client-side replay</a></li>
|
||||
<li><a href="@!urlTo("serverreplay.html")!@">Server-side replay</a></li>
|
||||
<li><a href="@!urlTo("sticky.html")!@">Sticky cookies and auth</a></li>
|
||||
<li><a href="@!urlTo("reverseproxy.html")!@">Reverse proxy mode</a></li>
|
||||
<li><a href="@!urlTo("upstreamcerts.html")!@">Upstream Certs</a></li>
|
||||
<li><a href="@!urlTo("replacements.html")!@">Replacements</a></li>
|
||||
<li><a href="@!urlTo("anticache.html")!@">Anticache</a></li>
|
||||
<li><a href="@!urlTo("filters.html")!@">Filter expressions</a></li>
|
||||
</ul>
|
||||
<li><a href="@!urlTo("scripts.html")!@">Scripts</a></li>
|
||||
<li><a href="@!urlTo("ssl.html")!@">Setting up SSL interception</a></li>
|
||||
<ul>
|
||||
<li><a href="@!urlTo("certinstall/firefox.html")!@">Firefox</a></li>
|
||||
<li><a href="@!urlTo("certinstall/osx.html")!@">OSX</a></li>
|
||||
<li><a href="@!urlTo("certinstall/windows7.html")!@">Windows 7</a></li>
|
||||
<li><a href="@!urlTo("certinstall/ios.html")!@">iPhone/iPad</a></li>
|
||||
<li><a href="@!urlTo("certinstall/android.html")!@">Android</a></li>
|
||||
</ul>
|
||||
<li><a href="@!urlTo("library.html")!@">libmproxy</a></li>
|
||||
<li>Tutorials</li>
|
||||
|
||||
@@ -13,11 +13,11 @@ if ns.options.website:
|
||||
else:
|
||||
ns.title = countershape.template.Template(None, "<h1>@!this.title!@</h1>")
|
||||
this.layout = countershape.Layout("_layout.html")
|
||||
ns.docTitle = "mitmproxy"
|
||||
|
||||
this.markup = markup.Markdown()
|
||||
ns.docMaintainer = "Aldo Cortesi"
|
||||
ns.docMaintainerEmail = "aldo@corte.si"
|
||||
ns.copyright = u"\u00a9 mitmproxy project, 2011"
|
||||
ns.copyright = u"\u00a9 mitmproxy project, 2012"
|
||||
|
||||
ns.index = countershape.widgets.SiblingPageIndex('/index.html', divclass="pageindex")
|
||||
|
||||
@@ -33,7 +33,8 @@ ns.index_contents = file(mpath("README.mkd")).read()
|
||||
top = os.path.abspath(os.getcwd())
|
||||
def example(s):
|
||||
d = file(mpath(s)).read()
|
||||
return countershape.template.Syntax("py")(d)
|
||||
extemp = """<div class="example">%s<div class="example_legend">(%s)</div></div>"""
|
||||
return extemp%(countershape.template.Syntax("py")(d), s)
|
||||
|
||||
|
||||
ns.example = example
|
||||
@@ -61,20 +62,25 @@ filt_help.extend(
|
||||
]
|
||||
)
|
||||
ns.filt_help = filt_help
|
||||
|
||||
|
||||
|
||||
|
||||
pages = [
|
||||
Page("index.html", "docs"),
|
||||
Page("intro.html", "Introduction"),
|
||||
Page("interception.html", "Interception"),
|
||||
Page("install.html", "Installation"),
|
||||
Page("mitmproxy.html", "mitmproxy"),
|
||||
Page("mitmdump.html", "mitmdump"),
|
||||
Page("clientreplay.html", "Client-side replay"),
|
||||
Page("serverreplay.html", "Server-side replay"),
|
||||
Page("sticky.html", "Sticky cookies and auth"),
|
||||
Page("upstreamcerts.html", "Upstream Certs"),
|
||||
Page("replacements.html", "Replacements"),
|
||||
Page("reverseproxy.html", "Reverse proxy mode"),
|
||||
Page("anticache.html", "Anticache"),
|
||||
Page("filters.html", "Filter expressions"),
|
||||
Page("scripts.html", "External scripts"),
|
||||
Page("ssl.html", "SSL"),
|
||||
Page("scripts.html", "Scripts"),
|
||||
Page("ssl.html", "Setting up SSL interception"),
|
||||
Directory("certinstall"),
|
||||
Page("library.html", "libmproxy: mitmproxy as a library"),
|
||||
Directory("tutorials"),
|
||||
|
||||
57
doc-src/install.html
Normal file
@@ -0,0 +1,57 @@
|
||||
|
||||
## Using Pip
|
||||
|
||||
The preferred way to install mitmproxy is to use
|
||||
[pip](http://pypi.python.org/pypi/pip). A single command will install the
|
||||
latest release of mitmproxy, along with all its dependencies:
|
||||
|
||||
<pre>
|
||||
sudo pip install mitmproxy
|
||||
</pre>
|
||||
|
||||
|
||||
## From Source
|
||||
|
||||
- When installing from source, you will need to install the
|
||||
[dependencies](@!urlTo("intro.html")!@) by hand.
|
||||
- Then run the following command from the base of the source distribution:
|
||||
|
||||
<pre>
|
||||
sudo python setup.py install
|
||||
</pre>
|
||||
|
||||
|
||||
## OSX
|
||||
|
||||
- Make sure that XCode is installed from the App Store, and that the
|
||||
command-line tools have been downloaded (XCode/Preferences/Downloads).
|
||||
- Install __pip__ using the following command:
|
||||
|
||||
<pre>
|
||||
sudo easy_install pip
|
||||
</pre>
|
||||
|
||||
- Now use __pip__ to set up the dependencies and do the install:
|
||||
|
||||
<pre>
|
||||
sudo pip install mitmproxy
|
||||
</pre>
|
||||
|
||||
This procedure may vary if, for instance, you've installed Python from an
|
||||
external source like [homebrew](http://mxcl.github.com/homebrew/). In that
|
||||
case, the easiest way to proceed is to first install __easy_install__, then
|
||||
continue as above.
|
||||
|
||||
There are a few bits of customization you might want to do to make mitmproxy
|
||||
comfortable to use. At the moment, mitmproxy's color scheme is optimized for a
|
||||
dark background terminal, so you probably want to change the default. You can
|
||||
use the OSX <b>open</b> program to create a simple and effective
|
||||
<b>~/.mailcap</b> file to view HTTP bodies:
|
||||
|
||||
<pre>
|
||||
application/*; /usr/bin/open -Wn %s
|
||||
audio/*; /usr/bin/open -Wn %s
|
||||
image/*; /usr/bin/open -Wn %s
|
||||
video/*; /usr/bin/open -Wn %s
|
||||
</pre>
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
|
||||
__mitmproxy__'s interception functionality lets you pause an HTTP request or
|
||||
response, inspect and modify it, and then accept it to send it on to the server
|
||||
or client. Because this is an interactive function, it's only present in
|
||||
__mitmproxy__, not in __mitmdump__.
|
||||
|
||||
|
||||
### 1: Set an interception pattern
|
||||
|
||||
<img src="@!urlTo('intercept-filt.png')!@"/>
|
||||
|
||||
We press __i__ to set an interception pattern. In this case, the __~q__ filter
|
||||
pattern tells __mitmproxy__ to intercept all requests. For complete filter
|
||||
syntax, see the [Filter expressions](@!urlTo("filters.html")!@) section of this
|
||||
document, or the built-in help function in __mitmproxy__.
|
||||
|
||||
### 2: Intercepted connections are indicated with a red exclamation mark:
|
||||
|
||||
<img src="@!urlTo('intercept-mid.png')!@"/>
|
||||
|
||||
### 3: You can now view and modify the request:
|
||||
|
||||
<img src="@!urlTo('intercept-options.png')!@"/>
|
||||
|
||||
In this case, we viewed the request by selecting it, pressed __e__ for "edit"
|
||||
and __m__ for "method" to change the HTTP request method.
|
||||
|
||||
### 4: Accept the intercept to continue
|
||||
|
||||
<img src="@!urlTo('intercept-result.png')!@"/>
|
||||
|
||||
Finally, we press __a__ to accept the modified request, which is then sent on
|
||||
to the server. In this case, we changed the request from an HTTP GET to to
|
||||
OPTIONS, and Google's server has responded with a 405 "Method not allowed".
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@ this lets you log in to a site using your browser, and then make subsequent
|
||||
requests using a tool like __curl__, which will then seem to be part of the
|
||||
authenticated session.
|
||||
|
||||
$!example("examples/stickycookies.py")!$
|
||||
$!example("examples/stickycookies")!$
|
||||
|
||||
|
||||
54
doc-src/mitmdump.html
Normal file
@@ -0,0 +1,54 @@
|
||||
|
||||
__mitmdump__ is the command-line companion to mitmproxy. It provides
|
||||
tcpdump-like functionality to let you view, record, and programmatically
|
||||
transform HTTP traffic. See the _--help_ flag output for complete
|
||||
documentation.
|
||||
|
||||
|
||||
|
||||
## Example: saving traffic
|
||||
|
||||
<pre class="terminal">
|
||||
> mitmdump -w outfile
|
||||
</pre>
|
||||
|
||||
Start up mitmdump in proxy mode, and write all traffic to __outfile__.
|
||||
|
||||
|
||||
## Example: client replay
|
||||
|
||||
<pre class="terminal">
|
||||
> mitmdump -nc outfile
|
||||
</pre>
|
||||
|
||||
Start mitmdump without binding to the proxy port (_-n_), then replay all
|
||||
requests from outfile (_-c filename_). Flags combine in the obvious way, so
|
||||
you can replay requests from one file, and write the resulting flows to
|
||||
another:
|
||||
|
||||
<pre class="terminal">
|
||||
> mitmdump -nc srcfile -w dstfile
|
||||
</pre>
|
||||
|
||||
See the [Client-side Replay](@!urlTo("clientreplay.html")!@) section for more information.
|
||||
|
||||
|
||||
## Example: running a script
|
||||
|
||||
<pre class="terminal">
|
||||
> mitmdump -s examples/add_header.py
|
||||
</pre>
|
||||
|
||||
This runs the __add_header.py__ example script, which simply adds a new header
|
||||
to all responses.
|
||||
|
||||
|
||||
## Example: scripted data transformation
|
||||
|
||||
<pre class="terminal">
|
||||
> mitmdump -ns examples/add_header.py -r srcfile -w dstfile
|
||||
</pre>
|
||||
|
||||
This command loads flows from __srcfile__, transforms it according to the
|
||||
specified script, then writes it back to __dstfile__.
|
||||
|
||||
112
doc-src/mitmproxy.html
Normal file
@@ -0,0 +1,112 @@
|
||||
|
||||
__mitmproxy__ is a console tool that allows interactive examination and
|
||||
modification of HTTP traffic. Use the _?_ shortcut key to view,
|
||||
context-sensitive documentation from any __mitmproxy__ screen.
|
||||
|
||||
## Flow list
|
||||
|
||||
The flow list shows an index of captured flows in chronological order.
|
||||
|
||||
<img src="@!urlTo("screenshots/mitmproxy.png")!@"/>
|
||||
|
||||
- __1__: A GET request, returning a 302 Redirect response.
|
||||
- __2__: A GET request, returning 16.75kb of text/html data.
|
||||
- __3__: A replayed request.
|
||||
- __4__: Intercepted flows are indicated with orange text. The user may edit
|
||||
these flows, and then accept them (using the _a_ key) to continue. In this
|
||||
case, the request has been intercepted on the way to the server.
|
||||
- __5__: A response intercepted from the server on the way to the client.
|
||||
- __6__: The event log can be toggled on and off using the _e_ shortcut key. This
|
||||
pane shows events and errors that may not result in a flow that shows up in the
|
||||
flow pane.
|
||||
- __7__: Flow count.
|
||||
- __8__: Various information on mitmproxy's state. In this case, we have an
|
||||
interception pattern set to ".*".
|
||||
- __9__: Bind address indicator - mitmproxy is listening on port 8080 of all
|
||||
interfaces.
|
||||
|
||||
|
||||
## Flow view
|
||||
|
||||
The __Flow View__ lets you inspect and manipulate a single flow:
|
||||
|
||||
<img src="@!urlTo("screenshots/mitmproxy-flowview.png")!@"/>
|
||||
|
||||
- __1__: Flow summary.
|
||||
- __2__: The Request/Response tabs, showing you which part of the flow you are
|
||||
currently viewing. In the example above, we're viewing the Response. Hit _tab_
|
||||
to switch between the Response and the Request.
|
||||
- __3__: Headers.
|
||||
- __4__: Body.
|
||||
- __5__: View Mode indicator. In this case, we're viewing the body in __hex__
|
||||
mode. The other available modes are __pretty__, which uses a number of
|
||||
heuristics to show you a friendly view of various content types, and __raw__,
|
||||
which shows you exactly what's there without any changes. You can change modes
|
||||
using the _m_ key.
|
||||
|
||||
|
||||
## Grid Editor
|
||||
|
||||
Much of the data that we'd like to interact with in mitmproxy is structured.
|
||||
For instance, headers, queries and form data can all be thought of as a list of
|
||||
key/value pairs. Mitmproxy has a built-in editor that lays this type of data
|
||||
out in a grid for easy manipulation.
|
||||
|
||||
At the moment, the Grid Editor is used in four parts of mitmproxy:
|
||||
|
||||
- Editing request or response headers (_e_ for edit, then _h_ for headers in flow view)
|
||||
- Editing a query string (_e_ for edit, then _q_ for query in flow view)
|
||||
- Editing a URL-encoded form (_e_ for edit, then _f_ for form in flow view)
|
||||
- Editing replacement patterns (_R_ globally)
|
||||
|
||||
If there is is no data, an empty editor will be started to let you add some.
|
||||
Here is the editor showing the headers from a request:
|
||||
|
||||
<img src="@!urlTo("screenshots/mitmproxy-kveditor.png")!@"/>
|
||||
|
||||
To edit, navigate to the key or value you want to modify using the arrow or vi
|
||||
navigation keys, and press enter. The background color will change to show that
|
||||
you are in edit mode for the specified field:
|
||||
|
||||
<img src="@!urlTo("screenshots/mitmproxy-kveditor-editmode.png")!@"/>
|
||||
|
||||
Modify the field as desired, then press escape to exit edit mode when you're
|
||||
done. You can also add a row (_a_ key), delete a row (_d_ key), spawn an
|
||||
external editor on a field (_e_ key). Be sure to consult the context-sensitive
|
||||
help (_?_ key) for more.
|
||||
|
||||
|
||||
# Example: Interception
|
||||
|
||||
__mitmproxy__'s interception functionality lets you pause an HTTP request or
|
||||
response, inspect and modify it, and then accept it to send it on to the server
|
||||
or client.
|
||||
|
||||
|
||||
### 1: Set an interception pattern
|
||||
|
||||
<img src="@!urlTo('mitmproxy-intercept-filt.png')!@"/>
|
||||
|
||||
We press _i_ to set an interception pattern. In this case, the __~q__ filter
|
||||
pattern tells __mitmproxy__ to intercept all requests. For complete filter
|
||||
syntax, see the [Filter expressions](@!urlTo("filters.html")!@) section of this
|
||||
document, or the built-in help function in __mitmproxy__.
|
||||
|
||||
### 2: Intercepted connections are indicated with orange text:
|
||||
|
||||
<img src="@!urlTo('mitmproxy-intercept-mid.png')!@"/>
|
||||
|
||||
### 3: You can now view and modify the request:
|
||||
|
||||
<img src="@!urlTo('mitmproxy-intercept-options.png')!@"/>
|
||||
|
||||
In this case, we viewed the request by selecting it, pressed _e_ for "edit"
|
||||
and _m_ for "method" to change the HTTP request method.
|
||||
|
||||
### 4: Accept the intercept to continue:
|
||||
|
||||
<img src="@!urlTo('mitmproxy-intercept-result.png')!@"/>
|
||||
|
||||
Finally, we press _a_ to accept the modified request, which is then sent on to
|
||||
the server. In this case, we changed the request from an HTTP GET to
|
||||
OPTIONS, and Google's server has responded with a 405 "Method not allowed".
|
||||
61
doc-src/replacements.html
Normal file
@@ -0,0 +1,61 @@
|
||||
- command-line: _--replace_, _--replace-from-file_
|
||||
- mitmproxy shortcut: _R_
|
||||
|
||||
Mitmproxy lets you specify an arbitrary number of patterns that define text
|
||||
replacements within flows. Each pattern has 3 components: a filter that defines
|
||||
which flows a replacement applies to, a regular expression that defines what
|
||||
gets replaced, and a target value that defines what is substituted in.
|
||||
|
||||
Replace hooks fire when either a client request or a server response is
|
||||
received. Only the matching flow component is affected: so, for example, if a
|
||||
replace hook is triggered on server response, the replacement is only run on
|
||||
the Response object leaving the Request intact. You control whether the hook
|
||||
triggers on the request, response or both using the filter pattern. If you need
|
||||
finer-grained control than this, it's simple to create a script using the
|
||||
replacement API on Flow components.
|
||||
|
||||
Replacement hooks are extremely handy in interactive testing of applications.
|
||||
For instance you can use a replace hook to replace the text "XSS" with a
|
||||
complicated XSS exploit, and then "inject" the exploit simply by interacting
|
||||
with the application through the browser. When used with tools like Firebug and
|
||||
mitmproxy's own interception abilities, replacement hooks can be an amazingly
|
||||
flexible and powerful feature.
|
||||
|
||||
|
||||
## On the command-line
|
||||
|
||||
The replacement hook command-line options use a compact syntax to make it easy
|
||||
to specify all three components at once. The general form is as follows:
|
||||
|
||||
/patt/regex/replacement
|
||||
|
||||
Here, __patt__ is a mitmproxy filter expression, __regex__ is a valid Python
|
||||
regular expression, and __replacement__ is a string literal. The first
|
||||
character in the expression (__/__ in this case) defines what the separation
|
||||
character is. Here's an example of a valid expression that replaces "foo" with
|
||||
"bar" in all requests:
|
||||
|
||||
:~q:foo:bar
|
||||
|
||||
In practice, it's pretty common for the replacement literal to be long and
|
||||
complex. For instance, it might be an XSS exploit that weighs in at hundreds or
|
||||
thousands of characters. To cope with this, there's a variation of the
|
||||
replacement hook specifier that lets you load the replacement text from a file.
|
||||
So, you might start __mitmdump__ as follows:
|
||||
|
||||
<pre class="terminal">
|
||||
mitmdump --replace-from-file :~q:foo:~/xss-exploit
|
||||
</pre>
|
||||
|
||||
This will load the replacement text from the file __~/xss-exploit__.
|
||||
|
||||
Both the _--replace_ and _--replace-from-file_ flags can be passed multiple
|
||||
times.
|
||||
|
||||
|
||||
## Interactively
|
||||
|
||||
The _R_ shortcut key in mitmproxy lets you add and edit replacement hooks using
|
||||
a built-in editor. The context-sensitive help (_h_) has complete usage
|
||||
information.
|
||||
|
||||
8
doc-src/reverseproxy.html
Normal file
@@ -0,0 +1,8 @@
|
||||
|
||||
- command-line: _-P_ http[s]://hostname[:port]
|
||||
- mitmproxy shortcut: _P_
|
||||
|
||||
In reverse proxy mode, mitmproxy acts as a standard HTTP server and forwards
|
||||
all requests to the specified upstream server. Note that the displayed URL for
|
||||
flows in this mode will use the value of the __Host__ header field from the
|
||||
request, not the reverse proxy server.
|
||||
|
Before Width: | Height: | Size: 30 KiB |
|
Before Width: | Height: | Size: 34 KiB |
|
Before Width: | Height: | Size: 72 KiB |
|
Before Width: | Height: | Size: 40 KiB |
BIN
doc-src/screenshots/mitmproxy-flowview.png
Normal file
|
After Width: | Height: | Size: 308 KiB |
BIN
doc-src/screenshots/mitmproxy-intercept-filt.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
doc-src/screenshots/mitmproxy-intercept-mid.png
Normal file
|
After Width: | Height: | Size: 19 KiB |
BIN
doc-src/screenshots/mitmproxy-intercept-options.png
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
doc-src/screenshots/mitmproxy-intercept-result.png
Normal file
|
After Width: | Height: | Size: 22 KiB |
BIN
doc-src/screenshots/mitmproxy-kveditor-editmode.png
Normal file
|
After Width: | Height: | Size: 44 KiB |
BIN
doc-src/screenshots/mitmproxy-kveditor.png
Normal file
|
After Width: | Height: | Size: 44 KiB |
BIN
doc-src/screenshots/mitmproxy.png
Normal file
|
After Width: | Height: | Size: 149 KiB |
@@ -1,15 +1,129 @@
|
||||
|
||||
Both __mitmproxy__ and __mitmdump__ allow you to modify requests and responses
|
||||
with external scripts. This is often done through the __--reqscript__ and
|
||||
__--respscript__ options
|
||||
__mitmproxy__ has a powerful scripting API that allows you to modify flows
|
||||
on-the-fly or rewrite previously saved flows locally.
|
||||
|
||||
The mitmproxy scripting API is event driven - a script is simply a Python
|
||||
module that exposes a set of event methods. Here's a complete mitmproxy script
|
||||
that adds a new header to every HTTP response before it is returned to the
|
||||
client:
|
||||
|
||||
$!example("examples/add_header.py")!$
|
||||
|
||||
The first argument to each event method is an instance of ScriptContext that
|
||||
lets the script interact with the global mitmproxy state. The __response__
|
||||
event also gets an instance of Flow, which we can use to manipulate the
|
||||
response itself.
|
||||
|
||||
|
||||
The script interface is simple - scripts simply read,
|
||||
modify and return a single __libmproxy.flow.Flow__ object, using the methods
|
||||
defined in the __libmproxy.script__ module. Scripts must be executable.
|
||||
## Events
|
||||
|
||||
$!example("examples/simple_script")!$
|
||||
### start(ScriptContext)
|
||||
|
||||
Called once on startup, before any other events.
|
||||
|
||||
|
||||
###clientconnect(ScriptContext, ClientConnect)
|
||||
|
||||
Called when a client initiates a connection to the proxy. Note that
|
||||
a connection can correspond to multiple HTTP requests.
|
||||
|
||||
|
||||
###request(ScriptContext, Flow)
|
||||
|
||||
Called when a client request has been received. The __Flow__ object is
|
||||
guaranteed to have a non-None __request__ attribute.
|
||||
|
||||
|
||||
### response(ScriptContext, Flow)
|
||||
|
||||
Called when a server response has been received. The __Flow__ object is
|
||||
guaranteed to have non-None __request__ and __response__ attributes.
|
||||
|
||||
|
||||
### error(ScriptContext, Flow)
|
||||
|
||||
Called when a flow error has occurred, e.g. invalid server responses, or
|
||||
interrupted connections. This is distinct from a valid server HTTP error
|
||||
response, which is simply a response with an HTTP error code. The __Flow__
|
||||
object is guaranteed to have non-None __request__ and __error__ attributes.
|
||||
|
||||
|
||||
### clientdisconnect(ScriptContext, ClientDisconnect)
|
||||
|
||||
Called when a client disconnects from the proxy.
|
||||
|
||||
### done(ScriptContext)
|
||||
|
||||
Called once on script shutdown, after any other events.
|
||||
|
||||
|
||||
## API
|
||||
|
||||
The main classes you will deal with in writing mitmproxy scripts are:
|
||||
|
||||
<table class="kvtable">
|
||||
<tr>
|
||||
<th>libmproxy.flow.ClientConnection</th>
|
||||
<td>Describes a client connection.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>libmproxy.flow.ClientDisconnection</th>
|
||||
<td>Describes a client disconnection.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>libmproxy.flow.Error</th>
|
||||
<td>A communications error.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>libmproxy.flow.Flow</th>
|
||||
<td>A collection of objects representing a single HTTP transaction.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>libmproxy.flow.Headers</th>
|
||||
<td>HTTP headers for a request or response.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>libmproxy.flow.ODict</th>
|
||||
|
||||
<td>A dictionary-like object for managing sets of key/value data. There
|
||||
is also a variant called CaselessODict that ignores key case for some
|
||||
calls (used mainly for headers).</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>libmproxy.flow.Response</th>
|
||||
<td>An HTTP response.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>libmproxy.flow.Request</th>
|
||||
<td>An HTTP request.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>libmproxy.flow.ScriptContext</th>
|
||||
<td> A handle for interacting with mitmproxy's from within scripts. </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>libmproxy.certutils.SSLCert</th>
|
||||
<td>Exposes information SSL certificates.</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
The canonical API documentation is the code. You can view the API documentation
|
||||
using pydoc (which is installed with Python by default), like this:
|
||||
|
||||
<pre class="terminal">
|
||||
> pydoc libmproxy.flow.Request
|
||||
</pre>
|
||||
|
||||
|
||||
## Running scripts on saved flows
|
||||
|
||||
Sometimes, we want to run a script on __Flow__ objects that are already
|
||||
complete. This happens when you start a script, and then load a saved set of
|
||||
flows from a file (see the "scripted data transformation" example on the
|
||||
[mitmdump](@!urlTo("mitmdump.html")!@) page). It also happens when you run a
|
||||
one-shot script on a single flow through the _|_ (pipe) shortcut in mitmproxy.
|
||||
|
||||
In this case, there are no client connections, and the events are run in the
|
||||
following order: __start__, __request__, __response__, __error__, __done__. If
|
||||
the flow doesn't have a __response__ or __error__ associated with it, the
|
||||
matching event will be skipped.
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
|
||||
- command-line: _-S path_
|
||||
- mitmproxy shortcut: _S_
|
||||
|
||||
Server-side replay lets us replay server responses from a saved HTTP
|
||||
conversation.
|
||||
|
||||
@@ -8,10 +11,9 @@ Matching requests with responses
|
||||
By default, __mitmproxy__ excludes request headers when matching incoming
|
||||
requests with responses from the replay file. This works in most circumstances,
|
||||
and makes it possible to replay server responses in situations where request
|
||||
headers would naturally vary, e.g. using a different user agent. The
|
||||
__--rheader__ option to both __mitmproxy__ and __mitmdump__ allows you to
|
||||
override this behaviour by specifying individual headers that should be
|
||||
included in matching.
|
||||
headers would naturally vary, e.g. using a different user agent. The _--rheader
|
||||
headername_ command-line option allows you to override this behaviour by
|
||||
specifying individual headers that should be included in matching.
|
||||
|
||||
|
||||
Response refreshing
|
||||
@@ -27,8 +29,7 @@ recording. So, if they were in the past at the time of recording, they will be
|
||||
in the past at the time of replay, and vice versa. Cookie expiry times are
|
||||
updated in a similar way.
|
||||
|
||||
You can turn off response refreshing using the __norefresh__ option, available
|
||||
both on the command-line and using the "options" keyboard shortcut within
|
||||
__mitmproxy__.
|
||||
You can turn off response refreshing using the _--norefresh_ argument, or using
|
||||
the _o_ options shortcut within __mitmproxy__.
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
|
||||
When the __stickycookie__ option is set, __mitmproxy__ will add the cookie most
|
||||
## Sticky cookies
|
||||
|
||||
- command-line: _-t_ (sticky cookies on all requests)
|
||||
- command-line: _-T filt_ (sticky cookies on requests matching filt)
|
||||
- mitmproxy shortcut: _t_
|
||||
|
||||
When the sticky cookie option is set, __mitmproxy__ will add the cookie most
|
||||
recently set by the server to any cookie-less request. Consider a service that
|
||||
sets a cookie to track the session after authentication. Using sticky cookies,
|
||||
you can fire up mitmproxy, and authenticate to a service as you usually would
|
||||
@@ -17,8 +23,12 @@ with the secured resources.
|
||||
|
||||
## Sticky auth
|
||||
|
||||
The __stickyauth__ option is analogous to the __stickycookie__ option, in that
|
||||
HTTP __Authorization__ headers are simply replayed to the server once they have
|
||||
been seen. This is enough to allow you to access a server resource using HTTP
|
||||
Basic authentication through the proxy. Note that __mitmproxy__ doesn't (yet)
|
||||
support replay of HTTP Digest authentication.
|
||||
- command-line: _-u_ (sticky auth on all requests)
|
||||
- command-line: _-U filt_ (sticky auth on requests matching filt)
|
||||
- mitmproxy shortcut: _u_
|
||||
|
||||
The sticky auth option is analogous to the sticky cookie option, in that HTTP
|
||||
__Authorization__ headers are simply replayed to the server once they have been
|
||||
seen. This is enough to allow you to access a server resource using HTTP Basic
|
||||
authentication through the proxy. Note that __mitmproxy__ doesn't (yet) support
|
||||
replay of HTTP Digest authentication.
|
||||
|
||||
@@ -49,13 +49,13 @@ voila! - totally hands-free wireless network startup.
|
||||
|
||||
We might also want to prune requests that download CSS, JS, images and so
|
||||
forth. These add only a few moments to the time it takes to replay, but they're
|
||||
not really needed and I somehow feel compelled trim them anyway. So, we fire up
|
||||
not really needed and I somehow feel compelled to trim them anyway. So, we fire up
|
||||
the mitmproxy console tool on our serialized conversation, like so:
|
||||
|
||||
<pre class="terminal">
|
||||
> mitmproxy wireless-login
|
||||
> mitmproxy -r wireless-login
|
||||
</pre>
|
||||
|
||||
We can now go through and manually delete (using the __d__ keyboard shortcut)
|
||||
everything we want to trim. When we're done, we use __S__ to save the
|
||||
everything we want to trim. When we're done, we use __w__ to save the
|
||||
conversation back to the file.
|
||||
|
||||
15
doc-src/upstreamcerts.html
Normal file
@@ -0,0 +1,15 @@
|
||||
- command-line: _--upstream-cert_
|
||||
- mitmproxy shortcut: _o_, then _u_
|
||||
|
||||
Normally, mitmproxy uses the target domain specified in a client's proxy
|
||||
request to generate an interception certificate. When __upstream-cert__ mode is
|
||||
activated a different procedure is followed: a connection is made to the
|
||||
specified remote server to retrieve its __Common Name__ and __Subject
|
||||
Alternative Names__. This feature is especially useful when the client
|
||||
specifies an IP address rather than a host name in the proxy request. If this
|
||||
is the case, we can only generate a certificate if we can establish the __CN__
|
||||
and __SANs__ from the upstream server.
|
||||
|
||||
Note that __upstream-cert__ mode does not work when the remote server relies on
|
||||
[Server Name Indication](http://en.wikipedia.org/wiki/Server_Name_Indication).
|
||||
Luckily, SNI is still not very widely used.
|
||||
8
examples/README
Normal file
@@ -0,0 +1,8 @@
|
||||
add_header.py Simple script that just adds a header to every request.
|
||||
dup_and_replay.py Duplicates each request, changes it, and then replays the modified request.
|
||||
flowbasic Basic use of mitmproxy as a library.
|
||||
modify_form.py Modify all form submissions to add a parameter.
|
||||
modify_querystring.py Modify all query strings to add a parameters.
|
||||
stub.py Script stub with a method definition for every event.
|
||||
stickycookies An example of writing a custom proxy with libmproxy.
|
||||
upsidedownternet.py Rewrites traffic to turn PNGs upside down.
|
||||
2
examples/add_header.py
Normal file
@@ -0,0 +1,2 @@
|
||||
def response(context, flow):
|
||||
flow.response.headers["newheader"] = ["foo"]
|
||||
4
examples/dup_and_replay.py
Normal file
@@ -0,0 +1,4 @@
|
||||
def request(ctx, flow):
|
||||
f = ctx.duplicate_flow(flow)
|
||||
f.request.path = "/changed"
|
||||
ctx.replay_request(f)
|
||||
39
examples/flowbasic
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
This example shows how to build a proxy based on mitmproxy's Flow
|
||||
primitives.
|
||||
|
||||
Note that request and response messages are not automatically acked, so we
|
||||
need to implement handlers to do this.
|
||||
"""
|
||||
import os
|
||||
from libmproxy import proxy, flow
|
||||
|
||||
class MyMaster(flow.FlowMaster):
|
||||
def run(self):
|
||||
try:
|
||||
flow.FlowMaster.run(self)
|
||||
except KeyboardInterrupt:
|
||||
self.shutdown()
|
||||
|
||||
def handle_request(self, r):
|
||||
f = flow.FlowMaster.handle_request(self, r)
|
||||
if f:
|
||||
r._ack()
|
||||
return f
|
||||
|
||||
def handle_response(self, r):
|
||||
f = flow.FlowMaster.handle_response(self, r)
|
||||
if f:
|
||||
r._ack()
|
||||
print f
|
||||
return f
|
||||
|
||||
|
||||
config = proxy.ProxyConfig(
|
||||
cacert = os.path.expanduser("~/.mitmproxy/mitmproxy-ca.pem")
|
||||
)
|
||||
state = flow.State()
|
||||
server = proxy.ProxyServer(config, 8080)
|
||||
m = MyMaster(server, state)
|
||||
m.run()
|
||||
8
examples/modify_form.py
Normal file
@@ -0,0 +1,8 @@
|
||||
|
||||
def request(context, flow):
|
||||
if "application/x-www-form-urlencoded" in flow.request.headers["content-type"]:
|
||||
frm = flow.request.get_form_urlencoded()
|
||||
frm["mitmproxy"] = ["rocks"]
|
||||
flow.request.set_form_urlencoded(frm)
|
||||
|
||||
|
||||
7
examples/modify_querystring.py
Normal file
@@ -0,0 +1,7 @@
|
||||
|
||||
def request(context, flow):
|
||||
q = flow.request.get_query()
|
||||
if q:
|
||||
q["mitmproxy"] = ["rocks"]
|
||||
flow.request.set_query(q)
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from libmproxy import script
|
||||
|
||||
f = script.load_flow()
|
||||
f.request.headers["newheader"] = ["foo"]
|
||||
script.return_flow(f)
|
||||
27
examples/stickycookies.py → examples/stickycookies
Normal file → Executable file
@@ -1,8 +1,12 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
This example builds on mitmproxy's base proxying infrastructure to
|
||||
implement functionality similar to the "sticky cookies" option. This is at
|
||||
a lower level than the Flow mechanism, so we're dealing directly with
|
||||
request and response objects.
|
||||
"""
|
||||
from libmproxy import controller, proxy
|
||||
|
||||
proxy.config = proxy.Config(
|
||||
"~/.mitmproxy/cert.pem"
|
||||
)
|
||||
import os
|
||||
|
||||
class StickyMaster(controller.Master):
|
||||
def __init__(self, server):
|
||||
@@ -17,19 +21,22 @@ class StickyMaster(controller.Master):
|
||||
|
||||
def handle_request(self, msg):
|
||||
hid = (msg.host, msg.port)
|
||||
if msg.headers.has_key("cookie"):
|
||||
if msg.headers["cookie"]:
|
||||
self.stickyhosts[hid] = msg.headers["cookie"]
|
||||
elif hid in self.stickyhosts:
|
||||
msg.headers["cookie"] = self.stickyhosts[hid]
|
||||
msg.ack()
|
||||
msg._ack()
|
||||
|
||||
def handle_response(self, msg):
|
||||
hid = (msg.request.host, msg.request.port)
|
||||
if msg.headers.has_key("set-cookie"):
|
||||
self.stickyhosts[hid] = f.response.headers["set-cookie"]
|
||||
msg.ack()
|
||||
if msg.headers["set-cookie"]:
|
||||
self.stickyhosts[hid] = msg.headers["set-cookie"]
|
||||
msg._ack()
|
||||
|
||||
|
||||
server = proxy.ProxyServer(8080)
|
||||
config = proxy.ProxyConfig(
|
||||
cacert = os.path.expanduser("~/.mitmproxy/mitmproxy-ca.pem")
|
||||
)
|
||||
server = proxy.ProxyServer(config, 8080)
|
||||
m = StickyMaster(server)
|
||||
m.run()
|
||||
48
examples/stub.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""
|
||||
This is a script stub, with definitions for all events.
|
||||
"""
|
||||
|
||||
def start(ctx):
|
||||
"""
|
||||
Called once on script startup, before any other events.
|
||||
"""
|
||||
ctx.log("start")
|
||||
|
||||
def clientconnect(ctx, client_connect):
|
||||
"""
|
||||
Called when a client initiates a connection to the proxy. Note that a
|
||||
connection can correspond to multiple HTTP requests
|
||||
"""
|
||||
ctx.log("clientconnect")
|
||||
|
||||
def request(ctx, flow):
|
||||
"""
|
||||
Called when a client request has been received.
|
||||
"""
|
||||
ctx.log("request")
|
||||
|
||||
def response(ctx, flow):
|
||||
"""
|
||||
Called when a server response has been received.
|
||||
"""
|
||||
ctx.log("response")
|
||||
|
||||
def error(ctx, flow):
|
||||
"""
|
||||
Called when a flow error has occured, e.g. invalid server responses, or
|
||||
interrupted connections. This is distinct from a valid server HTTP error
|
||||
response, which is simply a response with an HTTP error code.
|
||||
"""
|
||||
ctx.log("error")
|
||||
|
||||
def clientdisconnect(ctx, client_disconnect):
|
||||
"""
|
||||
Called when a client disconnects from the proxy.
|
||||
"""
|
||||
ctx.log("clientdisconnect")
|
||||
|
||||
def done(ctx):
|
||||
"""
|
||||
Called once on script shutdown, after any other events.
|
||||
"""
|
||||
ctx.log("done")
|
||||
8
examples/upsidedownternet.py
Normal file
@@ -0,0 +1,8 @@
|
||||
import Image, cStringIO
|
||||
def response(context, flow):
|
||||
if flow.response.headers["content-type"] == ["image/png"]:
|
||||
s = cStringIO.StringIO(flow.response.content)
|
||||
img = Image.open(s).rotate(180)
|
||||
s2 = cStringIO.StringIO()
|
||||
img.save(s2, "png")
|
||||
flow.response.content = s2.getvalue()
|
||||
221
libmproxy/certutils.py
Normal file
@@ -0,0 +1,221 @@
|
||||
import os, ssl, hashlib, socket, time, datetime
|
||||
from pyasn1.type import univ, constraint, char, namedtype, tag
|
||||
from pyasn1.codec.der.decoder import decode
|
||||
import OpenSSL
|
||||
|
||||
CERT_SLEEP_TIME = 1
|
||||
CERT_EXPIRY = str(365 * 3)
|
||||
|
||||
|
||||
def create_ca():
|
||||
key = OpenSSL.crypto.PKey()
|
||||
key.generate_key(OpenSSL.crypto.TYPE_RSA, 1024)
|
||||
ca = OpenSSL.crypto.X509()
|
||||
ca.set_serial_number(int(time.time()*10000))
|
||||
ca.set_version(2)
|
||||
ca.get_subject().CN = "mitmproxy"
|
||||
ca.get_subject().O = "mitmproxy"
|
||||
ca.gmtime_adj_notBefore(0)
|
||||
ca.gmtime_adj_notAfter(24 * 60 * 60 * 720)
|
||||
ca.set_issuer(ca.get_subject())
|
||||
ca.set_pubkey(key)
|
||||
ca.add_extensions([
|
||||
OpenSSL.crypto.X509Extension("basicConstraints", True,
|
||||
"CA:TRUE"),
|
||||
OpenSSL.crypto.X509Extension("nsCertType", True,
|
||||
"sslCA"),
|
||||
OpenSSL.crypto.X509Extension("extendedKeyUsage", True,
|
||||
"serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC"
|
||||
),
|
||||
OpenSSL.crypto.X509Extension("keyUsage", False,
|
||||
"keyCertSign, cRLSign"),
|
||||
OpenSSL.crypto.X509Extension("subjectKeyIdentifier", False, "hash",
|
||||
subject=ca),
|
||||
])
|
||||
ca.sign(key, "sha1")
|
||||
return key, ca
|
||||
|
||||
|
||||
def dummy_ca(path):
|
||||
dirname = os.path.dirname(path)
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
if path.endswith(".pem"):
|
||||
basename, _ = os.path.splitext(path)
|
||||
else:
|
||||
basename = path
|
||||
|
||||
key, ca = create_ca()
|
||||
|
||||
# Dump the CA plus private key
|
||||
f = open(path, "w")
|
||||
f.write(OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key))
|
||||
f.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca))
|
||||
f.close()
|
||||
|
||||
# Dump the certificate in PEM format
|
||||
f = open(os.path.join(dirname, basename + "-cert.pem"), "w")
|
||||
f.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca))
|
||||
f.close()
|
||||
|
||||
# Create a .cer file with the same contents for Android
|
||||
f = open(os.path.join(dirname, basename + "-cert.cer"), "w")
|
||||
f.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca))
|
||||
f.close()
|
||||
|
||||
# Dump the certificate in PKCS12 format for Windows devices
|
||||
f = open(os.path.join(dirname, basename + "-cert.p12"), "w")
|
||||
p12 = OpenSSL.crypto.PKCS12()
|
||||
p12.set_certificate(ca)
|
||||
f.write(p12.export())
|
||||
f.close()
|
||||
return True
|
||||
|
||||
|
||||
def dummy_cert(certdir, ca, commonname, sans):
|
||||
"""
|
||||
certdir: Certificate directory.
|
||||
ca: Path to the certificate authority file, or None.
|
||||
commonname: Common name for the generated certificate.
|
||||
|
||||
Returns cert path if operation succeeded, None if not.
|
||||
"""
|
||||
namehash = hashlib.sha256(commonname).hexdigest()
|
||||
certpath = os.path.join(certdir, namehash + ".pem")
|
||||
if os.path.exists(certpath):
|
||||
return certpath
|
||||
|
||||
ss = []
|
||||
for i in sans:
|
||||
ss.append("DNS: %s"%i)
|
||||
ss = ", ".join(ss)
|
||||
|
||||
if ca:
|
||||
raw = file(ca, "r").read()
|
||||
ca = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, raw)
|
||||
key = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, raw)
|
||||
else:
|
||||
key, ca = create_ca()
|
||||
|
||||
req = OpenSSL.crypto.X509Req()
|
||||
subj = req.get_subject()
|
||||
subj.CN = commonname
|
||||
req.set_pubkey(ca.get_pubkey())
|
||||
req.sign(key, "sha1")
|
||||
if ss:
|
||||
req.add_extensions([OpenSSL.crypto.X509Extension("subjectAltName", True, ss)])
|
||||
|
||||
cert = OpenSSL.crypto.X509()
|
||||
cert.gmtime_adj_notBefore(0)
|
||||
cert.gmtime_adj_notAfter(60 * 60 * 24 * 30)
|
||||
cert.set_issuer(ca.get_subject())
|
||||
cert.set_subject(req.get_subject())
|
||||
cert.set_serial_number(int(time.time()*10000))
|
||||
if ss:
|
||||
cert.add_extensions([OpenSSL.crypto.X509Extension("subjectAltName", True, ss)])
|
||||
cert.set_pubkey(req.get_pubkey())
|
||||
cert.sign(key, "sha1")
|
||||
|
||||
f = open(certpath, "w")
|
||||
f.write(OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert))
|
||||
f.close()
|
||||
|
||||
return certpath
|
||||
|
||||
|
||||
class _GeneralName(univ.Choice):
|
||||
# We are only interested in dNSNames. We use a default handler to ignore
|
||||
# other types.
|
||||
componentType = namedtype.NamedTypes(
|
||||
namedtype.NamedType('dNSName', char.IA5String().subtype(
|
||||
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class _GeneralNames(univ.SequenceOf):
|
||||
componentType = _GeneralName()
|
||||
sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, 1024)
|
||||
|
||||
|
||||
class SSLCert:
|
||||
def __init__(self, pemtxt):
|
||||
"""
|
||||
Returns a (common name, [subject alternative names]) tuple.
|
||||
"""
|
||||
self.cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, pemtxt)
|
||||
|
||||
@classmethod
|
||||
def from_der(klass, der):
|
||||
pem = ssl.DER_cert_to_PEM_cert(der)
|
||||
return klass(pem)
|
||||
|
||||
def digest(self, name):
|
||||
return self.cert.digest(name)
|
||||
|
||||
@property
|
||||
def issuer(self):
|
||||
return self.cert.get_issuer().get_components()
|
||||
|
||||
@property
|
||||
def notbefore(self):
|
||||
t = self.cert.get_notBefore()
|
||||
return datetime.datetime.strptime(t, "%Y%m%d%H%M%SZ")
|
||||
|
||||
@property
|
||||
def notafter(self):
|
||||
t = self.cert.get_notAfter()
|
||||
return datetime.datetime.strptime(t, "%Y%m%d%H%M%SZ")
|
||||
|
||||
@property
|
||||
def has_expired(self):
|
||||
return self.cert.has_expired()
|
||||
|
||||
@property
|
||||
def subject(self):
|
||||
return self.cert.get_subject().get_components()
|
||||
|
||||
@property
|
||||
def serial(self):
|
||||
return self.cert.get_serial_number()
|
||||
|
||||
@property
|
||||
def keyinfo(self):
|
||||
pk = self.cert.get_pubkey()
|
||||
types = {
|
||||
OpenSSL.crypto.TYPE_RSA: "RSA",
|
||||
OpenSSL.crypto.TYPE_DSA: "DSA",
|
||||
}
|
||||
return (
|
||||
types.get(pk.type(), "UNKNOWN"),
|
||||
pk.bits()
|
||||
)
|
||||
|
||||
@property
|
||||
def cn(self):
|
||||
cn = None
|
||||
for i in self.subject:
|
||||
if i[0] == "CN":
|
||||
cn = i[1]
|
||||
return cn
|
||||
|
||||
@property
|
||||
def altnames(self):
|
||||
altnames = []
|
||||
for i in range(self.cert.get_extension_count()):
|
||||
ext = self.cert.get_extension(i)
|
||||
if ext.get_short_name() == "subjectAltName":
|
||||
dec = decode(ext.get_data(), asn1Spec=_GeneralNames())
|
||||
for i in dec[0]:
|
||||
altnames.append(i[0].asOctets())
|
||||
return altnames
|
||||
|
||||
|
||||
# begin nocover
|
||||
def get_remote_cert(host, port):
|
||||
addr = socket.gethostbyname(host)
|
||||
s = ssl.get_server_certificate((addr, port))
|
||||
return SSLCert(s)
|
||||
# end nocover
|
||||
|
||||
@@ -1,5 +1,74 @@
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import proxy
|
||||
import optparse
|
||||
import optparse, re, filt
|
||||
|
||||
|
||||
class ParseReplaceException(Exception): pass
|
||||
class OptionException(Exception): pass
|
||||
|
||||
|
||||
def parse_replace_hook(s):
|
||||
"""
|
||||
Returns a (pattern, regex, replacement) tuple.
|
||||
|
||||
The general form for a replacement hook is as follows:
|
||||
|
||||
/patt/regex/replacement
|
||||
|
||||
The first character specifies the separator. Example:
|
||||
|
||||
:~q:foo:bar
|
||||
|
||||
If only two clauses are specified, the pattern is set to match
|
||||
universally (i.e. ".*"). Example:
|
||||
|
||||
/foo/bar/
|
||||
|
||||
Clauses are parsed from left to right. Extra separators are taken to be
|
||||
part of the final clause. For instance, the replacement clause below is
|
||||
"foo/bar/":
|
||||
|
||||
/one/two/foo/bar/
|
||||
|
||||
Checks that pattern and regex are both well-formed. Raises
|
||||
ParseReplaceException on error.
|
||||
"""
|
||||
sep, rem = s[0], s[1:]
|
||||
parts = rem.split(sep, 2)
|
||||
if len(parts) == 2:
|
||||
patt = ".*"
|
||||
regex, replacement = parts
|
||||
elif len(parts) == 3:
|
||||
patt, regex, replacement = parts
|
||||
else:
|
||||
raise ParseReplaceException("Malformed replacement specifier - too few clauses: %s"%s)
|
||||
|
||||
if not regex:
|
||||
raise ParseReplaceException("Empty replacement regex: %s"%str(patt))
|
||||
|
||||
try:
|
||||
re.compile(regex)
|
||||
except re.error, e:
|
||||
raise ParseReplaceException("Malformed replacement regex: %s"%str(e.message))
|
||||
|
||||
if not filt.parse(patt):
|
||||
raise ParseReplaceException("Malformed replacement filter pattern: %s"%patt)
|
||||
|
||||
return patt, regex, replacement
|
||||
|
||||
|
||||
def get_common_options(options):
|
||||
@@ -14,21 +83,42 @@ def get_common_options(options):
|
||||
elif options.stickyauth_filt:
|
||||
stickyauth = options.stickyauth_filt
|
||||
|
||||
reps = []
|
||||
for i in options.replace:
|
||||
try:
|
||||
p = parse_replace_hook(i)
|
||||
except ParseReplaceException, e:
|
||||
raise OptionException(e.message)
|
||||
reps.append(p)
|
||||
for i in options.replace_file:
|
||||
try:
|
||||
patt, rex, path = parse_replace_hook(i)
|
||||
except ParseReplaceException, e:
|
||||
raise OptionException(e.message)
|
||||
try:
|
||||
v = open(path, "r").read()
|
||||
except IOError, e:
|
||||
raise OptionException("Could not read replace file: %s"%path)
|
||||
reps.append((patt, rex, v))
|
||||
|
||||
return dict(
|
||||
anticache = options.anticache,
|
||||
anticomp = options.anticomp,
|
||||
client_replay = options.client_replay,
|
||||
eventlog = options.eventlog,
|
||||
kill = options.kill,
|
||||
no_server = options.no_server,
|
||||
refresh_server_playback = not options.norefresh,
|
||||
rheaders = options.rheaders,
|
||||
rfile = options.rfile,
|
||||
request_script = options.request_script,
|
||||
response_script = options.response_script,
|
||||
replacements = reps,
|
||||
server_replay = options.server_replay,
|
||||
script = options.script,
|
||||
stickycookie = stickycookie,
|
||||
stickyauth = stickyauth,
|
||||
wfile = options.wfile,
|
||||
verbosity = options.verbose,
|
||||
nopop = options.nopop,
|
||||
)
|
||||
|
||||
|
||||
@@ -38,11 +128,21 @@ def common_options(parser):
|
||||
action="store", type = "str", dest="addr", default='',
|
||||
help = "Address to bind proxy to (defaults to all interfaces)"
|
||||
)
|
||||
parser.add_option(
|
||||
"--anticache",
|
||||
action="store_true", dest="anticache", default=False,
|
||||
help="Strip out request headers that might cause the server to return 304-not-modified."
|
||||
)
|
||||
parser.add_option(
|
||||
"--confdir",
|
||||
action="store", type = "str", dest="confdir", default='~/.mitmproxy',
|
||||
help = "Configuration directory. (~/.mitmproxy)"
|
||||
)
|
||||
parser.add_option(
|
||||
"-e",
|
||||
action="store_true", dest="eventlog",
|
||||
help="Show event log."
|
||||
)
|
||||
parser.add_option(
|
||||
"-n",
|
||||
action="store_true", dest="no_server",
|
||||
@@ -53,6 +153,11 @@ def common_options(parser):
|
||||
action="store", type = "int", dest="port", default=8080,
|
||||
help = "Proxy service port."
|
||||
)
|
||||
parser.add_option(
|
||||
"-P",
|
||||
action="store", dest="reverse_proxy", default=None,
|
||||
help="Reverse proxy to upstream server: http[s]://host[:port]"
|
||||
)
|
||||
parser.add_option(
|
||||
"-q",
|
||||
action="store_true", dest="quiet",
|
||||
@@ -64,19 +169,9 @@ def common_options(parser):
|
||||
help="Read flows from file."
|
||||
)
|
||||
parser.add_option(
|
||||
"--anticache",
|
||||
action="store_true", dest="anticache", default=False,
|
||||
help="Strip out request headers that might cause the server to return 304-not-modified."
|
||||
)
|
||||
parser.add_option(
|
||||
"--reqscript",
|
||||
action="store", dest="request_script", default=None,
|
||||
help="Script to run when a request is recieved."
|
||||
)
|
||||
parser.add_option(
|
||||
"--respscript",
|
||||
action="store", dest="response_script", default=None,
|
||||
help="Script to run when a response is recieved."
|
||||
"-s",
|
||||
action="store", dest="script", default=None,
|
||||
help="Run a script."
|
||||
)
|
||||
parser.add_option(
|
||||
"-t",
|
||||
@@ -108,6 +203,29 @@ def common_options(parser):
|
||||
action="store", dest="wfile", default=None,
|
||||
help="Write flows to file."
|
||||
)
|
||||
parser.add_option(
|
||||
"-z",
|
||||
action="store_true", dest="anticomp", default=False,
|
||||
help="Try to convince servers to send us un-compressed data."
|
||||
)
|
||||
parser.add_option(
|
||||
"-Z",
|
||||
action="store", dest="body_size_limit", default=None,
|
||||
metavar="SIZE",
|
||||
help="Byte size limit of HTTP request and response bodies."\
|
||||
" Understands k/m/g suffixes, i.e. 3m for 3 megabytes."
|
||||
)
|
||||
parser.add_option(
|
||||
"--cert-wait-time", type="float",
|
||||
action="store", dest="cert_wait_time", default=0,
|
||||
help="Wait for specified number of seconds after a new cert is generated. This can smooth over small discrepancies between the client and server times."
|
||||
)
|
||||
parser.add_option(
|
||||
"--upstream-cert", default=False,
|
||||
action="store_true", dest="upstream_cert",
|
||||
help="Connect to upstream server to look up certificate details."
|
||||
)
|
||||
|
||||
group = optparse.OptionGroup(parser, "Client Replay")
|
||||
group.add_option(
|
||||
"-c",
|
||||
@@ -116,15 +234,9 @@ def common_options(parser):
|
||||
)
|
||||
parser.add_option_group(group)
|
||||
|
||||
parser.add_option(
|
||||
"--cert-wait-time",
|
||||
action="store", dest="cert_wait_time", default=0,
|
||||
help="Wait for specified number of seconds after a new cert is generated. This can smooth over small discrepancies between the client and server times."
|
||||
)
|
||||
|
||||
group = optparse.OptionGroup(parser, "Server Replay")
|
||||
group.add_option(
|
||||
"-s",
|
||||
"-S",
|
||||
action="store", dest="server_replay", default=None, metavar="PATH",
|
||||
help="Replay server responses from a saved file."
|
||||
)
|
||||
@@ -145,6 +257,34 @@ def common_options(parser):
|
||||
help= "Disable response refresh, "
|
||||
"which updates times in cookies and headers for replayed responses."
|
||||
)
|
||||
group.add_option(
|
||||
"--no-pop",
|
||||
action="store_true", dest="nopop", default=False,
|
||||
help="Disable response pop from response flow. "
|
||||
"This makes it possible to replay same response multiple times."
|
||||
)
|
||||
|
||||
group = optparse.OptionGroup(
|
||||
parser,
|
||||
"Replacements",
|
||||
"""
|
||||
Replacements are of the form "/pattern/regex/replacement", where
|
||||
the separator can be any character. Please see the documentation
|
||||
for more information.
|
||||
""".strip()
|
||||
)
|
||||
group.add_option(
|
||||
"--replace",
|
||||
action="append", type="str", dest="replace", default=[],
|
||||
metavar="PATTERN",
|
||||
help="Replacement pattern."
|
||||
)
|
||||
group.add_option(
|
||||
"--replace-from-file",
|
||||
action="append", type="str", dest="replace_file", default=[],
|
||||
metavar="PATTERN",
|
||||
help="Replacement pattern, where the replacement clause is a path to a file."
|
||||
)
|
||||
parser.add_option_group(group)
|
||||
|
||||
proxy.certificate_option_group(parser)
|
||||
|
||||
1553
libmproxy/console.py
967
libmproxy/console/__init__.py
Normal file
@@ -0,0 +1,967 @@
|
||||
# Copyright (C) 2010 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import mailcap, mimetypes, tempfile, os, subprocess, glob, time, shlex
|
||||
import os.path, sys, weakref
|
||||
import urwid
|
||||
from .. import controller, utils, flow
|
||||
import flowlist, flowview, help, common, grideditor, palettes, contentview, flowdetailview
|
||||
|
||||
EVENTLOG_SIZE = 500
|
||||
|
||||
|
||||
class Stop(Exception): pass
|
||||
|
||||
|
||||
|
||||
|
||||
class _PathCompleter:
|
||||
def __init__(self, _testing=False):
|
||||
"""
|
||||
_testing: disables reloading of the lookup table to make testing possible.
|
||||
"""
|
||||
self.lookup, self.offset = None, None
|
||||
self.final = None
|
||||
self._testing = _testing
|
||||
|
||||
def reset(self):
|
||||
self.lookup = None
|
||||
self.offset = -1
|
||||
|
||||
def complete(self, txt):
|
||||
"""
|
||||
Returns the next completion for txt, or None if there is no completion.
|
||||
"""
|
||||
path = os.path.expanduser(txt)
|
||||
if not self.lookup:
|
||||
if not self._testing:
|
||||
# Lookup is a set of (display value, actual value) tuples.
|
||||
self.lookup = []
|
||||
if os.path.isdir(path):
|
||||
files = glob.glob(os.path.join(path, "*"))
|
||||
prefix = txt
|
||||
else:
|
||||
files = glob.glob(path+"*")
|
||||
prefix = os.path.dirname(txt)
|
||||
prefix = prefix or "./"
|
||||
for f in files:
|
||||
display = os.path.join(prefix, os.path.basename(f))
|
||||
if os.path.isdir(f):
|
||||
display += "/"
|
||||
self.lookup.append((display, f))
|
||||
if not self.lookup:
|
||||
self.final = path
|
||||
return path
|
||||
self.lookup.sort()
|
||||
self.offset = -1
|
||||
self.lookup.append((txt, txt))
|
||||
self.offset += 1
|
||||
if self.offset >= len(self.lookup):
|
||||
self.offset = 0
|
||||
ret = self.lookup[self.offset]
|
||||
self.final = ret[1]
|
||||
return ret[0]
|
||||
|
||||
#begin nocover
|
||||
|
||||
class PathEdit(urwid.Edit, _PathCompleter):
|
||||
def __init__(self, *args, **kwargs):
|
||||
urwid.Edit.__init__(self, *args, **kwargs)
|
||||
_PathCompleter.__init__(self)
|
||||
|
||||
def keypress(self, size, key):
|
||||
if key == "tab":
|
||||
comp = self.complete(self.get_edit_text())
|
||||
self.set_edit_text(comp)
|
||||
self.set_edit_pos(len(comp))
|
||||
else:
|
||||
self.reset()
|
||||
return urwid.Edit.keypress(self, size, key)
|
||||
|
||||
|
||||
class ActionBar(common.WWrap):
|
||||
def __init__(self):
|
||||
self.message("")
|
||||
|
||||
def selectable(self):
|
||||
return True
|
||||
|
||||
def path_prompt(self, prompt, text):
|
||||
self.w = PathEdit(prompt, text)
|
||||
|
||||
def prompt(self, prompt, text = ""):
|
||||
# A (partial) workaround for this Urwid issue:
|
||||
# https://github.com/Nic0/tyrs/issues/115
|
||||
# We can remove it once veryone is beyond 1.0.1
|
||||
if isinstance(prompt, basestring):
|
||||
prompt = unicode(prompt)
|
||||
self.w = urwid.Edit(prompt, text or "")
|
||||
|
||||
def message(self, message):
|
||||
self.w = urwid.Text(message)
|
||||
|
||||
|
||||
class StatusBar(common.WWrap):
|
||||
def __init__(self, master, helptext):
|
||||
self.master, self.helptext = master, helptext
|
||||
self.expire = None
|
||||
self.ab = ActionBar()
|
||||
self.ib = common.WWrap(urwid.Text(""))
|
||||
self.w = urwid.Pile([self.ib, self.ab])
|
||||
|
||||
def get_status(self):
|
||||
r = []
|
||||
|
||||
if self.master.replacehooks.count():
|
||||
r.append("[")
|
||||
r.append(("heading_key", "R"))
|
||||
r.append("eplacing]")
|
||||
if self.master.client_playback:
|
||||
r.append("[")
|
||||
r.append(("heading_key", "cplayback"))
|
||||
r.append(":%s to go]"%self.master.client_playback.count())
|
||||
if self.master.server_playback:
|
||||
r.append("[")
|
||||
r.append(("heading_key", "splayback"))
|
||||
if self.master.nopop:
|
||||
r.append(":%s in file]"%self.master.server_playback.count())
|
||||
else:
|
||||
r.append(":%s to go]"%self.master.server_playback.count())
|
||||
if self.master.state.intercept_txt:
|
||||
r.append("[")
|
||||
r.append(("heading_key", "i"))
|
||||
r.append(":%s]"%self.master.state.intercept_txt)
|
||||
if self.master.state.limit_txt:
|
||||
r.append("[")
|
||||
r.append(("heading_key", "l"))
|
||||
r.append(":%s]"%self.master.state.limit_txt)
|
||||
if self.master.stickycookie_txt:
|
||||
r.append("[")
|
||||
r.append(("heading_key", "t"))
|
||||
r.append(":%s]"%self.master.stickycookie_txt)
|
||||
if self.master.stickyauth_txt:
|
||||
r.append("[")
|
||||
r.append(("heading_key", "u"))
|
||||
r.append(":%s]"%self.master.stickyauth_txt)
|
||||
if self.master.server.config.reverse_proxy:
|
||||
r.append("[")
|
||||
r.append(("heading_key", "P"))
|
||||
r.append(":%s]"%utils.unparse_url(*self.master.server.config.reverse_proxy))
|
||||
if self.master.state.default_body_view != contentview.VIEW_AUTO:
|
||||
r.append("[")
|
||||
r.append(("heading_key", "M"))
|
||||
r.append(":%s]"%contentview.VIEW_NAMES[self.master.state.default_body_view])
|
||||
|
||||
opts = []
|
||||
if self.master.anticache:
|
||||
opts.append("anticache")
|
||||
if self.master.anticomp:
|
||||
opts.append("anticomp")
|
||||
if not self.master.refresh_server_playback:
|
||||
opts.append("norefresh")
|
||||
if self.master.killextra:
|
||||
opts.append("killextra")
|
||||
if self.master.server.config.upstream_cert:
|
||||
opts.append("upstream-cert")
|
||||
|
||||
if opts:
|
||||
r.append("[%s]"%(":".join(opts)))
|
||||
|
||||
if self.master.script:
|
||||
r.append("[script:%s]"%self.master.script.path)
|
||||
|
||||
if self.master.debug:
|
||||
r.append("[lt:%0.3f]"%self.master.looptime)
|
||||
|
||||
return r
|
||||
|
||||
def redraw(self):
|
||||
if self.expire and time.time() > self.expire:
|
||||
self.message("")
|
||||
|
||||
fc = self.master.state.flow_count()
|
||||
if self.master.currentflow:
|
||||
idx = self.master.state.view.index(self.master.currentflow) + 1
|
||||
t = [
|
||||
('heading', ("[%s/%s]"%(idx, fc)).ljust(9))
|
||||
]
|
||||
else:
|
||||
t = [
|
||||
('heading', ("[%s]"%fc).ljust(9))
|
||||
]
|
||||
|
||||
if self.master.server.bound:
|
||||
boundaddr = "[%s:%s]"%(self.master.server.address or "*", self.master.server.port)
|
||||
else:
|
||||
boundaddr = ""
|
||||
t.extend(self.get_status())
|
||||
status = urwid.AttrWrap(urwid.Columns([
|
||||
urwid.Text(t),
|
||||
urwid.Text(
|
||||
[
|
||||
self.helptext,
|
||||
boundaddr
|
||||
],
|
||||
align="right"
|
||||
),
|
||||
]), "heading")
|
||||
self.ib.set_w(status)
|
||||
|
||||
def update(self, text):
|
||||
self.helptext = text
|
||||
self.redraw()
|
||||
self.master.drawscreen()
|
||||
|
||||
def selectable(self):
|
||||
return True
|
||||
|
||||
def get_edit_text(self):
|
||||
return self.ab.w.get_edit_text()
|
||||
|
||||
def path_prompt(self, prompt, text):
|
||||
return self.ab.path_prompt(prompt, text)
|
||||
|
||||
def prompt(self, prompt, text = ""):
|
||||
self.ab.prompt(prompt, text)
|
||||
|
||||
def message(self, msg, expire=None):
|
||||
if expire:
|
||||
self.expire = time.time() + float(expire)/1000
|
||||
else:
|
||||
self.expire = None
|
||||
self.ab.message(msg)
|
||||
self.master.drawscreen()
|
||||
|
||||
|
||||
#end nocover
|
||||
|
||||
class ConsoleState(flow.State):
|
||||
def __init__(self):
|
||||
flow.State.__init__(self)
|
||||
self.focus = None
|
||||
self.default_body_view = contentview.VIEW_AUTO
|
||||
self.view_flow_mode = common.VIEW_FLOW_REQUEST
|
||||
self.last_script = ""
|
||||
self.last_saveload = ""
|
||||
self.flowsettings = weakref.WeakKeyDictionary()
|
||||
|
||||
def add_flow_setting(self, flow, key, value):
|
||||
d = self.flowsettings.setdefault(flow, {})
|
||||
d[key] = value
|
||||
|
||||
def get_flow_setting(self, flow, key, default=None):
|
||||
d = self.flowsettings.get(flow, {})
|
||||
return d.get(key, default)
|
||||
|
||||
def add_request(self, req):
|
||||
f = flow.State.add_request(self, req)
|
||||
if self.focus is None:
|
||||
self.set_focus(0)
|
||||
return f
|
||||
|
||||
def add_response(self, resp):
|
||||
f = flow.State.add_response(self, resp)
|
||||
if self.focus is None:
|
||||
self.set_focus(0)
|
||||
return f
|
||||
|
||||
def set_limit(self, limit):
|
||||
ret = flow.State.set_limit(self, limit)
|
||||
self.set_focus(self.focus)
|
||||
return ret
|
||||
|
||||
def get_focus(self):
|
||||
if not self.view or self.focus is None:
|
||||
return None, None
|
||||
return self.view[self.focus], self.focus
|
||||
|
||||
def set_focus(self, idx):
|
||||
if self.view:
|
||||
if idx >= len(self.view):
|
||||
idx = len(self.view) - 1
|
||||
elif idx < 0:
|
||||
idx = 0
|
||||
self.focus = idx
|
||||
|
||||
def get_from_pos(self, pos):
|
||||
if len(self.view) <= pos or pos < 0:
|
||||
return None, None
|
||||
return self.view[pos], pos
|
||||
|
||||
def get_next(self, pos):
|
||||
return self.get_from_pos(pos+1)
|
||||
|
||||
def get_prev(self, pos):
|
||||
return self.get_from_pos(pos-1)
|
||||
|
||||
def delete_flow(self, f):
|
||||
ret = flow.State.delete_flow(self, f)
|
||||
self.set_focus(self.focus)
|
||||
return ret
|
||||
|
||||
|
||||
|
||||
class Options(object):
|
||||
__slots__ = [
|
||||
"anticache",
|
||||
"anticomp",
|
||||
"client_replay",
|
||||
"debug",
|
||||
"eventlog",
|
||||
"keepserving",
|
||||
"kill",
|
||||
"intercept",
|
||||
"no_server",
|
||||
"refresh_server_playback",
|
||||
"rfile",
|
||||
"script",
|
||||
"replacements",
|
||||
"rheaders",
|
||||
"server_replay",
|
||||
"stickycookie",
|
||||
"stickyauth",
|
||||
"verbosity",
|
||||
"wfile",
|
||||
"nopop",
|
||||
]
|
||||
def __init__(self, **kwargs):
|
||||
for k, v in kwargs.items():
|
||||
setattr(self, k, v)
|
||||
for i in self.__slots__:
|
||||
if not hasattr(self, i):
|
||||
setattr(self, i, None)
|
||||
|
||||
|
||||
#begin nocover
|
||||
|
||||
|
||||
class ConsoleMaster(flow.FlowMaster):
|
||||
palette = []
|
||||
def __init__(self, server, options):
|
||||
flow.FlowMaster.__init__(self, server, ConsoleState())
|
||||
self.looptime = 0
|
||||
self.options = options
|
||||
|
||||
for i in options.replacements:
|
||||
self.replacehooks.add(*i)
|
||||
|
||||
self.flow_list_walker = None
|
||||
self.set_palette()
|
||||
|
||||
r = self.set_intercept(options.intercept)
|
||||
if r:
|
||||
print >> sys.stderr, "Intercept error:", r
|
||||
sys.exit(1)
|
||||
|
||||
r = self.set_stickycookie(options.stickycookie)
|
||||
if r:
|
||||
print >> sys.stderr, "Sticky cookies error:", r
|
||||
sys.exit(1)
|
||||
|
||||
r = self.set_stickyauth(options.stickyauth)
|
||||
if r:
|
||||
print >> sys.stderr, "Sticky auth error:", r
|
||||
sys.exit(1)
|
||||
|
||||
self.refresh_server_playback = options.refresh_server_playback
|
||||
self.anticache = options.anticache
|
||||
self.anticomp = options.anticomp
|
||||
self.killextra = options.kill
|
||||
self.rheaders = options.rheaders
|
||||
self.nopop = options.nopop
|
||||
|
||||
self.eventlog = options.eventlog
|
||||
self.eventlist = urwid.SimpleListWalker([])
|
||||
|
||||
if options.client_replay:
|
||||
self.client_playback_path(options.client_replay)
|
||||
|
||||
if options.server_replay:
|
||||
self.server_playback_path(options.server_replay)
|
||||
|
||||
self.debug = options.debug
|
||||
|
||||
if options.script:
|
||||
err = self.load_script(options.script)
|
||||
if err:
|
||||
print >> sys.stderr, "Script load error:", err
|
||||
sys.exit(1)
|
||||
|
||||
def run_script_once(self, path, f):
|
||||
if not path:
|
||||
return
|
||||
ret = self.get_script(path)
|
||||
if ret[0]:
|
||||
self.statusbar.message(ret[0])
|
||||
return
|
||||
s = ret[1]
|
||||
if f.request:
|
||||
s.run("request", f)
|
||||
if f.response:
|
||||
s.run("response", f)
|
||||
if f.error:
|
||||
s.run("error", f)
|
||||
s.run("done")
|
||||
self.refresh_flow(f)
|
||||
self.state.last_script = path
|
||||
|
||||
def set_script(self, path):
|
||||
if not path:
|
||||
return
|
||||
ret = self.load_script(path)
|
||||
if ret:
|
||||
self.statusbar.message(ret)
|
||||
self.state.last_script = path
|
||||
|
||||
def toggle_eventlog(self):
|
||||
self.eventlog = not self.eventlog
|
||||
self.view_flowlist()
|
||||
|
||||
def _readflow(self, path):
|
||||
path = os.path.expanduser(path)
|
||||
try:
|
||||
f = file(path, "r")
|
||||
flows = list(flow.FlowReader(f).stream())
|
||||
except (IOError, flow.FlowReadError), v:
|
||||
return True, v.strerror
|
||||
return False, flows
|
||||
|
||||
def client_playback_path(self, path):
|
||||
err, ret = self._readflow(path)
|
||||
if err:
|
||||
self.statusbar.message(ret)
|
||||
else:
|
||||
self.start_client_playback(ret, False)
|
||||
|
||||
def server_playback_path(self, path):
|
||||
err, ret = self._readflow(path)
|
||||
if err:
|
||||
self.statusbar.message(ret)
|
||||
else:
|
||||
self.start_server_playback(
|
||||
ret,
|
||||
self.killextra, self.rheaders,
|
||||
False, self.nopop
|
||||
)
|
||||
|
||||
def spawn_editor(self, data):
|
||||
fd, name = tempfile.mkstemp('', "mproxy")
|
||||
os.write(fd, data)
|
||||
os.close(fd)
|
||||
c = os.environ.get("EDITOR")
|
||||
#If no EDITOR is set, assume 'vi'
|
||||
if not c:
|
||||
c = "vi"
|
||||
cmd = shlex.split(c)
|
||||
cmd.append(name)
|
||||
self.ui.stop()
|
||||
try:
|
||||
subprocess.call(cmd)
|
||||
except:
|
||||
self.statusbar.message("Can't start editor: %s" % c)
|
||||
self.ui.start()
|
||||
os.unlink(name)
|
||||
return data
|
||||
self.ui.start()
|
||||
data = open(name).read()
|
||||
os.unlink(name)
|
||||
return data
|
||||
|
||||
def spawn_external_viewer(self, data, contenttype):
|
||||
if contenttype:
|
||||
ext = mimetypes.guess_extension(contenttype) or ""
|
||||
else:
|
||||
ext = ""
|
||||
fd, name = tempfile.mkstemp(ext, "mproxy")
|
||||
os.write(fd, data)
|
||||
os.close(fd)
|
||||
|
||||
cmd = None
|
||||
shell = False
|
||||
|
||||
if contenttype:
|
||||
c = mailcap.getcaps()
|
||||
cmd, _ = mailcap.findmatch(c, contenttype, filename=name)
|
||||
if cmd:
|
||||
shell = True
|
||||
if not cmd:
|
||||
c = os.environ.get("PAGER") or os.environ.get("EDITOR")
|
||||
cmd = [c, name]
|
||||
self.ui.stop()
|
||||
subprocess.call(cmd, shell=shell)
|
||||
self.ui.start()
|
||||
os.unlink(name)
|
||||
|
||||
def set_palette(self):
|
||||
self.palette = palettes.dark
|
||||
|
||||
def run(self):
|
||||
self.currentflow = None
|
||||
|
||||
self.ui = urwid.raw_display.Screen()
|
||||
self.ui.set_terminal_properties(256)
|
||||
self.ui.register_palette(self.palette)
|
||||
self.flow_list_walker = flowlist.FlowListWalker(self, self.state)
|
||||
|
||||
self.view = None
|
||||
self.statusbar = None
|
||||
self.header = None
|
||||
self.body = None
|
||||
self.help_context = None
|
||||
|
||||
self.prompting = False
|
||||
self.onekey = False
|
||||
|
||||
self.view_flowlist()
|
||||
|
||||
self.server.start_slave(controller.Slave, self.masterq)
|
||||
|
||||
if self.options.rfile:
|
||||
ret = self.load_flows(self.options.rfile)
|
||||
if ret:
|
||||
self.shutdown()
|
||||
print >> sys.stderr, "Could not load file:", ret
|
||||
sys.exit(1)
|
||||
|
||||
self.ui.run_wrapper(self.loop)
|
||||
# If True, quit just pops out to flow list view.
|
||||
print >> sys.stderr, "Shutting down..."
|
||||
sys.stderr.flush()
|
||||
self.shutdown()
|
||||
|
||||
def focus_current(self):
|
||||
if self.currentflow:
|
||||
try:
|
||||
self.flow_list_walker.set_focus(self.state.index(self.currentflow))
|
||||
except (IndexError, ValueError):
|
||||
pass
|
||||
|
||||
def make_view(self):
|
||||
self.view = urwid.Frame(
|
||||
self.body,
|
||||
header = self.header,
|
||||
footer = self.statusbar
|
||||
)
|
||||
self.view.set_focus("body")
|
||||
|
||||
def view_help(self):
|
||||
h = help.HelpView(self, self.help_context, (self.statusbar, self.body, self.header))
|
||||
self.statusbar = StatusBar(self, help.footer)
|
||||
self.body = h
|
||||
self.header = None
|
||||
self.make_view()
|
||||
|
||||
def view_flowdetails(self, flow):
|
||||
h = flowdetailview.FlowDetailsView(self, flow, (self.statusbar, self.body, self.header))
|
||||
self.statusbar = StatusBar(self, flowdetailview.footer)
|
||||
self.body = h
|
||||
self.header = None
|
||||
self.make_view()
|
||||
|
||||
def view_grideditor(self, ge):
|
||||
self.body = ge
|
||||
self.header = None
|
||||
self.help_context = grideditor.help_context
|
||||
self.statusbar = StatusBar(self, grideditor.footer)
|
||||
self.make_view()
|
||||
|
||||
def view_flowlist(self):
|
||||
if self.ui.started:
|
||||
self.ui.clear()
|
||||
self.focus_current()
|
||||
if self.eventlog:
|
||||
self.body = flowlist.BodyPile(self)
|
||||
else:
|
||||
self.body = flowlist.FlowListBox(self)
|
||||
self.statusbar = StatusBar(self, flowlist.footer)
|
||||
self.header = None
|
||||
self.currentflow = None
|
||||
|
||||
self.make_view()
|
||||
self.help_context = flowlist.help_context
|
||||
|
||||
def view_flow(self, flow):
|
||||
self.body = flowview.FlowView(self, self.state, flow)
|
||||
self.header = flowview.FlowViewHeader(self, flow)
|
||||
self.statusbar = StatusBar(self, flowview.footer)
|
||||
self.currentflow = flow
|
||||
|
||||
self.make_view()
|
||||
self.help_context = flowview.help_context
|
||||
|
||||
def _write_flows(self, path, flows):
|
||||
self.state.last_saveload = path
|
||||
if not path:
|
||||
return
|
||||
path = os.path.expanduser(path)
|
||||
try:
|
||||
f = file(path, "wb")
|
||||
fw = flow.FlowWriter(f)
|
||||
for i in flows:
|
||||
fw.add(i)
|
||||
f.close()
|
||||
except IOError, v:
|
||||
self.statusbar.message(v.strerror)
|
||||
|
||||
def save_one_flow(self, path, flow):
|
||||
return self._write_flows(path, [flow])
|
||||
|
||||
def save_flows(self, path):
|
||||
return self._write_flows(path, self.state.view)
|
||||
|
||||
def load_flows_callback(self, path):
|
||||
if not path:
|
||||
return
|
||||
ret = self.load_flows(path)
|
||||
return ret or "Flows loaded from %s"%path
|
||||
|
||||
def load_flows(self, path):
|
||||
self.state.last_saveload = path
|
||||
path = os.path.expanduser(path)
|
||||
try:
|
||||
f = file(path, "r")
|
||||
fr = flow.FlowReader(f)
|
||||
except IOError, v:
|
||||
return v.strerror
|
||||
try:
|
||||
flow.FlowMaster.load_flows(self, fr)
|
||||
except flow.FlowReadError, v:
|
||||
return v.strerror
|
||||
f.close()
|
||||
if self.flow_list_walker:
|
||||
self.sync_list_view()
|
||||
self.focus_current()
|
||||
|
||||
def path_prompt(self, prompt, text, callback, *args):
|
||||
self.statusbar.path_prompt(prompt, text)
|
||||
self.view.set_focus("footer")
|
||||
self.prompting = (callback, args)
|
||||
|
||||
def prompt(self, prompt, text, callback, *args):
|
||||
self.statusbar.prompt(prompt, text)
|
||||
self.view.set_focus("footer")
|
||||
self.prompting = (callback, args)
|
||||
|
||||
def prompt_edit(self, prompt, text, callback):
|
||||
self.statusbar.prompt(prompt + ": ", text)
|
||||
self.view.set_focus("footer")
|
||||
self.prompting = (callback, [])
|
||||
|
||||
def prompt_onekey(self, prompt, keys, callback, *args):
|
||||
"""
|
||||
Keys are a set of (word, key) tuples. The appropriate key in the
|
||||
word is highlighted.
|
||||
"""
|
||||
prompt = [prompt, " ("]
|
||||
mkup = []
|
||||
for i, e in enumerate(keys):
|
||||
mkup.extend(common.highlight_key(e[0], e[1]))
|
||||
if i < len(keys)-1:
|
||||
mkup.append(",")
|
||||
prompt.extend(mkup)
|
||||
prompt.append(")? ")
|
||||
self.onekey = "".join(i[1] for i in keys)
|
||||
self.prompt(prompt, "", callback, *args)
|
||||
|
||||
def prompt_done(self):
|
||||
self.prompting = False
|
||||
self.onekey = False
|
||||
self.view.set_focus("body")
|
||||
self.statusbar.message("")
|
||||
|
||||
def prompt_execute(self, txt=None):
|
||||
if not txt:
|
||||
txt = self.statusbar.get_edit_text()
|
||||
p, args = self.prompting
|
||||
self.prompt_done()
|
||||
msg = p(txt, *args)
|
||||
if msg:
|
||||
self.statusbar.message(msg, 1000)
|
||||
|
||||
def prompt_cancel(self):
|
||||
self.prompt_done()
|
||||
|
||||
def accept_all(self):
|
||||
self.state.accept_all()
|
||||
|
||||
def set_limit(self, txt):
|
||||
v = self.state.set_limit(txt)
|
||||
self.sync_list_view()
|
||||
return v
|
||||
|
||||
def set_intercept(self, txt):
|
||||
return self.state.set_intercept(txt)
|
||||
|
||||
def change_default_display_mode(self, t):
|
||||
v = contentview.VIEW_SHORTCUTS.get(t)
|
||||
self.state.default_body_view = v
|
||||
if self.currentflow:
|
||||
self.refresh_flow(self.currentflow)
|
||||
|
||||
def set_reverse_proxy(self, txt):
|
||||
if not txt:
|
||||
self.server.config.reverse_proxy = None
|
||||
else:
|
||||
s = utils.parse_proxy_spec(txt)
|
||||
if not s:
|
||||
return "Invalid reverse proxy specification"
|
||||
self.server.config.reverse_proxy = s
|
||||
|
||||
def drawscreen(self):
|
||||
size = self.ui.get_cols_rows()
|
||||
canvas = self.view.render(size, focus=1)
|
||||
self.ui.draw_screen(size, canvas)
|
||||
return size
|
||||
|
||||
def pop_view(self):
|
||||
if self.currentflow:
|
||||
self.view_flow(self.currentflow)
|
||||
else:
|
||||
self.view_flowlist()
|
||||
|
||||
def set_replace(self, r):
|
||||
self.replacehooks.clear()
|
||||
for i in r:
|
||||
self.replacehooks.add(*i)
|
||||
|
||||
def loop(self):
|
||||
changed = True
|
||||
try:
|
||||
while not controller.should_exit:
|
||||
startloop = time.time()
|
||||
if changed:
|
||||
self.statusbar.redraw()
|
||||
size = self.drawscreen()
|
||||
changed = self.tick(self.masterq)
|
||||
self.ui.set_input_timeouts(max_wait=0.1)
|
||||
keys = self.ui.get_input()
|
||||
if keys:
|
||||
changed = True
|
||||
for k in keys:
|
||||
if self.prompting:
|
||||
if k == "esc":
|
||||
self.prompt_cancel()
|
||||
elif self.onekey:
|
||||
if k == "enter":
|
||||
self.prompt_cancel()
|
||||
elif k in self.onekey:
|
||||
self.prompt_execute(k)
|
||||
elif k == "enter":
|
||||
self.prompt_execute()
|
||||
else:
|
||||
self.view.keypress(size, k)
|
||||
else:
|
||||
k = self.view.keypress(size, k)
|
||||
if k:
|
||||
self.statusbar.message("")
|
||||
if k == "?":
|
||||
self.view_help()
|
||||
elif k == "c":
|
||||
if not self.client_playback:
|
||||
self.path_prompt(
|
||||
"Client replay: ",
|
||||
self.state.last_saveload,
|
||||
self.client_playback_path
|
||||
)
|
||||
else:
|
||||
self.prompt_onekey(
|
||||
"Stop current client replay?",
|
||||
(
|
||||
("yes", "y"),
|
||||
("no", "n"),
|
||||
),
|
||||
self.stop_client_playback_prompt,
|
||||
)
|
||||
elif k == "i":
|
||||
self.prompt(
|
||||
"Intercept filter: ",
|
||||
self.state.intercept_txt,
|
||||
self.set_intercept
|
||||
)
|
||||
elif k == "Q":
|
||||
raise Stop
|
||||
elif k == "q":
|
||||
self.prompt_onekey(
|
||||
"Quit",
|
||||
(
|
||||
("yes", "y"),
|
||||
("no", "n"),
|
||||
),
|
||||
self.quit,
|
||||
)
|
||||
elif k == "M":
|
||||
self.prompt_onekey(
|
||||
"Global default display mode",
|
||||
contentview.VIEW_PROMPT,
|
||||
self.change_default_display_mode
|
||||
)
|
||||
elif k == "P":
|
||||
if self.server.config.reverse_proxy:
|
||||
p = utils.unparse_url(*self.server.config.reverse_proxy)
|
||||
else:
|
||||
p = ""
|
||||
self.prompt(
|
||||
"Reverse proxy: ",
|
||||
p,
|
||||
self.set_reverse_proxy
|
||||
)
|
||||
elif k == "R":
|
||||
self.view_grideditor(
|
||||
grideditor.ReplaceEditor(
|
||||
self,
|
||||
self.replacehooks.get_specs(),
|
||||
self.set_replace
|
||||
)
|
||||
)
|
||||
elif k == "s":
|
||||
if self.script:
|
||||
self.load_script(None)
|
||||
else:
|
||||
self.path_prompt(
|
||||
"Set script: ",
|
||||
self.state.last_script,
|
||||
self.set_script
|
||||
)
|
||||
elif k == "S":
|
||||
if not self.server_playback:
|
||||
self.path_prompt(
|
||||
"Server replay: ",
|
||||
self.state.last_saveload,
|
||||
self.server_playback_path
|
||||
)
|
||||
else:
|
||||
self.prompt_onekey(
|
||||
"Stop current server replay?",
|
||||
(
|
||||
("yes", "y"),
|
||||
("no", "n"),
|
||||
),
|
||||
self.stop_server_playback_prompt,
|
||||
)
|
||||
elif k == "o":
|
||||
self.prompt_onekey(
|
||||
"Options",
|
||||
(
|
||||
("anticache", "a"),
|
||||
("anticomp", "c"),
|
||||
("killextra", "k"),
|
||||
("norefresh", "n"),
|
||||
("upstream-certs", "u"),
|
||||
),
|
||||
self._change_options
|
||||
)
|
||||
elif k == "t":
|
||||
self.prompt(
|
||||
"Sticky cookie filter: ",
|
||||
self.stickycookie_txt,
|
||||
self.set_stickycookie
|
||||
)
|
||||
elif k == "u":
|
||||
self.prompt(
|
||||
"Sticky auth filter: ",
|
||||
self.stickyauth_txt,
|
||||
self.set_stickyauth
|
||||
)
|
||||
self.looptime = time.time() - startloop
|
||||
except (Stop, KeyboardInterrupt):
|
||||
pass
|
||||
|
||||
def stop_client_playback_prompt(self, a):
|
||||
if a != "n":
|
||||
self.stop_client_playback()
|
||||
|
||||
def stop_server_playback_prompt(self, a):
|
||||
if a != "n":
|
||||
self.stop_server_playback()
|
||||
|
||||
def quit(self, a):
|
||||
if a != "n":
|
||||
raise Stop
|
||||
|
||||
def _change_options(self, a):
|
||||
if a == "a":
|
||||
self.anticache = not self.anticache
|
||||
if a == "c":
|
||||
self.anticomp = not self.anticomp
|
||||
elif a == "k":
|
||||
self.killextra = not self.killextra
|
||||
elif a == "n":
|
||||
self.refresh_server_playback = not self.refresh_server_playback
|
||||
elif a == "u":
|
||||
self.server.config.upstream_cert = not self.server.config.upstream_cert
|
||||
|
||||
def shutdown(self):
|
||||
self.state.killall(self)
|
||||
controller.Master.shutdown(self)
|
||||
|
||||
def sync_list_view(self):
|
||||
self.flow_list_walker._modified()
|
||||
|
||||
def clear_flows(self):
|
||||
self.state.clear()
|
||||
self.sync_list_view()
|
||||
|
||||
def delete_flow(self, f):
|
||||
self.state.delete_flow(f)
|
||||
self.sync_list_view()
|
||||
|
||||
def refresh_flow(self, c):
|
||||
if hasattr(self.header, "refresh_flow"):
|
||||
self.header.refresh_flow(c)
|
||||
if hasattr(self.body, "refresh_flow"):
|
||||
self.body.refresh_flow(c)
|
||||
if hasattr(self.statusbar, "refresh_flow"):
|
||||
self.statusbar.refresh_flow(c)
|
||||
|
||||
def process_flow(self, f, r):
|
||||
if self.state.intercept and f.match(self.state.intercept) and not f.request.is_replay():
|
||||
f.intercept()
|
||||
else:
|
||||
r._ack()
|
||||
self.sync_list_view()
|
||||
self.refresh_flow(f)
|
||||
|
||||
def clear_events(self):
|
||||
self.eventlist[:] = []
|
||||
|
||||
def add_event(self, e, level="info"):
|
||||
if level == "info":
|
||||
e = urwid.Text(e)
|
||||
elif level == "error":
|
||||
e = urwid.Text(("error", e))
|
||||
|
||||
self.eventlist.append(e)
|
||||
if len(self.eventlist) > EVENTLOG_SIZE:
|
||||
self.eventlist.pop(0)
|
||||
self.eventlist.set_focus(len(self.eventlist))
|
||||
|
||||
# Handlers
|
||||
def handle_error(self, r):
|
||||
f = flow.FlowMaster.handle_error(self, r)
|
||||
if f:
|
||||
self.process_flow(f, r)
|
||||
return f
|
||||
|
||||
def handle_request(self, r):
|
||||
f = flow.FlowMaster.handle_request(self, r)
|
||||
if f:
|
||||
self.process_flow(f, r)
|
||||
return f
|
||||
|
||||
def handle_response(self, r):
|
||||
f = flow.FlowMaster.handle_response(self, r)
|
||||
if f:
|
||||
self.process_flow(f, r)
|
||||
return f
|
||||
228
libmproxy/console/common.py
Normal file
@@ -0,0 +1,228 @@
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import urwid
|
||||
import urwid.util
|
||||
from .. import utils
|
||||
|
||||
|
||||
|
||||
VIEW_FLOW_REQUEST = 0
|
||||
VIEW_FLOW_RESPONSE = 1
|
||||
|
||||
|
||||
def highlight_key(s, k):
|
||||
l = []
|
||||
parts = s.split(k, 1)
|
||||
if parts[0]:
|
||||
l.append(("text", parts[0]))
|
||||
l.append(("key", k))
|
||||
if parts[1]:
|
||||
l.append(("text", parts[1]))
|
||||
return l
|
||||
|
||||
|
||||
KEY_MAX = 30
|
||||
def format_keyvals(lst, key="key", val="text", indent=0):
|
||||
"""
|
||||
Format a list of (key, value) tuples.
|
||||
|
||||
If key is None, it's treated specially:
|
||||
- We assume a sub-value, and add an extra indent.
|
||||
- The value is treated as a pre-formatted list of directives.
|
||||
"""
|
||||
ret = []
|
||||
if lst:
|
||||
maxk = min(max(len(i[0]) for i in lst if i and i[0]), KEY_MAX)
|
||||
for i, kv in enumerate(lst):
|
||||
if kv is None:
|
||||
ret.append(urwid.Text(""))
|
||||
else:
|
||||
cols = []
|
||||
# This cumbersome construction process is here for a reason:
|
||||
# Urwid < 1.0 barfs if given a fixed size column of size zero.
|
||||
if indent:
|
||||
cols.append(("fixed", indent, urwid.Text("")))
|
||||
cols.extend([
|
||||
(
|
||||
"fixed",
|
||||
maxk,
|
||||
urwid.Text([(key, kv[0] or "")])
|
||||
),
|
||||
kv[1] if isinstance(kv[1], urwid.Widget) else urwid.Text([(val, kv[1])])
|
||||
])
|
||||
ret.append(urwid.Columns(cols, dividechars = 2))
|
||||
return ret
|
||||
|
||||
|
||||
def shortcuts(k):
|
||||
if k == " ":
|
||||
k = "page down"
|
||||
elif k == "j":
|
||||
k = "down"
|
||||
elif k == "k":
|
||||
k = "up"
|
||||
return k
|
||||
|
||||
|
||||
def fcol(s, attr):
|
||||
s = unicode(s)
|
||||
return (
|
||||
"fixed",
|
||||
len(s),
|
||||
urwid.Text(
|
||||
[
|
||||
(attr, s)
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
if urwid.util.detected_encoding:
|
||||
SYMBOL_REPLAY = u"\u21ba"
|
||||
SYMBOL_RETURN = u"\u2190"
|
||||
else:
|
||||
SYMBOL_REPLAY = u"[r]"
|
||||
SYMBOL_RETURN = u"<-"
|
||||
|
||||
|
||||
|
||||
def raw_format_flow(f, focus, extended, padding):
|
||||
f = dict(f)
|
||||
|
||||
pile = []
|
||||
req = []
|
||||
if extended:
|
||||
req.append(
|
||||
fcol(
|
||||
utils.format_timestamp(f["req_timestamp"]),
|
||||
"highlight"
|
||||
)
|
||||
)
|
||||
else:
|
||||
req.append(fcol(">>" if focus else " ", "focus"))
|
||||
if f["req_is_replay"]:
|
||||
req.append(fcol(SYMBOL_REPLAY, "replay"))
|
||||
req.append(fcol(f["req_method"], "method"))
|
||||
|
||||
preamble = sum(i[1] for i in req) + len(req) -1
|
||||
|
||||
if f["intercepting"] and not f["req_acked"]:
|
||||
uc = "intercept"
|
||||
elif f["resp_code"] or f["err_msg"]:
|
||||
uc = "text"
|
||||
else:
|
||||
uc = "title"
|
||||
|
||||
req.append(
|
||||
urwid.Text([(uc, f["req_url"])])
|
||||
)
|
||||
|
||||
pile.append(urwid.Columns(req, dividechars=1))
|
||||
|
||||
resp = []
|
||||
resp.append(
|
||||
("fixed", preamble, urwid.Text(""))
|
||||
)
|
||||
|
||||
if f["resp_code"]:
|
||||
codes = {
|
||||
2: "code_200",
|
||||
3: "code_300",
|
||||
4: "code_400",
|
||||
5: "code_500",
|
||||
}
|
||||
ccol = codes.get(f["resp_code"]/100, "code_other")
|
||||
resp.append(fcol(SYMBOL_RETURN, ccol))
|
||||
if f["resp_is_replay"]:
|
||||
resp.append(fcol(SYMBOL_REPLAY, "replay"))
|
||||
resp.append(fcol(f["resp_code"], ccol))
|
||||
if f["intercepting"] and f["resp_code"] and not f["resp_acked"]:
|
||||
rc = "intercept"
|
||||
else:
|
||||
rc = "text"
|
||||
|
||||
if f["resp_ctype"]:
|
||||
resp.append(fcol(f["resp_ctype"], rc))
|
||||
resp.append(fcol(f["resp_clen"], rc))
|
||||
elif f["err_msg"]:
|
||||
resp.append(fcol(SYMBOL_RETURN, "error"))
|
||||
resp.append(
|
||||
urwid.Text([
|
||||
(
|
||||
"error",
|
||||
f["err_msg"]
|
||||
)
|
||||
])
|
||||
)
|
||||
pile.append(urwid.Columns(resp, dividechars=1))
|
||||
return urwid.Pile(pile)
|
||||
|
||||
|
||||
class FlowCache:
|
||||
@utils.LRUCache(200)
|
||||
def format_flow(self, *args):
|
||||
return raw_format_flow(*args)
|
||||
flowcache = FlowCache()
|
||||
|
||||
|
||||
def format_flow(f, focus, extended=False, padding=2):
|
||||
d = dict(
|
||||
intercepting = f.intercepting,
|
||||
|
||||
req_timestamp = f.request.timestamp,
|
||||
req_is_replay = f.request.is_replay(),
|
||||
req_method = f.request.method,
|
||||
req_acked = f.request.acked,
|
||||
req_url = f.request.get_url(),
|
||||
|
||||
err_msg = f.error.msg if f.error else None,
|
||||
resp_code = f.response.code if f.response else None,
|
||||
)
|
||||
if f.response:
|
||||
d.update(dict(
|
||||
resp_code = f.response.code,
|
||||
resp_is_replay = f.response.is_replay(),
|
||||
resp_acked = f.response.acked,
|
||||
resp_clen = utils.pretty_size(len(f.response.content)) if f.response.content else "[empty content]"
|
||||
))
|
||||
t = f.response.headers["content-type"]
|
||||
if t:
|
||||
d["resp_ctype"] = t[0].split(";")[0]
|
||||
else:
|
||||
d["resp_ctype"] = ""
|
||||
return flowcache.format_flow(tuple(sorted(d.items())), focus, extended, padding)
|
||||
|
||||
|
||||
|
||||
def int_version(v):
|
||||
SIG = 3
|
||||
v = urwid.__version__.split("-")[0].split(".")
|
||||
x = 0
|
||||
for i in range(min(SIG, len(v))):
|
||||
x += int(v[i]) * 10**(SIG-i)
|
||||
return x
|
||||
|
||||
|
||||
# We have to do this to be portable over 0.9.8 and 0.9.9 If compatibility
|
||||
# becomes a pain to maintain, we'll just mandate 0.9.9 or newer.
|
||||
class WWrap(urwid.WidgetWrap):
|
||||
if int_version(urwid.__version__) >= 990:
|
||||
def set_w(self, x):
|
||||
self._w = x
|
||||
def get_w(self):
|
||||
return self._w
|
||||
w = property(get_w, set_w)
|
||||
|
||||
|
||||
331
libmproxy/console/contentview.py
Normal file
@@ -0,0 +1,331 @@
|
||||
import re, cStringIO
|
||||
import urwid
|
||||
from PIL import Image
|
||||
from PIL.ExifTags import TAGS
|
||||
import lxml.html, lxml.etree
|
||||
import common
|
||||
from .. import utils, encoding, flow
|
||||
from ..contrib import jsbeautifier
|
||||
|
||||
VIEW_CUTOFF = 1024*50
|
||||
|
||||
VIEW_AUTO = 0
|
||||
VIEW_JSON = 1
|
||||
VIEW_XML = 2
|
||||
VIEW_URLENCODED = 3
|
||||
VIEW_MULTIPART = 4
|
||||
VIEW_JAVASCRIPT = 5
|
||||
VIEW_IMAGE = 6
|
||||
VIEW_RAW = 7
|
||||
VIEW_HEX = 8
|
||||
VIEW_HTML = 9
|
||||
|
||||
VIEW_NAMES = {
|
||||
VIEW_AUTO: "Auto",
|
||||
VIEW_JSON: "JSON",
|
||||
VIEW_XML: "XML",
|
||||
VIEW_URLENCODED: "URL-encoded",
|
||||
VIEW_MULTIPART: "Multipart Form",
|
||||
VIEW_JAVASCRIPT: "JavaScript",
|
||||
VIEW_IMAGE: "Image",
|
||||
VIEW_RAW: "Raw",
|
||||
VIEW_HEX: "Hex",
|
||||
VIEW_HTML: "HTML",
|
||||
}
|
||||
|
||||
|
||||
VIEW_PROMPT = (
|
||||
("auto detect", "a"),
|
||||
("hex", "e"),
|
||||
("html", "h"),
|
||||
("image", "i"),
|
||||
("javascript", "j"),
|
||||
("json", "s"),
|
||||
("raw", "r"),
|
||||
("multipart", "m"),
|
||||
("urlencoded", "u"),
|
||||
("xml", "x"),
|
||||
)
|
||||
|
||||
VIEW_SHORTCUTS = {
|
||||
"a": VIEW_AUTO,
|
||||
"x": VIEW_XML,
|
||||
"h": VIEW_HTML,
|
||||
"i": VIEW_IMAGE,
|
||||
"j": VIEW_JAVASCRIPT,
|
||||
"s": VIEW_JSON,
|
||||
"u": VIEW_URLENCODED,
|
||||
"m": VIEW_MULTIPART,
|
||||
"r": VIEW_RAW,
|
||||
"e": VIEW_HEX,
|
||||
}
|
||||
|
||||
CONTENT_TYPES_MAP = {
|
||||
"text/html": VIEW_HTML,
|
||||
"application/json": VIEW_JSON,
|
||||
"text/xml": VIEW_XML,
|
||||
"multipart/form-data": VIEW_MULTIPART,
|
||||
"application/x-www-form-urlencoded": VIEW_URLENCODED,
|
||||
"application/x-javascript": VIEW_JAVASCRIPT,
|
||||
"application/javascript": VIEW_JAVASCRIPT,
|
||||
"text/javascript": VIEW_JAVASCRIPT,
|
||||
"image/png": VIEW_IMAGE,
|
||||
"image/jpeg": VIEW_IMAGE,
|
||||
"image/gif": VIEW_IMAGE,
|
||||
"image/vnd.microsoft.icon": VIEW_IMAGE,
|
||||
"image/x-icon": VIEW_IMAGE,
|
||||
}
|
||||
|
||||
def trailer(clen, txt, limit):
|
||||
rem = clen - limit
|
||||
if rem > 0:
|
||||
txt.append(urwid.Text(""))
|
||||
txt.append(
|
||||
urwid.Text(
|
||||
[
|
||||
("highlight", "... %s of data not shown. Press "%utils.pretty_size(rem)),
|
||||
("key", "f"),
|
||||
("highlight", " to load all data.")
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _view_text(content, total, limit):
|
||||
"""
|
||||
Generates a body for a chunk of text.
|
||||
"""
|
||||
txt = []
|
||||
for i in utils.cleanBin(content).splitlines():
|
||||
txt.append(
|
||||
urwid.Text(("text", i), wrap="any")
|
||||
)
|
||||
trailer(total, txt, limit)
|
||||
return txt
|
||||
|
||||
|
||||
def view_raw(hdrs, content, limit):
|
||||
txt = _view_text(content[:limit], len(content), limit)
|
||||
return "Raw", txt
|
||||
|
||||
|
||||
def view_hex(hdrs, content, limit):
|
||||
txt = []
|
||||
for offset, hexa, s in utils.hexdump(content[:limit]):
|
||||
txt.append(urwid.Text([
|
||||
("offset", offset),
|
||||
" ",
|
||||
("text", hexa),
|
||||
" ",
|
||||
("text", s),
|
||||
]))
|
||||
trailer(len(content), txt, limit)
|
||||
return "Hex", txt
|
||||
|
||||
|
||||
def view_xml(hdrs, content, limit):
|
||||
parser = lxml.etree.XMLParser(remove_blank_text=True, resolve_entities=False, strip_cdata=False, recover=False)
|
||||
try:
|
||||
document = lxml.etree.fromstring(content, parser)
|
||||
except lxml.etree.XMLSyntaxError:
|
||||
return None
|
||||
docinfo = document.getroottree().docinfo
|
||||
|
||||
prev = []
|
||||
p = document.getroottree().getroot().getprevious()
|
||||
while p is not None:
|
||||
prev.insert(
|
||||
0,
|
||||
lxml.etree.tostring(p)
|
||||
)
|
||||
p = p.getprevious()
|
||||
doctype=docinfo.doctype
|
||||
if prev:
|
||||
doctype += "\n".join(prev).strip()
|
||||
doctype = doctype.strip()
|
||||
|
||||
s = lxml.etree.tostring(
|
||||
document,
|
||||
pretty_print=True,
|
||||
xml_declaration=True,
|
||||
doctype=doctype or None,
|
||||
encoding = docinfo.encoding
|
||||
)
|
||||
|
||||
txt = []
|
||||
for i in s[:limit].strip().split("\n"):
|
||||
txt.append(
|
||||
urwid.Text(("text", i)),
|
||||
)
|
||||
trailer(len(content), txt, limit)
|
||||
return "XML-like data", txt
|
||||
|
||||
|
||||
def view_html(hdrs, content, limit):
|
||||
if utils.isXML(content):
|
||||
parser = lxml.etree.HTMLParser(strip_cdata=True, remove_blank_text=True)
|
||||
d = lxml.html.fromstring(content, parser=parser)
|
||||
docinfo = d.getroottree().docinfo
|
||||
s = lxml.etree.tostring(d, pretty_print=True, doctype=docinfo.doctype)
|
||||
return "HTML", _view_text(s[:limit], len(s), limit)
|
||||
|
||||
|
||||
def view_json(hdrs, content, limit):
|
||||
lines = utils.pretty_json(content)
|
||||
if lines:
|
||||
txt = []
|
||||
sofar = 0
|
||||
for i in lines:
|
||||
sofar += len(i)
|
||||
txt.append(
|
||||
urwid.Text(("text", i)),
|
||||
)
|
||||
if sofar > limit:
|
||||
break
|
||||
trailer(sum(len(i) for i in lines), txt, limit)
|
||||
return "JSON", txt
|
||||
|
||||
|
||||
def view_multipart(hdrs, content, limit):
|
||||
v = hdrs.get("content-type")
|
||||
if v:
|
||||
v = utils.parse_content_type(v[0])
|
||||
if not v:
|
||||
return
|
||||
boundary = v[2].get("boundary")
|
||||
if not boundary:
|
||||
return
|
||||
|
||||
rx = re.compile(r'\bname="([^"]+)"')
|
||||
keys = []
|
||||
vals = []
|
||||
|
||||
for i in content.split("--" + boundary):
|
||||
parts = i.splitlines()
|
||||
if len(parts) > 1 and parts[0][0:2] != "--":
|
||||
match = rx.search(parts[1])
|
||||
if match:
|
||||
keys.append(match.group(1) + ":")
|
||||
vals.append(utils.cleanBin(
|
||||
"\n".join(parts[3+parts[2:].index(""):])
|
||||
))
|
||||
r = [
|
||||
urwid.Text(("highlight", "Form data:\n")),
|
||||
]
|
||||
r.extend(common.format_keyvals(
|
||||
zip(keys, vals),
|
||||
key = "header",
|
||||
val = "text"
|
||||
))
|
||||
return "Multipart form", r
|
||||
|
||||
|
||||
def view_urlencoded(hdrs, content, limit):
|
||||
lines = utils.urldecode(content)
|
||||
if lines:
|
||||
body = common.format_keyvals(
|
||||
[(k+":", v) for (k, v) in lines],
|
||||
key = "header",
|
||||
val = "text"
|
||||
)
|
||||
return "URLEncoded form", body
|
||||
|
||||
|
||||
def view_javascript(hdrs, content, limit):
|
||||
opts = jsbeautifier.default_options()
|
||||
opts.indent_size = 2
|
||||
try:
|
||||
res = jsbeautifier.beautify(content[:limit], opts)
|
||||
# begin nocover
|
||||
except:
|
||||
# Bugs in jsbeautifier mean that it can trhow arbitrary errors.
|
||||
return None
|
||||
# end nocover
|
||||
return "JavaScript", _view_text(res, len(content), limit)
|
||||
|
||||
|
||||
def view_image(hdrs, content, limit):
|
||||
try:
|
||||
img = Image.open(cStringIO.StringIO(content))
|
||||
except IOError:
|
||||
return None
|
||||
parts = [
|
||||
("Format", str(img.format_description)),
|
||||
("Size", "%s x %s px"%img.size),
|
||||
("Mode", str(img.mode)),
|
||||
]
|
||||
for i in sorted(img.info.keys()):
|
||||
if i != "exif":
|
||||
parts.append(
|
||||
(str(i), str(img.info[i]))
|
||||
)
|
||||
if hasattr(img, "_getexif"):
|
||||
ex = img._getexif()
|
||||
if ex:
|
||||
for i in sorted(ex.keys()):
|
||||
tag = TAGS.get(i, i)
|
||||
parts.append(
|
||||
(str(tag), str(ex[i]))
|
||||
)
|
||||
clean = []
|
||||
for i in parts:
|
||||
clean.append([utils.cleanBin(i[0]), utils.cleanBin(i[1])])
|
||||
fmt = common.format_keyvals(
|
||||
clean,
|
||||
key = "header",
|
||||
val = "text"
|
||||
)
|
||||
return "%s image"%img.format, fmt
|
||||
|
||||
|
||||
PRETTY_FUNCTION_MAP = {
|
||||
VIEW_XML: view_xml,
|
||||
VIEW_HTML: view_html,
|
||||
VIEW_JSON: view_json,
|
||||
VIEW_URLENCODED: view_urlencoded,
|
||||
VIEW_MULTIPART: view_multipart,
|
||||
VIEW_JAVASCRIPT: view_javascript,
|
||||
VIEW_IMAGE: view_image,
|
||||
VIEW_HEX: view_hex,
|
||||
VIEW_RAW: view_raw,
|
||||
}
|
||||
|
||||
def get_view_func(viewmode, hdrs, content):
|
||||
"""
|
||||
Returns a function object.
|
||||
"""
|
||||
if viewmode == VIEW_AUTO:
|
||||
ctype = hdrs.get("content-type")
|
||||
if ctype:
|
||||
ctype = ctype[0]
|
||||
ct = utils.parse_content_type(ctype) if ctype else None
|
||||
if ct:
|
||||
viewmode = CONTENT_TYPES_MAP.get("%s/%s"%(ct[0], ct[1]))
|
||||
if not viewmode and utils.isXML(content):
|
||||
viewmode = VIEW_XML
|
||||
return PRETTY_FUNCTION_MAP.get(viewmode, view_raw)
|
||||
|
||||
|
||||
def get_content_view(viewmode, hdrItems, content, limit):
|
||||
"""
|
||||
Returns a (msg, body) tuple.
|
||||
"""
|
||||
msg = []
|
||||
|
||||
hdrs = flow.ODictCaseless([list(i) for i in hdrItems])
|
||||
|
||||
enc = hdrs.get("content-encoding")
|
||||
if enc and enc[0] != "identity":
|
||||
decoded = encoding.decode(enc[0], content)
|
||||
if decoded:
|
||||
content = decoded
|
||||
msg.append("[decoded %s]"%enc[0])
|
||||
func = get_view_func(viewmode, hdrs, content)
|
||||
ret = func(hdrs, content, limit)
|
||||
if not ret:
|
||||
viewmode = VIEW_RAW
|
||||
ret = view_raw(hdrs, content, limit)
|
||||
msg.append("Couldn't parse: falling back to Raw")
|
||||
else:
|
||||
msg.append(ret[0])
|
||||
return " ".join(msg), ret[1]
|
||||
102
libmproxy/console/flowdetailview.py
Normal file
@@ -0,0 +1,102 @@
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import urwid
|
||||
import common
|
||||
|
||||
footer = [
|
||||
('heading_key', "q"), ":back ",
|
||||
]
|
||||
|
||||
class FlowDetailsView(urwid.ListBox):
|
||||
def __init__(self, master, flow, state):
|
||||
self.master, self.flow, self.state = master, flow, state
|
||||
urwid.ListBox.__init__(
|
||||
self,
|
||||
self.flowtext()
|
||||
)
|
||||
|
||||
def keypress(self, size, key):
|
||||
key = common.shortcuts(key)
|
||||
if key == "q":
|
||||
self.master.statusbar = self.state[0]
|
||||
self.master.body = self.state[1]
|
||||
self.master.header = self.state[2]
|
||||
self.master.make_view()
|
||||
return None
|
||||
elif key == "?":
|
||||
key = None
|
||||
return urwid.ListBox.keypress(self, size, key)
|
||||
|
||||
def flowtext(self):
|
||||
text = []
|
||||
|
||||
title = urwid.Text("Flow details")
|
||||
title = urwid.Padding(title, align="left", width=("relative", 100))
|
||||
title = urwid.AttrWrap(title, "heading")
|
||||
text.append(title)
|
||||
|
||||
if self.flow.response:
|
||||
c = self.flow.response.get_cert()
|
||||
if c:
|
||||
text.append(urwid.Text([("head", "Server Certificate:")]))
|
||||
parts = [
|
||||
["Type", "%s, %s bits"%c.keyinfo],
|
||||
["SHA1 digest", c.digest("sha1")],
|
||||
["Valid to", str(c.notafter)],
|
||||
["Valid from", str(c.notbefore)],
|
||||
["Serial", str(c.serial)],
|
||||
]
|
||||
|
||||
parts.append(
|
||||
[
|
||||
"Subject",
|
||||
urwid.BoxAdapter(
|
||||
urwid.ListBox(common.format_keyvals(c.subject, key="highlight", val="text")),
|
||||
len(c.subject)
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
parts.append(
|
||||
[
|
||||
"Issuer",
|
||||
urwid.BoxAdapter(
|
||||
urwid.ListBox(common.format_keyvals(c.issuer, key="highlight", val="text")),
|
||||
len(c.issuer)
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
if c.altnames:
|
||||
parts.append(
|
||||
[
|
||||
"Alt names",
|
||||
", ".join(c.altnames)
|
||||
]
|
||||
)
|
||||
text.extend(common.format_keyvals(parts, key="key", val="text", indent=4))
|
||||
|
||||
if self.flow.request.client_conn:
|
||||
text.append(urwid.Text([("head", "Client Connection:")]))
|
||||
cc = self.flow.request.client_conn
|
||||
parts = [
|
||||
["Address", "%s:%s"%tuple(cc.address)],
|
||||
["Requests", "%s"%cc.requestcount],
|
||||
["Closed", "%s"%cc.close],
|
||||
]
|
||||
text.extend(common.format_keyvals(parts, key="key", val="text", indent=4))
|
||||
|
||||
return text
|
||||
218
libmproxy/console/flowlist.py
Normal file
@@ -0,0 +1,218 @@
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import urwid
|
||||
import common
|
||||
|
||||
def _mkhelp():
|
||||
text = []
|
||||
keys = [
|
||||
("A", "accept all intercepted flows"),
|
||||
("a", "accept this intercepted flows"),
|
||||
("C", "clear flow list or eventlog"),
|
||||
("d", "delete flow"),
|
||||
("D", "duplicate flow"),
|
||||
("e", "toggle eventlog"),
|
||||
("l", "set limit filter pattern"),
|
||||
("L", "load saved flows"),
|
||||
("r", "replay request"),
|
||||
("V", "revert changes to request"),
|
||||
("w", "save all flows matching current limit"),
|
||||
("W", "save this flow"),
|
||||
("X", "kill and delete flow, even if it's mid-intercept"),
|
||||
("tab", "tab between eventlog and flow list"),
|
||||
("enter", "view flow"),
|
||||
("|", "run script on this flow"),
|
||||
]
|
||||
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
|
||||
return text
|
||||
help_context = _mkhelp()
|
||||
|
||||
footer = [
|
||||
('heading_key', "?"), ":help ",
|
||||
]
|
||||
|
||||
class EventListBox(urwid.ListBox):
|
||||
def __init__(self, master):
|
||||
self.master = master
|
||||
urwid.ListBox.__init__(self, master.eventlist)
|
||||
|
||||
def keypress(self, size, key):
|
||||
key = common.shortcuts(key)
|
||||
if key == "C":
|
||||
self.master.clear_events()
|
||||
key = None
|
||||
return urwid.ListBox.keypress(self, size, key)
|
||||
|
||||
|
||||
class BodyPile(urwid.Pile):
|
||||
def __init__(self, master):
|
||||
h = urwid.Text("Event log")
|
||||
h = urwid.Padding(h, align="left", width=("relative", 100))
|
||||
|
||||
self.inactive_header = urwid.AttrWrap(h, "heading_inactive")
|
||||
self.active_header = urwid.AttrWrap(h, "heading")
|
||||
|
||||
urwid.Pile.__init__(
|
||||
self,
|
||||
[
|
||||
FlowListBox(master),
|
||||
urwid.Frame(EventListBox(master), header = self.inactive_header)
|
||||
]
|
||||
)
|
||||
self.master = master
|
||||
self.focus = 0
|
||||
|
||||
def keypress(self, size, key):
|
||||
if key == "tab":
|
||||
self.focus = (self.focus + 1)%len(self.widget_list)
|
||||
self.set_focus(self.focus)
|
||||
if self.focus == 1:
|
||||
self.widget_list[1].header = self.active_header
|
||||
else:
|
||||
self.widget_list[1].header = self.inactive_header
|
||||
key = None
|
||||
elif key == "v":
|
||||
self.master.toggle_eventlog()
|
||||
key = None
|
||||
|
||||
# This is essentially a copypasta from urwid.Pile's keypress handler.
|
||||
# So much for "closed for modification, but open for extension".
|
||||
item_rows = None
|
||||
if len(size)==2:
|
||||
item_rows = self.get_item_rows( size, focus=True )
|
||||
i = self.widget_list.index(self.focus_item)
|
||||
tsize = self.get_item_size(size,i,True,item_rows)
|
||||
return self.focus_item.keypress( tsize, key )
|
||||
|
||||
|
||||
class ConnectionItem(common.WWrap):
|
||||
def __init__(self, master, state, flow, focus):
|
||||
self.master, self.state, self.flow = master, state, flow
|
||||
self.focus = focus
|
||||
w = self.get_text()
|
||||
common.WWrap.__init__(self, w)
|
||||
|
||||
def get_text(self):
|
||||
return common.format_flow(self.flow, self.focus)
|
||||
|
||||
def selectable(self):
|
||||
return True
|
||||
|
||||
def keypress(self, (maxcol,), key):
|
||||
key = common.shortcuts(key)
|
||||
if key == "a":
|
||||
self.flow.accept_intercept()
|
||||
self.master.sync_list_view()
|
||||
elif key == "d":
|
||||
self.flow.kill(self.master)
|
||||
self.state.delete_flow(self.flow)
|
||||
self.master.sync_list_view()
|
||||
elif key == "D":
|
||||
f = self.master.duplicate_flow(self.flow)
|
||||
self.master.currentflow = f
|
||||
self.master.focus_current()
|
||||
elif key == "r":
|
||||
self.flow.backup()
|
||||
r = self.master.replay_request(self.flow)
|
||||
if r:
|
||||
self.master.statusbar.message(r)
|
||||
self.master.sync_list_view()
|
||||
elif key == "V":
|
||||
if not self.flow.modified():
|
||||
self.master.statusbar.message("Flow not modified.")
|
||||
return
|
||||
self.state.revert(self.flow)
|
||||
self.master.sync_list_view()
|
||||
self.master.statusbar.message("Reverted.")
|
||||
elif key == "w":
|
||||
self.master.path_prompt(
|
||||
"Save flows: ",
|
||||
self.state.last_saveload,
|
||||
self.master.save_flows
|
||||
)
|
||||
elif key == "W":
|
||||
self.master.path_prompt(
|
||||
"Save this flow: ",
|
||||
self.state.last_saveload,
|
||||
self.master.save_one_flow,
|
||||
self.flow
|
||||
)
|
||||
elif key == "X":
|
||||
self.flow.kill(self.master)
|
||||
elif key == "enter":
|
||||
if self.flow.request:
|
||||
self.master.view_flow(self.flow)
|
||||
elif key == "|":
|
||||
self.master.path_prompt(
|
||||
"Send flow to script: ",
|
||||
self.state.last_script,
|
||||
self.master.run_script_once,
|
||||
self.flow
|
||||
)
|
||||
else:
|
||||
return key
|
||||
|
||||
|
||||
class FlowListWalker(urwid.ListWalker):
|
||||
def __init__(self, master, state):
|
||||
self.master, self.state = master, state
|
||||
if self.state.flow_count():
|
||||
self.set_focus(0)
|
||||
|
||||
def get_focus(self):
|
||||
f, i = self.state.get_focus()
|
||||
f = ConnectionItem(self.master, self.state, f, True) if f else None
|
||||
return f, i
|
||||
|
||||
def set_focus(self, focus):
|
||||
ret = self.state.set_focus(focus)
|
||||
return ret
|
||||
|
||||
def get_next(self, pos):
|
||||
f, i = self.state.get_next(pos)
|
||||
f = ConnectionItem(self.master, self.state, f, False) if f else None
|
||||
return f, i
|
||||
|
||||
def get_prev(self, pos):
|
||||
f, i = self.state.get_prev(pos)
|
||||
f = ConnectionItem(self.master, self.state, f, False) if f else None
|
||||
return f, i
|
||||
|
||||
|
||||
class FlowListBox(urwid.ListBox):
|
||||
def __init__(self, master):
|
||||
self.master = master
|
||||
urwid.ListBox.__init__(self, master.flow_list_walker)
|
||||
|
||||
def keypress(self, size, key):
|
||||
key = common.shortcuts(key)
|
||||
if key == "A":
|
||||
self.master.accept_all()
|
||||
self.master.sync_list_view()
|
||||
elif key == "C":
|
||||
self.master.clear_flows()
|
||||
elif key == "e":
|
||||
self.master.toggle_eventlog()
|
||||
elif key == "l":
|
||||
self.master.prompt("Limit: ", self.master.state.limit_txt, self.master.set_limit)
|
||||
elif key == "L":
|
||||
self.master.path_prompt(
|
||||
"Load flows: ",
|
||||
self.master.state.last_saveload,
|
||||
self.master.load_flows_callback
|
||||
)
|
||||
else:
|
||||
return urwid.ListBox.keypress(self, size, key)
|
||||
554
libmproxy/console/flowview.py
Normal file
@@ -0,0 +1,554 @@
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os, sys
|
||||
import urwid
|
||||
import common, grideditor, contentview
|
||||
from .. import utils, flow
|
||||
|
||||
def _mkhelp():
|
||||
text = []
|
||||
keys = [
|
||||
("A", "accept all intercepted flows"),
|
||||
("a", "accept this intercepted flow"),
|
||||
("b", "save request/response body"),
|
||||
("d", "delete flow"),
|
||||
("D", "duplicate flow"),
|
||||
("e", "edit request/response"),
|
||||
("f", "load full body data"),
|
||||
("m", "change body display mode for this entity"),
|
||||
(None,
|
||||
common.highlight_key("automatic", "a") +
|
||||
[("text", ": automatic detection")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("hex", "h") +
|
||||
[("text", ": Hex")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("image", "i") +
|
||||
[("text", ": Image")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("javascript", "j") +
|
||||
[("text", ": JavaScript")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("json", "s") +
|
||||
[("text", ": JSON")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("urlencoded", "u") +
|
||||
[("text", ": URL-encoded data")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("raw", "r") +
|
||||
[("text", ": raw data")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("xml", "x") +
|
||||
[("text", ": XML")]
|
||||
),
|
||||
("M", "change default body display mode"),
|
||||
("p", "previous flow"),
|
||||
("r", "replay request"),
|
||||
("V", "revert changes to request"),
|
||||
("v", "view body in external viewer"),
|
||||
("w", "save all flows matching current limit"),
|
||||
("W", "save this flow"),
|
||||
("X", "view flow details"),
|
||||
("z", "encode/decode a request/response"),
|
||||
("tab", "toggle request/response view"),
|
||||
("space", "next flow"),
|
||||
("|", "run script on this flow"),
|
||||
]
|
||||
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
|
||||
return text
|
||||
help_context = _mkhelp()
|
||||
|
||||
footer = [
|
||||
('heading_key', "?"), ":help ",
|
||||
('heading_key', "q"), ":back ",
|
||||
]
|
||||
|
||||
|
||||
class FlowViewHeader(common.WWrap):
|
||||
def __init__(self, master, f):
|
||||
self.master, self.flow = master, f
|
||||
self.w = common.format_flow(f, False, extended=True, padding=0)
|
||||
|
||||
def refresh_flow(self, f):
|
||||
if f == self.flow:
|
||||
self.w = common.format_flow(f, False, extended=True, padding=0)
|
||||
|
||||
|
||||
class CallbackCache:
|
||||
@utils.LRUCache(200)
|
||||
def _callback(self, method, *args, **kwargs):
|
||||
return getattr(self.obj, method)(*args, **kwargs)
|
||||
|
||||
def callback(self, obj, method, *args, **kwargs):
|
||||
# obj varies!
|
||||
self.obj = obj
|
||||
return self._callback(method, *args, **kwargs)
|
||||
cache = CallbackCache()
|
||||
|
||||
|
||||
class FlowView(common.WWrap):
|
||||
REQ = 0
|
||||
RESP = 1
|
||||
method_options = [
|
||||
("get", "g"),
|
||||
("post", "p"),
|
||||
("put", "u"),
|
||||
("head", "h"),
|
||||
("trace", "t"),
|
||||
("delete", "d"),
|
||||
("options", "o"),
|
||||
("edit raw", "e"),
|
||||
]
|
||||
def __init__(self, master, state, flow):
|
||||
self.master, self.state, self.flow = master, state, flow
|
||||
if self.state.view_flow_mode == common.VIEW_FLOW_RESPONSE:
|
||||
self.view_response()
|
||||
else:
|
||||
self.view_request()
|
||||
|
||||
def _cached_content_view(self, viewmode, hdrItems, content, limit):
|
||||
return contentview.get_content_view(viewmode, hdrItems, content, limit)
|
||||
|
||||
def content_view(self, viewmode, conn):
|
||||
full = self.state.get_flow_setting(
|
||||
self.flow,
|
||||
(self.state.view_flow_mode, "fullcontents"),
|
||||
False
|
||||
)
|
||||
if full:
|
||||
limit = sys.maxint
|
||||
else:
|
||||
limit = contentview.VIEW_CUTOFF
|
||||
return cache.callback(
|
||||
self, "_cached_content_view",
|
||||
viewmode,
|
||||
tuple(tuple(i) for i in conn.headers.lst),
|
||||
conn.content,
|
||||
limit
|
||||
)
|
||||
|
||||
def conn_text(self, conn):
|
||||
txt = common.format_keyvals(
|
||||
[(h+":", v) for (h, v) in conn.headers.lst],
|
||||
key = "header",
|
||||
val = "text"
|
||||
)
|
||||
if conn.content:
|
||||
override = self.state.get_flow_setting(
|
||||
self.flow,
|
||||
(self.state.view_flow_mode, "prettyview"),
|
||||
)
|
||||
viewmode = self.state.default_body_view if override is None else override
|
||||
|
||||
msg, body = self.content_view(viewmode, conn)
|
||||
|
||||
cols = [
|
||||
urwid.Text(
|
||||
[
|
||||
("heading", msg),
|
||||
]
|
||||
)
|
||||
]
|
||||
if override is not None:
|
||||
cols.append(
|
||||
urwid.Text(
|
||||
[
|
||||
" ",
|
||||
('heading', "["),
|
||||
('heading_key', "m"),
|
||||
('heading', (":%s]"%contentview.VIEW_NAMES[viewmode])),
|
||||
],
|
||||
align="right"
|
||||
)
|
||||
)
|
||||
title = urwid.AttrWrap(urwid.Columns(cols), "heading")
|
||||
txt.append(title)
|
||||
txt.extend(body)
|
||||
return urwid.ListBox(txt)
|
||||
|
||||
def _tab(self, content, attr):
|
||||
p = urwid.Text(content)
|
||||
p = urwid.Padding(p, align="left", width=("relative", 100))
|
||||
p = urwid.AttrWrap(p, attr)
|
||||
return p
|
||||
|
||||
def wrap_body(self, active, body):
|
||||
parts = []
|
||||
|
||||
if self.flow.intercepting and not self.flow.request.acked:
|
||||
qt = "Request intercepted"
|
||||
else:
|
||||
qt = "Request"
|
||||
if active == common.VIEW_FLOW_REQUEST:
|
||||
parts.append(self._tab(qt, "heading"))
|
||||
else:
|
||||
parts.append(self._tab(qt, "heading_inactive"))
|
||||
|
||||
if self.flow.intercepting and self.flow.response and not self.flow.response.acked:
|
||||
st = "Response intercepted"
|
||||
else:
|
||||
st = "Response"
|
||||
if active == common.VIEW_FLOW_RESPONSE:
|
||||
parts.append(self._tab(st, "heading"))
|
||||
else:
|
||||
parts.append(self._tab(st, "heading_inactive"))
|
||||
|
||||
h = urwid.Columns(parts)
|
||||
f = urwid.Frame(
|
||||
body,
|
||||
header=h
|
||||
)
|
||||
return f
|
||||
|
||||
def view_request(self):
|
||||
self.state.view_flow_mode = common.VIEW_FLOW_REQUEST
|
||||
body = self.conn_text(self.flow.request)
|
||||
self.w = self.wrap_body(common.VIEW_FLOW_REQUEST, body)
|
||||
self.master.statusbar.redraw()
|
||||
|
||||
def view_response(self):
|
||||
self.state.view_flow_mode = common.VIEW_FLOW_RESPONSE
|
||||
if self.flow.response:
|
||||
body = self.conn_text(self.flow.response)
|
||||
else:
|
||||
body = urwid.ListBox(
|
||||
[
|
||||
urwid.Text(""),
|
||||
urwid.Text(
|
||||
[
|
||||
("highlight", "No response. Press "),
|
||||
("key", "e"),
|
||||
("highlight", " and edit any aspect to add one."),
|
||||
]
|
||||
)
|
||||
]
|
||||
)
|
||||
self.w = self.wrap_body(common.VIEW_FLOW_RESPONSE, body)
|
||||
self.master.statusbar.redraw()
|
||||
|
||||
def refresh_flow(self, c=None):
|
||||
if c == self.flow:
|
||||
if self.state.view_flow_mode == common.VIEW_FLOW_RESPONSE and self.flow.response:
|
||||
self.view_response()
|
||||
else:
|
||||
self.view_request()
|
||||
|
||||
def set_method_raw(self, m):
|
||||
if m:
|
||||
self.flow.request.method = m
|
||||
self.master.refresh_flow(self.flow)
|
||||
|
||||
def edit_method(self, m):
|
||||
if m == "e":
|
||||
self.master.prompt_edit("Method", self.flow.request.method, self.set_method_raw)
|
||||
else:
|
||||
for i in self.method_options:
|
||||
if i[1] == m:
|
||||
self.flow.request.method = i[0].upper()
|
||||
self.master.refresh_flow(self.flow)
|
||||
|
||||
def save_body(self, path):
|
||||
if not path:
|
||||
return
|
||||
self.state.last_saveload = path
|
||||
if self.state.view_flow_mode == common.VIEW_FLOW_REQUEST:
|
||||
c = self.flow.request
|
||||
else:
|
||||
c = self.flow.response
|
||||
path = os.path.expanduser(path)
|
||||
try:
|
||||
f = file(path, "wb")
|
||||
f.write(str(c.content))
|
||||
f.close()
|
||||
except IOError, v:
|
||||
self.master.statusbar.message(v.strerror)
|
||||
|
||||
def set_url(self, url):
|
||||
request = self.flow.request
|
||||
if not request.set_url(str(url)):
|
||||
return "Invalid URL."
|
||||
self.master.refresh_flow(self.flow)
|
||||
|
||||
def set_resp_code(self, code):
|
||||
response = self.flow.response
|
||||
try:
|
||||
response.code = int(code)
|
||||
except ValueError:
|
||||
return None
|
||||
import BaseHTTPServer
|
||||
if BaseHTTPServer.BaseHTTPRequestHandler.responses.has_key(int(code)):
|
||||
response.msg = BaseHTTPServer.BaseHTTPRequestHandler.responses[int(code)][0]
|
||||
self.master.refresh_flow(self.flow)
|
||||
|
||||
def set_resp_msg(self, msg):
|
||||
response = self.flow.response
|
||||
response.msg = msg
|
||||
self.master.refresh_flow(self.flow)
|
||||
|
||||
def set_headers(self, lst, conn):
|
||||
conn.headers = flow.ODictCaseless(lst)
|
||||
|
||||
def set_query(self, lst, conn):
|
||||
conn.set_query(flow.ODict(lst))
|
||||
|
||||
def set_form(self, lst, conn):
|
||||
conn.set_form_urlencoded(flow.ODict(lst))
|
||||
|
||||
def edit_form(self, conn):
|
||||
self.master.view_grideditor(
|
||||
grideditor.URLEncodedFormEditor(self.master, conn.get_form_urlencoded().lst, self.set_form, conn)
|
||||
)
|
||||
|
||||
def edit_form_confirm(self, key, conn):
|
||||
if key == "y":
|
||||
self.edit_form(conn)
|
||||
|
||||
def edit(self, part):
|
||||
if self.state.view_flow_mode == common.VIEW_FLOW_REQUEST:
|
||||
conn = self.flow.request
|
||||
else:
|
||||
if not self.flow.response:
|
||||
self.flow.response = flow.Response(self.flow.request, 200, "OK", flow.ODictCaseless(), "")
|
||||
conn = self.flow.response
|
||||
|
||||
self.flow.backup()
|
||||
if part == "r":
|
||||
c = self.master.spawn_editor(conn.content or "")
|
||||
conn.content = c.rstrip("\n")
|
||||
elif part == "f":
|
||||
if not conn.get_form_urlencoded() and conn.content:
|
||||
self.master.prompt_onekey(
|
||||
"Existing body is not a URL-encoded form. Clear and edit?",
|
||||
[
|
||||
("yes", "y"),
|
||||
("no", "n"),
|
||||
],
|
||||
self.edit_form_confirm,
|
||||
conn
|
||||
)
|
||||
else:
|
||||
self.edit_form(conn)
|
||||
elif part == "h":
|
||||
self.master.view_grideditor(grideditor.HeaderEditor(self.master, conn.headers.lst, self.set_headers, conn))
|
||||
elif part == "q":
|
||||
self.master.view_grideditor(grideditor.QueryEditor(self.master, conn.get_query().lst, self.set_query, conn))
|
||||
elif part == "u" and self.state.view_flow_mode == common.VIEW_FLOW_REQUEST:
|
||||
self.master.prompt_edit("URL", conn.get_url(), self.set_url)
|
||||
elif part == "m" and self.state.view_flow_mode == common.VIEW_FLOW_REQUEST:
|
||||
self.master.prompt_onekey("Method", self.method_options, self.edit_method)
|
||||
elif part == "c" and self.state.view_flow_mode == common.VIEW_FLOW_RESPONSE:
|
||||
self.master.prompt_edit("Code", str(conn.code), self.set_resp_code)
|
||||
elif part == "m" and self.state.view_flow_mode == common.VIEW_FLOW_RESPONSE:
|
||||
self.master.prompt_edit("Message", conn.msg, self.set_resp_msg)
|
||||
self.master.refresh_flow(self.flow)
|
||||
|
||||
def _view_nextprev_flow(self, np, flow):
|
||||
try:
|
||||
idx = self.state.view.index(flow)
|
||||
except IndexError:
|
||||
return
|
||||
if np == "next":
|
||||
new_flow, new_idx = self.state.get_next(idx)
|
||||
else:
|
||||
new_flow, new_idx = self.state.get_prev(idx)
|
||||
if new_idx is None:
|
||||
self.master.statusbar.message("No more flows!")
|
||||
return
|
||||
self.master.view_flow(new_flow)
|
||||
|
||||
def view_next_flow(self, flow):
|
||||
return self._view_nextprev_flow("next", flow)
|
||||
|
||||
def view_prev_flow(self, flow):
|
||||
return self._view_nextprev_flow("prev", flow)
|
||||
|
||||
def change_this_display_mode(self, t):
|
||||
self.state.add_flow_setting(
|
||||
self.flow,
|
||||
(self.state.view_flow_mode, "prettyview"),
|
||||
contentview.VIEW_SHORTCUTS.get(t)
|
||||
)
|
||||
self.master.refresh_flow(self.flow)
|
||||
|
||||
def keypress(self, size, key):
|
||||
if key == " ":
|
||||
self.view_next_flow(self.flow)
|
||||
return
|
||||
|
||||
key = common.shortcuts(key)
|
||||
if self.state.view_flow_mode == common.VIEW_FLOW_REQUEST:
|
||||
conn = self.flow.request
|
||||
else:
|
||||
conn = self.flow.response
|
||||
|
||||
if key == "q":
|
||||
self.master.view_flowlist()
|
||||
key = None
|
||||
elif key == "tab":
|
||||
if self.state.view_flow_mode == common.VIEW_FLOW_REQUEST:
|
||||
self.view_response()
|
||||
else:
|
||||
self.view_request()
|
||||
elif key in ("up", "down", "page up", "page down"):
|
||||
# Why doesn't this just work??
|
||||
self.w.keypress(size, key)
|
||||
elif key == "a":
|
||||
self.flow.accept_intercept()
|
||||
self.master.view_flow(self.flow)
|
||||
elif key == "A":
|
||||
self.master.accept_all()
|
||||
self.master.view_flow(self.flow)
|
||||
elif key == "b":
|
||||
if conn:
|
||||
if self.state.view_flow_mode == common.VIEW_FLOW_REQUEST:
|
||||
self.master.path_prompt(
|
||||
"Save request body: ",
|
||||
self.state.last_saveload,
|
||||
self.save_body
|
||||
)
|
||||
else:
|
||||
self.master.path_prompt(
|
||||
"Save response body: ",
|
||||
self.state.last_saveload,
|
||||
self.save_body
|
||||
)
|
||||
elif key == "d":
|
||||
if self.state.flow_count() == 1:
|
||||
self.master.view_flowlist()
|
||||
elif self.state.view.index(self.flow) == len(self.state.view)-1:
|
||||
self.view_prev_flow(self.flow)
|
||||
else:
|
||||
self.view_next_flow(self.flow)
|
||||
f = self.flow
|
||||
f.kill(self.master)
|
||||
self.state.delete_flow(f)
|
||||
elif key == "D":
|
||||
f = self.master.duplicate_flow(self.flow)
|
||||
self.master.view_flow(f)
|
||||
self.master.currentflow = f
|
||||
self.master.statusbar.message("Duplicated.")
|
||||
elif key == "e":
|
||||
if self.state.view_flow_mode == common.VIEW_FLOW_REQUEST:
|
||||
self.master.prompt_onekey(
|
||||
"Edit request",
|
||||
(
|
||||
("query", "q"),
|
||||
("form", "f"),
|
||||
("url", "u"),
|
||||
("header", "h"),
|
||||
("raw body", "r"),
|
||||
("method", "m"),
|
||||
),
|
||||
self.edit
|
||||
)
|
||||
else:
|
||||
self.master.prompt_onekey(
|
||||
"Edit response",
|
||||
(
|
||||
("code", "c"),
|
||||
("message", "m"),
|
||||
("header", "h"),
|
||||
("raw body", "r"),
|
||||
),
|
||||
self.edit
|
||||
)
|
||||
key = None
|
||||
elif key == "f":
|
||||
self.master.statusbar.message("Loading all body data...")
|
||||
self.state.add_flow_setting(
|
||||
self.flow,
|
||||
(self.state.view_flow_mode, "fullcontents"),
|
||||
True
|
||||
)
|
||||
self.master.refresh_flow(self.flow)
|
||||
self.master.statusbar.message("")
|
||||
elif key == "m":
|
||||
p = list(contentview.VIEW_PROMPT)
|
||||
p.insert(0, ("clear", "c"))
|
||||
self.master.prompt_onekey(
|
||||
"Display mode",
|
||||
p,
|
||||
self.change_this_display_mode
|
||||
)
|
||||
key = None
|
||||
elif key == "p":
|
||||
self.view_prev_flow(self.flow)
|
||||
elif key == "r":
|
||||
self.flow.backup()
|
||||
r = self.master.replay_request(self.flow)
|
||||
if r:
|
||||
self.master.statusbar.message(r)
|
||||
self.master.refresh_flow(self.flow)
|
||||
elif key == "V":
|
||||
if not self.flow.modified():
|
||||
self.master.statusbar.message("Flow not modified.")
|
||||
return
|
||||
self.state.revert(self.flow)
|
||||
self.master.refresh_flow(self.flow)
|
||||
self.master.statusbar.message("Reverted.")
|
||||
elif key == "W":
|
||||
self.master.path_prompt(
|
||||
"Save this flow: ",
|
||||
self.state.last_saveload,
|
||||
self.master.save_one_flow,
|
||||
self.flow
|
||||
)
|
||||
elif key == "v":
|
||||
if conn and conn.content:
|
||||
t = conn.headers["content-type"] or [None]
|
||||
t = t[0]
|
||||
self.master.spawn_external_viewer(conn.content, t)
|
||||
elif key == "|":
|
||||
self.master.path_prompt(
|
||||
"Send flow to script: ", self.state.last_script,
|
||||
self.master.run_script_once, self.flow
|
||||
)
|
||||
elif key == "X":
|
||||
self.master.view_flowdetails(self.flow)
|
||||
elif key == "z":
|
||||
if conn:
|
||||
self.flow.backup()
|
||||
e = conn.headers["content-encoding"] or ["identity"]
|
||||
if e[0] != "identity":
|
||||
conn.decode()
|
||||
else:
|
||||
self.master.prompt_onekey(
|
||||
"Select encoding: ",
|
||||
(
|
||||
("gzip", "z"),
|
||||
("deflate", "d"),
|
||||
),
|
||||
self.encode_callback,
|
||||
conn
|
||||
)
|
||||
self.master.refresh_flow(self.flow)
|
||||
else:
|
||||
return key
|
||||
|
||||
def encode_callback(self, key, conn):
|
||||
encoding_map = {
|
||||
"z": "gzip",
|
||||
"d": "deflate",
|
||||
}
|
||||
conn.encode(encoding_map[key])
|
||||
self.master.refresh_flow(self.flow)
|
||||
365
libmproxy/console/grideditor.py
Normal file
@@ -0,0 +1,365 @@
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import copy, re
|
||||
import urwid
|
||||
import common
|
||||
from .. import utils, filt
|
||||
|
||||
|
||||
def _mkhelp():
|
||||
text = []
|
||||
keys = [
|
||||
("A", "insert row before cursor"),
|
||||
("a", "add row after cursor"),
|
||||
("d", "delete row"),
|
||||
("e", "spawn external editor on current field"),
|
||||
("q", "return to flow view"),
|
||||
("esc", "return to flow view/exit field edit mode"),
|
||||
("tab", "next field"),
|
||||
("enter", "edit field"),
|
||||
]
|
||||
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
|
||||
return text
|
||||
help_context = _mkhelp()
|
||||
|
||||
footer = [
|
||||
('heading_key', "enter"), ":edit ",
|
||||
('heading_key', "q"), ":back ",
|
||||
]
|
||||
footer_editing = [
|
||||
('heading_key', "esc"), ":stop editing ",
|
||||
]
|
||||
|
||||
|
||||
class SText(common.WWrap):
|
||||
def __init__(self, txt, focused, error):
|
||||
w = urwid.Text(txt, wrap="any")
|
||||
if focused:
|
||||
if error:
|
||||
w = urwid.AttrWrap(w, "focusfield_error")
|
||||
else:
|
||||
w = urwid.AttrWrap(w, "focusfield")
|
||||
elif error:
|
||||
w = urwid.AttrWrap(w, "field_error")
|
||||
common.WWrap.__init__(self, w)
|
||||
|
||||
def get_text(self):
|
||||
return self.w.get_text()[0]
|
||||
|
||||
def keypress(self, size, key):
|
||||
return key
|
||||
|
||||
def selectable(self):
|
||||
return True
|
||||
|
||||
|
||||
class SEdit(common.WWrap):
|
||||
def __init__(self, txt):
|
||||
w = urwid.Edit(edit_text=txt, wrap="any", multiline=True)
|
||||
w = urwid.AttrWrap(w, "editfield")
|
||||
common.WWrap.__init__(self, w)
|
||||
|
||||
def get_text(self):
|
||||
return self.w.get_text()[0]
|
||||
|
||||
def selectable(self):
|
||||
return True
|
||||
|
||||
|
||||
class GridRow(common.WWrap):
|
||||
def __init__(self, focused, editing, editor, values):
|
||||
self.focused, self.editing, self.editor = focused, editing, editor
|
||||
|
||||
errors = values[1]
|
||||
self.fields = []
|
||||
for i, v in enumerate(values[0]):
|
||||
if focused == i and editing:
|
||||
self.editing = SEdit(v)
|
||||
self.fields.append(self.editing)
|
||||
else:
|
||||
self.fields.append(
|
||||
SText(v, True if focused == i else False, i in errors)
|
||||
)
|
||||
|
||||
fspecs = self.fields[:]
|
||||
fspecs[0] = ("fixed", self.editor.first_width + 2, fspecs[0])
|
||||
w = urwid.Columns(
|
||||
fspecs,
|
||||
dividechars = 2
|
||||
)
|
||||
if focused is not None:
|
||||
w.set_focus_column(focused)
|
||||
common.WWrap.__init__(self, w)
|
||||
|
||||
def get_value(self):
|
||||
vals = []
|
||||
errors = set([])
|
||||
for i, f in enumerate(self.fields):
|
||||
v = f.get_text()
|
||||
vals.append(v)
|
||||
if self.editor.is_error(i, v):
|
||||
errors.add(i)
|
||||
return [vals, errors]
|
||||
|
||||
def keypress(self, s, k):
|
||||
if self.editing:
|
||||
w = self.w.column_widths(s)[self.focused]
|
||||
k = self.editing.keypress((w,), k)
|
||||
return k
|
||||
|
||||
def selectable(self):
|
||||
return True
|
||||
|
||||
|
||||
class GridWalker(urwid.ListWalker):
|
||||
"""
|
||||
Stores rows as a list of (rows, errors) tuples, where rows is a list
|
||||
and errors is a set with an entry of each offset in rows that is an
|
||||
error.
|
||||
"""
|
||||
def __init__(self, lst, editor):
|
||||
self.lst = [(i, set([])) for i in lst]
|
||||
self.editor = editor
|
||||
self.focus = 0
|
||||
self.focus_col = 0
|
||||
self.editing = False
|
||||
|
||||
def _modified(self):
|
||||
self.editor.show_empty_msg()
|
||||
return urwid.ListWalker._modified(self)
|
||||
|
||||
def get_current_value(self):
|
||||
if self.lst:
|
||||
return self.lst[self.focus][0][self.focus_col]
|
||||
|
||||
def set_current_value(self, val):
|
||||
row = list(self.lst[self.focus][0])
|
||||
row[self.focus_col] = val
|
||||
self.lst[self.focus] = [tuple(row), set([])]
|
||||
|
||||
def delete_focus(self):
|
||||
if self.lst:
|
||||
del self.lst[self.focus]
|
||||
self.focus = min(len(self.lst)-1, self.focus)
|
||||
self._modified()
|
||||
|
||||
def _insert(self, pos):
|
||||
self.focus = pos
|
||||
self.lst.insert(self.focus, [[""]*self.editor.columns, set([])])
|
||||
self.focus_col = 0
|
||||
self.start_edit()
|
||||
|
||||
def insert(self):
|
||||
return self._insert(self.focus)
|
||||
|
||||
def add(self):
|
||||
return self._insert(min(self.focus + 1, len(self.lst)))
|
||||
|
||||
def start_edit(self):
|
||||
if self.lst:
|
||||
self.editing = GridRow(self.focus_col, True, self.editor, self.lst[self.focus])
|
||||
self.editor.master.statusbar.update(footer_editing)
|
||||
self._modified()
|
||||
|
||||
def stop_edit(self):
|
||||
if self.editing:
|
||||
self.editor.master.statusbar.update(footer)
|
||||
self.lst[self.focus] = self.editing.get_value()
|
||||
self.editing = False
|
||||
self._modified()
|
||||
|
||||
def left(self):
|
||||
self.focus_col = max(self.focus_col - 1, 0)
|
||||
self._modified()
|
||||
|
||||
def right(self):
|
||||
self.focus_col = min(self.focus_col + 1, self.editor.columns-1)
|
||||
self._modified()
|
||||
|
||||
def tab_next(self):
|
||||
self.stop_edit()
|
||||
if self.focus_col < self.editor.columns-1:
|
||||
self.focus_col += 1
|
||||
elif self.focus != len(self.lst)-1:
|
||||
self.focus_col = 0
|
||||
self.focus += 1
|
||||
self._modified()
|
||||
|
||||
def get_focus(self):
|
||||
if self.editing:
|
||||
return self.editing, self.focus
|
||||
elif self.lst:
|
||||
return GridRow(self.focus_col, False, self.editor, self.lst[self.focus]), self.focus
|
||||
else:
|
||||
return None, None
|
||||
|
||||
def set_focus(self, focus):
|
||||
self.stop_edit()
|
||||
self.focus = focus
|
||||
|
||||
def get_next(self, pos):
|
||||
if pos+1 >= len(self.lst):
|
||||
return None, None
|
||||
return GridRow(None, False, self.editor, self.lst[pos+1]), pos+1
|
||||
|
||||
def get_prev(self, pos):
|
||||
if pos-1 < 0:
|
||||
return None, None
|
||||
return GridRow(None, False, self.editor, self.lst[pos-1]), pos-1
|
||||
|
||||
|
||||
class GridListBox(urwid.ListBox):
|
||||
def __init__(self, lw):
|
||||
urwid.ListBox.__init__(self, lw)
|
||||
|
||||
|
||||
FIRST_WIDTH_MAX = 40
|
||||
FIRST_WIDTH_MIN = 20
|
||||
class GridEditor(common.WWrap):
|
||||
def __init__(self, master, value, callback, *cb_args, **cb_kwargs):
|
||||
value = copy.deepcopy(value)
|
||||
self.master, self.value, self.callback = master, value, callback
|
||||
self.cb_args, self.cb_kwargs = cb_args, cb_kwargs
|
||||
|
||||
first_width = 20
|
||||
if value:
|
||||
for r in value:
|
||||
assert len(r) == self.columns
|
||||
first_width = max(len(r), first_width)
|
||||
self.first_width = min(first_width, FIRST_WIDTH_MAX)
|
||||
|
||||
title = urwid.Text(self.title)
|
||||
title = urwid.Padding(title, align="left", width=("relative", 100))
|
||||
title = urwid.AttrWrap(title, "heading")
|
||||
|
||||
headings = []
|
||||
for i, h in enumerate(self.headings):
|
||||
c = urwid.Text(h)
|
||||
if i == 0:
|
||||
headings.append(("fixed", first_width + 2, c))
|
||||
else:
|
||||
headings.append(c)
|
||||
h = urwid.Columns(
|
||||
headings,
|
||||
dividechars = 2
|
||||
)
|
||||
h = urwid.AttrWrap(h, "heading")
|
||||
|
||||
self.walker = GridWalker(self.value, self)
|
||||
self.lb = GridListBox(self.walker)
|
||||
self.w = urwid.Frame(
|
||||
self.lb,
|
||||
header = urwid.Pile([title, h])
|
||||
)
|
||||
self.master.statusbar.update("")
|
||||
self.show_empty_msg()
|
||||
|
||||
def show_empty_msg(self):
|
||||
if self.walker.lst:
|
||||
self.w.set_footer(None)
|
||||
else:
|
||||
self.w.set_footer(
|
||||
urwid.Text(
|
||||
[
|
||||
("highlight", "No values. Press "),
|
||||
("key", "a"),
|
||||
("highlight", " to add some."),
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def keypress(self, size, key):
|
||||
if self.walker.editing:
|
||||
if key in ["esc"]:
|
||||
self.walker.stop_edit()
|
||||
elif key == "tab":
|
||||
pf, pfc = self.walker.focus, self.walker.focus_col
|
||||
self.walker.tab_next()
|
||||
if self.walker.focus == pf and self.walker.focus_col != pfc:
|
||||
self.walker.start_edit()
|
||||
else:
|
||||
self.w.keypress(size, key)
|
||||
return None
|
||||
|
||||
key = common.shortcuts(key)
|
||||
if key in ["q", "esc"]:
|
||||
res = []
|
||||
for i in self.walker.lst:
|
||||
if any([x.strip() for x in i[0]]):
|
||||
res.append(i[0])
|
||||
self.callback(res, *self.cb_args, **self.cb_kwargs)
|
||||
self.master.pop_view()
|
||||
elif key in ["h", "left"]:
|
||||
self.walker.left()
|
||||
elif key in ["l", "right"]:
|
||||
self.walker.right()
|
||||
elif key == "tab":
|
||||
self.walker.tab_next()
|
||||
elif key == "a":
|
||||
self.walker.add()
|
||||
elif key == "A":
|
||||
self.walker.insert()
|
||||
elif key == "d":
|
||||
self.walker.delete_focus()
|
||||
elif key == "e":
|
||||
o = self.walker.get_current_value()
|
||||
if o is not None:
|
||||
n = self.master.spawn_editor(o)
|
||||
n = utils.clean_hanging_newline(n)
|
||||
self.walker.set_current_value(n)
|
||||
self.walker._modified()
|
||||
elif key in ["enter"]:
|
||||
self.walker.start_edit()
|
||||
else:
|
||||
return self.w.keypress(size, key)
|
||||
|
||||
def is_error(self, col, val):
|
||||
return False
|
||||
|
||||
|
||||
class QueryEditor(GridEditor):
|
||||
title = "Editing query"
|
||||
columns = 2
|
||||
headings = ("Key", "Value")
|
||||
|
||||
|
||||
class HeaderEditor(GridEditor):
|
||||
title = "Editing headers"
|
||||
columns = 2
|
||||
headings = ("Key", "Value")
|
||||
|
||||
|
||||
class URLEncodedFormEditor(GridEditor):
|
||||
title = "Editing URL-encoded form"
|
||||
columns = 2
|
||||
headings = ("Key", "Value")
|
||||
|
||||
|
||||
class ReplaceEditor(GridEditor):
|
||||
title = "Editing replacement patterns"
|
||||
columns = 3
|
||||
headings = ("Filter", "Regex", "Replacement")
|
||||
def is_error(self, col, val):
|
||||
if col == 0:
|
||||
if not filt.parse(val):
|
||||
return True
|
||||
elif col == 1:
|
||||
try:
|
||||
re.compile(val)
|
||||
except re.error:
|
||||
return True
|
||||
return False
|
||||
|
||||
176
libmproxy/console/help.py
Normal file
@@ -0,0 +1,176 @@
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import urwid
|
||||
import common
|
||||
from .. import filt, version
|
||||
|
||||
footer = [
|
||||
("heading", 'mitmproxy v%s '%version.VERSION),
|
||||
('heading_key', "q"), ":back ",
|
||||
]
|
||||
|
||||
class HelpView(urwid.ListBox):
|
||||
def __init__(self, master, help_context, state):
|
||||
self.master, self.state = master, state
|
||||
self.help_context = help_context or []
|
||||
urwid.ListBox.__init__(
|
||||
self,
|
||||
self.helptext()
|
||||
)
|
||||
|
||||
def keypress(self, size, key):
|
||||
key = common.shortcuts(key)
|
||||
if key == "q":
|
||||
self.master.statusbar = self.state[0]
|
||||
self.master.body = self.state[1]
|
||||
self.master.header = self.state[2]
|
||||
self.master.make_view()
|
||||
return None
|
||||
elif key == "?":
|
||||
key = None
|
||||
return urwid.ListBox.keypress(self, size, key)
|
||||
|
||||
def helptext(self):
|
||||
text = []
|
||||
text.append(urwid.Text([("head", "Keys for this view:\n")]))
|
||||
text.extend(self.help_context)
|
||||
|
||||
text.append(urwid.Text([("head", "\n\nMovement:\n")]))
|
||||
keys = [
|
||||
("j, k", "up, down"),
|
||||
("h, l", "left, right (in some contexts)"),
|
||||
("space", "page down"),
|
||||
("pg up/down", "page up/down"),
|
||||
("arrows", "up, down, left, right"),
|
||||
]
|
||||
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
|
||||
|
||||
text.append(urwid.Text([("head", "\n\nGlobal keys:\n")]))
|
||||
keys = [
|
||||
("c", "client replay"),
|
||||
("i", "set interception pattern"),
|
||||
("M", "change global default display mode"),
|
||||
(None,
|
||||
common.highlight_key("automatic", "a") +
|
||||
[("text", ": automatic detection")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("hex", "h") +
|
||||
[("text", ": Hex")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("image", "i") +
|
||||
[("text", ": Image")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("javascript", "j") +
|
||||
[("text", ": JavaScript")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("json", "s") +
|
||||
[("text", ": JSON")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("urlencoded", "u") +
|
||||
[("text", ": URL-encoded data")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("raw", "r") +
|
||||
[("text", ": raw data")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("xml", "x") +
|
||||
[("text", ": XML")]
|
||||
),
|
||||
("o", "toggle options:"),
|
||||
(None,
|
||||
common.highlight_key("anticache", "a") +
|
||||
[("text", ": prevent cached responses")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("anticomp", "c") +
|
||||
[("text", ": prevent compressed responses")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("killextra", "k") +
|
||||
[("text", ": kill requests not part of server replay")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("norefresh", "n") +
|
||||
[("text", ": disable server replay response refresh")]
|
||||
),
|
||||
(None,
|
||||
common.highlight_key("upstream certs", "u") +
|
||||
[("text", ": sniff cert info from upstream server")]
|
||||
),
|
||||
|
||||
("q", "quit / return to flow list"),
|
||||
("Q", "quit without confirm prompt"),
|
||||
("P", "set reverse proxy mode"),
|
||||
("R", "edit replacement patterns"),
|
||||
("s", "set/unset script"),
|
||||
("S", "server replay"),
|
||||
("t", "set sticky cookie expression"),
|
||||
("u", "set sticky auth expression"),
|
||||
]
|
||||
text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
|
||||
|
||||
text.append(urwid.Text([("head", "\n\nFilter expressions:\n")]))
|
||||
f = []
|
||||
for i in filt.filt_unary:
|
||||
f.append(
|
||||
("~%s"%i.code, i.help)
|
||||
)
|
||||
for i in filt.filt_rex:
|
||||
f.append(
|
||||
("~%s regex"%i.code, i.help)
|
||||
)
|
||||
for i in filt.filt_int:
|
||||
f.append(
|
||||
("~%s int"%i.code, i.help)
|
||||
)
|
||||
f.sort()
|
||||
f.extend(
|
||||
[
|
||||
("!", "unary not"),
|
||||
("&", "and"),
|
||||
("|", "or"),
|
||||
("(...)", "grouping"),
|
||||
]
|
||||
)
|
||||
text.extend(common.format_keyvals(f, key="key", val="text", indent=4))
|
||||
|
||||
text.append(
|
||||
urwid.Text(
|
||||
[
|
||||
"\n",
|
||||
("text", " Regexes are Python-style.\n"),
|
||||
("text", " Regexes can be specified as quoted strings.\n"),
|
||||
("text", " Header matching (~h, ~hq, ~hs) is against a string of the form \"name: value\".\n"),
|
||||
("text", " Expressions with no operators are regex matches against URL.\n"),
|
||||
("text", " Default binary operator is &.\n"),
|
||||
("head", "\n Examples:\n"),
|
||||
]
|
||||
)
|
||||
)
|
||||
examples = [
|
||||
("google\.com", "Url containing \"google.com"),
|
||||
("~q ~b test", "Requests where body contains \"test\""),
|
||||
("!(~q & ~t \"text/html\")", "Anything but requests with a text/html content type."),
|
||||
]
|
||||
text.extend(common.format_keyvals(examples, key="key", val="text", indent=4))
|
||||
return text
|
||||
|
||||
59
libmproxy/console/palettes.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
dark = [
|
||||
('body', 'black', 'dark cyan'),
|
||||
('foot', 'light gray', 'default'),
|
||||
('title', 'white,bold', 'default',),
|
||||
('editline', 'white', 'default',),
|
||||
|
||||
# Status bar & heading
|
||||
('heading', 'light gray', "dark blue", None, "g85", "dark blue"),
|
||||
('heading_key', 'light cyan', "dark blue", None, "light cyan", "dark blue"),
|
||||
('heading_inactive', 'white', 'dark gray', None, "g58", "g11"),
|
||||
|
||||
# Help
|
||||
('key', 'light cyan', 'default'),
|
||||
('head', 'white,bold', 'default'),
|
||||
('text', 'light gray', 'default'),
|
||||
|
||||
# List and Connections
|
||||
('method', 'dark cyan', 'default'),
|
||||
('focus', 'yellow', 'default'),
|
||||
|
||||
('code_200', 'light green', 'default'),
|
||||
('code_300', 'light blue', 'default'),
|
||||
('code_400', 'light red', 'default', None, "#f60", "default"),
|
||||
('code_500', 'light red', 'default'),
|
||||
('code_other', 'dark red', 'default'),
|
||||
|
||||
('error', 'light red', 'default'),
|
||||
|
||||
('header', 'dark cyan', 'default'),
|
||||
('highlight', 'white,bold', 'default'),
|
||||
('intercept', 'brown', 'default', None, "#f60", "default"),
|
||||
('replay', 'light green', 'default', None, "#0f0", "default"),
|
||||
('ack', 'light red', 'default'),
|
||||
|
||||
# Hex view
|
||||
('offset', 'dark cyan', 'default'),
|
||||
|
||||
# Grid Editor
|
||||
('focusfield', 'black', 'light gray'),
|
||||
('focusfield_error', 'dark red', 'light gray'),
|
||||
('field_error', 'dark red', 'black'),
|
||||
('editfield', 'black', 'light cyan'),
|
||||
]
|
||||
10
libmproxy/contrib/README
Normal file
@@ -0,0 +1,10 @@
|
||||
|
||||
Contribs:
|
||||
|
||||
pyparsing 1.5.2, MIT license
|
||||
|
||||
jsbeautifier, git checkout 25/03/12, MIT license
|
||||
- Removed test directories
|
||||
- Disabled packers through a single-line modification (see "# CORTESI"
|
||||
comment)
|
||||
|
||||
1153
libmproxy/contrib/jsbeautifier/__init__.py
Normal file
25
libmproxy/contrib/jsbeautifier/unpackers/README.specs.mkd
Normal file
@@ -0,0 +1,25 @@
|
||||
# UNPACKERS SPECIFICATIONS
|
||||
|
||||
Nothing very difficult: an unpacker is a submodule placed in the directory
|
||||
where this file was found. Each unpacker must define three symbols:
|
||||
|
||||
* `PRIORITY` : integer number expressing the priority in applying this
|
||||
unpacker. Lower number means higher priority.
|
||||
Makes sense only if a source file has been packed with
|
||||
more than one packer.
|
||||
* `detect(source)` : returns `True` if source is packed, otherwise, `False`.
|
||||
* `unpack(source)` : takes a `source` string and unpacks it. Must always return
|
||||
valid JavaScript. That is to say, your code should look
|
||||
like:
|
||||
|
||||
```
|
||||
if detect(source):
|
||||
return do_your_fancy_things_with(source)
|
||||
else:
|
||||
return source
|
||||
```
|
||||
|
||||
*You can safely define any other symbol in your module, as it will be ignored.*
|
||||
|
||||
`__init__` code will automatically load new unpackers, without any further step
|
||||
to be accomplished. Simply drop it in this directory.
|
||||
67
libmproxy/contrib/jsbeautifier/unpackers/__init__.py
Normal file
@@ -0,0 +1,67 @@
|
||||
#
|
||||
# General code for JSBeautifier unpackers infrastructure. See README.specs
|
||||
# written by Stefano Sanfilippo <a.little.coder@gmail.com>
|
||||
#
|
||||
|
||||
"""General code for JSBeautifier unpackers infrastructure."""
|
||||
|
||||
import pkgutil
|
||||
import re
|
||||
from jsbeautifier.unpackers import evalbased
|
||||
|
||||
# NOTE: AT THE MOMENT, IT IS DEACTIVATED FOR YOUR SECURITY: it runs js!
|
||||
BLACKLIST = ['jsbeautifier.unpackers.evalbased']
|
||||
|
||||
class UnpackingError(Exception):
|
||||
"""Badly packed source or general error. Argument is a
|
||||
meaningful description."""
|
||||
pass
|
||||
|
||||
def getunpackers():
|
||||
"""Scans the unpackers dir, finds unpackers and add them to UNPACKERS list.
|
||||
An unpacker will be loaded only if it is a valid python module (name must
|
||||
adhere to naming conventions) and it is not blacklisted (i.e. inserted
|
||||
into BLACKLIST."""
|
||||
path = __path__
|
||||
prefix = __name__ + '.'
|
||||
unpackers = []
|
||||
interface = ['unpack', 'detect', 'PRIORITY']
|
||||
for _importer, modname, _ispkg in pkgutil.iter_modules(path, prefix):
|
||||
if 'tests' not in modname and modname not in BLACKLIST:
|
||||
try:
|
||||
module = __import__(modname, fromlist=interface)
|
||||
except ImportError:
|
||||
raise UnpackingError('Bad unpacker: %s' % modname)
|
||||
else:
|
||||
unpackers.append(module)
|
||||
|
||||
return sorted(unpackers, key = lambda mod: mod.PRIORITY)
|
||||
|
||||
UNPACKERS = getunpackers()
|
||||
|
||||
def run(source, evalcode=False):
|
||||
"""Runs the applicable unpackers and return unpacked source as a string."""
|
||||
for unpacker in [mod for mod in UNPACKERS if mod.detect(source)]:
|
||||
source = unpacker.unpack(source)
|
||||
if evalcode and evalbased.detect(source):
|
||||
source = evalbased.unpack(source)
|
||||
return source
|
||||
|
||||
def filtercomments(source):
|
||||
"""NOT USED: strips trailing comments and put them at the top."""
|
||||
trailing_comments = []
|
||||
comment = True
|
||||
|
||||
while comment:
|
||||
if re.search(r'^\s*\/\*', source):
|
||||
comment = source[0, source.index('*/') + 2]
|
||||
elif re.search(r'^\s*\/\/', source):
|
||||
comment = re.search(r'^\s*\/\/', source).group(0)
|
||||
else:
|
||||
comment = None
|
||||
|
||||
if comment:
|
||||
source = re.sub(r'^\s+', '', source[len(comment):])
|
||||
trailing_comments.append(comment)
|
||||
|
||||
return '\n'.join(trailing_comments) + source
|
||||
39
libmproxy/contrib/jsbeautifier/unpackers/evalbased.py
Normal file
@@ -0,0 +1,39 @@
|
||||
#
|
||||
# Unpacker for eval() based packers, a part of javascript beautifier
|
||||
# by Einar Lielmanis <einar@jsbeautifier.org>
|
||||
#
|
||||
# written by Stefano Sanfilippo <a.little.coder@gmail.com>
|
||||
#
|
||||
# usage:
|
||||
#
|
||||
# if detect(some_string):
|
||||
# unpacked = unpack(some_string)
|
||||
#
|
||||
|
||||
"""Unpacker for eval() based packers: runs JS code and returns result.
|
||||
Works only if a JS interpreter (e.g. Mozilla's Rhino) is installed and
|
||||
properly set up on host."""
|
||||
|
||||
from subprocess import PIPE, Popen
|
||||
|
||||
PRIORITY = 3
|
||||
|
||||
def detect(source):
|
||||
"""Detects if source is likely to be eval() packed."""
|
||||
return source.strip().lower().startswith('eval(function(')
|
||||
|
||||
def unpack(source):
|
||||
"""Runs source and return resulting code."""
|
||||
return jseval('print %s;' % source[4:]) if detect(source) else source
|
||||
|
||||
# In case of failure, we'll just return the original, without crashing on user.
|
||||
def jseval(script):
|
||||
"""Run code in the JS interpreter and return output."""
|
||||
try:
|
||||
interpreter = Popen(['js'], stdin=PIPE, stdout=PIPE)
|
||||
except OSError:
|
||||
return script
|
||||
result, errors = interpreter.communicate(script)
|
||||
if interpreter.poll() or errors:
|
||||
return script
|
||||
return result
|
||||
@@ -0,0 +1,58 @@
|
||||
#
|
||||
# simple unpacker/deobfuscator for scripts messed up with
|
||||
# javascriptobfuscator.com
|
||||
#
|
||||
# written by Einar Lielmanis <einar@jsbeautifier.org>
|
||||
# rewritten in Python by Stefano Sanfilippo <a.little.coder@gmail.com>
|
||||
#
|
||||
# Will always return valid javascript: if `detect()` is false, `code` is
|
||||
# returned, unmodified.
|
||||
#
|
||||
# usage:
|
||||
#
|
||||
# if javascriptobfuscator.detect(some_string):
|
||||
# some_string = javascriptobfuscator.unpack(some_string)
|
||||
#
|
||||
|
||||
"""deobfuscator for scripts messed up with JavascriptObfuscator.com"""
|
||||
|
||||
import re
|
||||
|
||||
PRIORITY = 1
|
||||
|
||||
def smartsplit(code):
|
||||
"""Split `code` at " symbol, only if it is not escaped."""
|
||||
strings = []
|
||||
pos = 0
|
||||
while pos < len(code):
|
||||
if code[pos] == '"':
|
||||
word = '' # new word
|
||||
pos += 1
|
||||
while pos < len(code):
|
||||
if code[pos] == '"':
|
||||
break
|
||||
if code[pos] == '\\':
|
||||
word += '\\'
|
||||
pos += 1
|
||||
word += code[pos]
|
||||
pos += 1
|
||||
strings.append('"%s"' % word)
|
||||
pos += 1
|
||||
return strings
|
||||
|
||||
def detect(code):
|
||||
"""Detects if `code` is JavascriptObfuscator.com packed."""
|
||||
# prefer `is not` idiom, so that a true boolean is returned
|
||||
return (re.search(r'^var _0x[a-f0-9]+ ?\= ?\[', code) is not None)
|
||||
|
||||
def unpack(code):
|
||||
"""Unpacks JavascriptObfuscator.com packed code."""
|
||||
if detect(code):
|
||||
matches = re.search(r'var (_0x[a-f\d]+) ?\= ?\[(.*?)\];', code)
|
||||
if matches:
|
||||
variable = matches.group(1)
|
||||
dictionary = smartsplit(matches.group(2))
|
||||
code = code[len(matches.group(0)):]
|
||||
for key, value in enumerate(dictionary):
|
||||
code = code.replace(r'%s[%s]' % (variable, key), value)
|
||||
return code
|
||||
86
libmproxy/contrib/jsbeautifier/unpackers/myobfuscate.py
Normal file
@@ -0,0 +1,86 @@
|
||||
#
|
||||
# deobfuscator for scripts messed up with myobfuscate.com
|
||||
# by Einar Lielmanis <einar@jsbeautifier.org>
|
||||
#
|
||||
# written by Stefano Sanfilippo <a.little.coder@gmail.com>
|
||||
#
|
||||
# usage:
|
||||
#
|
||||
# if detect(some_string):
|
||||
# unpacked = unpack(some_string)
|
||||
#
|
||||
|
||||
# CAVEAT by Einar Lielmanis
|
||||
|
||||
#
|
||||
# You really don't want to obfuscate your scripts there: they're tracking
|
||||
# your unpackings, your script gets turned into something like this,
|
||||
# as of 2011-08-26:
|
||||
#
|
||||
# var _escape = 'your_script_escaped';
|
||||
# var _111 = document.createElement('script');
|
||||
# _111.src = 'http://api.www.myobfuscate.com/?getsrc=ok' +
|
||||
# '&ref=' + encodeURIComponent(document.referrer) +
|
||||
# '&url=' + encodeURIComponent(document.URL);
|
||||
# var 000 = document.getElementsByTagName('head')[0];
|
||||
# 000.appendChild(_111);
|
||||
# document.write(unescape(_escape));
|
||||
#
|
||||
|
||||
"""Deobfuscator for scripts messed up with MyObfuscate.com"""
|
||||
|
||||
import re
|
||||
import base64
|
||||
|
||||
# Python 2 retrocompatibility
|
||||
# pylint: disable=F0401
|
||||
# pylint: disable=E0611
|
||||
try:
|
||||
from urllib import unquote
|
||||
except ImportError:
|
||||
from urllib.parse import unquote
|
||||
|
||||
from jsbeautifier.unpackers import UnpackingError
|
||||
|
||||
PRIORITY = 1
|
||||
|
||||
CAVEAT = """//
|
||||
// Unpacker warning: be careful when using myobfuscate.com for your projects:
|
||||
// scripts obfuscated by the free online version call back home.
|
||||
//
|
||||
|
||||
"""
|
||||
|
||||
SIGNATURE = (r'["\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4A\x4B\x4C\x4D\x4E\x4F'
|
||||
r'\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5A\x61\x62\x63\x64\x65'
|
||||
r'\x66\x67\x68\x69\x6A\x6B\x6C\x6D\x6E\x6F\x70\x71\x72\x73\x74\x75'
|
||||
r'\x76\x77\x78\x79\x7A\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x2B'
|
||||
r'\x2F\x3D","","\x63\x68\x61\x72\x41\x74","\x69\x6E\x64\x65\x78'
|
||||
r'\x4F\x66","\x66\x72\x6F\x6D\x43\x68\x61\x72\x43\x6F\x64\x65","'
|
||||
r'\x6C\x65\x6E\x67\x74\x68"]')
|
||||
|
||||
def detect(source):
|
||||
"""Detects MyObfuscate.com packer."""
|
||||
return SIGNATURE in source
|
||||
|
||||
def unpack(source):
|
||||
"""Unpacks js code packed with MyObfuscate.com"""
|
||||
if not detect(source):
|
||||
return source
|
||||
payload = unquote(_filter(source))
|
||||
match = re.search(r"^var _escape\='<script>(.*)<\/script>'",
|
||||
payload, re.DOTALL)
|
||||
polished = match.group(1) if match else source
|
||||
return CAVEAT + polished
|
||||
|
||||
def _filter(source):
|
||||
"""Extracts and decode payload (original file) from `source`"""
|
||||
try:
|
||||
varname = re.search(r'eval\(\w+\(\w+\((\w+)\)\)\);', source).group(1)
|
||||
reverse = re.search(r"var +%s *\= *'(.*)';" % varname, source).group(1)
|
||||
except AttributeError:
|
||||
raise UnpackingError('Malformed MyObfuscate data.')
|
||||
try:
|
||||
return base64.b64decode(reverse[::-1].encode('utf8')).decode('utf8')
|
||||
except TypeError:
|
||||
raise UnpackingError('MyObfuscate payload is not base64-encoded.')
|
||||
104
libmproxy/contrib/jsbeautifier/unpackers/packer.py
Normal file
@@ -0,0 +1,104 @@
|
||||
#
|
||||
# Unpacker for Dean Edward's p.a.c.k.e.r, a part of javascript beautifier
|
||||
# by Einar Lielmanis <einar@jsbeautifier.org>
|
||||
#
|
||||
# written by Stefano Sanfilippo <a.little.coder@gmail.com>
|
||||
#
|
||||
# usage:
|
||||
#
|
||||
# if detect(some_string):
|
||||
# unpacked = unpack(some_string)
|
||||
#
|
||||
|
||||
"""Unpacker for Dean Edward's p.a.c.k.e.r"""
|
||||
|
||||
import re
|
||||
import string
|
||||
from jsbeautifier.unpackers import UnpackingError
|
||||
|
||||
PRIORITY = 1
|
||||
|
||||
def detect(source):
|
||||
"""Detects whether `source` is P.A.C.K.E.R. coded."""
|
||||
return source.replace(' ', '').startswith('eval(function(p,a,c,k,e,r')
|
||||
|
||||
def unpack(source):
|
||||
"""Unpacks P.A.C.K.E.R. packed js code."""
|
||||
payload, symtab, radix, count = _filterargs(source)
|
||||
|
||||
if count != len(symtab):
|
||||
raise UnpackingError('Malformed p.a.c.k.e.r. symtab.')
|
||||
|
||||
try:
|
||||
unbase = Unbaser(radix)
|
||||
except TypeError:
|
||||
raise UnpackingError('Unknown p.a.c.k.e.r. encoding.')
|
||||
|
||||
def lookup(match):
|
||||
"""Look up symbols in the synthetic symtab."""
|
||||
word = match.group(0)
|
||||
return symtab[unbase(word)] or word
|
||||
|
||||
source = re.sub(r'\b\w+\b', lookup, payload)
|
||||
return _replacestrings(source)
|
||||
|
||||
def _filterargs(source):
|
||||
"""Juice from a source file the four args needed by decoder."""
|
||||
argsregex = (r"}\('(.*)', *(\d+), *(\d+), *'(.*)'\."
|
||||
r"split\('\|'\), *(\d+), *(.*)\)\)")
|
||||
args = re.search(argsregex, source, re.DOTALL).groups()
|
||||
|
||||
try:
|
||||
return args[0], args[3].split('|'), int(args[1]), int(args[2])
|
||||
except ValueError:
|
||||
raise UnpackingError('Corrupted p.a.c.k.e.r. data.')
|
||||
|
||||
def _replacestrings(source):
|
||||
"""Strip string lookup table (list) and replace values in source."""
|
||||
match = re.search(r'var *(_\w+)\=\["(.*?)"\];', source, re.DOTALL)
|
||||
|
||||
if match:
|
||||
varname, strings = match.groups()
|
||||
startpoint = len(match.group(0))
|
||||
lookup = strings.split('","')
|
||||
variable = '%s[%%d]' % varname
|
||||
for index, value in enumerate(lookup):
|
||||
source = source.replace(variable % index, '"%s"' % value)
|
||||
return source[startpoint:]
|
||||
return source
|
||||
|
||||
|
||||
class Unbaser(object):
|
||||
"""Functor for a given base. Will efficiently convert
|
||||
strings to natural numbers."""
|
||||
ALPHABET = {
|
||||
62 : '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ',
|
||||
95 : (' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||
'[\]^_`abcdefghijklmnopqrstuvwxyz{|}~')
|
||||
}
|
||||
|
||||
def __init__(self, base):
|
||||
self.base = base
|
||||
|
||||
# If base can be handled by int() builtin, let it do it for us
|
||||
if 2 <= base <= 36:
|
||||
self.unbase = lambda string: int(string, base)
|
||||
else:
|
||||
# Build conversion dictionary cache
|
||||
try:
|
||||
self.dictionary = dict((cipher, index) for
|
||||
index, cipher in enumerate(self.ALPHABET[base]))
|
||||
except KeyError:
|
||||
raise TypeError('Unsupported base encoding.')
|
||||
|
||||
self.unbase = self._dictunbaser
|
||||
|
||||
def __call__(self, string):
|
||||
return self.unbase(string)
|
||||
|
||||
def _dictunbaser(self, string):
|
||||
"""Decodes a value to an integer."""
|
||||
ret = 0
|
||||
for index, cipher in enumerate(string[::-1]):
|
||||
ret += (self.base ** index) * self.dictionary[cipher]
|
||||
return ret
|
||||
34
libmproxy/contrib/jsbeautifier/unpackers/urlencode.py
Normal file
@@ -0,0 +1,34 @@
|
||||
#
|
||||
# Trivial bookmarklet/escaped script detector for the javascript beautifier
|
||||
# written by Einar Lielmanis <einar@jsbeautifier.org>
|
||||
# rewritten in Python by Stefano Sanfilippo <a.little.coder@gmail.com>
|
||||
#
|
||||
# Will always return valid javascript: if `detect()` is false, `code` is
|
||||
# returned, unmodified.
|
||||
#
|
||||
# usage:
|
||||
#
|
||||
# some_string = urlencode.unpack(some_string)
|
||||
#
|
||||
|
||||
"""Bookmarklet/escaped script unpacker."""
|
||||
|
||||
# Python 2 retrocompatibility
|
||||
# pylint: disable=F0401
|
||||
# pylint: disable=E0611
|
||||
try:
|
||||
from urllib import unquote_plus
|
||||
except ImportError:
|
||||
from urllib.parse import unquote_plus
|
||||
|
||||
PRIORITY = 0
|
||||
|
||||
def detect(code):
|
||||
"""Detects if a scriptlet is urlencoded."""
|
||||
# the fact that script doesn't contain any space, but has %20 instead
|
||||
# should be sufficient check for now.
|
||||
return ' ' not in code and ('%20' in code or code.count('%') > 3)
|
||||
|
||||
def unpack(code):
|
||||
"""URL decode `code` source string."""
|
||||
return unquote_plus(code) if detect(code) else code
|
||||
@@ -1,22 +1,21 @@
|
||||
# Copyright (C) 2010 Aldo Cortesi
|
||||
#
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
import Queue, threading
|
||||
|
||||
exit = False
|
||||
should_exit = False
|
||||
|
||||
#begin nocover
|
||||
|
||||
@@ -25,18 +24,19 @@ class Msg:
|
||||
self.q = Queue.Queue()
|
||||
self.acked = False
|
||||
|
||||
def ack(self, data=False):
|
||||
self.acked = True
|
||||
if data is None:
|
||||
self.q.put(data)
|
||||
else:
|
||||
self.q.put(data or self)
|
||||
def _ack(self, data=False):
|
||||
if not self.acked:
|
||||
self.acked = True
|
||||
if data is None:
|
||||
self.q.put(data)
|
||||
else:
|
||||
self.q.put(data or self)
|
||||
|
||||
def send(self, masterq):
|
||||
def _send(self, masterq):
|
||||
self.acked = False
|
||||
try:
|
||||
masterq.put(self, timeout=3)
|
||||
while not exit:
|
||||
while not should_exit:
|
||||
try:
|
||||
g = self.q.get(timeout=0.5)
|
||||
except Queue.Empty:
|
||||
@@ -81,10 +81,10 @@ class Master:
|
||||
return changed
|
||||
|
||||
def run(self):
|
||||
if self.server:
|
||||
slave = Slave(self.masterq, self.server)
|
||||
slave.start()
|
||||
while not exit:
|
||||
global should_exit
|
||||
should_exit = False
|
||||
self.server.start_slave(Slave, self.masterq)
|
||||
while not should_exit:
|
||||
self.tick(self.masterq)
|
||||
self.shutdown()
|
||||
|
||||
@@ -94,11 +94,11 @@ class Master:
|
||||
if m:
|
||||
m(msg)
|
||||
else:
|
||||
msg.ack()
|
||||
msg._ack()
|
||||
|
||||
def shutdown(self):
|
||||
global exit
|
||||
if not exit:
|
||||
exit = True
|
||||
global should_exit
|
||||
if not should_exit:
|
||||
should_exit = True
|
||||
if self.server:
|
||||
self.server.shutdown()
|
||||
|
||||
@@ -1,4 +1,19 @@
|
||||
import sys, os, traceback
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, os
|
||||
import flow, filt, utils
|
||||
|
||||
class DumpError(Exception): pass
|
||||
@@ -7,16 +22,19 @@ class DumpError(Exception): pass
|
||||
class Options(object):
|
||||
__slots__ = [
|
||||
"anticache",
|
||||
"anticomp",
|
||||
"client_replay",
|
||||
"eventlog",
|
||||
"keepserving",
|
||||
"kill",
|
||||
"no_server",
|
||||
"nopop",
|
||||
"refresh_server_playback",
|
||||
"request_script",
|
||||
"response_script",
|
||||
"replacements",
|
||||
"rfile",
|
||||
"rheaders",
|
||||
"server_replay",
|
||||
"script",
|
||||
"stickycookie",
|
||||
"stickyauth",
|
||||
"verbosity",
|
||||
@@ -42,7 +60,7 @@ def str_request(req):
|
||||
c = req.client_conn.address[0]
|
||||
else:
|
||||
c = "[replay]"
|
||||
r = "%s %s %s"%(c, req.method, req.url())
|
||||
r = "%s %s %s"%(c, req.method, req.get_url())
|
||||
if req.stickycookie:
|
||||
r = "[stickycookie] " + r
|
||||
return r
|
||||
@@ -54,6 +72,8 @@ class DumpMaster(flow.FlowMaster):
|
||||
self.outfile = outfile
|
||||
self.o = options
|
||||
self.anticache = options.anticache
|
||||
self.anticomp = options.anticomp
|
||||
self.eventlog = options.eventlog
|
||||
self.refresh_server_playback = options.refresh_server_playback
|
||||
|
||||
if filtstr:
|
||||
@@ -61,11 +81,6 @@ class DumpMaster(flow.FlowMaster):
|
||||
else:
|
||||
self.filt = None
|
||||
|
||||
if self.o.response_script:
|
||||
self.set_response_script(self.o.response_script)
|
||||
if self.o.request_script:
|
||||
self.set_request_script(self.o.request_script)
|
||||
|
||||
if options.stickycookie:
|
||||
self.set_stickycookie(options.stickycookie)
|
||||
|
||||
@@ -80,13 +95,29 @@ class DumpMaster(flow.FlowMaster):
|
||||
except IOError, v:
|
||||
raise DumpError(v.strerror)
|
||||
|
||||
if options.replacements:
|
||||
for i in options.replacements:
|
||||
self.replacehooks.add(*i)
|
||||
|
||||
if options.server_replay:
|
||||
self.start_server_playback(
|
||||
self._readflow(options.server_replay),
|
||||
options.kill, options.rheaders,
|
||||
not options.keepserving,
|
||||
options.nopop
|
||||
)
|
||||
|
||||
if options.client_replay:
|
||||
self.start_client_playback(
|
||||
self._readflow(options.client_replay),
|
||||
not options.keepserving
|
||||
)
|
||||
|
||||
if options.script:
|
||||
err = self.load_script(options.script)
|
||||
if err:
|
||||
raise DumpError(err)
|
||||
|
||||
if options.rfile:
|
||||
path = os.path.expanduser(options.rfile)
|
||||
try:
|
||||
@@ -94,13 +125,10 @@ class DumpMaster(flow.FlowMaster):
|
||||
freader = flow.FlowReader(f)
|
||||
except IOError, v:
|
||||
raise DumpError(v.strerror)
|
||||
self.load_flows(freader)
|
||||
|
||||
if options.client_replay:
|
||||
self.start_client_playback(
|
||||
self._readflow(options.client_replay),
|
||||
not options.keepserving
|
||||
)
|
||||
try:
|
||||
self.load_flows(freader)
|
||||
except flow.FlowReadError, v:
|
||||
raise DumpError(v)
|
||||
|
||||
|
||||
def _readflow(self, path):
|
||||
@@ -112,24 +140,14 @@ class DumpMaster(flow.FlowMaster):
|
||||
raise DumpError(v.strerror)
|
||||
return flows
|
||||
|
||||
def _runscript(self, f, script):
|
||||
try:
|
||||
ret = f.run_script(script)
|
||||
if self.o.verbosity > 0:
|
||||
print >> self.outfile, ret
|
||||
except flow.RunException, e:
|
||||
if e.errout:
|
||||
eout = "Script output:\n" + self.indent(4, e.errout) + "\n"
|
||||
else:
|
||||
eout = ""
|
||||
raise DumpError(
|
||||
"%s: %s\n%s"%(script, e.args[0], eout)
|
||||
)
|
||||
def add_event(self, e, level="info"):
|
||||
if self.eventlog:
|
||||
print >> self.outfile, e
|
||||
|
||||
def handle_request(self, r):
|
||||
f = flow.FlowMaster.handle_request(self, r)
|
||||
if f:
|
||||
r.ack()
|
||||
r._ack()
|
||||
return f
|
||||
|
||||
def indent(self, n, t):
|
||||
@@ -138,7 +156,7 @@ class DumpMaster(flow.FlowMaster):
|
||||
|
||||
def _process_flow(self, f):
|
||||
if self.filt and not f.match(self.filt):
|
||||
return
|
||||
return
|
||||
|
||||
if f.response:
|
||||
sz = utils.pretty_size(len(f.response.content))
|
||||
@@ -168,7 +186,7 @@ class DumpMaster(flow.FlowMaster):
|
||||
print >> self.outfile
|
||||
print >> self.outfile, result
|
||||
print >> self.outfile, "\n"
|
||||
elif self.o.verbosity == 3:
|
||||
elif self.o.verbosity >= 3:
|
||||
print >> self.outfile, str_request(f.request)
|
||||
print >> self.outfile, self.indent(4, f.request.headers)
|
||||
if utils.isBin(f.request.content):
|
||||
@@ -178,6 +196,7 @@ class DumpMaster(flow.FlowMaster):
|
||||
print >> self.outfile
|
||||
print >> self.outfile, result
|
||||
print >> self.outfile, "\n"
|
||||
|
||||
self.state.delete_flow(f)
|
||||
if self.o.wfile:
|
||||
self.fwriter.add(f)
|
||||
@@ -185,24 +204,24 @@ class DumpMaster(flow.FlowMaster):
|
||||
def handle_response(self, msg):
|
||||
f = flow.FlowMaster.handle_response(self, msg)
|
||||
if f:
|
||||
msg.ack()
|
||||
msg._ack()
|
||||
self._process_flow(f)
|
||||
return f
|
||||
|
||||
def handle_error(self, msg):
|
||||
f = flow.FlowMaster.handle_error(self, msg)
|
||||
if f:
|
||||
msg.ack()
|
||||
self._process_flow(f)
|
||||
return f
|
||||
|
||||
|
||||
# begin nocover
|
||||
def run(self):
|
||||
if self.o.rfile and not self.o.keepserving:
|
||||
if self.script:
|
||||
self.load_script(None)
|
||||
return
|
||||
try:
|
||||
return flow.FlowMaster.run(self)
|
||||
except BaseException, v:
|
||||
except BaseException:
|
||||
self.shutdown()
|
||||
raise
|
||||
|
||||
88
libmproxy/encoding.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Utility functions for decoding response bodies.
|
||||
"""
|
||||
import cStringIO
|
||||
import gzip, zlib
|
||||
|
||||
__ALL__ = ["ENCODINGS"]
|
||||
|
||||
ENCODINGS = set(["identity", "gzip", "deflate"])
|
||||
|
||||
def decode(e, content):
|
||||
encoding_map = {
|
||||
"identity": identity,
|
||||
"gzip": decode_gzip,
|
||||
"deflate": decode_deflate,
|
||||
}
|
||||
if e not in encoding_map:
|
||||
return None
|
||||
return encoding_map[e](content)
|
||||
|
||||
def encode(e, content):
|
||||
encoding_map = {
|
||||
"identity": identity,
|
||||
"gzip": encode_gzip,
|
||||
"deflate": encode_deflate,
|
||||
}
|
||||
if e not in encoding_map:
|
||||
return None
|
||||
return encoding_map[e](content)
|
||||
|
||||
def identity(content):
|
||||
"""
|
||||
Returns content unchanged. Identity is the default value of
|
||||
Accept-Encoding headers.
|
||||
"""
|
||||
return content
|
||||
|
||||
def decode_gzip(content):
|
||||
gfile = gzip.GzipFile(fileobj=cStringIO.StringIO(content))
|
||||
try:
|
||||
return gfile.read()
|
||||
except IOError:
|
||||
return None
|
||||
|
||||
def encode_gzip(content):
|
||||
s = cStringIO.StringIO()
|
||||
gf = gzip.GzipFile(fileobj=s, mode='wb')
|
||||
gf.write(content)
|
||||
gf.close()
|
||||
return s.getvalue()
|
||||
|
||||
def decode_deflate(content):
|
||||
"""
|
||||
Returns decompressed data for DEFLATE. Some servers may respond with
|
||||
compressed data without a zlib header or checksum. An undocumented
|
||||
feature of zlib permits the lenient decompression of data missing both
|
||||
values.
|
||||
|
||||
http://bugs.python.org/issue5784
|
||||
"""
|
||||
try:
|
||||
try:
|
||||
return zlib.decompress(content)
|
||||
except zlib.error:
|
||||
return zlib.decompress(content, -15)
|
||||
except zlib.error:
|
||||
return None
|
||||
|
||||
def encode_deflate(content):
|
||||
"""
|
||||
Returns compressed content, always including zlib header and checksum.
|
||||
"""
|
||||
return zlib.compress(content)
|
||||
@@ -1,16 +1,15 @@
|
||||
|
||||
# Copyright (C) 2010 Aldo Cortesi
|
||||
#
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
@@ -19,7 +18,7 @@
|
||||
|
||||
~q Request
|
||||
~s Response
|
||||
|
||||
|
||||
Headers:
|
||||
|
||||
Patterns are matched against "name: value" strings. Field names are
|
||||
@@ -34,9 +33,10 @@
|
||||
~bq rex Expression in the body of response
|
||||
~t rex Shortcut for content-type header.
|
||||
|
||||
~m rex Method
|
||||
~u rex URL
|
||||
~c CODE Response code.
|
||||
rex Equivalent to ~u rex
|
||||
rex Equivalent to ~u rex
|
||||
"""
|
||||
import re, sys
|
||||
import contrib.pyparsing as pp
|
||||
@@ -56,19 +56,27 @@ class _Action(_Token):
|
||||
return klass(*toks[1:])
|
||||
|
||||
|
||||
class FErr(_Action):
|
||||
code = "e"
|
||||
help = "Match error"
|
||||
def __call__(self, f):
|
||||
return True if f.error else False
|
||||
|
||||
|
||||
class FReq(_Action):
|
||||
code = "q"
|
||||
help = "Match request"
|
||||
def __call__(self, conn):
|
||||
return not conn.is_response()
|
||||
help = "Match request with no response"
|
||||
def __call__(self, f):
|
||||
if not f.response:
|
||||
return True
|
||||
|
||||
|
||||
class FResp(_Action):
|
||||
code = "s"
|
||||
help = "Match response"
|
||||
def __call__(self, conn):
|
||||
return conn.is_response()
|
||||
|
||||
def __call__(self, f):
|
||||
return True if f.response else False
|
||||
|
||||
|
||||
class _Rex(_Action):
|
||||
def __init__(self, expr):
|
||||
@@ -78,82 +86,75 @@ class _Rex(_Action):
|
||||
except:
|
||||
raise ValueError, "Cannot compile expression."
|
||||
|
||||
|
||||
def _check_content_type(expr, o):
|
||||
val = o.headers.get("content-type")
|
||||
val = o.headers["content-type"]
|
||||
if val and re.search(expr, val[0]):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
|
||||
class FContentType(_Rex):
|
||||
code = "t"
|
||||
help = "Content-type header"
|
||||
def __call__(self, o):
|
||||
if _check_content_type(self.expr, o):
|
||||
def __call__(self, f):
|
||||
if _check_content_type(self.expr, f.request):
|
||||
return True
|
||||
elif o.is_response() and _check_content_type(self.expr, o.request):
|
||||
elif f.response and _check_content_type(self.expr, f.response):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
class FRequestContentType(_Rex):
|
||||
code = "tq"
|
||||
help = "Request Content-Type header"
|
||||
def __call__(self, o):
|
||||
if o.is_response():
|
||||
return _check_content_type(self.expr, o.request)
|
||||
else:
|
||||
return _check_content_type(self.expr, o)
|
||||
def __call__(self, f):
|
||||
return _check_content_type(self.expr, f.request)
|
||||
|
||||
|
||||
class FResponseContentType(_Rex):
|
||||
code = "ts"
|
||||
help = "Request Content-Type header"
|
||||
def __call__(self, o):
|
||||
if o.is_response():
|
||||
return _check_content_type(self.expr, o)
|
||||
else:
|
||||
return False
|
||||
def __call__(self, f):
|
||||
if f.response:
|
||||
return _check_content_type(self.expr, f.response)
|
||||
return False
|
||||
|
||||
|
||||
class FHead(_Rex):
|
||||
code = "h"
|
||||
help = "Header"
|
||||
def __call__(self, o):
|
||||
val = o.headers.match_re(self.expr)
|
||||
if not val and o.is_response():
|
||||
val = o.request.headers.match_re(self.expr)
|
||||
return val
|
||||
|
||||
def __call__(self, f):
|
||||
if f.request.headers.match_re(self.expr):
|
||||
return True
|
||||
elif f.response and f.response.headers.match_re(self.expr):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class FHeadRequest(_Rex):
|
||||
code = "hq"
|
||||
help = "Request header"
|
||||
def __call__(self, o):
|
||||
if o.is_response():
|
||||
h = o.request.headers
|
||||
else:
|
||||
h = o.headers
|
||||
return h.match_re(self.expr)
|
||||
def __call__(self, f):
|
||||
if f.request.headers.match_re(self.expr):
|
||||
return True
|
||||
|
||||
|
||||
class FHeadResponse(_Rex):
|
||||
code = "hs"
|
||||
help = "Response header"
|
||||
def __call__(self, o):
|
||||
if not o.is_response():
|
||||
return False
|
||||
return o.headers.match_re(self.expr)
|
||||
def __call__(self, f):
|
||||
if f.response and f.response.headers.match_re(self.expr):
|
||||
return True
|
||||
|
||||
|
||||
class FBod(_Rex):
|
||||
code = "b"
|
||||
help = "Body"
|
||||
def __call__(self, o):
|
||||
if o.content and re.search(self.expr, o.content):
|
||||
def __call__(self, f):
|
||||
if f.request.content and re.search(self.expr, f.request.content):
|
||||
return True
|
||||
elif o.is_response() and o.request.content and re.search(self.expr, o.request.content):
|
||||
elif f.response and f.response.content and re.search(self.expr, f.response.content):
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -161,24 +162,25 @@ class FBod(_Rex):
|
||||
class FBodRequest(_Rex):
|
||||
code = "bq"
|
||||
help = "Request body"
|
||||
def __call__(self, o):
|
||||
if o.is_response() and o.request.content and re.search(self.expr, o.request.content):
|
||||
def __call__(self, f):
|
||||
if f.request.content and re.search(self.expr, f.request.content):
|
||||
return True
|
||||
elif not o.is_response() and o.content and re.search(self.expr, o.content):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class FBodResponse(_Rex):
|
||||
code = "bs"
|
||||
help = "Response body"
|
||||
def __call__(self, o):
|
||||
if not o.is_response():
|
||||
return False
|
||||
elif o.content and re.search(self.expr, o.content):
|
||||
def __call__(self, f):
|
||||
if f.response and f.response.content and re.search(self.expr, f.response.content):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
|
||||
class FMethod(_Rex):
|
||||
code = "m"
|
||||
help = "Method"
|
||||
def __call__(self, f):
|
||||
return bool(re.search(self.expr, f.request.method, re.IGNORECASE))
|
||||
|
||||
|
||||
class FUrl(_Rex):
|
||||
code = "u"
|
||||
@@ -190,12 +192,8 @@ class FUrl(_Rex):
|
||||
toks = toks[1:]
|
||||
return klass(*toks)
|
||||
|
||||
def __call__(self, o):
|
||||
if o.is_response():
|
||||
c = o.request
|
||||
else:
|
||||
c = o
|
||||
return re.search(self.expr, c.url())
|
||||
def __call__(self, f):
|
||||
return re.search(self.expr, f.request.get_url())
|
||||
|
||||
|
||||
class _Int(_Action):
|
||||
@@ -206,10 +204,9 @@ class _Int(_Action):
|
||||
class FCode(_Int):
|
||||
code = "c"
|
||||
help = "HTTP response code"
|
||||
def __call__(self, o):
|
||||
if o.is_response():
|
||||
return o.code == self.num
|
||||
return False
|
||||
def __call__(self, f):
|
||||
if f.response and f.response.code == self.num:
|
||||
return True
|
||||
|
||||
|
||||
class FAnd(_Token):
|
||||
@@ -221,8 +218,8 @@ class FAnd(_Token):
|
||||
for i in self.lst:
|
||||
i.dump(indent+1, fp)
|
||||
|
||||
def __call__(self, o):
|
||||
return all([i(o) for i in self.lst])
|
||||
def __call__(self, f):
|
||||
return all(i(f) for i in self.lst)
|
||||
|
||||
|
||||
class FOr(_Token):
|
||||
@@ -234,8 +231,8 @@ class FOr(_Token):
|
||||
for i in self.lst:
|
||||
i.dump(indent+1, fp)
|
||||
|
||||
def __call__(self, o):
|
||||
return any([i(o) for i in self.lst])
|
||||
def __call__(self, f):
|
||||
return any(i(f) for i in self.lst)
|
||||
|
||||
|
||||
class FNot(_Token):
|
||||
@@ -246,12 +243,14 @@ class FNot(_Token):
|
||||
print >> fp, "\t"*indent, self.__class__.__name__
|
||||
self.itm.dump(indent + 1, fp)
|
||||
|
||||
def __call__(self, o):
|
||||
return not self.itm(o)
|
||||
def __call__(self, f):
|
||||
return not self.itm(f)
|
||||
|
||||
|
||||
filt_unary = [
|
||||
FReq,
|
||||
FResp
|
||||
FResp,
|
||||
FErr
|
||||
]
|
||||
filt_rex = [
|
||||
FHeadRequest,
|
||||
@@ -260,6 +259,7 @@ filt_rex = [
|
||||
FBodRequest,
|
||||
FBodResponse,
|
||||
FBod,
|
||||
FMethod,
|
||||
FUrl,
|
||||
FRequestContentType,
|
||||
FResponseContentType,
|
||||
@@ -277,7 +277,7 @@ def _make():
|
||||
f.setParseAction(klass.make)
|
||||
parts.append(f)
|
||||
|
||||
simplerex = "".join([c for c in pp.printables if c not in "()~'\""])
|
||||
simplerex = "".join(c for c in pp.printables if c not in "()~'\"")
|
||||
rex = pp.Word(simplerex) |\
|
||||
pp.QuotedString("\"", escChar='\\') |\
|
||||
pp.QuotedString("'", escChar='\\')
|
||||
@@ -315,6 +315,6 @@ def parse(s):
|
||||
return bnf.parseString(s, parseAll=True)[0]
|
||||
except pp.ParseException:
|
||||
return None
|
||||
except ValueError, e:
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
1338
libmproxy/flow.py
@@ -1,151 +0,0 @@
|
||||
"""
|
||||
Netstring is a module for encoding and decoding netstring streams.
|
||||
See http://cr.yp.to/proto/netstrings.txt for more information on netstrings.
|
||||
Author: Will McGugan (http://www.willmcgugan.com)
|
||||
"""
|
||||
from cStringIO import StringIO
|
||||
|
||||
|
||||
def header(data):
|
||||
return str(len(data))+":"
|
||||
|
||||
|
||||
class FileEncoder(object):
|
||||
def __init__(self, file_out):
|
||||
""""
|
||||
file_out -- A writable file object
|
||||
"""
|
||||
self.file_out = file_out
|
||||
|
||||
def write(self, data):
|
||||
"""
|
||||
Encodes a netstring and writes it to the file object.
|
||||
|
||||
data -- A string to be encoded and written
|
||||
"""
|
||||
write = self.file_out.write
|
||||
write(header(data))
|
||||
write(data)
|
||||
write(',')
|
||||
return self
|
||||
|
||||
|
||||
class DecoderError(Exception):
|
||||
PRECEDING_ZERO_IN_SIZE = 0
|
||||
MAX_SIZE_REACHED = 1
|
||||
ILLEGAL_DIGIT_IN_SIZE = 2
|
||||
ILLEGAL_DIGIT = 3
|
||||
error_text = {
|
||||
PRECEDING_ZERO_IN_SIZE: "PRECEDING_ZERO_IN_SIZE",
|
||||
MAX_SIZE_REACHED: "MAX_SIZE_REACHED",
|
||||
ILLEGAL_DIGIT_IN_SIZE: "ILLEGAL_DIGIT_IN_SIZE",
|
||||
ILLEGAL_DIGIT: "ILLEGAL_DIGIT"
|
||||
}
|
||||
def __init__(self, code, text):
|
||||
Exception.__init__(self)
|
||||
self.code = code
|
||||
self.text = text
|
||||
|
||||
def __str__(self):
|
||||
return "%s (#%i), %s" % (DecoderError.error_text[self.code], self.code, self.text)
|
||||
|
||||
|
||||
class Decoder(object):
|
||||
"""
|
||||
A netstring decoder.
|
||||
Turns a netstring stream in to a number of discreet strings.
|
||||
"""
|
||||
def __init__(self, max_size=None):
|
||||
"""
|
||||
Create a netstring-stream decoder object.
|
||||
|
||||
max_size -- The maximum size of a netstring encoded string, after which
|
||||
a DecoderError will be throw. A value of None (the default) indicates
|
||||
that there should be no maximum string size.
|
||||
"""
|
||||
self.max_size = max_size
|
||||
self.data_pos = 0
|
||||
self.string_start = 0
|
||||
self.expecting_terminator = False
|
||||
self.size_string = ""
|
||||
self.data_size = None
|
||||
self.remaining_bytes = 0
|
||||
self.data_out = StringIO()
|
||||
self.yield_data = ""
|
||||
|
||||
def feed(self, data):
|
||||
"""
|
||||
A generator that yields 0 or more strings from the given data.
|
||||
|
||||
data -- A string containing complete or partial netstring data
|
||||
"""
|
||||
self.data_pos = 0
|
||||
self.string_start = 0
|
||||
while self.data_pos < len(data):
|
||||
if self.expecting_terminator:
|
||||
c = data[self.data_pos]
|
||||
self.data_pos += 1
|
||||
if c != ',':
|
||||
raise DecoderError(DecoderError.ILLEGAL_DIGIT, "Illegal digit (%s) at end of data"%repr(c))
|
||||
yield self.yield_data
|
||||
self.yield_data = ""
|
||||
self.expecting_terminator = False
|
||||
elif self.data_size is None:
|
||||
c = data[self.data_pos]
|
||||
self.data_pos += 1
|
||||
|
||||
if not len(self.size_string):
|
||||
self.string_start = self.data_pos-1
|
||||
|
||||
if c in "0123456789":
|
||||
if self.size_string == '0':
|
||||
raise DecoderError(DecoderError.PRECEDING_ZERO_IN_SIZE, "Preceding zeros in size field illegal")
|
||||
self.size_string += c
|
||||
if self.max_size is not None and int(self.size_string) > self.max_size:
|
||||
raise DecoderError(DecoderError.MAX_SIZE_REACHED, "Maximum size of netstring exceeded")
|
||||
|
||||
elif c == ":":
|
||||
if not len(self.size_string):
|
||||
raise DecoderError(DecoderError.ILLEGAL_DIGIT_IN_SIZE, "Illegal digit (%s) in size field"%repr(c))
|
||||
self.data_size = int(self.size_string)
|
||||
self.remaining_bytes = self.data_size
|
||||
|
||||
else:
|
||||
raise DecoderError(DecoderError.ILLEGAL_DIGIT_IN_SIZE, "Illegal digit (%s) in size field"%repr(c))
|
||||
|
||||
elif self.data_size is not None:
|
||||
get_bytes = min(self.remaining_bytes, len(data)-self.data_pos)
|
||||
chunk = data[self.data_pos:self.data_pos+get_bytes]
|
||||
whole_string = len(chunk) == self.data_size
|
||||
if not whole_string:
|
||||
self.data_out.write(chunk)
|
||||
self.data_pos += get_bytes
|
||||
self.remaining_bytes -= get_bytes
|
||||
if self.remaining_bytes == 0:
|
||||
if whole_string:
|
||||
self.yield_data = chunk
|
||||
else:
|
||||
self.yield_data = self.data_out.getvalue()
|
||||
self.data_out.reset()
|
||||
self.data_out.truncate()
|
||||
self.data_size = None
|
||||
self.size_string = ""
|
||||
self.remaining_bytes = 0
|
||||
self.expecting_terminator = True
|
||||
|
||||
|
||||
def decode_file(file_in, buffer_size=1024):
|
||||
"""
|
||||
Generates 0 or more strings from a netstring file.
|
||||
|
||||
file_in -- A readable file-like object containing netstring data
|
||||
buffer_size -- The number of bytes to attempt to read in each iteration
|
||||
(default = 1024).
|
||||
"""
|
||||
decoder = Decoder()
|
||||
while True:
|
||||
data = file_in.read(buffer_size)
|
||||
if not len(data):
|
||||
return
|
||||
for s in decoder.feed(data):
|
||||
yield s
|
||||
@@ -1,15 +1,27 @@
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
A simple proxy server implementation, which always reads all of a server
|
||||
response into memory, performs some transformation, and then writes it back
|
||||
to the client.
|
||||
|
||||
Development started from Neil Schemenauer's munchy.py
|
||||
to the client.
|
||||
"""
|
||||
import sys, os, string, socket, urlparse, re, select, copy, base64, time, Cookie
|
||||
from email.utils import parsedate_tz, formatdate, mktime_tz
|
||||
import shutil, tempfile
|
||||
import sys, os, string, socket, time
|
||||
import shutil, tempfile, threading
|
||||
import optparse, SocketServer, ssl
|
||||
import utils, controller
|
||||
import utils, flow, certutils
|
||||
|
||||
NAME = "mitmproxy"
|
||||
|
||||
@@ -22,28 +34,66 @@ class ProxyError(Exception):
|
||||
return "ProxyError(%s, %s)"%(self.code, self.msg)
|
||||
|
||||
|
||||
class SSLConfig:
|
||||
def __init__(self, certfile = None, ciphers = None, cacert = None, cert_wait_time=None):
|
||||
class ProxyConfig:
|
||||
def __init__(self, certfile = None, ciphers = None, cacert = None, cert_wait_time=0, upstream_cert=False, body_size_limit = None, reverse_proxy=None):
|
||||
self.certfile = certfile
|
||||
self.ciphers = ciphers
|
||||
self.cacert = cacert
|
||||
self.certdir = None
|
||||
self.cert_wait_time = cert_wait_time
|
||||
self.upstream_cert = upstream_cert
|
||||
self.body_size_limit = body_size_limit
|
||||
self.reverse_proxy = reverse_proxy
|
||||
|
||||
|
||||
def read_chunked(fp):
|
||||
content = ""
|
||||
def read_headers(fp):
|
||||
"""
|
||||
Read a set of headers from a file pointer. Stop once a blank line
|
||||
is reached. Return a ODict object.
|
||||
"""
|
||||
ret = []
|
||||
name = ''
|
||||
while 1:
|
||||
line = fp.readline()
|
||||
if not line or line == '\r\n' or line == '\n':
|
||||
break
|
||||
if line[0] in ' \t':
|
||||
# continued header
|
||||
ret[-1][1] = ret[-1][1] + '\r\n ' + line.strip()
|
||||
else:
|
||||
i = line.find(':')
|
||||
# We're being liberal in what we accept, here.
|
||||
if i > 0:
|
||||
name = line[:i]
|
||||
value = line[i+1:].strip()
|
||||
ret.append([name, value])
|
||||
return flow.ODictCaseless(ret)
|
||||
|
||||
|
||||
def read_chunked(fp, limit):
|
||||
content = ""
|
||||
total = 0
|
||||
while 1:
|
||||
line = fp.readline(128)
|
||||
if line == "":
|
||||
raise IOError("Connection closed")
|
||||
if line == '\r\n' or line == '\n':
|
||||
continue
|
||||
length = int(line,16)
|
||||
try:
|
||||
length = int(line,16)
|
||||
except ValueError:
|
||||
# FIXME: Not strictly correct - this could be from the server, in which
|
||||
# case we should send a 502.
|
||||
raise ProxyError(400, "Invalid chunked encoding length: %s"%line)
|
||||
if not length:
|
||||
break
|
||||
total += length
|
||||
if limit is not None and total > limit:
|
||||
msg = "HTTP Body too large."\
|
||||
" Limit is %s, chunked content length was at least %s"%(limit, total)
|
||||
raise ProxyError(509, msg)
|
||||
content += fp.read(length)
|
||||
line = fp.readline()
|
||||
line = fp.readline(5)
|
||||
if line != '\r\n':
|
||||
raise IOError("Malformed chunked body")
|
||||
while 1:
|
||||
@@ -53,45 +103,31 @@ def read_chunked(fp):
|
||||
if line == '\r\n' or line == '\n':
|
||||
break
|
||||
return content
|
||||
|
||||
|
||||
def read_http_body(rfile, connection, headers, all):
|
||||
if headers.has_key('transfer-encoding'):
|
||||
if not ",".join(headers["transfer-encoding"]) == "chunked":
|
||||
|
||||
def read_http_body(rfile, connection, headers, all, limit):
|
||||
if 'transfer-encoding' in headers:
|
||||
if not ",".join(headers["transfer-encoding"]).lower() == "chunked":
|
||||
raise IOError('Invalid transfer-encoding')
|
||||
content = read_chunked(rfile)
|
||||
elif headers.has_key("content-length"):
|
||||
content = rfile.read(int(headers["content-length"][0]))
|
||||
content = read_chunked(rfile, limit)
|
||||
elif "content-length" in headers:
|
||||
try:
|
||||
l = int(headers["content-length"][0])
|
||||
except ValueError:
|
||||
# FIXME: Not strictly correct - this could be from the server, in which
|
||||
# case we should send a 502.
|
||||
raise ProxyError(400, "Invalid content-length header: %s"%headers["content-length"])
|
||||
if limit is not None and l > limit:
|
||||
raise ProxyError(509, "HTTP Body too large. Limit is %s, content-length was %s"%(limit, l))
|
||||
content = rfile.read(l)
|
||||
elif all:
|
||||
content = rfile.read()
|
||||
content = rfile.read(limit if limit else None)
|
||||
connection.close = True
|
||||
else:
|
||||
content = ""
|
||||
return content
|
||||
|
||||
|
||||
def parse_url(url):
|
||||
"""
|
||||
Returns a (scheme, host, port, path) tuple, or None on error.
|
||||
"""
|
||||
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
|
||||
if not scheme:
|
||||
return None
|
||||
if ':' in netloc:
|
||||
host, port = string.split(netloc, ':')
|
||||
port = int(port)
|
||||
else:
|
||||
host = netloc
|
||||
if scheme == "https":
|
||||
port = 443
|
||||
else:
|
||||
port = 80
|
||||
path = urlparse.urlunparse(('', '', path, params, query, fragment))
|
||||
if not path.startswith("/"):
|
||||
path = "/" + path
|
||||
return scheme, host, port, path
|
||||
|
||||
|
||||
def parse_request_line(request):
|
||||
"""
|
||||
Parse a proxy request line. Return (method, scheme, host, port, path, minor).
|
||||
@@ -113,7 +149,7 @@ def parse_request_line(request):
|
||||
if url.startswith("/") or url == "*":
|
||||
scheme, port, host, path = None, None, None, url
|
||||
else:
|
||||
parts = parse_url(url)
|
||||
parts = utils.parse_url(url)
|
||||
if not parts:
|
||||
raise ProxyError(400, "Invalid url: %s"%url)
|
||||
scheme, host, port, path = parts
|
||||
@@ -127,339 +163,6 @@ def parse_request_line(request):
|
||||
return method, scheme, host, port, path, minor
|
||||
|
||||
|
||||
class Request(controller.Msg):
|
||||
FMT = '%s %s HTTP/1.1\r\n%s\r\n%s'
|
||||
FMT_PROXY = '%s %s://%s:%s%s HTTP/1.1\r\n%s\r\n%s'
|
||||
def __init__(self, client_conn, host, port, scheme, method, path, headers, content, timestamp=None):
|
||||
self.client_conn = client_conn
|
||||
self.host, self.port, self.scheme = host, port, scheme
|
||||
self.method, self.path, self.headers, self.content = method, path, headers, content
|
||||
self.timestamp = timestamp or utils.timestamp()
|
||||
self.close = False
|
||||
controller.Msg.__init__(self)
|
||||
|
||||
# Have this request's cookies been modified by sticky cookies or auth?
|
||||
self.stickycookie = False
|
||||
self.stickyauth = False
|
||||
|
||||
def anticache(self):
|
||||
"""
|
||||
Modifies this request to remove headers that might produce a cached
|
||||
response. That is, we remove ETags and If-Modified-Since headers.
|
||||
"""
|
||||
delheaders = [
|
||||
"if-modified-since",
|
||||
"if-none-match",
|
||||
]
|
||||
for i in delheaders:
|
||||
if i in self.headers:
|
||||
del self.headers[i]
|
||||
|
||||
def set_replay(self):
|
||||
self.client_conn = None
|
||||
|
||||
def is_replay(self):
|
||||
if self.client_conn:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def load_state(self, state):
|
||||
if state["client_conn"]:
|
||||
if self.client_conn:
|
||||
self.client_conn.load_state(state["client_conn"])
|
||||
else:
|
||||
self.client_conn = ClientConnect.from_state(state["client_conn"])
|
||||
else:
|
||||
self.client_conn = None
|
||||
self.host = state["host"]
|
||||
self.port = state["port"]
|
||||
self.scheme = state["scheme"]
|
||||
self.method = state["method"]
|
||||
self.path = state["path"]
|
||||
self.headers = utils.Headers.from_state(state["headers"])
|
||||
self.content = base64.decodestring(state["content"])
|
||||
self.timestamp = state["timestamp"]
|
||||
|
||||
def get_state(self):
|
||||
return dict(
|
||||
client_conn = self.client_conn.get_state() if self.client_conn else None,
|
||||
host = self.host,
|
||||
port = self.port,
|
||||
scheme = self.scheme,
|
||||
method = self.method,
|
||||
path = self.path,
|
||||
headers = self.headers.get_state(),
|
||||
content = base64.encodestring(self.content),
|
||||
timestamp = self.timestamp,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_state(klass, state):
|
||||
return klass(
|
||||
ClientConnect.from_state(state["client_conn"]),
|
||||
state["host"],
|
||||
state["port"],
|
||||
state["scheme"],
|
||||
state["method"],
|
||||
state["path"],
|
||||
utils.Headers.from_state(state["headers"]),
|
||||
base64.decodestring(state["content"]),
|
||||
state["timestamp"]
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return id(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.get_state() == other.get_state()
|
||||
|
||||
def copy(self):
|
||||
c = copy.copy(self)
|
||||
c.headers = self.headers.copy()
|
||||
return c
|
||||
|
||||
def hostport(self):
|
||||
if (self.port, self.scheme) in [(80, "http"), (443, "https")]:
|
||||
host = self.host
|
||||
else:
|
||||
host = "%s:%s"%(self.host, self.port)
|
||||
return host
|
||||
|
||||
def url(self):
|
||||
return "%s://%s%s"%(self.scheme, self.hostport(), self.path)
|
||||
|
||||
def set_url(self, url):
|
||||
parts = parse_url(url)
|
||||
if not parts:
|
||||
return False
|
||||
self.scheme, self.host, self.port, self.path = parts
|
||||
return True
|
||||
|
||||
def is_response(self):
|
||||
return False
|
||||
|
||||
def assemble(self, _proxy = False):
|
||||
"""
|
||||
Assembles the request for transmission to the server. We make some
|
||||
modifications to make sure interception works properly.
|
||||
"""
|
||||
headers = self.headers.copy()
|
||||
utils.try_del(headers, 'accept-encoding')
|
||||
utils.try_del(headers, 'proxy-connection')
|
||||
utils.try_del(headers, 'keep-alive')
|
||||
utils.try_del(headers, 'connection')
|
||||
utils.try_del(headers, 'content-length')
|
||||
utils.try_del(headers, 'transfer-encoding')
|
||||
if not headers.has_key('host'):
|
||||
headers["host"] = [self.hostport()]
|
||||
content = self.content
|
||||
if content is not None:
|
||||
headers["content-length"] = [str(len(content))]
|
||||
else:
|
||||
content = ""
|
||||
if self.close:
|
||||
headers["connection"] = ["close"]
|
||||
if not _proxy:
|
||||
return self.FMT % (self.method, self.path, str(headers), content)
|
||||
else:
|
||||
return self.FMT_PROXY % (self.method, self.scheme, self.host, self.port, self.path, str(headers), content)
|
||||
|
||||
|
||||
class Response(controller.Msg):
|
||||
FMT = '%s\r\n%s\r\n%s'
|
||||
def __init__(self, request, code, msg, headers, content, timestamp=None):
|
||||
self.request = request
|
||||
self.code, self.msg = code, msg
|
||||
self.headers, self.content = headers, content
|
||||
self.timestamp = timestamp or utils.timestamp()
|
||||
controller.Msg.__init__(self)
|
||||
self.replay = False
|
||||
|
||||
def _refresh_cookie(self, c, delta):
|
||||
"""
|
||||
Takes a cookie string c and a time delta in seconds, and returns
|
||||
a refreshed cookie string.
|
||||
"""
|
||||
c = Cookie.SimpleCookie(str(c))
|
||||
for i in c.values():
|
||||
if "expires" in i:
|
||||
d = parsedate_tz(i["expires"])
|
||||
if d:
|
||||
d = mktime_tz(d) + delta
|
||||
i["expires"] = formatdate(d)
|
||||
else:
|
||||
# This can happen when the expires tag is invalid.
|
||||
# reddit.com sends a an expires tag like this: "Thu, 31 Dec
|
||||
# 2037 23:59:59 GMT", which is valid RFC 1123, but not
|
||||
# strictly correct according tot he cookie spec. Browsers
|
||||
# appear to parse this tolerantly - maybe we should too.
|
||||
# For now, we just ignore this.
|
||||
del i["expires"]
|
||||
return c.output(header="").strip()
|
||||
|
||||
def refresh(self, now=None):
|
||||
"""
|
||||
This fairly complex and heuristic function refreshes a server
|
||||
response for replay.
|
||||
|
||||
- It adjusts date, expires and last-modified headers.
|
||||
- It adjusts cookie expiration.
|
||||
"""
|
||||
if not now:
|
||||
now = time.time()
|
||||
delta = now - self.timestamp
|
||||
refresh_headers = [
|
||||
"date",
|
||||
"expires",
|
||||
"last-modified",
|
||||
]
|
||||
for i in refresh_headers:
|
||||
if i in self.headers:
|
||||
d = parsedate_tz(self.headers[i][0])
|
||||
if d:
|
||||
new = mktime_tz(d) + delta
|
||||
self.headers[i] = [formatdate(new)]
|
||||
c = []
|
||||
for i in self.headers.get("set-cookie", []):
|
||||
c.append(self._refresh_cookie(i, delta))
|
||||
if c:
|
||||
self.headers["set-cookie"] = c
|
||||
|
||||
def set_replay(self):
|
||||
self.replay = True
|
||||
|
||||
def is_replay(self):
|
||||
return self.replay
|
||||
|
||||
def load_state(self, state):
|
||||
self.code = state["code"]
|
||||
self.msg = state["msg"]
|
||||
self.headers = utils.Headers.from_state(state["headers"])
|
||||
self.content = base64.decodestring(state["content"])
|
||||
self.timestamp = state["timestamp"]
|
||||
|
||||
def get_state(self):
|
||||
return dict(
|
||||
code = self.code,
|
||||
msg = self.msg,
|
||||
headers = self.headers.get_state(),
|
||||
timestamp = self.timestamp,
|
||||
content = base64.encodestring(self.content)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_state(klass, request, state):
|
||||
return klass(
|
||||
request,
|
||||
state["code"],
|
||||
state["msg"],
|
||||
utils.Headers.from_state(state["headers"]),
|
||||
base64.decodestring(state["content"]),
|
||||
state["timestamp"],
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.get_state() == other.get_state()
|
||||
|
||||
def copy(self):
|
||||
c = copy.copy(self)
|
||||
c.headers = self.headers.copy()
|
||||
return c
|
||||
|
||||
def is_response(self):
|
||||
return True
|
||||
|
||||
def assemble(self):
|
||||
"""
|
||||
Assembles the response for transmission to the client. We make some
|
||||
modifications to make sure interception works properly.
|
||||
"""
|
||||
headers = self.headers.copy()
|
||||
utils.try_del(headers, 'accept-encoding')
|
||||
utils.try_del(headers, 'proxy-connection')
|
||||
utils.try_del(headers, 'connection')
|
||||
utils.try_del(headers, 'keep-alive')
|
||||
utils.try_del(headers, 'transfer-encoding')
|
||||
content = self.content
|
||||
if content is not None:
|
||||
headers["content-length"] = [str(len(content))]
|
||||
else:
|
||||
content = ""
|
||||
if self.request.client_conn.close:
|
||||
headers["connection"] = ["close"]
|
||||
proto = "HTTP/1.1 %s %s"%(self.code, str(self.msg))
|
||||
data = (proto, str(headers), content)
|
||||
return self.FMT%data
|
||||
|
||||
|
||||
class ClientDisconnect(controller.Msg):
|
||||
def __init__(self, client_conn):
|
||||
controller.Msg.__init__(self)
|
||||
self.client_conn = client_conn
|
||||
|
||||
|
||||
class ClientConnect(controller.Msg):
|
||||
def __init__(self, address):
|
||||
"""
|
||||
address is an (address, port) tuple, or None if this connection has
|
||||
been replayed from within mitmproxy.
|
||||
"""
|
||||
self.address = address
|
||||
self.close = False
|
||||
controller.Msg.__init__(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.get_state() == other.get_state()
|
||||
|
||||
def load_state(self, state):
|
||||
self.address = state
|
||||
|
||||
def get_state(self):
|
||||
return list(self.address) if self.address else None
|
||||
|
||||
@classmethod
|
||||
def from_state(klass, state):
|
||||
if state:
|
||||
return klass(state)
|
||||
else:
|
||||
return None
|
||||
|
||||
def copy(self):
|
||||
return copy.copy(self)
|
||||
|
||||
|
||||
class Error(controller.Msg):
|
||||
def __init__(self, request, msg, timestamp=None):
|
||||
self.request, self.msg = request, msg
|
||||
self.timestamp = timestamp or utils.timestamp()
|
||||
controller.Msg.__init__(self)
|
||||
|
||||
def load_state(self, state):
|
||||
self.msg = state["msg"]
|
||||
self.timestamp = state["timestamp"]
|
||||
|
||||
def copy(self):
|
||||
return copy.copy(self)
|
||||
|
||||
def get_state(self):
|
||||
return dict(
|
||||
msg = self.msg,
|
||||
timestamp = self.timestamp,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_state(klass, state):
|
||||
return klass(
|
||||
None,
|
||||
state["msg"],
|
||||
state["timestamp"],
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.get_state() == other.get_state()
|
||||
|
||||
|
||||
class FileLike:
|
||||
def __init__(self, o):
|
||||
self.o = o
|
||||
@@ -482,10 +185,14 @@ class FileLike:
|
||||
result += data
|
||||
return result
|
||||
|
||||
def readline(self):
|
||||
def readline(self, size = None):
|
||||
result = ''
|
||||
bytes_read = 0
|
||||
while True:
|
||||
if size is not None and bytes_read >= size:
|
||||
break
|
||||
ch = self.read(1)
|
||||
bytes_read += 1
|
||||
if not ch:
|
||||
break
|
||||
else:
|
||||
@@ -496,13 +203,33 @@ class FileLike:
|
||||
|
||||
|
||||
#begin nocover
|
||||
class RequestReplayThread(threading.Thread):
|
||||
def __init__(self, config, flow, masterq):
|
||||
self.config, self.flow, self.masterq = config, flow, masterq
|
||||
threading.Thread.__init__(self)
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
server = ServerConnection(self.config, self.flow.request)
|
||||
server.send()
|
||||
response = server.read_response()
|
||||
response._send(self.masterq)
|
||||
except ProxyError, v:
|
||||
err = flow.Error(self.flow.request, v.msg)
|
||||
err._send(self.masterq)
|
||||
|
||||
|
||||
class ServerConnection:
|
||||
def __init__(self, request):
|
||||
self.host = request.host
|
||||
self.port = request.port
|
||||
self.scheme = request.scheme
|
||||
def __init__(self, config, request):
|
||||
self.config, self.request = config, request
|
||||
if config.reverse_proxy:
|
||||
self.scheme, self.host, self.port = config.reverse_proxy
|
||||
else:
|
||||
self.host = request.host
|
||||
self.port = request.port
|
||||
self.scheme = request.scheme
|
||||
self.close = False
|
||||
self.cert = None
|
||||
self.server, self.rfile, self.wfile = None, None, None
|
||||
self.connect()
|
||||
|
||||
@@ -513,19 +240,20 @@ class ServerConnection:
|
||||
if self.scheme == "https":
|
||||
server = ssl.wrap_socket(server)
|
||||
server.connect((addr, self.port))
|
||||
if self.scheme == "https":
|
||||
self.cert = server.getpeercert(True)
|
||||
except socket.error, err:
|
||||
raise ProxyError(504, 'Error connecting to "%s": %s' % (self.host, err))
|
||||
raise ProxyError(502, 'Error connecting to "%s": %s' % (self.host, err))
|
||||
self.server = server
|
||||
self.rfile, self.wfile = server.makefile('rb'), server.makefile('wb')
|
||||
|
||||
def send_request(self, request):
|
||||
self.request = request
|
||||
request.close = self.close
|
||||
def send(self):
|
||||
self.request.close = self.close
|
||||
try:
|
||||
self.wfile.write(request.assemble())
|
||||
self.wfile.write(self.request._assemble())
|
||||
self.wfile.flush()
|
||||
except socket.error, err:
|
||||
raise ProxyError(504, 'Error sending data to "%s": %s' % (request.host, err))
|
||||
raise ProxyError(502, 'Error sending data to "%s": %s' % (self.request.host, err))
|
||||
|
||||
def read_response(self):
|
||||
line = self.rfile.readline()
|
||||
@@ -534,19 +262,23 @@ class ServerConnection:
|
||||
if not line:
|
||||
raise ProxyError(502, "Blank server response.")
|
||||
parts = line.strip().split(" ", 2)
|
||||
if len(parts) == 2: # handle missing message gracefully
|
||||
parts.append("")
|
||||
if not len(parts) == 3:
|
||||
raise ProxyError(502, "Invalid server response: %s."%line)
|
||||
proto, code, msg = parts
|
||||
code = int(code)
|
||||
headers = utils.Headers()
|
||||
headers.read(self.rfile)
|
||||
try:
|
||||
code = int(code)
|
||||
except ValueError:
|
||||
raise ProxyError(502, "Invalid server response: %s."%line)
|
||||
headers = read_headers(self.rfile)
|
||||
if code >= 100 and code <= 199:
|
||||
return self.read_response()
|
||||
if self.request.method == "HEAD" or code == 204 or code == 304:
|
||||
content = ""
|
||||
else:
|
||||
content = read_http_body(self.rfile, self, headers, True)
|
||||
return Response(self.request, code, msg, headers, content)
|
||||
content = read_http_body(self.rfile, self, headers, True, self.config.body_size_limit)
|
||||
return flow.Response(self.request, code, msg, headers, content, self.cert)
|
||||
|
||||
def terminate(self):
|
||||
try:
|
||||
@@ -564,59 +296,74 @@ class ProxyHandler(SocketServer.StreamRequestHandler):
|
||||
SocketServer.StreamRequestHandler.__init__(self, request, client_address, server)
|
||||
|
||||
def handle(self):
|
||||
cc = ClientConnect(self.client_address)
|
||||
cc.send(self.mqueue)
|
||||
cc = flow.ClientConnect(self.client_address)
|
||||
cc._send(self.mqueue)
|
||||
while not cc.close:
|
||||
self.handle_request(cc)
|
||||
cd = ClientDisconnect(cc)
|
||||
cd.send(self.mqueue)
|
||||
cd = flow.ClientDisconnect(cc)
|
||||
cd._send(self.mqueue)
|
||||
self.finish()
|
||||
|
||||
def handle_request(self, cc):
|
||||
server, request, err = None, None, None
|
||||
try:
|
||||
request = self.read_request(cc)
|
||||
try:
|
||||
request = self.read_request(cc)
|
||||
except IOError, v:
|
||||
raise IOError, "Reading request: %s"%v
|
||||
if request is None:
|
||||
cc.close = True
|
||||
return
|
||||
request = request.send(self.mqueue)
|
||||
cc.requestcount += 1
|
||||
request = request._send(self.mqueue)
|
||||
if request is None:
|
||||
cc.close = True
|
||||
return
|
||||
|
||||
if request.is_response():
|
||||
if isinstance(request, flow.Response):
|
||||
response = request
|
||||
request = False
|
||||
response = response.send(self.mqueue)
|
||||
response = response._send(self.mqueue)
|
||||
else:
|
||||
server = ServerConnection(request)
|
||||
server.send_request(request)
|
||||
response = server.read_response()
|
||||
response = response.send(self.mqueue)
|
||||
server = ServerConnection(self.config, request)
|
||||
server.send()
|
||||
try:
|
||||
response = server.read_response()
|
||||
except IOError, v:
|
||||
raise IOError, "Reading response: %s"%v
|
||||
response = response._send(self.mqueue)
|
||||
if response is None:
|
||||
server.terminate()
|
||||
if response is None:
|
||||
cc.close = True
|
||||
return
|
||||
self.send_response(response)
|
||||
except IOError:
|
||||
except IOError, v:
|
||||
cc.connection_error = v
|
||||
cc.close = True
|
||||
except ProxyError, e:
|
||||
err = Error(request, e.msg)
|
||||
err.send(self.mqueue)
|
||||
cc.close = True
|
||||
self.send_error(e.code, e.msg)
|
||||
cc.connection_error = "%s: %s"%(e.code, e.msg)
|
||||
if request:
|
||||
err = flow.Error(request, e.msg)
|
||||
err._send(self.mqueue)
|
||||
self.send_error(e.code, e.msg)
|
||||
if server:
|
||||
server.terminate()
|
||||
|
||||
def find_cert(self, host):
|
||||
def find_cert(self, host, port):
|
||||
if self.config.certfile:
|
||||
return self.config.certfile
|
||||
else:
|
||||
ret = utils.dummy_cert(self.config.certdir, self.config.cacert, host)
|
||||
sans = []
|
||||
if self.config.upstream_cert:
|
||||
cert = certutils.get_remote_cert(host, port)
|
||||
sans = cert.altnames
|
||||
host = cert.cn
|
||||
ret = certutils.dummy_cert(self.config.certdir, self.config.cacert, host, sans)
|
||||
time.sleep(self.config.cert_wait_time)
|
||||
if not ret:
|
||||
raise ProxyError(400, "mitmproxy: Unable to generate dummy cert.")
|
||||
raise ProxyError(502, "mitmproxy: Unable to generate dummy cert.")
|
||||
return ret
|
||||
|
||||
def read_request(self, client_conn):
|
||||
@@ -640,11 +387,11 @@ class ProxyHandler(SocketServer.StreamRequestHandler):
|
||||
)
|
||||
self.wfile.flush()
|
||||
kwargs = dict(
|
||||
certfile = self.find_cert(host),
|
||||
certfile = self.find_cert(host, port),
|
||||
keyfile = self.config.certfile or self.config.cacert,
|
||||
server_side = True,
|
||||
ssl_version = ssl.PROTOCOL_SSLv23,
|
||||
do_handshake_on_connect = False
|
||||
do_handshake_on_connect = True,
|
||||
)
|
||||
if sys.version_info[1] > 6:
|
||||
kwargs["ciphers"] = self.config.ciphers
|
||||
@@ -654,9 +401,8 @@ class ProxyHandler(SocketServer.StreamRequestHandler):
|
||||
method, scheme, host, port, path, httpminor = parse_request_line(self.rfile.readline())
|
||||
if scheme is None:
|
||||
scheme = "https"
|
||||
headers = utils.Headers()
|
||||
headers.read(self.rfile)
|
||||
if host is None and headers.has_key("host"):
|
||||
headers = read_headers(self.rfile)
|
||||
if host is None and "host" in headers:
|
||||
netloc = headers["host"][0]
|
||||
if ':' in netloc:
|
||||
host, port = string.split(netloc, ':')
|
||||
@@ -669,8 +415,13 @@ class ProxyHandler(SocketServer.StreamRequestHandler):
|
||||
port = 80
|
||||
port = int(port)
|
||||
if host is None:
|
||||
raise ProxyError(400, 'Invalid request: %s'%request)
|
||||
if headers.has_key('expect'):
|
||||
if self.config.reverse_proxy:
|
||||
scheme, host, port = self.config.reverse_proxy
|
||||
else:
|
||||
# FIXME: We only specify the first part of the invalid request in this error.
|
||||
# We should gather up everything read from the socket, and specify it all.
|
||||
raise ProxyError(400, 'Invalid request: %s'%line)
|
||||
if "expect" in headers:
|
||||
expect = ",".join(headers['expect'])
|
||||
if expect == "100-continue" and httpminor >= 1:
|
||||
self.wfile.write('HTTP/1.1 100 Continue\r\n')
|
||||
@@ -681,18 +432,18 @@ class ProxyHandler(SocketServer.StreamRequestHandler):
|
||||
raise ProxyError(417, 'Unmet expect: %s'%expect)
|
||||
if httpminor == 0:
|
||||
client_conn.close = True
|
||||
if headers.has_key('connection'):
|
||||
if "connection" in headers:
|
||||
for value in ",".join(headers['connection']).split(","):
|
||||
value = value.strip()
|
||||
if value == "close":
|
||||
client_conn.close = True
|
||||
if value == "keep-alive":
|
||||
client_conn.close = False
|
||||
content = read_http_body(self.rfile, client_conn, headers, False)
|
||||
return Request(client_conn, host, port, scheme, method, path, headers, content)
|
||||
content = read_http_body(self.rfile, client_conn, headers, False, self.config.body_size_limit)
|
||||
return flow.Request(client_conn, host, port, scheme, method, path, headers, content)
|
||||
|
||||
def send_response(self, response):
|
||||
self.wfile.write(response.assemble())
|
||||
self.wfile.write(response._assemble())
|
||||
self.wfile.flush()
|
||||
|
||||
def terminate(self, connection, wfile, rfile):
|
||||
@@ -730,6 +481,7 @@ ServerBase.daemon_threads = True # Terminate workers when main thread ter
|
||||
class ProxyServer(ServerBase):
|
||||
request_queue_size = 20
|
||||
allow_reuse_address = True
|
||||
bound = True
|
||||
def __init__(self, config, port, address=''):
|
||||
"""
|
||||
Raises ProxyServerError if there's a startup problem.
|
||||
@@ -743,6 +495,10 @@ class ProxyServer(ServerBase):
|
||||
self.certdir = tempfile.mkdtemp(prefix="mitmproxy")
|
||||
config.certdir = self.certdir
|
||||
|
||||
def start_slave(self, klass, masterq):
|
||||
slave = klass(masterq, self)
|
||||
slave.start()
|
||||
|
||||
def set_mqueue(self, q):
|
||||
self.masterq = q
|
||||
|
||||
@@ -750,8 +506,23 @@ class ProxyServer(ServerBase):
|
||||
self.RequestHandlerClass(self.config, request, client_address, self, self.masterq)
|
||||
|
||||
def shutdown(self):
|
||||
shutil.rmtree(self.certdir)
|
||||
ServerBase.shutdown(self)
|
||||
try:
|
||||
shutil.rmtree(self.certdir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
class DummyServer:
|
||||
bound = False
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
def start_slave(self, klass, masterq):
|
||||
pass
|
||||
|
||||
def shutdown(self):
|
||||
pass
|
||||
|
||||
|
||||
# Command-line utils
|
||||
@@ -770,8 +541,7 @@ def certificate_option_group(parser):
|
||||
parser.add_option_group(group)
|
||||
|
||||
|
||||
def process_certificate_option_group(parser, options):
|
||||
conf = {}
|
||||
def process_proxy_options(parser, options):
|
||||
if options.cert:
|
||||
options.cert = os.path.expanduser(options.cert)
|
||||
if not os.path.exists(options.cert):
|
||||
@@ -780,12 +550,24 @@ def process_certificate_option_group(parser, options):
|
||||
cacert = os.path.join(options.confdir, "mitmproxy-ca.pem")
|
||||
cacert = os.path.expanduser(cacert)
|
||||
if not os.path.exists(cacert):
|
||||
utils.dummy_ca(cacert)
|
||||
certutils.dummy_ca(cacert)
|
||||
if getattr(options, "cache", None) is not None:
|
||||
options.cache = os.path.expanduser(options.cache)
|
||||
return SSLConfig(
|
||||
body_size_limit = utils.parse_size(options.body_size_limit)
|
||||
|
||||
if options.reverse_proxy:
|
||||
rp = utils.parse_proxy_spec(options.reverse_proxy)
|
||||
if not rp:
|
||||
parser.error("Invalid reverse proxy specification: %s"%options.reverse_proxy)
|
||||
else:
|
||||
rp = None
|
||||
|
||||
return ProxyConfig(
|
||||
certfile = options.cert,
|
||||
cacert = cacert,
|
||||
ciphers = options.ciphers,
|
||||
cert_wait_time = options.cert_wait_time
|
||||
cert_wait_time = options.cert_wait_time,
|
||||
body_size_limit = body_size_limit,
|
||||
upstream_cert = options.upstream_cert,
|
||||
reverse_proxy = rp
|
||||
)
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
[ req ]
|
||||
prompt = no
|
||||
distinguished_name = req_distinguished_name
|
||||
x509_extensions = v3_ca
|
||||
req_extensions = v3_ca_req
|
||||
|
||||
[ req_distinguished_name ]
|
||||
organizationName = mitmproxy
|
||||
commonName = mitmproxy
|
||||
|
||||
[ v3_ca ]
|
||||
basicConstraints = critical,CA:true
|
||||
keyUsage = cRLSign, keyCertSign
|
||||
extendedKeyUsage=serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC
|
||||
nsCertType = sslCA
|
||||
|
||||
[ v3_ca_req ]
|
||||
basicConstraints = critical,CA:true
|
||||
keyUsage = cRLSign, keyCertSign
|
||||
extendedKeyUsage=serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC
|
||||
nsCertType = sslCA
|
||||
|
||||
[ v3_cert ]
|
||||
basicConstraints = CA:false
|
||||
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
|
||||
extendedKeyUsage=serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC
|
||||
nsCertType = server
|
||||
|
||||
[ v3_cert_req ]
|
||||
basicConstraints = CA:false
|
||||
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
|
||||
extendedKeyUsage=serverAuth,clientAuth,emailProtection,timeStamping,msCodeInd,msCodeCom,msCTLSign,msSGC,msEFS,nsSGC
|
||||
nsCertType = server
|
||||
@@ -1,30 +0,0 @@
|
||||
[ req ]
|
||||
prompt = no
|
||||
distinguished_name = req_distinguished_name
|
||||
x509_extensions = v3_cert
|
||||
req_extensions = v3_cert_req
|
||||
|
||||
[ req_distinguished_name ]
|
||||
organizationName = mitmproxy
|
||||
commonName = %(commonname)s
|
||||
|
||||
[ v3_ca ]
|
||||
basicConstraints = critical,CA:true
|
||||
keyUsage = cRLSign, keyCertSign
|
||||
nsCertType = sslCA
|
||||
|
||||
[ v3_ca_req ]
|
||||
basicConstraints = critical,CA:true
|
||||
keyUsage = cRLSign, keyCertSign
|
||||
nsCertType = sslCA
|
||||
|
||||
[ v3_cert ]
|
||||
basicConstraints = CA:false
|
||||
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
|
||||
nsCertType = server
|
||||
|
||||
[ v3_cert_req ]
|
||||
basicConstraints = CA:false
|
||||
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
|
||||
nsCertType = server
|
||||
|
||||
@@ -1,27 +1,70 @@
|
||||
"""
|
||||
The mitmproxy scripting interface is simple - a serialized representation
|
||||
of a flow is passed to the script on stdin, and a possibly modified flow is
|
||||
then read by mitmproxy from the scripts stdout. This module provides two
|
||||
convenience functions to make loading and returning data from scripts
|
||||
simple.
|
||||
"""
|
||||
import sys, base64
|
||||
import flow
|
||||
# Copyright (C) 2012 Aldo Cortesi
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import os, traceback
|
||||
|
||||
class ScriptError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
#begin nocover
|
||||
def load_flow():
|
||||
class Script:
|
||||
"""
|
||||
Load a flow from the stdin. Returns a Flow object.
|
||||
"""
|
||||
data = sys.stdin.read()
|
||||
return flow.Flow.script_deserialize(data)
|
||||
The instantiator should do something along this vein:
|
||||
|
||||
|
||||
def return_flow(f):
|
||||
s = Script(path, master)
|
||||
s.load()
|
||||
s.run("start")
|
||||
"""
|
||||
Print a flow to stdout.
|
||||
"""
|
||||
print >> sys.stdout, f.script_serialize()
|
||||
|
||||
def __init__(self, path, ctx):
|
||||
self.path, self.ctx = path, ctx
|
||||
self.ns = None
|
||||
|
||||
def load(self):
|
||||
"""
|
||||
Loads a module.
|
||||
|
||||
Raises ScriptError on failure, with argument equal to an error
|
||||
message that may be a formatted traceback.
|
||||
"""
|
||||
path = os.path.expanduser(self.path)
|
||||
if not os.path.exists(path):
|
||||
raise ScriptError("No such file: %s"%self.path)
|
||||
if not os.path.isfile(path):
|
||||
raise ScriptError("Not a file: %s"%self.path)
|
||||
ns = {}
|
||||
try:
|
||||
execfile(path, ns, ns)
|
||||
except Exception, v:
|
||||
raise ScriptError(traceback.format_exc(v))
|
||||
self.ns = ns
|
||||
|
||||
def run(self, name, *args, **kwargs):
|
||||
"""
|
||||
Runs a plugin method.
|
||||
|
||||
Returns:
|
||||
|
||||
(True, retval) on success.
|
||||
(False, None) on nonexistent method.
|
||||
(Fals, (exc, traceback string)) if there was an exception.
|
||||
"""
|
||||
f = self.ns.get(name)
|
||||
if f:
|
||||
try:
|
||||
return (True, f(self.ctx, *args, **kwargs))
|
||||
except Exception, v:
|
||||
return (False, (v, traceback.format_exc(v)))
|
||||
else:
|
||||
return (False, None)
|
||||
|
||||
398
libmproxy/tnetstring.py
Normal file
@@ -0,0 +1,398 @@
|
||||
# imported from the tnetstring project: https://github.com/rfk/tnetstring
|
||||
#
|
||||
# Copyright (c) 2011 Ryan Kelly
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
"""
|
||||
tnetstring: data serialization using typed netstrings
|
||||
======================================================
|
||||
|
||||
|
||||
This is a data serialization library. It's a lot like JSON but it uses a
|
||||
new syntax called "typed netstrings" that Zed has proposed for use in the
|
||||
Mongrel2 webserver. It's designed to be simpler and easier to implement
|
||||
than JSON, with a happy consequence of also being faster in many cases.
|
||||
|
||||
An ordinary netstring is a blob of data prefixed with its length and postfixed
|
||||
with a sanity-checking comma. The string "hello world" encodes like this::
|
||||
|
||||
11:hello world,
|
||||
|
||||
Typed netstrings add other datatypes by replacing the comma with a type tag.
|
||||
Here's the integer 12345 encoded as a tnetstring::
|
||||
|
||||
5:12345#
|
||||
|
||||
And here's the list [12345,True,0] which mixes integers and bools::
|
||||
|
||||
19:5:12345#4:true!1:0#]
|
||||
|
||||
Simple enough? This module gives you the following functions:
|
||||
|
||||
:dump: dump an object as a tnetstring to a file
|
||||
:dumps: dump an object as a tnetstring to a string
|
||||
:load: load a tnetstring-encoded object from a file
|
||||
:loads: load a tnetstring-encoded object from a string
|
||||
:pop: pop a tnetstring-encoded object from the front of a string
|
||||
|
||||
Note that since parsing a tnetstring requires reading all the data into memory
|
||||
at once, there's no efficiency gain from using the file-based versions of these
|
||||
functions. They're only here so you can use load() to read precisely one
|
||||
item from a file or socket without consuming any extra data.
|
||||
|
||||
By default tnetstrings work only with byte strings, not unicode. If you want
|
||||
unicode strings then pass an optional encoding to the various functions,
|
||||
like so::
|
||||
|
||||
>>> print repr(tnetstring.loads("2:\\xce\\xb1,"))
|
||||
'\\xce\\xb1'
|
||||
>>>
|
||||
>>> print repr(tnetstring.loads("2:\\xce\\xb1,","utf8"))
|
||||
u'\u03b1'
|
||||
|
||||
"""
|
||||
|
||||
__ver_major__ = 0
|
||||
__ver_minor__ = 2
|
||||
__ver_patch__ = 0
|
||||
__ver_sub__ = ""
|
||||
__version__ = "%d.%d.%d%s" % (__ver_major__,__ver_minor__,__ver_patch__,__ver_sub__)
|
||||
|
||||
|
||||
from collections import deque
|
||||
|
||||
|
||||
def dumps(value,encoding=None):
|
||||
"""dumps(object,encoding=None) -> string
|
||||
|
||||
This function dumps a python object as a tnetstring.
|
||||
"""
|
||||
# This uses a deque to collect output fragments in reverse order,
|
||||
# then joins them together at the end. It's measurably faster
|
||||
# than creating all the intermediate strings.
|
||||
# If you're reading this to get a handle on the tnetstring format,
|
||||
# consider the _gdumps() function instead; it's a standard top-down
|
||||
# generator that's simpler to understand but much less efficient.
|
||||
q = deque()
|
||||
_rdumpq(q,0,value,encoding)
|
||||
return "".join(q)
|
||||
|
||||
|
||||
def dump(value,file,encoding=None):
|
||||
"""dump(object,file,encoding=None)
|
||||
|
||||
This function dumps a python object as a tnetstring and writes it to
|
||||
the given file.
|
||||
"""
|
||||
file.write(dumps(value,encoding))
|
||||
|
||||
|
||||
def _rdumpq(q,size,value,encoding=None):
|
||||
"""Dump value as a tnetstring, to a deque instance, last chunks first.
|
||||
|
||||
This function generates the tnetstring representation of the given value,
|
||||
pushing chunks of the output onto the given deque instance. It pushes
|
||||
the last chunk first, then recursively generates more chunks.
|
||||
|
||||
When passed in the current size of the string in the queue, it will return
|
||||
the new size of the string in the queue.
|
||||
|
||||
Operating last-chunk-first makes it easy to calculate the size written
|
||||
for recursive structures without having to build their representation as
|
||||
a string. This is measurably faster than generating the intermediate
|
||||
strings, especially on deeply nested structures.
|
||||
"""
|
||||
write = q.appendleft
|
||||
if value is None:
|
||||
write("0:~")
|
||||
return size + 3
|
||||
if value is True:
|
||||
write("4:true!")
|
||||
return size + 7
|
||||
if value is False:
|
||||
write("5:false!")
|
||||
return size + 8
|
||||
if isinstance(value,(int,long)):
|
||||
data = str(value)
|
||||
ldata = len(data)
|
||||
span = str(ldata)
|
||||
write("#")
|
||||
write(data)
|
||||
write(":")
|
||||
write(span)
|
||||
return size + 2 + len(span) + ldata
|
||||
if isinstance(value,(float,)):
|
||||
# Use repr() for float rather than str().
|
||||
# It round-trips more accurately.
|
||||
# Probably unnecessary in later python versions that
|
||||
# use David Gay's ftoa routines.
|
||||
data = repr(value)
|
||||
ldata = len(data)
|
||||
span = str(ldata)
|
||||
write("^")
|
||||
write(data)
|
||||
write(":")
|
||||
write(span)
|
||||
return size + 2 + len(span) + ldata
|
||||
if isinstance(value,str):
|
||||
lvalue = len(value)
|
||||
span = str(lvalue)
|
||||
write(",")
|
||||
write(value)
|
||||
write(":")
|
||||
write(span)
|
||||
return size + 2 + len(span) + lvalue
|
||||
if isinstance(value,(list,tuple,)):
|
||||
write("]")
|
||||
init_size = size = size + 1
|
||||
for item in reversed(value):
|
||||
size = _rdumpq(q,size,item,encoding)
|
||||
span = str(size - init_size)
|
||||
write(":")
|
||||
write(span)
|
||||
return size + 1 + len(span)
|
||||
if isinstance(value,dict):
|
||||
write("}")
|
||||
init_size = size = size + 1
|
||||
for (k,v) in value.iteritems():
|
||||
size = _rdumpq(q,size,v,encoding)
|
||||
size = _rdumpq(q,size,k,encoding)
|
||||
span = str(size - init_size)
|
||||
write(":")
|
||||
write(span)
|
||||
return size + 1 + len(span)
|
||||
if isinstance(value,unicode):
|
||||
if encoding is None:
|
||||
raise ValueError("must specify encoding to dump unicode strings")
|
||||
value = value.encode(encoding)
|
||||
lvalue = len(value)
|
||||
span = str(lvalue)
|
||||
write(",")
|
||||
write(value)
|
||||
write(":")
|
||||
write(span)
|
||||
return size + 2 + len(span) + lvalue
|
||||
raise ValueError("unserializable object")
|
||||
|
||||
|
||||
def _gdumps(value,encoding):
|
||||
"""Generate fragments of value dumped as a tnetstring.
|
||||
|
||||
This is the naive dumping algorithm, implemented as a generator so that
|
||||
it's easy to pass to "".join() without building a new list.
|
||||
|
||||
This is mainly here for comparison purposes; the _rdumpq version is
|
||||
measurably faster as it doesn't have to build intermediate strins.
|
||||
"""
|
||||
if value is None:
|
||||
yield "0:~"
|
||||
elif value is True:
|
||||
yield "4:true!"
|
||||
elif value is False:
|
||||
yield "5:false!"
|
||||
elif isinstance(value,(int,long)):
|
||||
data = str(value)
|
||||
yield str(len(data))
|
||||
yield ":"
|
||||
yield data
|
||||
yield "#"
|
||||
elif isinstance(value,(float,)):
|
||||
data = repr(value)
|
||||
yield str(len(data))
|
||||
yield ":"
|
||||
yield data
|
||||
yield "^"
|
||||
elif isinstance(value,(str,)):
|
||||
yield str(len(value))
|
||||
yield ":"
|
||||
yield value
|
||||
yield ","
|
||||
elif isinstance(value,(list,tuple,)):
|
||||
sub = []
|
||||
for item in value:
|
||||
sub.extend(_gdumps(item))
|
||||
sub = "".join(sub)
|
||||
yield str(len(sub))
|
||||
yield ":"
|
||||
yield sub
|
||||
yield "]"
|
||||
elif isinstance(value,(dict,)):
|
||||
sub = []
|
||||
for (k,v) in value.iteritems():
|
||||
sub.extend(_gdumps(k))
|
||||
sub.extend(_gdumps(v))
|
||||
sub = "".join(sub)
|
||||
yield str(len(sub))
|
||||
yield ":"
|
||||
yield sub
|
||||
yield "}"
|
||||
elif isinstance(value,(unicode,)):
|
||||
if encoding is None:
|
||||
raise ValueError("must specify encoding to dump unicode strings")
|
||||
value = value.encode(encoding)
|
||||
yield str(len(value))
|
||||
yield ":"
|
||||
yield value
|
||||
yield ","
|
||||
else:
|
||||
raise ValueError("unserializable object")
|
||||
|
||||
|
||||
def loads(string,encoding=None):
|
||||
"""loads(string,encoding=None) -> object
|
||||
|
||||
This function parses a tnetstring into a python object.
|
||||
"""
|
||||
# No point duplicating effort here. In the C-extension version,
|
||||
# loads() is measurably faster then pop() since it can avoid
|
||||
# the overhead of building a second string.
|
||||
return pop(string,encoding)[0]
|
||||
|
||||
|
||||
def load(file,encoding=None):
|
||||
"""load(file,encoding=None) -> object
|
||||
|
||||
This function reads a tnetstring from a file and parses it into a
|
||||
python object. The file must support the read() method, and this
|
||||
function promises not to read more data than necessary.
|
||||
"""
|
||||
# Read the length prefix one char at a time.
|
||||
# Note that the netstring spec explicitly forbids padding zeros.
|
||||
c = file.read(1)
|
||||
if not c.isdigit():
|
||||
raise ValueError("not a tnetstring: missing or invalid length prefix")
|
||||
datalen = ord(c) - ord("0")
|
||||
c = file.read(1)
|
||||
if datalen != 0:
|
||||
while c.isdigit():
|
||||
datalen = (10 * datalen) + (ord(c) - ord("0"))
|
||||
if datalen > 999999999:
|
||||
errmsg = "not a tnetstring: absurdly large length prefix"
|
||||
raise ValueError(errmsg)
|
||||
c = file.read(1)
|
||||
if c != ":":
|
||||
raise ValueError("not a tnetstring: missing or invalid length prefix")
|
||||
# Now we can read and parse the payload.
|
||||
# This repeats the dispatch logic of pop() so we can avoid
|
||||
# re-constructing the outermost tnetstring.
|
||||
data = file.read(datalen)
|
||||
if len(data) != datalen:
|
||||
raise ValueError("not a tnetstring: length prefix too big")
|
||||
type = file.read(1)
|
||||
if type == ",":
|
||||
if encoding is not None:
|
||||
return data.decode(encoding)
|
||||
return data
|
||||
if type == "#":
|
||||
try:
|
||||
return int(data)
|
||||
except ValueError:
|
||||
raise ValueError("not a tnetstring: invalid integer literal")
|
||||
if type == "^":
|
||||
try:
|
||||
return float(data)
|
||||
except ValueError:
|
||||
raise ValueError("not a tnetstring: invalid float literal")
|
||||
if type == "!":
|
||||
if data == "true":
|
||||
return True
|
||||
elif data == "false":
|
||||
return False
|
||||
else:
|
||||
raise ValueError("not a tnetstring: invalid boolean literal")
|
||||
if type == "~":
|
||||
if data:
|
||||
raise ValueError("not a tnetstring: invalid null literal")
|
||||
return None
|
||||
if type == "]":
|
||||
l = []
|
||||
while data:
|
||||
(item,data) = pop(data,encoding)
|
||||
l.append(item)
|
||||
return l
|
||||
if type == "}":
|
||||
d = {}
|
||||
while data:
|
||||
(key,data) = pop(data,encoding)
|
||||
(val,data) = pop(data,encoding)
|
||||
d[key] = val
|
||||
return d
|
||||
raise ValueError("unknown type tag")
|
||||
|
||||
|
||||
|
||||
def pop(string,encoding=None):
|
||||
"""pop(string,encoding=None) -> (object, remain)
|
||||
|
||||
This function parses a tnetstring into a python object.
|
||||
It returns a tuple giving the parsed object and a string
|
||||
containing any unparsed data from the end of the string.
|
||||
"""
|
||||
# Parse out data length, type and remaining string.
|
||||
try:
|
||||
(dlen,rest) = string.split(":",1)
|
||||
dlen = int(dlen)
|
||||
except ValueError:
|
||||
raise ValueError("not a tnetstring: missing or invalid length prefix")
|
||||
try:
|
||||
(data,type,remain) = (rest[:dlen],rest[dlen],rest[dlen+1:])
|
||||
except IndexError:
|
||||
# This fires if len(rest) < dlen, meaning we don't need
|
||||
# to further validate that data is the right length.
|
||||
raise ValueError("not a tnetstring: invalid length prefix")
|
||||
# Parse the data based on the type tag.
|
||||
if type == ",":
|
||||
if encoding is not None:
|
||||
return (data.decode(encoding),remain)
|
||||
return (data,remain)
|
||||
if type == "#":
|
||||
try:
|
||||
return (int(data),remain)
|
||||
except ValueError:
|
||||
raise ValueError("not a tnetstring: invalid integer literal")
|
||||
if type == "^":
|
||||
try:
|
||||
return (float(data),remain)
|
||||
except ValueError:
|
||||
raise ValueError("not a tnetstring: invalid float literal")
|
||||
if type == "!":
|
||||
if data == "true":
|
||||
return (True,remain)
|
||||
elif data == "false":
|
||||
return (False,remain)
|
||||
else:
|
||||
raise ValueError("not a tnetstring: invalid boolean literal")
|
||||
if type == "~":
|
||||
if data:
|
||||
raise ValueError("not a tnetstring: invalid null literal")
|
||||
return (None,remain)
|
||||
if type == "]":
|
||||
l = []
|
||||
while data:
|
||||
(item,data) = pop(data,encoding)
|
||||
l.append(item)
|
||||
return (l,remain)
|
||||
if type == "}":
|
||||
d = {}
|
||||
while data:
|
||||
(key,data) = pop(data,encoding)
|
||||
(val,data) = pop(data,encoding)
|
||||
d[key] = val
|
||||
return (d,remain)
|
||||
raise ValueError("unknown type tag")
|
||||
@@ -1,20 +1,20 @@
|
||||
# Copyright (C) 2010 Aldo Cortesi
|
||||
#
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
import re, os, subprocess, datetime, textwrap, errno, sys, time, functools
|
||||
|
||||
CERT_SLEEP_TIME = 1
|
||||
import os, datetime, urlparse, string, urllib
|
||||
import time, functools, cgi
|
||||
import json
|
||||
|
||||
def timestamp():
|
||||
"""
|
||||
@@ -54,61 +54,45 @@ def isXML(s):
|
||||
return False
|
||||
|
||||
|
||||
def cleanBin(s):
|
||||
def cleanBin(s, fixspacing=False):
|
||||
"""
|
||||
Cleans binary data to make it safe to display. If fixspacing is True,
|
||||
tabs, newlines and so forth will be maintained, if not, they will be
|
||||
replaced with a placeholder.
|
||||
"""
|
||||
parts = []
|
||||
for i in s:
|
||||
o = ord(i)
|
||||
if o > 31 and o < 127:
|
||||
if (o > 31 and o < 127):
|
||||
parts.append(i)
|
||||
elif i in "\n\r\t" and not fixspacing:
|
||||
parts.append(i)
|
||||
else:
|
||||
if i not in "\n\r\t":
|
||||
parts.append(".")
|
||||
parts.append(".")
|
||||
return "".join(parts)
|
||||
|
||||
|
||||
TAG = r"""
|
||||
<\s*
|
||||
(?!\s*[!"])
|
||||
(?P<close>\s*\/)?
|
||||
(?P<name>\w+)
|
||||
(
|
||||
[^'"\t >]+ |
|
||||
"[^\"]*"['\"]* |
|
||||
'[^']*'['\"]* |
|
||||
\s+
|
||||
)*
|
||||
(?P<selfcont>\s*\/\s*)?
|
||||
\s*>
|
||||
"""
|
||||
UNI = set(["br", "hr", "img", "input", "area", "link"])
|
||||
INDENT = " "*4
|
||||
def pretty_xmlish(s):
|
||||
|
||||
def pretty_json(s):
|
||||
try:
|
||||
p = json.loads(s)
|
||||
except ValueError:
|
||||
return None
|
||||
return json.dumps(p, sort_keys=True, indent=4).split("\n")
|
||||
|
||||
|
||||
def urldecode(s):
|
||||
"""
|
||||
A robust pretty-printer for XML-ish data.
|
||||
Returns a list of lines.
|
||||
Takes a urlencoded string and returns a list of (key, value) tuples.
|
||||
"""
|
||||
s = cleanBin(s)
|
||||
data, offset, indent, prev = [], 0, 0, None
|
||||
for i in re.finditer(TAG, s, re.VERBOSE|re.MULTILINE):
|
||||
start, end = i.span()
|
||||
name = i.group("name")
|
||||
if start > offset:
|
||||
txt = []
|
||||
for x in textwrap.dedent(s[offset:start]).split("\n"):
|
||||
if x.strip():
|
||||
txt.append(indent*INDENT + x)
|
||||
data.extend(txt)
|
||||
if i.group("close") and not (name in UNI and name==prev):
|
||||
indent = max(indent - 1, 0)
|
||||
data.append(indent*INDENT + i.group().strip())
|
||||
offset = end
|
||||
if not any([i.group("close"), i.group("selfcont"), name in UNI]):
|
||||
indent += 1
|
||||
prev = name
|
||||
trail = s[offset:]
|
||||
if trail.strip():
|
||||
data.append(s[offset:])
|
||||
return data
|
||||
return cgi.parse_qsl(s)
|
||||
|
||||
|
||||
def urlencode(s):
|
||||
"""
|
||||
Takes a list of (key, value) tuples and returns a urlencoded string.
|
||||
"""
|
||||
s = [tuple(i) for i in s]
|
||||
return urllib.urlencode(s, False)
|
||||
|
||||
|
||||
def hexdump(s):
|
||||
@@ -120,191 +104,20 @@ def hexdump(s):
|
||||
for i in range(0, len(s), 16):
|
||||
o = "%.10x"%i
|
||||
part = s[i:i+16]
|
||||
x = " ".join(["%.2x"%ord(i) for i in part])
|
||||
x = " ".join("%.2x"%ord(i) for i in part)
|
||||
if len(part) < 16:
|
||||
x += " "
|
||||
x += " ".join([" " for i in range(16-len(part))])
|
||||
x += " ".join(" " for i in range(16 - len(part)))
|
||||
parts.append(
|
||||
(o, x, cleanBin(part))
|
||||
(o, x, cleanBin(part, True))
|
||||
)
|
||||
return parts
|
||||
|
||||
|
||||
def isStringLike(anobj):
|
||||
try:
|
||||
# Avoid succeeding expensively if anobj is large.
|
||||
anobj[:0]+''
|
||||
except:
|
||||
return 0
|
||||
else:
|
||||
return 1
|
||||
|
||||
|
||||
def isSequenceLike(anobj):
|
||||
"""
|
||||
Is anobj a non-string sequence type (list, tuple, iterator, or
|
||||
similar)? Crude, but mostly effective.
|
||||
"""
|
||||
if not hasattr(anobj, "next"):
|
||||
if isStringLike(anobj):
|
||||
return 0
|
||||
try:
|
||||
anobj[:0]
|
||||
except:
|
||||
return 0
|
||||
return 1
|
||||
|
||||
|
||||
def _caseless(s):
|
||||
return s.lower()
|
||||
|
||||
|
||||
def try_del(dict, key):
|
||||
try:
|
||||
del dict[key]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
class MultiDict:
|
||||
"""
|
||||
Simple wrapper around a dictionary to make holding multiple objects per
|
||||
key easier.
|
||||
|
||||
Note that this class assumes that keys are strings.
|
||||
|
||||
Keys have no order, but the order in which values are added to a key is
|
||||
preserved.
|
||||
"""
|
||||
# This ridiculous bit of subterfuge is needed to prevent the class from
|
||||
# treating this as a bound method.
|
||||
_helper = (str,)
|
||||
def __init__(self):
|
||||
self._d = dict()
|
||||
|
||||
def copy(self):
|
||||
m = self.__class__()
|
||||
m._d = self._d.copy()
|
||||
return m
|
||||
|
||||
def clear(self):
|
||||
return self._d.clear()
|
||||
|
||||
def get(self, key, d=None):
|
||||
key = self._helper[0](key)
|
||||
return self._d.get(key, d)
|
||||
|
||||
def __contains__(self, key):
|
||||
key = self._helper[0](key)
|
||||
return self._d.__contains__(key)
|
||||
|
||||
def __eq__(self, other):
|
||||
return dict(self) == dict(other)
|
||||
|
||||
def __delitem__(self, key):
|
||||
self._d.__delitem__(key)
|
||||
|
||||
def __getitem__(self, key):
|
||||
key = self._helper[0](key)
|
||||
return self._d.__getitem__(key)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if not isSequenceLike(value):
|
||||
raise ValueError, "Cannot insert non-sequence."
|
||||
key = self._helper[0](key)
|
||||
return self._d.__setitem__(key, value)
|
||||
|
||||
def has_key(self, key):
|
||||
key = self._helper[0](key)
|
||||
return self._d.has_key(key)
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
key = self._helper[0](key)
|
||||
return self._d.setdefault(key, default)
|
||||
|
||||
def keys(self):
|
||||
return self._d.keys()
|
||||
|
||||
def extend(self, key, value):
|
||||
if not self.has_key(key):
|
||||
self[key] = []
|
||||
self[key].extend(value)
|
||||
|
||||
def append(self, key, value):
|
||||
self.extend(key, [value])
|
||||
|
||||
def itemPairs(self):
|
||||
"""
|
||||
Yield all possible pairs of items.
|
||||
"""
|
||||
for i in self.keys():
|
||||
for j in self[i]:
|
||||
yield (i, j)
|
||||
|
||||
def get_state(self):
|
||||
return list(self.itemPairs())
|
||||
|
||||
@classmethod
|
||||
def from_state(klass, state):
|
||||
md = klass()
|
||||
for i in state:
|
||||
md.append(*i)
|
||||
return md
|
||||
|
||||
|
||||
class Headers(MultiDict):
|
||||
"""
|
||||
A dictionary-like class for keeping track of HTTP headers.
|
||||
|
||||
It is case insensitive, and __repr__ formats the headers correcty for
|
||||
output to the server.
|
||||
"""
|
||||
_helper = (_caseless,)
|
||||
def __repr__(self):
|
||||
"""
|
||||
Returns a string containing a formatted header string.
|
||||
"""
|
||||
headerElements = []
|
||||
for key in sorted(self.keys()):
|
||||
for val in self[key]:
|
||||
headerElements.append(key + ": " + val)
|
||||
headerElements.append("")
|
||||
return "\r\n".join(headerElements)
|
||||
|
||||
def match_re(self, expr):
|
||||
"""
|
||||
Match the regular expression against each header (key, value) pair.
|
||||
"""
|
||||
for k, v in self.itemPairs():
|
||||
s = "%s: %s"%(k, v)
|
||||
if re.search(expr, s):
|
||||
return True
|
||||
return False
|
||||
|
||||
def read(self, fp):
|
||||
"""
|
||||
Read a set of headers from a file pointer. Stop once a blank line
|
||||
is reached.
|
||||
"""
|
||||
name = ''
|
||||
while 1:
|
||||
line = fp.readline()
|
||||
if not line or line == '\r\n' or line == '\n':
|
||||
break
|
||||
if line[0] in ' \t':
|
||||
# continued header
|
||||
self[name][-1] = self[name][-1] + '\r\n ' + line.strip()
|
||||
else:
|
||||
i = line.find(':')
|
||||
# We're being liberal in what we accept, here.
|
||||
if i > 0:
|
||||
name = line[:i]
|
||||
value = line[i+1:].strip()
|
||||
if self.has_key(name):
|
||||
# merge value
|
||||
self.append(name, value)
|
||||
else:
|
||||
self[name] = [value]
|
||||
def del_all(dict, keys):
|
||||
for key in keys:
|
||||
if key in dict:
|
||||
del dict[key]
|
||||
|
||||
|
||||
def pretty_size(size):
|
||||
@@ -340,164 +153,7 @@ class Data:
|
||||
if not os.path.exists(fullpath):
|
||||
raise ValueError, "dataPath: %s does not exist."%fullpath
|
||||
return fullpath
|
||||
data = Data(__name__)
|
||||
|
||||
|
||||
def dummy_ca(path):
|
||||
"""
|
||||
Creates a dummy CA, and writes it to path.
|
||||
|
||||
This function also creates the necessary directories if they don't exist.
|
||||
|
||||
Returns True if operation succeeded, False if not.
|
||||
"""
|
||||
dirname = os.path.dirname(path)
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
|
||||
if path.endswith(".pem"):
|
||||
basename, _ = os.path.splitext(path)
|
||||
else:
|
||||
basename = path
|
||||
|
||||
cmd = [
|
||||
"openssl",
|
||||
"req",
|
||||
"-new",
|
||||
"-x509",
|
||||
"-config", data.path("resources/ca.cnf"),
|
||||
"-nodes",
|
||||
"-days", "9999",
|
||||
"-out", path,
|
||||
"-newkey", "rsa:1024",
|
||||
"-keyout", path,
|
||||
]
|
||||
ret = subprocess.call(
|
||||
cmd,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE
|
||||
)
|
||||
# begin nocover
|
||||
if ret:
|
||||
return False
|
||||
# end nocover
|
||||
|
||||
cmd = [
|
||||
"openssl",
|
||||
"pkcs12",
|
||||
"-export",
|
||||
"-password", "pass:",
|
||||
"-nokeys",
|
||||
"-in", path,
|
||||
"-out", os.path.join(dirname, basename + "-cert.p12")
|
||||
]
|
||||
ret = subprocess.call(
|
||||
cmd,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE
|
||||
)
|
||||
# begin nocover
|
||||
if ret:
|
||||
return False
|
||||
# end nocover
|
||||
cmd = [
|
||||
"openssl",
|
||||
"x509",
|
||||
"-in", path,
|
||||
"-out", os.path.join(dirname, basename + "-cert.pem")
|
||||
]
|
||||
ret = subprocess.call(
|
||||
cmd,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE
|
||||
)
|
||||
# begin nocover
|
||||
if ret:
|
||||
return False
|
||||
# end nocover
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def dummy_cert(certdir, ca, commonname):
|
||||
"""
|
||||
certdir: Certificate directory.
|
||||
ca: Path to the certificate authority file, or None.
|
||||
commonname: Common name for the generated certificate.
|
||||
|
||||
Returns cert path if operation succeeded, None if not.
|
||||
"""
|
||||
certpath = os.path.join(certdir, commonname + ".pem")
|
||||
if os.path.exists(certpath):
|
||||
return certpath
|
||||
|
||||
confpath = os.path.join(certdir, commonname + ".cnf")
|
||||
reqpath = os.path.join(certdir, commonname + ".req")
|
||||
|
||||
template = open(data.path("resources/cert.cnf")).read()
|
||||
f = open(confpath, "w").write(template%(dict(commonname=commonname)))
|
||||
|
||||
if ca:
|
||||
# Create a dummy signed certificate. Uses same key as the signing CA
|
||||
cmd = [
|
||||
"openssl",
|
||||
"req",
|
||||
"-new",
|
||||
"-config", confpath,
|
||||
"-out", reqpath,
|
||||
"-key", ca,
|
||||
]
|
||||
ret = subprocess.call(
|
||||
cmd,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE
|
||||
)
|
||||
if ret: return None
|
||||
cmd = [
|
||||
"openssl",
|
||||
"x509",
|
||||
"-req",
|
||||
"-in", reqpath,
|
||||
"-days", "9999",
|
||||
"-out", certpath,
|
||||
"-CA", ca,
|
||||
"-CAcreateserial",
|
||||
"-extfile", confpath,
|
||||
"-extensions", "v3_cert",
|
||||
]
|
||||
ret = subprocess.call(
|
||||
cmd,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE
|
||||
)
|
||||
if ret: return None
|
||||
else:
|
||||
# Create a new selfsigned certificate + key
|
||||
cmd = [
|
||||
"openssl",
|
||||
"req",
|
||||
"-new",
|
||||
"-x509",
|
||||
"-config", confpath,
|
||||
"-nodes",
|
||||
"-days", "9999",
|
||||
"-out", certpath,
|
||||
"-newkey", "rsa:1024",
|
||||
"-keyout", certpath,
|
||||
]
|
||||
ret = subprocess.call(
|
||||
cmd,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stdin=subprocess.PIPE
|
||||
)
|
||||
if ret: return None
|
||||
return certpath
|
||||
pkg_data = Data(__name__)
|
||||
|
||||
|
||||
class LRUCache:
|
||||
@@ -536,3 +192,120 @@ class LRUCache:
|
||||
cache.pop(d)
|
||||
return ret
|
||||
return wrap
|
||||
|
||||
|
||||
def parse_url(url):
|
||||
"""
|
||||
Returns a (scheme, host, port, path) tuple, or None on error.
|
||||
"""
|
||||
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
|
||||
if not scheme:
|
||||
return None
|
||||
if ':' in netloc:
|
||||
host, port = string.rsplit(netloc, ':', maxsplit=1)
|
||||
try:
|
||||
port = int(port)
|
||||
except ValueError:
|
||||
return None
|
||||
else:
|
||||
host = netloc
|
||||
if scheme == "https":
|
||||
port = 443
|
||||
else:
|
||||
port = 80
|
||||
path = urlparse.urlunparse(('', '', path, params, query, fragment))
|
||||
if not path.startswith("/"):
|
||||
path = "/" + path
|
||||
return scheme, host, port, path
|
||||
|
||||
|
||||
def parse_proxy_spec(url):
|
||||
p = parse_url(url)
|
||||
if not p or not p[1]:
|
||||
return None
|
||||
return p[:3]
|
||||
|
||||
|
||||
def parse_content_type(c):
|
||||
"""
|
||||
A simple parser for content-type values. Returns a (type, subtype,
|
||||
parameters) tuple, where type and subtype are strings, and parameters
|
||||
is a dict. If the string could not be parsed, return None.
|
||||
|
||||
E.g. the following string:
|
||||
|
||||
text/html; charset=UTF-8
|
||||
|
||||
Returns:
|
||||
|
||||
("text", "html", {"charset": "UTF-8"})
|
||||
"""
|
||||
parts = c.split(";", 1)
|
||||
ts = parts[0].split("/", 1)
|
||||
if len(ts) != 2:
|
||||
return None
|
||||
d = {}
|
||||
if len(parts) == 2:
|
||||
for i in parts[1].split(";"):
|
||||
clause = i.split("=", 1)
|
||||
if len(clause) == 2:
|
||||
d[clause[0].strip()] = clause[1].strip()
|
||||
return ts[0].lower(), ts[1].lower(), d
|
||||
|
||||
|
||||
def hostport(scheme, host, port):
|
||||
"""
|
||||
Returns the host component, with a port specifcation if needed.
|
||||
"""
|
||||
if (port, scheme) in [(80, "http"), (443, "https")]:
|
||||
return host
|
||||
else:
|
||||
return "%s:%s"%(host, port)
|
||||
|
||||
|
||||
def unparse_url(scheme, host, port, path=""):
|
||||
"""
|
||||
Returns a URL string, constructed from the specified compnents.
|
||||
"""
|
||||
return "%s://%s%s"%(scheme, hostport(scheme, host, port), path)
|
||||
|
||||
|
||||
def clean_hanging_newline(t):
|
||||
"""
|
||||
Many editors will silently add a newline to the final line of a
|
||||
document (I'm looking at you, Vim). This function fixes this common
|
||||
problem at the risk of removing a hanging newline in the rare cases
|
||||
where the user actually intends it.
|
||||
"""
|
||||
if t[-1] == "\n":
|
||||
return t[:-1]
|
||||
return t
|
||||
|
||||
|
||||
def parse_size(s):
|
||||
"""
|
||||
Parses a size specification. Valid specifications are:
|
||||
|
||||
123: bytes
|
||||
123k: kilobytes
|
||||
123m: megabytes
|
||||
123g: gigabytes
|
||||
"""
|
||||
if not s:
|
||||
return None
|
||||
mult = None
|
||||
if s[-1].lower() == "k":
|
||||
mult = 1024**1
|
||||
elif s[-1].lower() == "m":
|
||||
mult = 1024**2
|
||||
elif s[-1].lower() == "g":
|
||||
mult = 1024**3
|
||||
|
||||
if mult:
|
||||
s = s[:-1]
|
||||
else:
|
||||
mult = 1
|
||||
try:
|
||||
return int(s) * mult
|
||||
except ValueError:
|
||||
raise ValueError("Invalid size specification: %s"%s)
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
IVERSION = (0, 5)
|
||||
VERSION = ".".join([str(i) for i in IVERSION])
|
||||
IVERSION = (0, 8, 1)
|
||||
VERSION = ".".join(str(i) for i in IVERSION)
|
||||
|
||||
23
mitmdump
@@ -1,24 +1,24 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (C) 2010 Aldo Cortesi
|
||||
#
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, os.path
|
||||
from libmproxy import proxy, dump, utils, cmdline
|
||||
import sys
|
||||
from libmproxy import proxy, dump, cmdline
|
||||
from libmproxy.version import VERSION
|
||||
from optparse import OptionParser, OptionGroup
|
||||
from optparse import OptionParser
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
@@ -38,18 +38,21 @@ if __name__ == '__main__':
|
||||
if options.quiet:
|
||||
options.verbose = 0
|
||||
|
||||
config = proxy.process_certificate_option_group(parser, options)
|
||||
proxyconfig = proxy.process_proxy_options(parser, options)
|
||||
if options.no_server:
|
||||
server = None
|
||||
server = proxy.DummyServer(proxyconfig)
|
||||
else:
|
||||
try:
|
||||
server = proxy.ProxyServer(config, options.port, options.addr)
|
||||
server = proxy.ProxyServer(proxyconfig, options.port, options.addr)
|
||||
except proxy.ProxyServerError, v:
|
||||
print >> sys.stderr, "mitmdump:", v.args[0]
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
dumpopts = dump.Options(**cmdline.get_common_options(options))
|
||||
try:
|
||||
dumpopts = dump.Options(**cmdline.get_common_options(options))
|
||||
except cmdline.OptionException, v:
|
||||
parser.error(v.message)
|
||||
dumpopts.keepserving = options.keepserving
|
||||
|
||||
if args:
|
||||
|
||||
26
mitmproxy
@@ -1,22 +1,22 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (C) 2010 Aldo Cortesi
|
||||
#
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys, os.path
|
||||
from libmproxy import proxy, controller, console, utils, flow, cmdline
|
||||
import sys
|
||||
from libmproxy import proxy, console, cmdline
|
||||
from libmproxy.version import VERSION
|
||||
from optparse import OptionParser, OptionGroup
|
||||
|
||||
@@ -35,11 +35,6 @@ if __name__ == '__main__':
|
||||
"Filters",
|
||||
"See help in mitmproxy for filter expression syntax."
|
||||
)
|
||||
group.add_option(
|
||||
"-l", "--limit", action="store",
|
||||
type = "str", dest="limit", default=None,
|
||||
help = "Limit filter expression."
|
||||
)
|
||||
group.add_option(
|
||||
"-i", "--intercept", action="store",
|
||||
type = "str", dest="intercept", default=None,
|
||||
@@ -48,10 +43,10 @@ if __name__ == '__main__':
|
||||
parser.add_option_group(group)
|
||||
options, args = parser.parse_args()
|
||||
|
||||
config = proxy.process_certificate_option_group(parser, options)
|
||||
config = proxy.process_proxy_options(parser, options)
|
||||
|
||||
if options.no_server:
|
||||
server = None
|
||||
server = proxy.DummyServer(config)
|
||||
else:
|
||||
try:
|
||||
server = proxy.ProxyServer(config, options.port, options.addr)
|
||||
@@ -59,12 +54,13 @@ if __name__ == '__main__':
|
||||
print >> sys.stderr, "mitmproxy:", v.args[0]
|
||||
sys.exit(1)
|
||||
|
||||
opts = console.Options(**cmdline.get_common_options(options))
|
||||
try:
|
||||
opts = console.Options(**cmdline.get_common_options(options))
|
||||
except cmdline.OptionException, v:
|
||||
parser.error(v.message)
|
||||
opts.intercept = options.intercept
|
||||
opts.limit = options.limit
|
||||
opts.debug = options.debug
|
||||
m = console.ConsoleMaster(server, opts)
|
||||
|
||||
m.run()
|
||||
|
||||
|
||||
|
||||
20
setup.py
@@ -27,7 +27,7 @@ def findPackages(path, dataExclude=[]):
|
||||
that only data _directories_ and their contents are returned -
|
||||
non-Python files at module scope are not, and should be manually
|
||||
included.
|
||||
|
||||
|
||||
dataExclude is a list of fnmatch-compatible expressions for files and
|
||||
directories that should not be included in pakcage_data.
|
||||
|
||||
@@ -65,14 +65,12 @@ def findPackages(path, dataExclude=[]):
|
||||
return packages, package_data
|
||||
|
||||
|
||||
|
||||
|
||||
long_description = file("README.mkd").read()
|
||||
long_description = file("README.txt").read()
|
||||
packages, package_data = findPackages("libmproxy")
|
||||
setup(
|
||||
name = "mitmproxy",
|
||||
version = version.VERSION,
|
||||
description = "An interactive SSL-capable intercepting HTTP proxy for penetration testers and software developers.",
|
||||
description = "An interactive, SSL-capable, man-in-the-middle HTTP proxy for penetration testers and software developers.",
|
||||
long_description = long_description,
|
||||
author = "Aldo Cortesi",
|
||||
author_email = "aldo@corte.si",
|
||||
@@ -81,10 +79,18 @@ setup(
|
||||
package_data = package_data,
|
||||
scripts = ["mitmproxy", "mitmdump"],
|
||||
classifiers = [
|
||||
"Development Status :: 4 - Beta",
|
||||
"License :: OSI Approved :: GNU General Public License (GPL)",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Environment :: Console :: Curses",
|
||||
"Operating System :: MacOS :: MacOS X",
|
||||
"Operating System :: POSIX",
|
||||
"Programming Language :: Python",
|
||||
"Topic :: Security",
|
||||
"Topic :: Internet",
|
||||
"Topic :: Internet :: WWW/HTTP",
|
||||
"Topic :: Internet :: Proxy Servers",
|
||||
"Topic :: Software Development :: Testing"
|
||||
]
|
||||
],
|
||||
install_requires=['urwid==1.0.1', 'pyasn1', 'pyopenssl>=0.12', "PIL", "lxml"],
|
||||
)
|
||||
|
||||
@@ -2,4 +2,5 @@ base = ..
|
||||
coverage = ../libmproxy
|
||||
exclude = .
|
||||
../libmproxy/contrib
|
||||
../libmproxy/tnetstring.py
|
||||
|
||||
|
||||
BIN
test/data/dercert
Normal file
BIN
test/data/image-err1.jpg
Normal file
|
After Width: | Height: | Size: 81 KiB |
BIN
test/data/image.gif
Normal file
|
After Width: | Height: | Size: 2.3 KiB |
BIN
test/data/image.ico
Normal file
|
After Width: | Height: | Size: 11 KiB |
BIN
test/data/image.jpg
Normal file
|
After Width: | Height: | Size: 1.5 KiB |