Compare commits

..

73 Commits

Author SHA1 Message Date
Maximilian Hils
170568ded0 fix text selection hint 2015-07-22 00:41:02 +02:00
Maximilian Hils
cacf767b5c bump version 2015-07-22 00:16:38 +02:00
Maximilian Hils
ec02eda0b4 preliminary release script 2015-07-21 19:03:25 +02:00
Maximilian Hils
7fcbbb86cc streamline build process 2015-07-21 17:51:59 +02:00
Maximilian Hils
a08172f6cc fix slack badge 2015-07-17 20:49:58 +02:00
Maximilian Hils
1abd2647b4 Add Slack to README. 2015-07-15 04:20:04 +02:00
Maximilian Hils
822bd10465 Merge pull request #677 from isra17/json_error
Use `ensure_ascii` in wep.app#broadcast
2015-07-14 00:53:32 +02:00
Maximilian Hils
3dcc05ca97 Merge pull request #676 from isra17/master
Add source address filter
2015-07-14 00:42:48 +02:00
isra17
c33df9dd1c Use ensure_ascii in wep.app#broadcast
Otherwise, a non-unicode character in a flow cause mitmweb to crash.
2015-07-13 18:19:33 -04:00
isra17
471e196e08 Add ~src ~dst REGEX filters
This filter allow to match on the request source and destination address
in the form of `<IP>:<Port>`.

Also fixed the parsing grammar to add a `WordEnd` after each filter
name. That way, `~src` doesn't match `~s` instead and keep the behavior
consistent with `~hq` != `~h`.
2015-07-13 18:11:14 -04:00
Maximilian Hils
2af2e60f1f Merge pull request #673 from jlhonora/master
Match g/G shortcuts to less
2015-07-08 18:36:49 +02:00
jlhonora
1ebdda7902 Match g/G shortcuts to less, fix #631 2015-07-08 12:10:03 -03:00
Maximilian Hils
f8243086f6 improve dependency management in setup.py 2015-07-04 19:38:41 +02:00
Maximilian Hils
5a2b121f50 fix #648 2015-07-04 19:11:02 +02:00
Maximilian Hils
08476e988f clarify docs 2015-07-04 17:44:52 +02:00
Maximilian Hils
9960565359 fix #653 2015-07-04 15:46:45 +02:00
Maximilian Hils
4c831992aa fix #593, fix #656, coverage++ 2015-07-03 02:47:12 +02:00
Maximilian Hils
9bffd9cf03 fix windows compatibility 2015-07-03 02:46:06 +02:00
Aldo Cortesi
5ad6773e78 Merge pull request #661 from kyle-m/master
Enabling upstream server verification.
2015-06-30 10:51:46 +12:00
Kyle Morton
f0ad1f334c Enabling upstream server verification. Added flags --verify_upstream_cert,
--upstream-trusted-cadir, and --upstream-trusted-ca.
2015-06-29 11:00:20 -07:00
Maximilian Hils
aebad44d55 synchronize metadata files across projects 2015-06-26 23:43:19 +02:00
Maximilian Hils
c8f6bf66fb Merge branch 'appveyor2' 2015-06-26 23:24:31 +02:00
Aldo Cortesi
5f277408cf Merge pull request #654 from mitmproxy/remove_certforward
Remove Certforward Feature
2015-06-27 09:22:41 +12:00
Maximilian Hils
7990503eaf docs: fix certinstall image max-width 2015-06-26 23:04:15 +02:00
Maximilian Hils
643accd5f8 add appveyor 2015-06-26 18:33:31 +02:00
Maximilian Hils
b369962cbe remove certforward feature
The certforward feature was implemented to support #gotofail,
which only works on unpatched iOS devices. Given that many apps don't
support iOS 7 anymore, jailbreak+ssl killswitch is usually the better option.
By removing certforward, we can make netlib a pure python module again,
which significantly simplifies distribution.
2015-06-26 13:27:40 +02:00
Aldo Cortesi
876252eba8 Remove stray print & massage whitespace a bit 2015-06-25 10:27:22 +12:00
Aldo Cortesi
becc790d83 Merge pull request #645 from elitest/hardfailvenv
Add a check to see if virtualenv is installed.
2015-06-24 15:10:41 +12:00
Jim Shaver
080e453425 Merge branch 'master' into hardfailvenv
Conflicts:
	dev
2015-06-23 21:48:05 -05:00
Jim Shaver
db5c0b210b merged with upstream changes in dev 2015-06-23 20:50:51 -05:00
Jim Shaver
854dc004ee revert change, already done on master. 2015-06-23 20:47:36 -05:00
Jim Shaver
951fe2f7ed Add a check to see if virtualenv is installed. 2015-06-23 20:17:57 -05:00
Aldo Cortesi
074d8d7c74 Merge pull request #640 from Kriechi/ssl-version-handling
unify SSL version/method handling
2015-06-23 22:20:10 +12:00
Thomas Kriechbaumer
14e49f4fc7 unify SSL version/method handling 2015-06-23 12:05:34 +02:00
Aldo Cortesi
2c928181e8 Merge branch 'master' of ssh.github.com:mitmproxy/mitmproxy 2015-06-23 14:01:50 +12:00
Aldo Cortesi
237e05c823 Travis to Slack 2015-06-23 14:01:31 +12:00
Aldo Cortesi
e2069d52a8 Merge pull request #638 from iroiro123/http-transparent
HTTP Transparent Proxy
2015-06-23 09:40:25 +12:00
iroiro123
fbb23b5c9f changed error handling (ssl spoof mode) 2015-06-23 01:49:22 +09:00
iroiro123
5c7fa7a594 add unit tests for spoof mode 2015-06-23 00:57:33 +09:00
iroiro123
fd90367329 SSL Spoof mode 2015-06-21 00:51:56 +09:00
iroiro123
378aa78324 Spoof mode 2015-06-20 21:43:50 +09:00
Maximilian Hils
159543dd2b Update README.mkd 2015-06-18 18:10:06 +02:00
iroiro123
59ec291b6c HTTP Transparent Proxy 2015-06-18 23:53:27 +09:00
Thomas Kriechbaumer
e9fecbf830 add landscape/prospector config 2015-06-18 10:31:27 +02:00
Maximilian Hils
36d7e3fcd6 Update README.mkd 2015-06-16 03:05:21 +02:00
Thomas Kriechbaumer
5fea5d7813 fix pep8 config 2015-06-15 12:01:42 +02:00
Aldo Cortesi
7890450b0c Handle invalid IDNA encoding in hostnames
Fixes #622
2015-06-12 16:00:16 +12:00
Aldo Cortesi
fcc1558180 Fix typo in docs - thanks to Jim_Showalter@intuit.com 2015-06-12 14:15:26 +12:00
Aldo Cortesi
85e1ae7543 Merge pull request #624 from drahosj/master
Added flow marking functionality in the console
2015-06-12 13:53:07 +12:00
Jake Drahos
946030367f Added unmark all functionality
- 'U' to unmark all marked flows
2015-06-11 20:31:54 -05:00
Jake Drahos
2a6698bf5a Moved marking from flow to console
No longer taints the flow primitive
2015-06-11 20:27:33 -05:00
Jake Drahos
f2d7a6aa57 Merge remote-tracking branch 'origin/master'
Conflicts:
	libmproxy/console/flowlist.py
2015-06-11 16:34:53 -05:00
Jake Drahos
dd1e401e01 Changed mark symbol
Smaller symbol now, still just as easy to see while scrolling
2015-06-11 16:33:46 -05:00
Jake Drahos
486177edc7 Added functionality to write marked flows to file
w (write) -> m (marked)
2015-06-11 16:25:28 -05:00
Jake Drahos
d45d0ce22a Added functionality to write marked flows to file
w (write) -> m (marked)
2015-06-11 16:15:24 -05:00
Jake Drahos
122ee88021 Unmark duplicated flows
If a marked flow is duplicated, the duplicate will now be unmarked
2015-06-11 12:52:15 -05:00
Jake Drahos
13e71eba10 Changed symbols and colors
Added a better symbol for the mark, and changed the color to red. This helps it
stand out more easily.
2015-06-11 12:02:15 -05:00
Jake Drahos
a34eeb9a28 Fixed console rendering bug
Clearing all flows now works properly
2015-06-11 11:49:23 -05:00
Jake Drahos
e53a2426c1 Marked flows not deleted on clear all
Marked flows survive a clear all unless all current flows are marked.

Bug: They don't show up until another flow is added
2015-06-11 11:40:03 -05:00
Jake Drahos
8b998cfbea Implemented basic marking of flows
- Press m to toggle flow mark
- Flow mark is set in libmproxy/console/common.py. Currently set to "==="
2015-06-11 10:27:48 -05:00
Maximilian Hils
d389b9c59d fix #615 2015-06-08 19:01:32 +02:00
Maximilian Hils
083b404bba fix #618 2015-06-08 18:26:02 +02:00
Thomas Kriechbaumer
c59f045bd9 import travis config from netlib
this enables ALPN support with OpenSSL 1.0.2
2015-06-08 13:39:46 +02:00
David Weinstein
9b70d4bd2a Merge pull request #613 from dweinstein/patch/docker-tweaks
match name of automated build base img
2015-06-07 21:43:26 -04:00
David Weinstein
84b4ca6a65 match name of automated build base img 2015-06-07 11:06:47 -04:00
Aldo Cortesi
60ee1dd3aa Merge pull request #611 from dweinstein/feature/dockerfiles
initial add of docker mitmproxy
2015-06-07 10:12:49 +12:00
David Weinstein
3bd36b58e5 initial add of docker mitmproxy 2015-06-06 00:14:53 -04:00
Aldo Cortesi
bf7b76a997 Merge pull request #610 from tekii/fix-607
Fix 607
2015-06-06 11:10:33 +12:00
Marcelo Glezer
1befa9477c fix #607 fix message 2015-06-05 15:33:36 -03:00
Marcelo Glezer
17b34de28d fix #607 decode data before sending it to pyperclip 2015-06-05 15:19:57 -03:00
Aldo Cortesi
783e904b92 Merge pull request #603 from Kriechi/remove-contrib
replace contrib libraries with pypi dependencies
2015-06-04 13:30:18 +12:00
Aldo Cortesi
249bb4a2ac Bump version 2015-06-04 11:15:29 +12:00
Thomas Kriechbaumer
fbb2633dd3 replace contrib libraries with pypi dependencies 2015-06-02 10:00:34 +02:00
52 changed files with 1046 additions and 4823 deletions

11
.appveyor.yml Normal file
View File

@@ -0,0 +1,11 @@
version: '{build}'
shallow_clone: true
environment:
matrix:
- PYTHON: "C:\\Python27"
install:
- "%PYTHON%\\Scripts\\pip install --src . -r requirements.txt"
- "%PYTHON%\\python -c \"from OpenSSL import SSL; print(SSL.SSLeay_version(SSL.SSLEAY_VERSION))\""
build: off # Not a C# project
test_script:
- "%PYTHON%\\Scripts\\nosetests"

1
.dockerignore Normal file
View File

@@ -0,0 +1 @@
.git

16
.landscape.yml Normal file
View File

@@ -0,0 +1,16 @@
max-line-length: 120
pylint:
options:
dummy-variables-rgx: _$|.+_$|dummy_.+
disable:
- missing-docstring
- protected-access
- too-few-public-methods
- too-many-arguments
- too-many-instance-attributes
- too-many-locals
- too-many-public-methods
- too-many-return-statements
- too-many-statements
- unpacking-non-sequence

View File

@@ -1,25 +1,71 @@
language: python
sudo: false
python:
- "2.7"
- pypy
# command to install dependencies, e.g. pip install -r requirements.txt --use-mirrors
matrix:
include:
- python: 2.7
env: OPENSSL=1.0.2
addons:
apt:
sources:
# Debian sid currently holds OpenSSL 1.0.2
# change this with future releases!
- debian-sid
packages:
- libssl-dev
- python: pypy
env: OPENSSL=1.0.2
addons:
apt:
sources:
# Debian sid currently holds OpenSSL 1.0.2
# change this with future releases!
- debian-sid
packages:
- libssl-dev
install:
- "pip install --src . -r requirements.txt"
# command to run tests, e.g. python setup.py test
script:
before_script:
- "openssl version -a"
script:
- "nosetests --with-cov --cov-report term-missing"
after_success:
after_success:
- coveralls
notifications:
irc:
channels:
- "irc.oftc.net#mitmproxy"
on_success: change
on_failure: always
slack:
rooms:
- mitmproxy:YaDGC9Gt9TEM7o8zkC2OLNsu
on_success: :change
on_failure: always
# exclude cryptography from cache
# it depends on libssl-dev version
# which needs to be compiled specifically to each version
before_cache:
- pip uninstall -y cryptography
- rm -rf /home/travis/virtualenv/python2.7.9/lib/python2.7/site-packages/cryptography/
- rm -rf /home/travis/virtualenv/pypy-2.5.0/site-packages/cryptography/
- rm /home/travis/virtualenv/python2.7.9/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py
- rm /home/travis/virtualenv/pypy-2.5.0/site-packages/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py
cache:
directories:
- /home/travis/virtualenv/python2.7.9/lib/python2.7/site-packages
- /home/travis/virtualenv/python2.7.9/bin
- /home/travis/virtualenv/pypy-2.5.0/site-packages
- /home/travis/virtualenv/pypy-2.5.0/bin
- /home/travis/virtualenv/python2.7.9/lib/python2.7/site-packages
- /home/travis/virtualenv/python2.7.9/bin
- /home/travis/virtualenv/pypy-2.5.0/site-packages
- /home/travis/virtualenv/pypy-2.5.0/bin

View File

@@ -1,56 +1,51 @@
1067 Aldo Cortesi
542 Maximilian Hils
76 Marcelo Glezer
1112 Aldo Cortesi
569 Maximilian Hils
79 Marcelo Glezer
28 Jim Shaver
18 Henrik Nordstrom
13 Thomas Roth
12 Pedro Worcel
11 Stephen Altamirano
11 Jake Drahos
11 Justus Wingert
11 Jim Shaver
11 Stephen Altamirano
10 András Veres-Szentkirályi
9 Legend Tang
8 Rouli
8 Jason A. Novak
8 Rouli
7 Alexis Hildebrandt
5 Matthias Urlichs
6 Thomas Kriechbaumer
5 Brad Peabody
5 Matthias Urlichs
5 Tomaz Muraus
5 elitest
4 root
5 iroiro123
4 Bryan Bishop
4 Marc Liyanage
4 Valtteri Virtanen
4 Wade 524
4 Bryan Bishop
4 Youhei Sakurai
4 Marc Liyanage
4 root
3 Chris Neasbitt
3 Zack B
3 Kyle Manna
3 David Weinstein
3 Eli Shvartsman
3 Kyle Manna
3 Zack B
2 Bennett Blodinger
2 Choongwoo Han
2 Rob Wills
2 israel
2 Mark E. Haase
2 Heikki Hannikainen
2 Jaime Soriano Pastor
2 Jim Lloyd
2 Heikki Hannikainen
2 Krzysztof Bielicki
2 Bennett Blodinger
2 Mark E. Haase
2 Michael Frister
2 Rob Wills
2 alts
1 Yuangxuan Wang
1 capt8bit
1 davidpshaw
1 deployable
1 joebowbeer
1 meeee
1 michaeljau
1 peralta
1 phil plante
1 sentient07
1 vzvu3k6k
2 isra17
2 israel
1 Andy Smith
1 Dan Wilbraham
1 David Shaw
1 Doug Lethin
1 Eric Entzel
1 Felix Wolfsteller
1 Gabriel Kirkpatrick
@@ -61,6 +56,7 @@
1 James Billingham
1 Jean Regisser
1 Kit Randel
1 Kyle Morton
1 Lucas Cimon
1 Mathieu Mitchell
1 Michael Bisbjerg
@@ -82,7 +78,20 @@
1 Steven Van Acker
1 Suyash
1 Tarashish Mishra
1 TearsDontFalls
1 Terry Long
1 Ulrich Petri
1 Vyacheslav Bakhmutov
1 Wade Catron
1 Yuangxuan Wang
1 capt8bit
1 davidpshaw
1 deployable
1 jlhonora
1 joebowbeer
1 meeee
1 michaeljau
1 peralta
1 phil plante
1 sentient07
1 vzvu3k6k

4
Dockerfile Normal file
View File

@@ -0,0 +1,4 @@
FROM mitmproxy/base:latest-onbuild
EXPOSE 8080
EXPOSE 8081
VOLUME /certs

View File

@@ -1,7 +1,9 @@
[![Build Status](https://travis-ci.org/mitmproxy/mitmproxy.svg?branch=master)](https://travis-ci.org/mitmproxy/mitmproxy) [![Coverage Status](https://coveralls.io/repos/mitmproxy/mitmproxy/badge.svg?branch=master)](https://coveralls.io/r/mitmproxy/mitmproxy)
[![Latest Version](https://pypip.in/version/mitmproxy/badge.svg?style=flat)](https://pypi.python.org/pypi/mitmproxy/)
[![Supported Python versions](https://pypip.in/py_versions/mitmproxy/badge.svg?style=flat)](https://pypi.python.org/pypi/mitmproxy)
[![Supported Python implementations](https://pypip.in/implementation/mitmproxy/badge.svg?style=flat)](https://pypi.python.org/pypi/mitmproxy/)
[![Build Status](https://img.shields.io/travis/mitmproxy/mitmproxy/master.svg)](https://travis-ci.org/mitmproxy/mitmproxy)
[![Code Health](https://landscape.io/github/mitmproxy/mitmproxy/master/landscape.svg?style=flat)](https://landscape.io/github/mitmproxy/mitmproxy/master)
[![Coverage Status](https://img.shields.io/coveralls/mitmproxy/mitmproxy/master.svg)](https://coveralls.io/r/mitmproxy/mitmproxy)
[![Downloads](https://img.shields.io/pypi/dm/mitmproxy.svg?color=orange)](https://pypi.python.org/pypi/mitmproxy)
[![Latest Version](https://img.shields.io/pypi/v/mitmproxy.svg)](https://pypi.python.org/pypi/mitmproxy)
[![Supported Python versions](https://img.shields.io/pypi/pyversions/mitmproxy.svg)](https://pypi.python.org/pypi/mitmproxy)
__mitmproxy__ is an interactive, SSL-capable man-in-the-middle proxy for HTTP
@@ -12,11 +14,14 @@ __mitmdump__ is the command-line version of mitmproxy. Think tcpdump for HTTP.
__libmproxy__ is the library that mitmproxy and mitmdump are built on.
Documentation, tutorials and distribution packages can be found on the
mitmproxy.org website:
mitmproxy.org website:
[mitmproxy.org](http://mitmproxy.org).
You can find complete directions for installing mitmproxy [here](http://mitmproxy.org/doc/install.html).
Installation Instructions are available at [mitmproxy.org/doc/install.html](http://mitmproxy.org/doc/install.html).
You can join our developer chat on Slack:
[![Slack](https://mitmproxy-slack.herokuapp.com/badge.svg)](https://mitmproxy-slack.herokuapp.com/)
Features

View File

@@ -1,20 +0,0 @@
#!/bin/bash
autopep8 -i -r -a -a .
if [[ -n "$(git status -s)" ]]; then
echo "autopep8 yielded the following changes:"
git status -s
git --no-pager diff
exit 1
fi
autoflake -i -r --remove-all-unused-imports --remove-unused-variables .
if [[ -n "$(git status -s)" ]]; then
echo "autoflake yielded the following changes:"
git status -s
git --no-pager diff
exit 1
fi
echo "Coding style seems to be ok."
exit 0

2
dev
View File

@@ -2,7 +2,7 @@
set -e
VENV=../venv.mitmproxy
python -m virtualenv $VENV
python -m virtualenv $VENV || { echo 'virtualenv is not installed. Exiting.' ; exit 1; }
source $VENV/bin/activate
pip install --src .. -r requirements.txt

View File

@@ -11,4 +11,4 @@ if %errorlevel% neq 0 exit /b %errorlevel%
echo.
echo * Created virtualenv environment in %VENV%.
echo * Installed all dependencies into the virtualenv.
echo * Activated virtualenv environment.
echo * Activated virtualenv environment.

View File

@@ -23,7 +23,7 @@ configure your target device with the correct proxy settings. Now start a
browser on the device, and visit the magic domain **mitm.it**. You should see
something like this:
<img src="@!urlTo("certinstall-webapp.png")!@"></img>
<img class="img-responsive" src="@!urlTo("certinstall-webapp.png")!@" ></img>
Click on the relevant icon, and follow the setup instructions for the platform
you're on, and you are good to go.

View File

@@ -120,8 +120,8 @@ Name](http://en.wikipedia.org/wiki/SubjectAltName) field in the SSL certificate
that allows an arbitrary number of alternative domains to be specified. If the
expected domain matches any of these, the client will proceed, even though the
domain doesn't match the certificate Common Name. The answer here is simple:
when extract the CN from the upstream cert, we also extract the SANs, and add
them to the generated dummy certificate.
when we extract the CN from the upstream cert, we also extract the SANs, and
add them to the generated dummy certificate.
## Complication 3: Server Name Indication

View File

@@ -145,8 +145,9 @@ You can view the API documentation using pydoc (which is installed with Python b
## Running scripts in parallel
We have a single flow primitive, so when a script is handling something, other requests block.
While that's a very desirable behaviour under some circumstances, scripts can be run threaded by using the <code>libmproxy.script.concurrent</code> decorator.
We have a single flow primitive, so when a script is blocking, other requests are not processed.
While that's usually a very desirable behaviour, blocking scripts can be run threaded by using the <code>libmproxy.script.concurrent</code> decorator.
If your script does not block, you should avoid the overhead of the decorator.
$!example("examples/nonblocking.py")!$

View File

@@ -370,6 +370,21 @@ def common_options(parser):
default=None,
help="Forward all requests to upstream proxy server: http://host[:port]"
)
group.add_argument(
"--spoof",
action="store_true", dest="spoof_mode", default=False,
help="Use Host header to connect to HTTP servers."
)
group.add_argument(
"--ssl-spoof",
action="store_true", dest="ssl_spoof_mode", default=False,
help="Use TLS SNI to connect to HTTPS servers."
)
group.add_argument(
"--spoofed-port",
action="store", dest="spoofed_ssl_port", type=int, default=443,
help="Port number of upstream HTTPS servers in SSL spoof mode."
)
group = parser.add_argument_group(
"Advanced Proxy Options",

View File

@@ -48,6 +48,7 @@ class ConsoleState(flow.State):
self.set_focus(0)
elif self.follow_focus:
self.set_focus(len(self.view) - 1)
self.set_flow_marked(f, False)
return f
def update_flow(self, f):
@@ -100,9 +101,29 @@ class ConsoleState(flow.State):
return ret
def clear(self):
self.focus = None
marked_flows = []
for f in self.flows:
if self.flow_marked(f):
marked_flows.append(f)
super(ConsoleState, self).clear()
for f in marked_flows:
self.add_flow(f)
self.set_flow_marked(f, True)
if len(self.flows.views) == 0:
self.focus = None
else:
self.focus = 0
self.set_focus(self.focus)
def flow_marked(self, flow):
return self.get_flow_setting(flow, "marked", False)
def set_flow_marked(self, flow, marked):
self.add_flow_setting(flow, "marked", marked)
class Options(object):
attributes = [
@@ -591,6 +612,13 @@ class ConsoleMaster(flow.FlowMaster):
def save_flows(self, path):
return self._write_flows(path, self.state.view)
def save_marked_flows(self, path):
marked_flows = []
for f in self.state.view:
if self.state.flow_marked(f):
marked_flows.append(f)
return self._write_flows(path, marked_flows)
def load_flows_callback(self, path):
if not path:

View File

@@ -115,9 +115,11 @@ def fcol(s, attr):
if urwid.util.detected_encoding:
SYMBOL_REPLAY = u"\u21ba"
SYMBOL_RETURN = u"\u2190"
SYMBOL_MARK = u"\u25cf"
else:
SYMBOL_REPLAY = u"[r]"
SYMBOL_RETURN = u"<-"
SYMBOL_MARK = "[m]"
def raw_format_flow(f, focus, extended, padding):
@@ -133,6 +135,10 @@ def raw_format_flow(f, focus, extended, padding):
)
else:
req.append(fcol(">>" if focus else " ", "focus"))
if f["marked"]:
req.append(fcol(SYMBOL_MARK, "mark"))
if f["req_is_replay"]:
req.append(fcol(SYMBOL_REPLAY, "replay"))
req.append(fcol(f["req_method"], "method"))
@@ -284,8 +290,16 @@ def copy_flow(part, scope, flow, master, state):
signals.status_message.send(message="No contents to copy.")
return
# pyperclip calls encode('utf-8') on data to be copied without checking.
# if data are already encoded that way UnicodeDecodeError is thrown.
toclip = ""
try:
pyperclip.copy(data)
toclip = data.decode('utf-8')
except (UnicodeDecodeError):
toclip = data
try:
pyperclip.copy(toclip)
except (RuntimeError, UnicodeDecodeError, AttributeError):
def save(k):
if k == "y":
@@ -364,7 +378,8 @@ def ask_save_body(part, master, state, flow):
flowcache = utils.LRUCache(800)
def format_flow(f, focus, extended=False, hostheader=False, padding=2):
def format_flow(f, focus, extended=False, hostheader=False, padding=2,
marked=False):
d = dict(
intercepted = f.intercepted,
acked = f.reply.acked,
@@ -376,6 +391,8 @@ def format_flow(f, focus, extended=False, hostheader=False, padding=2):
err_msg = f.error.msg if f.error else None,
resp_code = f.response.code if f.response else None,
marked = marked,
)
if f.response:
if f.response.content:

View File

@@ -9,13 +9,14 @@ from PIL.ExifTags import TAGS
import subprocess
import traceback
import urwid
import html2text
import netlib.utils
from netlib import odict
from . import common, signals
from .. import utils, encoding
from ..contrib import jsbeautifier, html2text
from ..contrib import jsbeautifier
from ..contrib.wbxml.ASCommandResponse import ASCommandResponse
try:

View File

@@ -17,9 +17,11 @@ def _mkhelp():
("F", "toggle follow flow list"),
("l", "set limit filter pattern"),
("L", "load saved flows"),
("m", "toggle flow mark"),
("n", "create a new request"),
("P", "copy flow to clipboard"),
("r", "replay request"),
("U", "unmark all marked flows"),
("V", "revert changes to request"),
("w", "save flows "),
("W", "stream flows to file"),
@@ -48,9 +50,9 @@ class EventListBox(urwid.ListBox):
self.master.clear_events()
key = None
elif key == "G":
self.set_focus(0)
elif key == "g":
self.set_focus(len(self.master.eventlist) - 1)
elif key == "g":
self.set_focus(0)
return urwid.ListBox.keypress(self, size, key)
@@ -108,7 +110,8 @@ class ConnectionItem(urwid.WidgetWrap):
return common.format_flow(
self.flow,
self.f,
hostheader = self.master.showhost
hostheader = self.master.showhost,
marked=self.state.flow_marked(self.flow)
)
def selectable(self):
@@ -120,6 +123,11 @@ class ConnectionItem(urwid.WidgetWrap):
prompt = "Save all flows to",
callback = self.master.save_flows
)
elif k == "m":
signals.status_prompt_path.send(
prompt = "Save marked flows to",
callback = self.master.save_marked_flows
)
else:
signals.status_prompt_path.send(
prompt = "Save this flow to",
@@ -177,6 +185,12 @@ class ConnectionItem(urwid.WidgetWrap):
elif key == "D":
f = self.master.duplicate_flow(self.flow)
self.master.view_flow(f)
elif key == "m":
if self.state.flow_marked(self.flow):
self.state.set_flow_marked(self.flow, False)
else:
self.state.set_flow_marked(self.flow, True)
signals.flowlist_change.send(self)
elif key == "r":
r = self.master.replay_request(self.flow)
if r:
@@ -202,6 +216,10 @@ class ConnectionItem(urwid.WidgetWrap):
),
callback = self.stop_server_playback_prompt,
)
elif key == "U":
for f in self.state.flows:
self.state.set_flow_marked(f, False)
signals.flowlist_change.send(self)
elif key == "V":
if not self.flow.modified():
signals.status_message.send(message="Flow not modified.")
@@ -216,6 +234,7 @@ class ConnectionItem(urwid.WidgetWrap):
keys = (
("all flows", "a"),
("this flow", "t"),
("marked flows", "m"),
),
callback = self.save_flows_prompt,
)
@@ -319,10 +338,10 @@ class FlowListBox(urwid.ListBox):
self.master.clear_flows()
elif key == "e":
self.master.toggle_eventlog()
elif key == "G":
elif key == "g":
self.master.state.set_focus(0)
signals.flowlist_change.send(self)
elif key == "g":
elif key == "G":
self.master.state.set_focus(self.master.state.flow_count())
signals.flowlist_change.send(self)
elif key == "l":

View File

@@ -416,9 +416,9 @@ class GridEditor(urwid.WidgetWrap):
res.append(i[0])
self.callback(self.data_out(res), *self.cb_args, **self.cb_kwargs)
signals.pop_view_state.send(self)
elif key == "G":
self.walker.set_focus(0)
elif key == "g":
self.walker.set_focus(0)
elif key == "G":
self.walker.set_focus(len(self.walker.lst) - 1)
elif key in ["h", "left"]:
self.walker.left()

View File

@@ -28,7 +28,7 @@ class HelpView(urwid.ListBox):
keys = [
("j, k", "down, up"),
("h, l", "left, right (in some contexts)"),
("g, G", "go to end, beginning"),
("g, G", "go to beginning, end"),
("space", "page down"),
("pg up/down", "page up/down"),
("arrows", "up, down, left, right"),
@@ -42,12 +42,12 @@ class HelpView(urwid.ListBox):
text.append(urwid.Text([("head", "\n\nGlobal keys:\n")]))
keys = [
("c", "client replay"),
("c", "client replay of HTTP requests"),
("i", "set interception pattern"),
("o", "options"),
("q", "quit / return to previous page"),
("Q", "quit without confirm prompt"),
("S", "server replay"),
("S", "server replay of HTTP responses"),
]
text.extend(
common.format_keyvals(keys, key="key", val="text", indent=4)
@@ -108,8 +108,8 @@ class HelpView(urwid.ListBox):
return None
elif key == "?":
key = None
elif key == "G":
self.set_focus(0)
elif key == "g":
self.set_focus(0)
elif key == "G":
self.set_focus(len(self.body.contents))
return urwid.ListBox.keypress(self, size, key)

View File

@@ -24,7 +24,7 @@ class Palette:
'method', 'focus',
'code_200', 'code_300', 'code_400', 'code_500', 'code_other',
'error',
'header', 'highlight', 'intercept', 'replay',
'header', 'highlight', 'intercept', 'replay', 'mark',
# Hex view
'offset',
@@ -104,6 +104,7 @@ class LowDark(Palette):
highlight = ('white,bold', 'default'),
intercept = ('brown', 'default'),
replay = ('light green', 'default'),
mark = ('light red', 'default'),
# Hex view
offset = ('dark cyan', 'default'),
@@ -167,6 +168,7 @@ class LowLight(Palette):
highlight = ('black,bold', 'default'),
intercept = ('brown', 'default'),
replay = ('dark green', 'default'),
mark = ('dark red', 'default'),
# Hex view
offset = ('dark blue', 'default'),

View File

@@ -33,10 +33,10 @@ class Searchable(urwid.ListBox):
self.find_next(False)
elif key == "N":
self.find_next(True)
elif key == "G":
elif key == "g":
self.set_focus(0)
self.walker._modified()
elif key == "g":
elif key == "G":
self.set_focus(len(self.walker) - 1)
self.walker._modified()
else:

View File

@@ -23,7 +23,7 @@ class Window(urwid.Frame):
if not k:
if args[1] == "mouse drag":
signals.status_message.send(
message = "Hold down alt or ctrl to select text.",
message = "Hold down shift, alt or ctrl to select text.",
expire = 1
)
elif args[1] == "mouse press" and args[2] == 4:

View File

@@ -1,14 +1,10 @@
Contribs:
pyparsing 1.5.2, MIT license
jsbeautifier, git checkout 25/03/12, MIT license
- Removed test directories
- Disabled packers through a single-line modification (see "# CORTESI"
comment)
html2text, git checkout 18/08/12, GPLv3
WinDivert 1.1.4, LGPL license, http://reqrypt.org/windivert.html
wbxml
- https://github.com/davidpshaw/PyWBXMLDecoder

View File

@@ -1,834 +0,0 @@
#!/usr/bin/env python
"""html2text: Turn HTML into equivalent Markdown-structured text."""
__version__ = "3.200.3"
__author__ = "Aaron Swartz (me@aaronsw.com)"
__copyright__ = "(C) 2004-2008 Aaron Swartz. GNU GPL 3."
__contributors__ = ["Martin 'Joey' Schulze", "Ricardo Reyes", "Kevin Jay North"]
# TODO:
# Support decoded entities with unifiable.
try:
True
except NameError:
setattr(__builtins__, 'True', 1)
setattr(__builtins__, 'False', 0)
def has_key(x, y):
if hasattr(x, 'has_key'): return x.has_key(y)
else: return y in x
try:
import htmlentitydefs
import urlparse
import HTMLParser
except ImportError: #Python3
import html.entities as htmlentitydefs
import urllib.parse as urlparse
import html.parser as HTMLParser
try: #Python3
import urllib.request as urllib
except:
import urllib
import optparse, re, sys, codecs, types
try: from textwrap import wrap
except: pass
# Use Unicode characters instead of their ascii psuedo-replacements
UNICODE_SNOB = 0
# Put the links after each paragraph instead of at the end.
LINKS_EACH_PARAGRAPH = 0
# Wrap long lines at position. 0 for no wrapping. (Requires Python 2.3.)
BODY_WIDTH = 78
# Don't show internal links (href="#local-anchor") -- corresponding link targets
# won't be visible in the plain text file anyway.
SKIP_INTERNAL_LINKS = True
# Use inline, rather than reference, formatting for images and links
INLINE_LINKS = True
# Number of pixels Google indents nested lists
GOOGLE_LIST_INDENT = 36
IGNORE_ANCHORS = False
IGNORE_IMAGES = False
IGNORE_EMPHASIS = False
### Entity Nonsense ###
def name2cp(k):
if k == 'apos': return ord("'")
if hasattr(htmlentitydefs, "name2codepoint"): # requires Python 2.3
return htmlentitydefs.name2codepoint[k]
else:
k = htmlentitydefs.entitydefs[k]
if k.startswith("&#") and k.endswith(";"): return int(k[2:-1]) # not in latin-1
return ord(codecs.latin_1_decode(k)[0])
unifiable = {'rsquo':"'", 'lsquo':"'", 'rdquo':'"', 'ldquo':'"',
'copy':'(C)', 'mdash':'--', 'nbsp':' ', 'rarr':'->', 'larr':'<-', 'middot':'*',
'ndash':'-', 'oelig':'oe', 'aelig':'ae',
'agrave':'a', 'aacute':'a', 'acirc':'a', 'atilde':'a', 'auml':'a', 'aring':'a',
'egrave':'e', 'eacute':'e', 'ecirc':'e', 'euml':'e',
'igrave':'i', 'iacute':'i', 'icirc':'i', 'iuml':'i',
'ograve':'o', 'oacute':'o', 'ocirc':'o', 'otilde':'o', 'ouml':'o',
'ugrave':'u', 'uacute':'u', 'ucirc':'u', 'uuml':'u',
'lrm':'', 'rlm':''}
unifiable_n = {}
for k in unifiable.keys():
unifiable_n[name2cp(k)] = unifiable[k]
### End Entity Nonsense ###
def onlywhite(line):
"""Return true if the line does only consist of whitespace characters."""
for c in line:
if c is not ' ' and c is not ' ':
return c is ' '
return line
def hn(tag):
if tag[0] == 'h' and len(tag) == 2:
try:
n = int(tag[1])
if n in range(1, 10): return n
except ValueError: return 0
def dumb_property_dict(style):
"""returns a hash of css attributes"""
return dict([(x.strip(), y.strip()) for x, y in [z.split(':', 1) for z in style.split(';') if ':' in z]]);
def dumb_css_parser(data):
"""returns a hash of css selectors, each of which contains a hash of css attributes"""
# remove @import sentences
importIndex = data.find('@import')
while importIndex != -1:
data = data[0:importIndex] + data[data.find(';', importIndex) + 1:]
importIndex = data.find('@import')
# parse the css. reverted from dictionary compehension in order to support older pythons
elements = [x.split('{') for x in data.split('}') if '{' in x.strip()]
try:
elements = dict([(a.strip(), dumb_property_dict(b)) for a, b in elements])
except ValueError:
elements = {} # not that important
return elements
def element_style(attrs, style_def, parent_style):
"""returns a hash of the 'final' style attributes of the element"""
style = parent_style.copy()
if 'class' in attrs:
for css_class in attrs['class'].split():
css_style = style_def['.' + css_class]
style.update(css_style)
if 'style' in attrs:
immediate_style = dumb_property_dict(attrs['style'])
style.update(immediate_style)
return style
def google_list_style(style):
"""finds out whether this is an ordered or unordered list"""
if 'list-style-type' in style:
list_style = style['list-style-type']
if list_style in ['disc', 'circle', 'square', 'none']:
return 'ul'
return 'ol'
def google_has_height(style):
"""check if the style of the element has the 'height' attribute explicitly defined"""
if 'height' in style:
return True
return False
def google_text_emphasis(style):
"""return a list of all emphasis modifiers of the element"""
emphasis = []
if 'text-decoration' in style:
emphasis.append(style['text-decoration'])
if 'font-style' in style:
emphasis.append(style['font-style'])
if 'font-weight' in style:
emphasis.append(style['font-weight'])
return emphasis
def google_fixed_width_font(style):
"""check if the css of the current element defines a fixed width font"""
font_family = ''
if 'font-family' in style:
font_family = style['font-family']
if 'Courier New' == font_family or 'Consolas' == font_family:
return True
return False
def list_numbering_start(attrs):
"""extract numbering from list element attributes"""
if 'start' in attrs:
return int(attrs['start']) - 1
else:
return 0
class HTML2Text(HTMLParser.HTMLParser):
def __init__(self, out=None, baseurl=''):
HTMLParser.HTMLParser.__init__(self)
# Config options
self.unicode_snob = UNICODE_SNOB
self.links_each_paragraph = LINKS_EACH_PARAGRAPH
self.body_width = BODY_WIDTH
self.skip_internal_links = SKIP_INTERNAL_LINKS
self.inline_links = INLINE_LINKS
self.google_list_indent = GOOGLE_LIST_INDENT
self.ignore_links = IGNORE_ANCHORS
self.ignore_images = IGNORE_IMAGES
self.ignore_emphasis = IGNORE_EMPHASIS
self.google_doc = False
self.ul_item_mark = '*'
if out is None:
self.out = self.outtextf
else:
self.out = out
self.outtextlist = [] # empty list to store output characters before they are "joined"
try:
self.outtext = unicode()
except NameError: # Python3
self.outtext = str()
self.quiet = 0
self.p_p = 0 # number of newline character to print before next output
self.outcount = 0
self.start = 1
self.space = 0
self.a = []
self.astack = []
self.acount = 0
self.list = []
self.blockquote = 0
self.pre = 0
self.startpre = 0
self.code = False
self.br_toggle = ''
self.lastWasNL = 0
self.lastWasList = False
self.style = 0
self.style_def = {}
self.tag_stack = []
self.emphasis = 0
self.drop_white_space = 0
self.inheader = False
self.abbr_title = None # current abbreviation definition
self.abbr_data = None # last inner HTML (for abbr being defined)
self.abbr_list = {} # stack of abbreviations to write later
self.baseurl = baseurl
try: del unifiable_n[name2cp('nbsp')]
except KeyError: pass
unifiable['nbsp'] = '&nbsp_place_holder;'
def feed(self, data):
data = data.replace("</' + 'script>", "</ignore>")
HTMLParser.HTMLParser.feed(self, data)
def handle(self, data):
self.feed(data)
self.feed("")
return self.optwrap(self.close())
def outtextf(self, s):
self.outtextlist.append(s)
if s: self.lastWasNL = s[-1] == '\n'
def close(self):
HTMLParser.HTMLParser.close(self)
self.pbr()
self.o('', 0, 'end')
self.outtext = self.outtext.join(self.outtextlist)
if self.unicode_snob:
nbsp = unichr(name2cp('nbsp'))
else:
nbsp = u' '
self.outtext = self.outtext.replace(u'&nbsp_place_holder;', nbsp)
return self.outtext
def handle_charref(self, c):
self.o(self.charref(c), 1)
def handle_entityref(self, c):
self.o(self.entityref(c), 1)
def handle_starttag(self, tag, attrs):
self.handle_tag(tag, attrs, 1)
def handle_endtag(self, tag):
self.handle_tag(tag, None, 0)
def previousIndex(self, attrs):
""" returns the index of certain set of attributes (of a link) in the
self.a list
If the set of attributes is not found, returns None
"""
if not has_key(attrs, 'href'): return None
i = -1
for a in self.a:
i += 1
match = 0
if has_key(a, 'href') and a['href'] == attrs['href']:
if has_key(a, 'title') or has_key(attrs, 'title'):
if (has_key(a, 'title') and has_key(attrs, 'title') and
a['title'] == attrs['title']):
match = True
else:
match = True
if match: return i
def drop_last(self, nLetters):
if not self.quiet:
self.outtext = self.outtext[:-nLetters]
def handle_emphasis(self, start, tag_style, parent_style):
"""handles various text emphases"""
tag_emphasis = google_text_emphasis(tag_style)
parent_emphasis = google_text_emphasis(parent_style)
# handle Google's text emphasis
strikethrough = 'line-through' in tag_emphasis and self.hide_strikethrough
bold = 'bold' in tag_emphasis and not 'bold' in parent_emphasis
italic = 'italic' in tag_emphasis and not 'italic' in parent_emphasis
fixed = google_fixed_width_font(tag_style) and not \
google_fixed_width_font(parent_style) and not self.pre
if start:
# crossed-out text must be handled before other attributes
# in order not to output qualifiers unnecessarily
if bold or italic or fixed:
self.emphasis += 1
if strikethrough:
self.quiet += 1
if italic:
self.o("_")
self.drop_white_space += 1
if bold:
self.o("**")
self.drop_white_space += 1
if fixed:
self.o('`')
self.drop_white_space += 1
self.code = True
else:
if bold or italic or fixed:
# there must not be whitespace before closing emphasis mark
self.emphasis -= 1
self.space = 0
self.outtext = self.outtext.rstrip()
if fixed:
if self.drop_white_space:
# empty emphasis, drop it
self.drop_last(1)
self.drop_white_space -= 1
else:
self.o('`')
self.code = False
if bold:
if self.drop_white_space:
# empty emphasis, drop it
self.drop_last(2)
self.drop_white_space -= 1
else:
self.o("**")
if italic:
if self.drop_white_space:
# empty emphasis, drop it
self.drop_last(1)
self.drop_white_space -= 1
else:
self.o("_")
# space is only allowed after *all* emphasis marks
if (bold or italic) and not self.emphasis:
self.o(" ")
if strikethrough:
self.quiet -= 1
def handle_tag(self, tag, attrs, start):
#attrs = fixattrs(attrs)
if attrs is None:
attrs = {}
else:
attrs = dict(attrs)
if self.google_doc:
# the attrs parameter is empty for a closing tag. in addition, we
# need the attributes of the parent nodes in order to get a
# complete style description for the current element. we assume
# that google docs export well formed html.
parent_style = {}
if start:
if self.tag_stack:
parent_style = self.tag_stack[-1][2]
tag_style = element_style(attrs, self.style_def, parent_style)
self.tag_stack.append((tag, attrs, tag_style))
else:
dummy, attrs, tag_style = self.tag_stack.pop()
if self.tag_stack:
parent_style = self.tag_stack[-1][2]
if hn(tag):
self.p()
if start:
self.inheader = True
self.o(hn(tag)*"#" + ' ')
else:
self.inheader = False
return # prevent redundant emphasis marks on headers
if tag in ['p', 'div']:
if self.google_doc:
if start and google_has_height(tag_style):
self.p()
else:
self.soft_br()
else:
self.p()
if tag == "br" and start: self.o(" \n")
if tag == "hr" and start:
self.p()
self.o("* * *")
self.p()
if tag in ["head", "style", 'script']:
if start: self.quiet += 1
else: self.quiet -= 1
if tag == "style":
if start: self.style += 1
else: self.style -= 1
if tag in ["body"]:
self.quiet = 0 # sites like 9rules.com never close <head>
if tag == "blockquote":
if start:
self.p(); self.o('> ', 0, 1); self.start = 1
self.blockquote += 1
else:
self.blockquote -= 1
self.p()
if tag in ['em', 'i', 'u'] and not self.ignore_emphasis: self.o("_")
if tag in ['strong', 'b'] and not self.ignore_emphasis: self.o("**")
if tag in ['del', 'strike', 's']:
if start:
self.o("<"+tag+">")
else:
self.o("</"+tag+">")
if self.google_doc:
if not self.inheader:
# handle some font attributes, but leave headers clean
self.handle_emphasis(start, tag_style, parent_style)
if tag in ["code", "tt"] and not self.pre: self.o('`') #TODO: `` `this` ``
if tag == "abbr":
if start:
self.abbr_title = None
self.abbr_data = ''
if has_key(attrs, 'title'):
self.abbr_title = attrs['title']
else:
if self.abbr_title != None:
self.abbr_list[self.abbr_data] = self.abbr_title
self.abbr_title = None
self.abbr_data = ''
if tag == "a" and not self.ignore_links:
if start:
if has_key(attrs, 'href') and not (self.skip_internal_links and attrs['href'].startswith('#')):
self.astack.append(attrs)
self.o("[")
else:
self.astack.append(None)
else:
if self.astack:
a = self.astack.pop()
if a:
if self.inline_links:
self.o("](" + escape_md(a['href']) + ")")
else:
i = self.previousIndex(a)
if i is not None:
a = self.a[i]
else:
self.acount += 1
a['count'] = self.acount
a['outcount'] = self.outcount
self.a.append(a)
self.o("][" + str(a['count']) + "]")
if tag == "img" and start and not self.ignore_images:
if has_key(attrs, 'src'):
attrs['href'] = attrs['src']
alt = attrs.get('alt', '')
self.o("![" + escape_md(alt) + "]")
if self.inline_links:
self.o("(" + escape_md(attrs['href']) + ")")
else:
i = self.previousIndex(attrs)
if i is not None:
attrs = self.a[i]
else:
self.acount += 1
attrs['count'] = self.acount
attrs['outcount'] = self.outcount
self.a.append(attrs)
self.o("[" + str(attrs['count']) + "]")
if tag == 'dl' and start: self.p()
if tag == 'dt' and not start: self.pbr()
if tag == 'dd' and start: self.o(' ')
if tag == 'dd' and not start: self.pbr()
if tag in ["ol", "ul"]:
# Google Docs create sub lists as top level lists
if (not self.list) and (not self.lastWasList):
self.p()
if start:
if self.google_doc:
list_style = google_list_style(tag_style)
else:
list_style = tag
numbering_start = list_numbering_start(attrs)
self.list.append({'name':list_style, 'num':numbering_start})
else:
if self.list: self.list.pop()
self.lastWasList = True
else:
self.lastWasList = False
if tag == 'li':
self.pbr()
if start:
if self.list: li = self.list[-1]
else: li = {'name':'ul', 'num':0}
if self.google_doc:
nest_count = self.google_nest_count(tag_style)
else:
nest_count = len(self.list)
self.o(" " * nest_count) #TODO: line up <ol><li>s > 9 correctly.
if li['name'] == "ul": self.o(self.ul_item_mark + " ")
elif li['name'] == "ol":
li['num'] += 1
self.o(str(li['num'])+". ")
self.start = 1
if tag in ["table", "tr"] and start: self.p()
if tag == 'td': self.pbr()
if tag == "pre":
if start:
self.startpre = 1
self.pre = 1
else:
self.pre = 0
self.p()
def pbr(self):
if self.p_p == 0:
self.p_p = 1
def p(self):
self.p_p = 2
def soft_br(self):
self.pbr()
self.br_toggle = ' '
def o(self, data, puredata=0, force=0):
if self.abbr_data is not None:
self.abbr_data += data
if not self.quiet:
if self.google_doc:
# prevent white space immediately after 'begin emphasis' marks ('**' and '_')
lstripped_data = data.lstrip()
if self.drop_white_space and not (self.pre or self.code):
data = lstripped_data
if lstripped_data != '':
self.drop_white_space = 0
if puredata and not self.pre:
data = re.sub('\s+', ' ', data)
if data and data[0] == ' ':
self.space = 1
data = data[1:]
if not data and not force: return
if self.startpre:
#self.out(" :") #TODO: not output when already one there
self.startpre = 0
bq = (">" * self.blockquote)
if not (force and data and data[0] == ">") and self.blockquote: bq += " "
if self.pre:
bq += " "
data = data.replace("\n", "\n"+bq)
if self.start:
self.space = 0
self.p_p = 0
self.start = 0
if force == 'end':
# It's the end.
self.p_p = 0
self.out("\n")
self.space = 0
if self.p_p:
self.out((self.br_toggle+'\n'+bq)*self.p_p)
self.space = 0
self.br_toggle = ''
if self.space:
if not self.lastWasNL: self.out(' ')
self.space = 0
if self.a and ((self.p_p == 2 and self.links_each_paragraph) or force == "end"):
if force == "end": self.out("\n")
newa = []
for link in self.a:
if self.outcount > link['outcount']:
self.out(" ["+ str(link['count']) +"]: " + urlparse.urljoin(self.baseurl, link['href']))
if has_key(link, 'title'): self.out(" ("+link['title']+")")
self.out("\n")
else:
newa.append(link)
if self.a != newa: self.out("\n") # Don't need an extra line when nothing was done.
self.a = newa
if self.abbr_list and force == "end":
for abbr, definition in self.abbr_list.items():
self.out(" *[" + abbr + "]: " + definition + "\n")
self.p_p = 0
self.out(data)
self.outcount += 1
def handle_data(self, data):
if r'\/script>' in data: self.quiet -= 1
if self.style:
self.style_def.update(dumb_css_parser(data))
self.o(data, 1)
def unknown_decl(self, data): pass
def charref(self, name):
if name[0] in ['x','X']:
c = int(name[1:], 16)
else:
c = int(name)
if not self.unicode_snob and c in unifiable_n.keys():
return unifiable_n[c]
else:
try:
return unichr(c)
except NameError: #Python3
return chr(c)
def entityref(self, c):
if not self.unicode_snob and c in unifiable.keys():
return unifiable[c]
else:
try: name2cp(c)
except KeyError: return "&" + c + ';'
else:
try:
return unichr(name2cp(c))
except NameError: #Python3
return chr(name2cp(c))
def replaceEntities(self, s):
s = s.group(1)
if s[0] == "#":
return self.charref(s[1:])
else: return self.entityref(s)
r_unescape = re.compile(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));")
def unescape(self, s):
return self.r_unescape.sub(self.replaceEntities, s)
def google_nest_count(self, style):
"""calculate the nesting count of google doc lists"""
nest_count = 0
if 'margin-left' in style:
nest_count = int(style['margin-left'][:-2]) / self.google_list_indent
return nest_count
def optwrap(self, text):
"""Wrap all paragraphs in the provided text."""
if not self.body_width:
return text
assert wrap, "Requires Python 2.3."
result = ''
newlines = 0
for para in text.split("\n"):
if len(para) > 0:
if not skipwrap(para):
for line in wrap(para, self.body_width):
result += line + "\n"
result += "\n"
newlines = 2
else:
if not onlywhite(para):
result += para + "\n"
newlines = 1
else:
if newlines < 2:
result += "\n"
newlines += 1
return result
ordered_list_matcher = re.compile(r'\d+\.\s')
unordered_list_matcher = re.compile(r'[-\*\+]\s')
md_chars_matcher = re.compile(r"([\\\[\]\(\)])")
def skipwrap(para):
# If the text begins with four spaces or one tab, it's a code block; don't wrap
if para[0:4] == ' ' or para[0] == '\t':
return True
# If the text begins with only two "--", possibly preceded by whitespace, that's
# an emdash; so wrap.
stripped = para.lstrip()
if stripped[0:2] == "--" and stripped[2] != "-":
return False
# I'm not sure what this is for; I thought it was to detect lists, but there's
# a <br>-inside-<span> case in one of the tests that also depends upon it.
if stripped[0:1] == '-' or stripped[0:1] == '*':
return True
# If the text begins with a single -, *, or +, followed by a space, or an integer,
# followed by a ., followed by a space (in either case optionally preceeded by
# whitespace), it's a list; don't wrap.
if ordered_list_matcher.match(stripped) or unordered_list_matcher.match(stripped):
return True
return False
def wrapwrite(text):
text = text.encode('utf-8')
try: #Python3
sys.stdout.buffer.write(text)
except AttributeError:
sys.stdout.write(text)
def html2text(html, baseurl=''):
h = HTML2Text(baseurl=baseurl)
return h.handle(html)
def unescape(s, unicode_snob=False):
h = HTML2Text()
h.unicode_snob = unicode_snob
return h.unescape(s)
def escape_md(text):
"""Escapes markdown-sensitive characters."""
return md_chars_matcher.sub(r"\\\1", text)
def main():
baseurl = ''
p = optparse.OptionParser('%prog [(filename|url) [encoding]]',
version='%prog ' + __version__)
p.add_option("--ignore-emphasis", dest="ignore_emphasis", action="store_true",
default=IGNORE_EMPHASIS, help="don't include any formatting for emphasis")
p.add_option("--ignore-links", dest="ignore_links", action="store_true",
default=IGNORE_ANCHORS, help="don't include any formatting for links")
p.add_option("--ignore-images", dest="ignore_images", action="store_true",
default=IGNORE_IMAGES, help="don't include any formatting for images")
p.add_option("-g", "--google-doc", action="store_true", dest="google_doc",
default=False, help="convert an html-exported Google Document")
p.add_option("-d", "--dash-unordered-list", action="store_true", dest="ul_style_dash",
default=False, help="use a dash rather than a star for unordered list items")
p.add_option("-b", "--body-width", dest="body_width", action="store", type="int",
default=BODY_WIDTH, help="number of characters per output line, 0 for no wrap")
p.add_option("-i", "--google-list-indent", dest="list_indent", action="store", type="int",
default=GOOGLE_LIST_INDENT, help="number of pixels Google indents nested lists")
p.add_option("-s", "--hide-strikethrough", action="store_true", dest="hide_strikethrough",
default=False, help="hide strike-through text. only relevent when -g is specified as well")
(options, args) = p.parse_args()
# process input
encoding = "utf-8"
if len(args) > 0:
file_ = args[0]
if len(args) == 2:
encoding = args[1]
if len(args) > 2:
p.error('Too many arguments')
if file_.startswith('http://') or file_.startswith('https://'):
baseurl = file_
j = urllib.urlopen(baseurl)
data = j.read()
if encoding is None:
try:
from feedparser import _getCharacterEncoding as enc
except ImportError:
enc = lambda x, y: ('utf-8', 1)
encoding = enc(j.headers, data)[0]
if encoding == 'us-ascii':
encoding = 'utf-8'
else:
data = open(file_, 'rb').read()
if encoding is None:
try:
from chardet import detect
except ImportError:
detect = lambda x: {'encoding': 'utf-8'}
encoding = detect(data)['encoding']
else:
data = sys.stdin.read()
data = data.decode(encoding)
h = HTML2Text(baseurl=baseurl)
# handle options
if options.ul_style_dash: h.ul_item_mark = '-'
h.body_width = options.body_width
h.list_indent = options.list_indent
h.ignore_emphasis = options.ignore_emphasis
h.ignore_links = options.ignore_links
h.ignore_images = options.ignore_images
h.google_doc = options.google_doc
h.hide_strikethrough = options.hide_strikethrough
wrapwrite(h.handle(data))
if __name__ == "__main__":
main()

File diff suppressed because it is too large Load Diff

View File

@@ -34,7 +34,7 @@
from __future__ import absolute_import
import re
import sys
from .contrib import pyparsing as pp
import pyparsing as pp
from .protocol.http import decoded
@@ -241,6 +241,19 @@ class FUrl(_Rex):
def __call__(self, f):
return re.search(self.expr, f.request.url)
class FSrc(_Rex):
code = "src"
help = "Match source address"
def __call__(self, f):
return f.client_conn and re.search(self.expr, repr(f.client_conn.address))
class FDst(_Rex):
code = "dst"
help = "Match destination address"
def __call__(self, f):
return f.server_conn and re.search(self.expr, repr(f.server_conn.address))
class _Int(_Action):
def __init__(self, num):
@@ -313,6 +326,8 @@ filt_rex = [
FRequestContentType,
FResponseContentType,
FContentType,
FSrc,
FDst,
]
filt_int = [
FCode
@@ -324,7 +339,7 @@ def _make():
# ones.
parts = []
for klass in filt_unary:
f = pp.Literal("~%s" % klass.code)
f = pp.Literal("~%s" % klass.code) + pp.WordEnd()
f.setParseAction(klass.make)
parts.append(f)
@@ -333,12 +348,12 @@ def _make():
pp.QuotedString("\"", escChar='\\') |\
pp.QuotedString("'", escChar='\\')
for klass in filt_rex:
f = pp.Literal("~%s" % klass.code) + rex.copy()
f = pp.Literal("~%s" % klass.code) + pp.WordEnd() + rex.copy()
f.setParseAction(klass.make)
parts.append(f)
for klass in filt_int:
f = pp.Literal("~%s" % klass.code) + pp.Word(pp.nums)
f = pp.Literal("~%s" % klass.code) + pp.WordEnd() + pp.Word(pp.nums)
f.setParseAction(klass.make)
parts.append(f)

View File

@@ -825,7 +825,7 @@ class FlowMaster(controller.Master):
ssl_established=True
))
f = http.HTTPFlow(c, s)
headers = ODictCaseless()
headers = odict.ODictCaseless()
req = http.HTTPRequest(
"absolute",

View File

@@ -584,11 +584,10 @@ class HTTPRequest(HTTPMessage):
of the request, e.g. if an upstream proxy is in place
If hostheader is set to True, the Host: header will be used as
additional (and preferred) data source. This is handy in transparent
mode, where only the ip of the destination is known, but not the
resolved name. This is disabled by default, as an attacker may spoof
the host header to confuse an analyst.
additional (and preferred) data source. This is handy in
transparent mode, where only the IO of the destination is known,
but not the resolved name. This is disabled by default, as an
attacker may spoof the host header to confuse an analyst.
"""
host = None
if hostheader:
@@ -596,7 +595,10 @@ class HTTPRequest(HTTPMessage):
if not host:
host = self.host
if host:
return host.encode("idna")
try:
return host.encode("idna")
except ValueError:
return host
else:
return None
@@ -1312,9 +1314,7 @@ class HTTPHandler(ProtocolHandler):
pass
elif request.form_in == self.expected_form_in:
request.form_out = self.expected_form_out
if request.form_in == "absolute":
if request.scheme != "http":
raise http.HttpError(
@@ -1327,7 +1327,32 @@ class HTTPHandler(ProtocolHandler):
self.c.set_server_address((request.host, request.port))
flow.server_conn = self.c.server_conn
elif request.form_in == "relative":
if self.c.config.mode == "spoof":
# Host header
h = request.pretty_host(hostheader=True)
if h is None:
raise http.HttpError(
400,
"Invalid request: No host information"
)
p = http.parse_url("http://" + h)
request.scheme = p[0]
request.host = p[1]
request.port = p[2]
self.c.set_server_address((request.host, request.port))
flow.server_conn = self.c.server_conn
if self.c.config.mode == "sslspoof":
# SNI is processed in server.py
if not (flow.server_conn and flow.server_conn.ssl_established):
raise http.HttpError(
400,
"Invalid request: No host information"
)
return None
raise http.HttpError(
400, "Invalid HTTP request form (expected: %s, got: %s)" % (
self.expected_form_in, request.form_in

View File

@@ -4,7 +4,7 @@ import re
from OpenSSL import SSL
from netlib import http_auth, certutils, tcp
from .. import utils, platform, version
from .primitives import RegularProxyMode, TransparentProxyMode, UpstreamProxyMode, ReverseProxyMode, Socks5ProxyMode
from .primitives import RegularProxyMode, SpoofMode, SSLSpoofMode, TransparentProxyMode, UpstreamProxyMode, ReverseProxyMode, Socks5ProxyMode
TRANSPARENT_SSL_PORTS = [443, 8443]
CONF_BASENAME = "mitmproxy"
@@ -48,10 +48,13 @@ class ProxyConfig:
ciphers_client=None,
ciphers_server=None,
certs=[],
certforward=False,
ssl_version_client="secure",
ssl_version_server="secure",
ssl_ports=TRANSPARENT_SSL_PORTS
ssl_version_client=tcp.SSL_DEFAULT_METHOD,
ssl_version_server=tcp.SSL_DEFAULT_METHOD,
ssl_ports=TRANSPARENT_SSL_PORTS,
spoofed_ssl_port=None,
ssl_verify_upstream_cert=False,
ssl_upstream_trusted_cadir=None,
ssl_upstream_trusted_ca=None
):
self.host = host
self.port = port
@@ -70,6 +73,10 @@ class ProxyConfig:
self.mode = ReverseProxyMode(upstream_server)
elif mode == "upstream":
self.mode = UpstreamProxyMode(upstream_server)
elif mode == "spoof":
self.mode = SpoofMode()
elif mode == "sslspoof":
self.mode = SSLSpoofMode(spoofed_ssl_port)
else:
self.mode = RegularProxyMode()
@@ -86,47 +93,33 @@ class ProxyConfig:
CONF_BASENAME)
for spec, cert in certs:
self.certstore.add_cert_file(spec, cert)
self.certforward = certforward
self.openssl_method_client, self.openssl_options_client = version_to_openssl(
ssl_version_client)
self.openssl_method_server, self.openssl_options_server = version_to_openssl(
ssl_version_server)
self.ssl_ports = ssl_ports
if isinstance(ssl_version_client, int):
self.openssl_method_client = ssl_version_client
else:
self.openssl_method_client = tcp.SSL_VERSIONS[ssl_version_client]
if isinstance(ssl_version_server, int):
self.openssl_method_server = ssl_version_server
else:
self.openssl_method_server = tcp.SSL_VERSIONS[ssl_version_server]
if ssl_verify_upstream_cert:
self.openssl_verification_mode_server = SSL.VERIFY_PEER
else:
self.openssl_verification_mode_server = SSL.VERIFY_NONE
self.openssl_trusted_cadir_server = ssl_upstream_trusted_cadir
self.openssl_trusted_ca_server = ssl_upstream_trusted_ca
sslversion_choices = (
"all",
"secure",
"SSLv2",
"SSLv3",
"TLSv1",
"TLSv1_1",
"TLSv1_2")
def version_to_openssl(version):
"""
Convert a reasonable SSL version specification into the format OpenSSL expects.
Don't ask...
https://bugs.launchpad.net/pyopenssl/+bug/1020632/comments/3
"""
if version == "all":
return SSL.SSLv23_METHOD, None
elif version == "secure":
# SSLv23_METHOD + NO_SSLv2 + NO_SSLv3 == TLS 1.0+
# TLSv1_METHOD would be TLS 1.0 only
return SSL.SSLv23_METHOD, (SSL.OP_NO_SSLv2 | SSL.OP_NO_SSLv3)
elif version in sslversion_choices:
return getattr(SSL, "%s_METHOD" % version), None
else:
raise ValueError("Invalid SSL version: %s" % version)
self.openssl_options_client = tcp.SSL_DEFAULT_OPTIONS
self.openssl_options_server = tcp.SSL_DEFAULT_OPTIONS
def process_proxy_options(parser, options):
body_size_limit = utils.parse_size(options.body_size_limit)
c = 0
mode, upstream_server = None, None
mode, upstream_server, spoofed_ssl_port = None, None, None
if options.transparent_proxy:
c += 1
if not platform.resolver:
@@ -144,6 +137,13 @@ def process_proxy_options(parser, options):
c += 1
mode = "upstream"
upstream_server = options.upstream_proxy
if options.spoof_mode:
c += 1
mode = "spoof"
if options.ssl_spoof_mode:
c += 1
mode = "sslspoof"
spoofed_ssl_port = options.spoofed_ssl_port
if c > 1:
return parser.error(
"Transparent, SOCKS5, reverse and upstream proxy mode "
@@ -210,10 +210,13 @@ def process_proxy_options(parser, options):
ciphers_client=options.ciphers_client,
ciphers_server=options.ciphers_server,
certs=certs,
certforward=options.certforward,
ssl_version_client=options.ssl_version_client,
ssl_version_server=options.ssl_version_server,
ssl_ports=ssl_ports
ssl_ports=ssl_ports,
spoofed_ssl_port=spoofed_ssl_port,
ssl_verify_upstream_cert=options.ssl_verify_upstream_cert,
ssl_upstream_trusted_cadir=options.ssl_upstream_trusted_cadir,
ssl_upstream_trusted_ca=options.ssl_upstream_trusted_ca
)
@@ -232,11 +235,6 @@ def ssl_option_group(parser):
'it is used, else the default key in the conf dir is used. '
'The PEM file should contain the full certificate chain, with the leaf certificate as the first entry. '
'Can be passed multiple times.')
group.add_argument(
"--cert-forward", action="store_true",
dest="certforward", default=False,
help="Simply forward SSL certificates from upstream."
)
group.add_argument(
"--ciphers-client", action="store",
type=str, dest="ciphers_client", default=None,
@@ -257,6 +255,23 @@ def ssl_option_group(parser):
action="store_true", dest="no_upstream_cert",
help="Don't connect to upstream server to look up certificate details."
)
group.add_argument(
"--verify-upstream-cert", default=False,
action="store_true", dest="ssl_verify_upstream_cert",
help="Verify upstream server SSL/TLS certificates and fail if invalid "
"or not present."
)
group.add_argument(
"--upstream-trusted-cadir", default=None, action="store",
dest="ssl_upstream_trusted_cadir",
help="Path to a directory of trusted CA certificates for upstream "
"server verification prepared using the c_rehash tool."
)
group.add_argument(
"--upstream-trusted-ca", default=None, action="store",
dest="ssl_upstream_trusted_ca",
help="Path to a PEM formatted trusted CA certificate."
)
group.add_argument(
"--ssl-port",
action="append",
@@ -268,16 +283,18 @@ def ssl_option_group(parser):
"Defaults to %s." %
str(TRANSPARENT_SSL_PORTS))
group.add_argument(
"--ssl-version-client", dest="ssl_version_client",
default="secure", action="store",
choices=sslversion_choices,
help="Set supported SSL/TLS version for client connections. "
"SSLv2, SSLv3 and 'all' are INSECURE. Defaults to secure."
"--ssl-version-client", dest="ssl_version_client", type=str, default=tcp.SSL_DEFAULT_VERSION,
choices=tcp.SSL_VERSIONS.keys(),
help=""""
Use a specified protocol for client connections:
TLSv1.2, TLSv1.1, TLSv1, SSLv3, SSLv2, SSLv23.
Default to SSLv23."""
)
group.add_argument(
"--ssl-version-server", dest="ssl_version_server",
default="secure", action="store",
choices=sslversion_choices,
help="Set supported SSL/TLS version for server connections. "
"SSLv2, SSLv3 and 'all' are INSECURE. Defaults to secure."
"--ssl-version-server", dest="ssl_version_server", type=str, default=tcp.SSL_DEFAULT_VERSION,
choices=tcp.SSL_VERSIONS.keys(),
help=""""
Use a specified protocol for server connections:
TLSv1.2, TLSv1.1, TLSv1, SSLv3, SSLv2, SSLv23.
Default to SSLv23."""
)

View File

@@ -1,5 +1,5 @@
from __future__ import absolute_import
from netlib import socks
from netlib import socks, tcp
class ProxyError(Exception):
@@ -51,6 +51,33 @@ class RegularProxyMode(ProxyMode):
return None
class SpoofMode(ProxyMode):
http_form_in = "relative"
http_form_out = "relative"
def get_upstream_server(self, client_conn):
return None
@property
def name(self):
return "spoof"
class SSLSpoofMode(ProxyMode):
http_form_in = "relative"
http_form_out = "relative"
def __init__(self, sslport):
self.sslport = sslport
def get_upstream_server(self, client_conn):
return None
@property
def name(self):
return "sslspoof"
class TransparentProxyMode(ProxyMode):
http_form_in = "relative"
http_form_out = "relative"
@@ -79,24 +106,11 @@ class Socks5ProxyMode(ProxyMode):
def __init__(self, sslports):
self.sslports = sslports
@staticmethod
def _assert_socks5(msg):
if msg.ver != socks.VERSION.SOCKS5:
if msg.ver == ord("G") and len(msg.methods) == ord("E"):
guess = "Probably not a SOCKS request but a regular HTTP request. "
else:
guess = ""
raise socks.SocksError(
socks.REP.GENERAL_SOCKS_SERVER_FAILURE,
guess +
"Invalid SOCKS version. Expected 0x05, got 0x%x" %
msg.ver)
def get_upstream_server(self, client_conn):
try:
# Parse Client Greeting
client_greet = socks.ClientGreeting.from_file(client_conn.rfile)
self._assert_socks5(client_greet)
client_greet = socks.ClientGreeting.from_file(client_conn.rfile, fail_early=True)
client_greet.assert_socks5()
if socks.METHOD.NO_AUTHENTICATION_REQUIRED not in client_greet.methods:
raise socks.SocksError(
socks.METHOD.NO_ACCEPTABLE_METHODS,
@@ -113,7 +127,7 @@ class Socks5ProxyMode(ProxyMode):
# Parse Connect Request
connect_request = socks.Message.from_file(client_conn.rfile)
self._assert_socks5(connect_request)
connect_request.assert_socks5()
if connect_request.msg != socks.CMD.CONNECT:
raise socks.SocksError(
socks.REP.COMMAND_NOT_SUPPORTED,
@@ -126,9 +140,9 @@ class Socks5ProxyMode(ProxyMode):
connect_reply = socks.Message(
socks.VERSION.SOCKS5,
socks.REP.SUCCEEDED,
socks.ATYP.DOMAINNAME,
connect_request.atyp,
# dummy value, we don't have an upstream connection yet.
client_conn.address
connect_request.addr
)
connect_reply.to_file(client_conn.wfile)
client_conn.wfile.flush()
@@ -136,12 +150,7 @@ class Socks5ProxyMode(ProxyMode):
ssl = bool(connect_request.addr.port in self.sslports)
return ssl, ssl, connect_request.addr.host, connect_request.addr.port
except socks.SocksError as e:
msg = socks.Message(5, e.code, socks.ATYP.DOMAINNAME, repr(e))
try:
msg.to_file(client_conn.wfile)
except:
pass
except (socks.SocksError, tcp.NetLibError) as e:
raise ProxyError(502, "SOCKS5 mode failure: %s" % str(e))

View File

@@ -117,6 +117,16 @@ class ConnectionHandler:
self.server_conn.address(),
"info")
self.conntype = "tcp"
elif not self.server_conn and self.config.mode == "sslspoof":
port = self.config.mode.sslport
self.set_server_address(("-", port))
self.establish_ssl(client=True)
host = self.client_conn.connection.get_servername()
if host:
self.set_server_address((host, port))
self.establish_server_connection()
self.establish_ssl(server=True, sni=host)
# Delegate handling to the protocol handler
protocol_handler(
@@ -225,8 +235,18 @@ class ConnectionHandler:
sni,
method=self.config.openssl_method_server,
options=self.config.openssl_options_server,
verify_options=self.config.openssl_verification_mode_server,
ca_path=self.config.openssl_trusted_cadir_server,
ca_pemfile=self.config.openssl_trusted_ca_server,
cipher_list=self.config.ciphers_server,
)
ssl_cert_err = self.server_conn.ssl_verification_error
if ssl_cert_err is not None:
self.log(
"SSL verification failed for upstream server at depth %s with error: %s" %
(ssl_cert_err['depth'], ssl_cert_err['errno']),
"error")
self.log("Ignoring server verification error, continuing with connection", "error")
except tcp.NetLibError as v:
e = ProxyError(502, repr(v))
# Workaround for https://github.com/mitmproxy/mitmproxy/issues/427
@@ -236,6 +256,13 @@ class ConnectionHandler:
if client and "handshake failure" in e.message:
self.server_conn.may_require_sni = e
else:
ssl_cert_err = self.server_conn.ssl_verification_error
if ssl_cert_err is not None:
self.log(
"SSL verification failed for upstream server at depth %s with error: %s" %
(ssl_cert_err['depth'], ssl_cert_err['errno']),
"error")
self.log("Aborting connection attempt", "error")
raise e
if client:
if self.client_conn.ssl_established:
@@ -293,26 +320,25 @@ class ConnectionHandler:
self.channel.tell("log", Log(msg, level))
def find_cert(self):
if self.config.certforward and self.server_conn.ssl_established:
return self.server_conn.cert, self.config.certstore.gen_pkey(
self.server_conn.cert), None
else:
host = self.server_conn.address.host
sans = []
if self.server_conn.ssl_established and (
not self.config.no_upstream_cert):
upstream_cert = self.server_conn.cert
sans.extend(upstream_cert.altnames)
if upstream_cert.cn:
sans.append(host)
host = upstream_cert.cn.decode("utf8").encode("idna")
if self.server_conn.sni:
sans.append(self.server_conn.sni)
host = self.server_conn.address.host
sans = []
if self.server_conn.ssl_established and (
not self.config.no_upstream_cert):
upstream_cert = self.server_conn.cert
sans.extend(upstream_cert.altnames)
if upstream_cert.cn:
sans.append(host)
host = upstream_cert.cn.decode("utf8").encode("idna")
if self.server_conn.sni:
sans.append(self.server_conn.sni)
# for ssl spoof mode
if hasattr(self.client_conn, "sni"):
sans.append(self.client_conn.sni)
ret = self.config.certstore.get_cert(host, sans)
if not ret:
raise ProxyError(502, "Unable to generate dummy cert.")
return ret
ret = self.config.certstore.get_cert(host, sans)
if not ret:
raise ProxyError(502, "Unable to generate dummy cert.")
return ret
def handle_sni(self, connection):
"""
@@ -325,6 +351,8 @@ class ConnectionHandler:
if not sn:
return
sni = sn.decode("utf8").encode("idna")
# for ssl spoof mode
self.client_conn.sni = sni
if sni != self.server_conn.sni:
self.log("SNI received: %s" % sni, "debug")

View File

@@ -128,15 +128,17 @@ class Script:
class ReplyProxy(object):
def __init__(self, original_reply):
self._ignore_calls = 1
self.lock = threading.Lock()
def __init__(self, original_reply, script_thread):
self.original_reply = original_reply
self.script_thread = script_thread
self._ignore_call = True
self.lock = threading.Lock()
def __call__(self, *args, **kwargs):
with self.lock:
if self._ignore_calls > 0:
self._ignore_calls -= 1
if self._ignore_call:
self.script_thread.start()
self._ignore_call = False
return
self.original_reply(*args, **kwargs)
@@ -145,16 +147,19 @@ class ReplyProxy(object):
def _handle_concurrent_reply(fn, o, *args, **kwargs):
# Make first call to o.reply a no op
reply_proxy = ReplyProxy(o.reply)
o.reply = reply_proxy
# Make first call to o.reply a no op and start the script thread.
# We must not start the script thread before, as this may lead to a nasty race condition
# where the script thread replies a different response before the normal reply, which then gets swallowed.
def run():
fn(*args, **kwargs)
# If the script did not call .reply(), we have to do it now.
reply_proxy()
ScriptThread(target=run).start()
script_thread = ScriptThread(target=run)
reply_proxy = ReplyProxy(o.reply, script_thread)
o.reply = reply_proxy
class ScriptThread(threading.Thread):

View File

@@ -1,4 +1,6 @@
IVERSION = (0, 12, 1)
from __future__ import (absolute_import, print_function, division)
IVERSION = (0, 13)
VERSION = ".".join(str(i) for i in IVERSION)
MINORVERSION = ".".join(str(i) for i in IVERSION[:2])
NAME = "mitmproxy"

View File

@@ -81,7 +81,8 @@ class WebSocketEventBroadcaster(tornado.websocket.WebSocketHandler):
@classmethod
def broadcast(cls, **kwargs):
message = json.dumps(kwargs)
message = json.dumps(kwargs, ensure_ascii=False)
for conn in cls.connections:
try:
conn.write_message(message)

View File

@@ -59,6 +59,7 @@ body,
}
.main-view {
flex: 1 1 auto;
height: 0;
display: flex;
flex-direction: row;
}

155
release/build.py Normal file
View File

@@ -0,0 +1,155 @@
#!/usr/bin/env python
from os.path import dirname, realpath, join, exists, normpath
from os import chdir, mkdir, getcwd
import os
import shutil
import subprocess
import tempfile
import glob
import re
from shlex import split
from contextlib import contextmanager
import click
# https://virtualenv.pypa.io/en/latest/userguide.html#windows-notes
# scripts and executables on Windows go in ENV\Scripts\ instead of ENV/bin/
if os.name == "nt":
venv_bin = "Scripts"
else:
venv_bin = "bin"
root_dir = join(dirname(realpath(__file__)), "..", "..")
mitmproxy_dir = join(root_dir, "mitmproxy")
dist_dir = join(mitmproxy_dir, "dist")
test_venv_dir = join(root_dir, "venv.mitmproxy-release")
projects = ("netlib", "pathod", "mitmproxy")
tools = ["mitmweb", "mitmdump", "pathod", "pathoc"]
if os.name != "nt":
tools.append("mitmproxy")
version_files = (join(root_dir, x) for x in (
"mitmproxy/libmproxy/version.py",
"pathod/libpathod/version.py",
"netlib/netlib/version.py"
))
@click.group(chain=True)
def cli():
"""
mitmproxy build tool
"""
pass
@cli.command("contributors")
def update_contributors():
print("Updating CONTRIBUTORS.md...")
contributors = subprocess.check_output(split("git shortlog -n -s"))
with open(join(mitmproxy_dir, "CONTRIBUTORS"), "w") as f:
f.write(contributors)
@cli.command("docs")
def render_docs():
print("Rendering the docs...")
subprocess.check_call([
"cshape",
join(mitmproxy_dir, "doc-src"),
join(mitmproxy_dir, "doc")
])
@cli.command("test")
@click.pass_context
def test(ctx):
if not exists(dist_dir):
ctx.invoke(release)
# Make sure that the regular python installation is not on the python path!
os.environ["PYTHONPATH"] = ""
print("Creating virtualenv for test install...")
if exists(test_venv_dir):
shutil.rmtree(test_venv_dir)
subprocess.check_call(["virtualenv", "-q", test_venv_dir])
pip = join(test_venv_dir, venv_bin, "pip")
chdir(dist_dir)
for project in projects:
print("Installing %s..." % project)
dist = glob.glob("./%s*" % project)[0]
subprocess.check_call([pip, "install", "-q", dist])
print("Running binaries...")
for tool in tools:
tool = join(test_venv_dir, venv_bin, tool)
print(tool)
print(subprocess.check_output([tool, "--version"]))
print("Virtualenv available for further testing:")
print(normpath(join(test_venv_dir, venv_bin, "activate")))
@cli.command("release")
def release():
os.environ["PYTHONPATH"] = ""
print("Building release...")
if exists(dist_dir):
shutil.rmtree(dist_dir)
for project in projects:
print("Creating %s source distribution..." % project)
subprocess.check_call(
["python", "./setup.py", "-q", "sdist", "--dist-dir", dist_dir, "--formats=gztar"],
cwd=join(root_dir, project)
)
@cli.command("set-version")
@click.argument('version')
def set_version(version):
version = ", ".join(version.split("."))
for version_file in version_files:
with open(version_file, "rb") as f:
content = f.read()
new_content = re.sub(r"IVERSION\s*=\s*\([\d,\s]+\)", "IVERSION = (%s)" % version, content)
with open(version_file, "wb") as f:
f.write(new_content)
@cli.command("add-tag")
@click.argument('version')
def git_tag(version):
for project in projects:
print("Tagging %s..." % project)
subprocess.check_call(
["git", "tag", version],
cwd=join(root_dir, project)
)
subprocess.check_call(
["git", "push", "--tags"],
cwd=join(root_dir, project)
)
@cli.command("upload")
@click.option('--username', prompt=True)
@click.password_option(confirmation_prompt=False)
@click.option('--repository', default="pypi")
def upload_release(username, password, repository):
print("Uploading distributions...")
subprocess.check_call([
"twine",
"upload",
"-u", username,
"-p", password,
"-r", repository,
"%s/*" % dist_dir
])
if __name__ == "__main__":
cli()

View File

@@ -1,2 +0,0 @@
#!/bin/sh
git shortlog -n -s

View File

@@ -8,30 +8,35 @@
- Ensure that the website style assets have been compiled for production, and
synced to the docs.
- Render the docs:
cshape doc-src doc
- Render the docs, update CONTRIBUTORS file:
./release/build.py docs contributors
- Run the test release, make sure the output is sensible
./release/test-release
./release/build.py release
- Build the OSX binaries
- Follow instructions in osxbinaries
- Follow instructions in osx-binaries
- Move to download dir:
mv ./tmp/osx-mitmproxy-VERSION.tar.gz ~/mitmproxy/www.mitmproxy.org/src/download
- Build the sources for each project:
python ./setup.py sdist
mv ./dist/FILE ~/mitmproxy/www.mitmproxy.org/src/download
- Move all source distributions from mitmproxy's dist folder to the server:
mv ./dist/* ~/mitmproxy/www.mitmproxy.org/src/download
- Tag with the version number, and do:
git push --tags
Tag and push v0.12 for all projects:
./release/build.py tag v0.12
- Upload to pypi for each project:
- Upload to pypi:
python ./setup.py sdist upload
./release/build.py upload
Be careful: Pypi requires you to bump the version number if you want to do any further changes.
- Now bump the version number to be ready for the next cycle:
TODO: We just shipped 0.12 - do we bump to 0.12.1 or 0.13 now?
mitmproxy/libmproxy/version.py
netlib/netlib/version.py
pathod/libpathod/version.py
pathod/libpathod/version.py

View File

@@ -4,6 +4,5 @@ max-complexity = 15
[pep8]
max-line-length = 80
max-complexity = 15
exclude = */contrib/*
ignore = E251,E309

View File

@@ -11,19 +11,19 @@ here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.txt'), encoding='utf-8') as f:
long_description = f.read()
scripts = ["mitmdump", "mitmweb"]
if os.name != "nt":
scripts.append("mitmproxy")
# Core dependencies
deps = {
"netlib>=%s, <%s" % (version.MINORVERSION, version.NEXT_MINORVERSION),
"pyasn1>0.1.2",
"tornado>=4.0.2",
"configargparse>=0.9.3",
"pyperclip>=1.5.8",
"blinker>=1.3"
"blinker>=1.3",
"pyparsing>=1.5.2",
"html2text>=2015.4.14"
}
script_deps = {
# A script -> additional dependencies dict.
scripts = {
"mitmproxy": {
"urwid>=1.3",
"lxml>=3.3.6",
@@ -32,14 +32,30 @@ script_deps = {
"mitmdump": set(),
"mitmweb": set()
}
for script in scripts:
deps.update(script_deps[script])
# Developer dependencies
dev_deps = {
"mock>=1.0.1",
"nose>=1.3.0",
"nose-cov>=1.6",
"coveralls>=0.4.1",
"click>=4.1",
"pathod>=%s, <%s" % (version.MINORVERSION, version.NEXT_MINORVERSION),
"countershape"
}
# Add *all* script dependencies to developer dependencies.
for script_deps in scripts.values():
dev_deps.update(script_deps)
# Remove mitmproxy for Windows support.
if os.name == "nt":
del scripts["mitmproxy"]
deps.add("pydivert>=0.0.7") # Transparent proxying on Windows
console_scripts = [
"%s = libmproxy.main:%s" % (s, s) for s in scripts
]
# Add dependencies for available scripts as core dependencies.
for script_deps in scripts.values():
deps.update(script_deps)
console_scripts = ["%s = libmproxy.main:%s" % (s, s) for s in scripts.keys()]
setup(
name="mitmproxy",
@@ -73,15 +89,7 @@ setup(
'console_scripts': console_scripts},
install_requires=list(deps),
extras_require={
'dev': [
"mock>=1.0.1",
"nose>=1.3.0",
"nose-cov>=1.6",
"coveralls>=0.4.1",
"pathod>=%s, <%s" %
(version.MINORVERSION,
version.NEXT_MINORVERSION),
"countershape"],
'dev': list(dev_deps),
'contentviews': [
"pyamf>=0.6.1",
"protobuf>=2.5.0",

View File

@@ -0,0 +1,14 @@
-----BEGIN CERTIFICATE-----
MIICJzCCAZACCQCo1BdopddN/TANBgkqhkiG9w0BAQUFADBXMQswCQYDVQQGEwJB
VTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0
cyBQdHkgTHRkMRAwDgYDVQQDEwdUUlVTVEVEMCAXDTE1MDYxOTE4MDEzMVoYDzIx
MTUwNTI2MTgwMTMxWjBXMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0
ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMRAwDgYDVQQDEwdU
UlVTVEVEMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC00Jf3KrBAmLQWl+Dz
8Qrig8ActB94kv0/Lu03P/2DwOR8kH2h3w4OC3b3CFKX31h7hm/H1PPHq7cIX6IR
fwrYCtBE77UbxklSlrwn06j6YSotz0/dwLEQEFDXWITJq7AyntaiafDHazbbXESN
m/+I/YEl2wKemEHE//qWbeM9kwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAF0NREP3
X+fTebzJGttzrFkDhGVFKRNyLXblXRVanlGOYF+q8grgZY2ufC/55gqf+ub6FRT5
gKPhL4V2rqL8UAvCE7jq8ujpVfTB8kRAKC675W2DBZk2EJX9mjlr89t7qXGsI5nF
onpfJ1UtiJshNoV7h/NFHeoag91kx628807n
-----END CERTIFICATE-----

View File

@@ -0,0 +1,14 @@
-----BEGIN CERTIFICATE-----
MIICJzCCAZACCQCo1BdopddN/TANBgkqhkiG9w0BAQUFADBXMQswCQYDVQQGEwJB
VTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0
cyBQdHkgTHRkMRAwDgYDVQQDEwdUUlVTVEVEMCAXDTE1MDYxOTE4MDEzMVoYDzIx
MTUwNTI2MTgwMTMxWjBXMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0
ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMRAwDgYDVQQDEwdU
UlVTVEVEMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC00Jf3KrBAmLQWl+Dz
8Qrig8ActB94kv0/Lu03P/2DwOR8kH2h3w4OC3b3CFKX31h7hm/H1PPHq7cIX6IR
fwrYCtBE77UbxklSlrwn06j6YSotz0/dwLEQEFDXWITJq7AyntaiafDHazbbXESN
m/+I/YEl2wKemEHE//qWbeM9kwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAF0NREP3
X+fTebzJGttzrFkDhGVFKRNyLXblXRVanlGOYF+q8grgZY2ufC/55gqf+ub6FRT5
gKPhL4V2rqL8UAvCE7jq8ujpVfTB8kRAKC675W2DBZk2EJX9mjlr89t7qXGsI5nF
onpfJ1UtiJshNoV7h/NFHeoag91kx628807n
-----END CERTIFICATE-----

View File

@@ -0,0 +1,14 @@
-----BEGIN CERTIFICATE-----
MIICJzCCAZACCQCo1BdopddN/TANBgkqhkiG9w0BAQUFADBXMQswCQYDVQQGEwJB
VTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0
cyBQdHkgTHRkMRAwDgYDVQQDEwdUUlVTVEVEMCAXDTE1MDYxOTE4MDEzMVoYDzIx
MTUwNTI2MTgwMTMxWjBXMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0
ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMRAwDgYDVQQDEwdU
UlVTVEVEMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC00Jf3KrBAmLQWl+Dz
8Qrig8ActB94kv0/Lu03P/2DwOR8kH2h3w4OC3b3CFKX31h7hm/H1PPHq7cIX6IR
fwrYCtBE77UbxklSlrwn06j6YSotz0/dwLEQEFDXWITJq7AyntaiafDHazbbXESN
m/+I/YEl2wKemEHE//qWbeM9kwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAF0NREP3
X+fTebzJGttzrFkDhGVFKRNyLXblXRVanlGOYF+q8grgZY2ufC/55gqf+ub6FRT5
gKPhL4V2rqL8UAvCE7jq8ujpVfTB8kRAKC675W2DBZk2EJX9mjlr89t7qXGsI5nF
onpfJ1UtiJshNoV7h/NFHeoag91kx628807n
-----END CERTIFICATE-----

View File

@@ -0,0 +1,33 @@
-----BEGIN CERTIFICATE-----
MIIC8jCCAlugAwIBAgICEAcwDQYJKoZIhvcNAQEFBQAwVzELMAkGA1UEBhMCQVUx
EzARBgNVBAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMg
UHR5IEx0ZDEQMA4GA1UEAxMHVFJVU1RFRDAgFw0xNTA2MjAwMTE4MjdaGA8yMTE1
MDUyNzAxMTgyN1owfjELMAkGA1UEBhMCQVUxEzARBgNVBAgTClNvbWUtU3RhdGUx
ITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEUMBIGA1UECxMLSU5U
RVJNIFVOSVQxITAfBgNVBAMTGE9SRyBXSVRIIElOVEVSTUVESUFURSBDQTCBnzAN
BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAtRPNKgh4WdYGmU2Ae6Tf2Mbd3oaRI/uY
Qm6aKeYk1i7g41C0vVowNcD/qdNpGUNnai/Kak9anHOYyppNo7zHgf3EO8zQ4NTQ
pkDKsdCqbUQcjGfhjWXKnOw+I5er4Rj+MwM1f5cbwb8bYHiSPmXaxzdL0/SNXGAA
ys/UswgwkU8CAwEAAaOBozCBoDAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBTPkPQW
DAPOIy8mipuEsZcP1694EDBxBgNVHSMEajBooVukWTBXMQswCQYDVQQGEwJBVTET
MBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQ
dHkgTHRkMRAwDgYDVQQDEwdUUlVTVEVEggkAqNQXaKXXTf0wDQYJKoZIhvcNAQEF
BQADgYEApaPbwonY8l+zSxlY2Fw4WNKfl5nwcTW4fuv/0tZLzvsS6P4hTXxbYJNa
k3hQ1qlrr8DiWJewF85hYvEI2F/7eqS5dhhPTEUFPpsjhbgiqnASvW+WKQIgoY2r
aHgOXi7RNFtTcCgk0UZISWOY7ORLy8Xu6vKrLRjDhyfIbGlqnAs=
-----END CERTIFICATE-----
-----BEGIN RSA PRIVATE KEY-----
MIICXAIBAAKBgQC1E80qCHhZ1gaZTYB7pN/Yxt3ehpEj+5hCbpop5iTWLuDjULS9
WjA1wP+p02kZQ2dqL8pqT1qcc5jKmk2jvMeB/cQ7zNDg1NCmQMqx0KptRByMZ+GN
Zcqc7D4jl6vhGP4zAzV/lxvBvxtgeJI+ZdrHN0vT9I1cYADKz9SzCDCRTwIDAQAB
AoGAfKHocKnrzEmXuSSy7meI+vfF9kfA1ndxUSg3S+dwK0uQ1mTSQhI1ZIo2bnlo
uU6/e0Lxm0KLJ2wZGjoifjSNTC8pcxIfAQY4kM9fqoUcXVSBVSS2kByTunhNSVZQ
yQyc+UTq9g1zBnJsZAltn7/PaihU4heWgP/++lposuShqmECQQDaG+7l0qul1xak
9kuZgc88BSTfn9iMK2zIQRcVKuidK4dT3QEp0wmWR5Ue8jq8lvTmVTGNGZbHcheh
KhoZfLgLAkEA1IjwAw/8z02yV3lbc2QUjIl9m9lvjHBoE2sGuSfq/cZskLKrGat+
CVj3spqVAg22tpQwVBuHiipBziWVnEtiTQJAB9FKfchQSLBt6lm9mfHyKJeSm8VR
8Kw5yO+0URjpn4CI6DOasBIVXOKR8LsD6fCLNJpHHWSWZ+2p9SfaKaGzwwJBAM31
Scld89qca4fzNZkT0goCrvOZeUy6HVE79Q72zPVSFSD/02kT1BaQ3bB5to5/5aD2
6AKJjwZoPs7bgykrsD0CQBzU8U/8x2dNQnG0QeqaKQu5kKhZSZ9bsawvrCkxSl6b
WAjl/Jehi5bbQ07zQo3cge6qeR38FCWVCHQ/5wNbc54=
-----END RSA PRIVATE KEY-----

View File

@@ -0,0 +1,32 @@
# untrusted-interm.crt, self-signed
-----BEGIN CERTIFICATE-----
MIICdTCCAd4CCQDRSKOnIMbTgDANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJB
VTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0
cyBQdHkgTHRkMRQwEgYDVQQLEwtJTlRFUk0gVU5JVDEhMB8GA1UEAxMYT1JHIFdJ
VEggSU5URVJNRURJQVRFIENBMCAXDTE1MDYyMDAxMzY0M1oYDzIxMTUwNTI3MDEz
NjQzWjB+MQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UE
ChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMRQwEgYDVQQLEwtJTlRFUk0gVU5J
VDEhMB8GA1UEAxMYT1JHIFdJVEggSU5URVJNRURJQVRFIENBMIGfMA0GCSqGSIb3
DQEBAQUAA4GNADCBiQKBgQC1E80qCHhZ1gaZTYB7pN/Yxt3ehpEj+5hCbpop5iTW
LuDjULS9WjA1wP+p02kZQ2dqL8pqT1qcc5jKmk2jvMeB/cQ7zNDg1NCmQMqx0Kpt
RByMZ+GNZcqc7D4jl6vhGP4zAzV/lxvBvxtgeJI+ZdrHN0vT9I1cYADKz9SzCDCR
TwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAGbObAMEajCz4kj7OP2/DB5SRy2+H/G3
8Qvc43xlMMNQyYxsDuLOFL0UMRzoKgntrrm2nni8jND+tuMt+hv3ZlBcJlYJ6ynR
sC1ITTC/1SwwwO0AFIyduUEIJYr/B3sgcVYPLcEfeDZgmEQc9Tnc01aEu3lx2+l9
0JTSPL2L9LdA
-----END CERTIFICATE-----
-----BEGIN RSA PRIVATE KEY-----
MIICXAIBAAKBgQC1E80qCHhZ1gaZTYB7pN/Yxt3ehpEj+5hCbpop5iTWLuDjULS9
WjA1wP+p02kZQ2dqL8pqT1qcc5jKmk2jvMeB/cQ7zNDg1NCmQMqx0KptRByMZ+GN
Zcqc7D4jl6vhGP4zAzV/lxvBvxtgeJI+ZdrHN0vT9I1cYADKz9SzCDCRTwIDAQAB
AoGAfKHocKnrzEmXuSSy7meI+vfF9kfA1ndxUSg3S+dwK0uQ1mTSQhI1ZIo2bnlo
uU6/e0Lxm0KLJ2wZGjoifjSNTC8pcxIfAQY4kM9fqoUcXVSBVSS2kByTunhNSVZQ
yQyc+UTq9g1zBnJsZAltn7/PaihU4heWgP/++lposuShqmECQQDaG+7l0qul1xak
9kuZgc88BSTfn9iMK2zIQRcVKuidK4dT3QEp0wmWR5Ue8jq8lvTmVTGNGZbHcheh
KhoZfLgLAkEA1IjwAw/8z02yV3lbc2QUjIl9m9lvjHBoE2sGuSfq/cZskLKrGat+
CVj3spqVAg22tpQwVBuHiipBziWVnEtiTQJAB9FKfchQSLBt6lm9mfHyKJeSm8VR
8Kw5yO+0URjpn4CI6DOasBIVXOKR8LsD6fCLNJpHHWSWZ+2p9SfaKaGzwwJBAM31
Scld89qca4fzNZkT0goCrvOZeUy6HVE79Q72zPVSFSD/02kT1BaQ3bB5to5/5aD2
6AKJjwZoPs7bgykrsD0CQBzU8U/8x2dNQnG0QeqaKQu5kKhZSZ9bsawvrCkxSl6b
WAjl/Jehi5bbQ07zQo3cge6qeR38FCWVCHQ/5wNbc54=
-----END RSA PRIVATE KEY-----

View File

@@ -241,6 +241,23 @@ class TestMatching:
assert self.q("~c 200", s)
assert not self.q("~c 201", s)
def test_src(self):
q = self.req()
assert self.q("~src address", q)
assert not self.q("~src foobar", q)
assert self.q("~src :22", q)
assert not self.q("~src :99", q)
assert self.q("~src address:22", q)
def test_dst(self):
q = self.req()
q.server_conn = tutils.tserver_conn()
assert self.q("~dst address", q)
assert not self.q("~dst foobar", q)
assert self.q("~dst :22", q)
assert not self.q("~dst :99", q)
assert self.q("~dst address:22", q)
def test_and(self):
s = self.resp()
assert self.q("~c 200 & ~h head", s)

View File

@@ -142,6 +142,10 @@ class TestHTTPRequest:
assert r.pretty_host(True) is None
assert r.pretty_host(False) is None
# Invalid IDNA
r.headers["host"] = [".disqus.com"]
assert r.pretty_host(True) == ".disqus.com"
def test_get_form_for_urlencoded(self):
r = tutils.treq()
r.headers.add("content-type", "application/x-www-form-urlencoded")

View File

@@ -9,6 +9,8 @@ from libpathod import test
from netlib import http, tcp
import mock
from OpenSSL import SSL
def test_proxy_error():
p = ProxyError(111, "msg")
@@ -90,6 +92,12 @@ class TestProcessProxyOptions:
self.assert_err("expected one argument", "-U")
self.assert_err("Invalid server specification", "-U", "upstream")
self.assert_noerr("--spoof")
self.assert_noerr("--ssl-spoof")
self.assert_noerr("--spoofed-port", "443")
self.assert_err("expected one argument", "--spoofed-port")
self.assert_err("mutually exclusive", "-R", "http://localhost", "-T")
def test_client_certs(self):
@@ -127,6 +135,20 @@ class TestProcessProxyOptions:
"--singleuser",
"test")
def test_verify_upstream_cert(self):
p = self.assert_noerr("--verify-upstream-cert")
assert p.openssl_verification_mode_server == SSL.VERIFY_PEER
def test_upstream_trusted_cadir(self):
expected_dir = "/path/to/a/ca/dir"
p = self.assert_noerr("--upstream-trusted-cadir", expected_dir)
assert p.openssl_trusted_cadir_server == expected_dir
def test_upstream_trusted_ca(self):
expected_file = "/path/to/a/cert/file"
p = self.assert_noerr("--upstream-trusted-ca", expected_file)
assert p.openssl_trusted_ca_server == expected_file
class TestProxyServer:
# binding to 0.0.0.0:1 works without special permissions on Windows

View File

@@ -2,13 +2,14 @@ import socket
import time
from libmproxy.proxy.config import HostMatcher
import libpathod
from netlib import tcp, http_auth, http
from netlib import tcp, http_auth, http, socks
from libpathod import pathoc, pathod
from netlib.certutils import SSLCert
import tutils
import tservers
from libmproxy.protocol import KILL, Error
from libmproxy.protocol.http import CONTENT_MISSING
from OpenSSL import SSL
"""
Note that the choice of response code in these tests matters more than you
@@ -236,6 +237,7 @@ class TestHTTP(tservers.HTTPProxTest, CommonMixin, AppMixin):
for i in l:
if "serverdisconnect" in i:
return True
req = "get:'%s/p/200:b@1'"
p = self.pathoc()
assert p.request(req % self.server.urlbase)
@@ -348,13 +350,72 @@ class TestHTTPSCertfile(tservers.HTTPProxTest, CommonMixin):
assert self.pathod("304")
class TestHTTPSUpstreamServerVerificationWTrustedCert(tservers.HTTPProxTest):
"""
Test upstream server certificate verification with a trusted server cert.
"""
ssl = True
ssloptions = pathod.SSLOptions(
cn="trusted-cert",
certs=[
("trusted-cert", tutils.test_data.path("data/trusted-server.crt"))
])
def test_verification_w_cadir(self):
self.config.openssl_verification_mode_server = SSL.VERIFY_PEER
self.config.openssl_trusted_cadir_server = tutils.test_data.path(
"data/trusted-cadir/")
self.pathoc()
def test_verification_w_pemfile(self):
self.config.openssl_verification_mode_server = SSL.VERIFY_PEER
self.config.openssl_trusted_ca_server = tutils.test_data.path(
"data/trusted-cadir/trusted-ca.pem")
self.pathoc()
class TestHTTPSUpstreamServerVerificationWBadCert(tservers.HTTPProxTest):
"""
Test upstream server certificate verification with an untrusted server cert.
"""
ssl = True
ssloptions = pathod.SSLOptions(
cn="untrusted-cert",
certs=[
("untrusted-cert", tutils.test_data.path("data/untrusted-server.crt"))
])
def test_default_verification_w_bad_cert(self):
"""Should use no verification."""
self.config.openssl_trusted_ca_server = tutils.test_data.path(
"data/trusted-cadir/trusted-ca.pem")
self.pathoc()
def test_no_verification_w_bad_cert(self):
self.config.openssl_verification_mode_server = SSL.VERIFY_NONE
self.config.openssl_trusted_ca_server = tutils.test_data.path(
"data/trusted-cadir/trusted-ca.pem")
self.pathoc()
def test_verification_w_bad_cert(self):
self.config.openssl_verification_mode_server = SSL.VERIFY_PEER
self.config.openssl_trusted_ca_server = tutils.test_data.path(
"data/trusted-cadir/trusted-ca.pem")
tutils.raises("SSL handshake error", self.pathoc)
class TestHTTPSNoCommonName(tservers.HTTPProxTest):
"""
Test what happens if we get a cert without common name back.
"""
ssl = True
ssloptions = pathod.SSLOptions(
certs = [
certs=[
("*", tutils.test_data.path("data/no_common_name.pem"))
]
)
@@ -368,6 +429,92 @@ class TestReverse(tservers.ReverseProxTest, CommonMixin, TcpMixin):
reverse = True
class TestSocks5(tservers.SocksModeTest):
def test_simple(self):
p = self.pathoc()
p.socks_connect(("localhost", self.server.port))
f = p.request("get:/p/200")
assert f.status_code == 200
def test_with_authentication_only(self):
p = self.pathoc()
f = p.request("get:/p/200")
assert f.status_code == 502
assert "SOCKS5 mode failure" in f.content
def test_no_connect(self):
"""
mitmproxy doesn't support UDP or BIND SOCKS CMDs
"""
p = self.pathoc()
socks.ClientGreeting(
socks.VERSION.SOCKS5,
[socks.METHOD.NO_AUTHENTICATION_REQUIRED]
).to_file(p.wfile)
socks.Message(
socks.VERSION.SOCKS5,
socks.CMD.BIND,
socks.ATYP.DOMAINNAME,
("example.com", 8080)
).to_file(p.wfile)
p.wfile.flush()
p.rfile.read(2) # read server greeting
f = p.request("get:/p/200") # the request doesn't matter, error response from handshake will be read anyway.
assert f.status_code == 502
assert "SOCKS5 mode failure" in f.content
class TestSpoof(tservers.SpoofModeTest):
def test_http(self):
alist = (
("localhost", self.server.port),
("127.0.0.1", self.server.port)
)
for a in alist:
self.server.clear_log()
p = self.pathoc()
f = p.request("get:/p/304:h'Host'='%s:%s'" % a)
assert self.server.last_log()
assert f.status_code == 304
l = self.master.state.view[-1]
assert l.server_conn.address
assert l.server_conn.address.host == a[0]
assert l.server_conn.address.port == a[1]
def test_http_without_host(self):
p = self.pathoc()
f = p.request("get:/p/304:r")
assert f.status_code == 400
class TestSSLSpoof(tservers.SSLSpoofModeTest):
def test_https(self):
alist = (
("localhost", self.server.port),
("127.0.0.1", self.server.port)
)
for a in alist:
self.server.clear_log()
self.config.mode.sslport = a[1]
p = self.pathoc(sni=a[0])
f = p.request("get:/p/304")
assert self.server.last_log()
assert f.status_code == 304
l = self.master.state.view[-1]
assert l.server_conn.address
assert l.server_conn.address.host == a[0]
assert l.server_conn.address.port == a[1]
def test_https_without_sni(self):
a = ("localhost", self.server.port)
self.config.mode.sslport = a[1]
p = self.pathoc(sni=None)
f = p.request("get:/p/304")
assert f.status_code == 400
class TestHttps2Http(tservers.ReverseProxTest):
@classmethod
def get_proxy_config(cls):
@@ -447,7 +594,7 @@ class TestProxy(tservers.HTTPProxTest):
connection.close()
request, response = self.master.state.view[
0].request, self.master.state.view[0].response
0].request, self.master.state.view[0].response
assert response.code == 304 # sanity test for our low level request
# time.sleep might be a little bit shorter than a second
assert 0.95 < (request.timestamp_end - request.timestamp_start) < 1.2
@@ -609,7 +756,6 @@ class TestStreamRequest(tservers.HTTPProxTest):
assert self.server.last_log()
def test_stream_chunked(self):
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.connect(("127.0.0.1", self.proxy.port))
fconn = connection.makefile()
@@ -627,8 +773,8 @@ class TestStreamRequest(tservers.HTTPProxTest):
chunks = list(
content for _,
content,
_ in http.read_http_body_chunked(
content,
_ in http.read_http_body_chunked(
fconn,
headers,
None,
@@ -708,14 +854,6 @@ class TestIncompleteResponse(tservers.HTTPProxTest):
assert self.pathod("200").status_code == 502
class TestCertForward(tservers.HTTPProxTest):
certforward = True
ssl = True
def test_app_err(self):
tutils.raises("handshake error", self.pathod, "200:b@100")
class TestUpstreamProxy(tservers.HTTPUpstreamProxTest, CommonMixin, AppMixin):
ssl = False
@@ -738,9 +876,9 @@ class TestUpstreamProxy(tservers.HTTPUpstreamProxTest, CommonMixin, AppMixin):
class TestUpstreamProxySSL(
tservers.HTTPUpstreamProxTest,
CommonMixin,
TcpMixin):
tservers.HTTPUpstreamProxTest,
CommonMixin,
TcpMixin):
ssl = True
def _host_pattern_on(self, attr):
@@ -810,10 +948,12 @@ class TestUpstreamProxySSL(
"""
https://github.com/mitmproxy/mitmproxy/issues/313
"""
def handle_request(f):
f.request.httpversion = (1, 0)
del f.request.headers["Content-Length"]
f.reply()
_handle_request = self.chain[0].tmaster.handle_request
self.chain[0].tmaster.handle_request = handle_request
try:
@@ -831,6 +971,7 @@ class TestProxyChainingSSLReconnect(tservers.HTTPUpstreamProxTest):
If we have a disconnect on a secure connection that's transparently proxified to
an upstream http proxy, we need to send the CONNECT request again.
"""
def kill_requests(master, attr, exclude):
k = [0] # variable scope workaround: put into array
_func = getattr(master, attr)
@@ -842,21 +983,22 @@ class TestProxyChainingSSLReconnect(tservers.HTTPUpstreamProxTest):
f.error = Error("terminated")
f.reply(KILL)
return _func(f)
setattr(master, attr, handler)
kill_requests(self.chain[1].tmaster, "handle_request",
exclude=[
# fail first request
# fail first request
2, # allow second request
])
])
kill_requests(self.chain[0].tmaster, "handle_request",
exclude=[
1, # CONNECT
# fail first request
# fail first request
3, # reCONNECT
4, # request
])
])
p = self.pathoc()
req = p.request("get:'/p/418:b\"content\"'")
@@ -878,18 +1020,18 @@ class TestProxyChainingSSLReconnect(tservers.HTTPUpstreamProxTest):
assert self.proxy.tmaster.state.flows[1].request.form_in == "relative"
assert self.chain[0].tmaster.state.flows[
0].request.form_in == "authority"
0].request.form_in == "authority"
assert self.chain[0].tmaster.state.flows[
1].request.form_in == "relative"
1].request.form_in == "relative"
assert self.chain[0].tmaster.state.flows[
2].request.form_in == "authority"
2].request.form_in == "authority"
assert self.chain[0].tmaster.state.flows[
3].request.form_in == "relative"
3].request.form_in == "relative"
assert self.chain[1].tmaster.state.flows[
0].request.form_in == "relative"
0].request.form_in == "relative"
assert self.chain[1].tmaster.state.flows[
1].request.form_in == "relative"
1].request.form_in == "relative"
req = p.request("get:'/p/418:b\"content2\"'")

View File

@@ -89,7 +89,6 @@ class ProxTestBase(object):
no_upstream_cert = False
authenticator = None
masterclass = TestMaster
certforward = False
@classmethod
def setupAll(cls):
@@ -131,7 +130,6 @@ class ProxTestBase(object):
no_upstream_cert = cls.no_upstream_cert,
cadir = cls.cadir,
authenticator = cls.authenticator,
certforward = cls.certforward,
ssl_ports=([cls.server.port, cls.server2.port] if cls.ssl else []),
clientcerts = tutils.test_data.path("data/clientcert") if cls.clientcerts else None
)
@@ -270,6 +268,56 @@ class ReverseProxTest(ProxTestBase):
return p.request(q)
class SocksModeTest(HTTPProxTest):
@classmethod
def get_proxy_config(cls):
d = ProxTestBase.get_proxy_config()
d["mode"] = "socks5"
return d
class SpoofModeTest(ProxTestBase):
ssl = None
@classmethod
def get_proxy_config(cls):
d = ProxTestBase.get_proxy_config()
d["upstream_server"] = None
d["mode"] = "spoof"
return d
def pathoc(self, sni=None):
"""
Returns a connected Pathoc instance.
"""
p = libpathod.pathoc.Pathoc(
("localhost", self.proxy.port), ssl=self.ssl, sni=sni, fp=None
)
p.connect()
return p
class SSLSpoofModeTest(ProxTestBase):
ssl = True
@classmethod
def get_proxy_config(cls):
d = ProxTestBase.get_proxy_config()
d["upstream_server"] = None
d["mode"] = "sslspoof"
d["spoofed_ssl_port"] = 443
return d
def pathoc(self, sni=None):
"""
Returns a connected Pathoc instance.
"""
p = libpathod.pathoc.Pathoc(
("localhost", self.proxy.port), ssl=self.ssl, sni=sni, fp=None
)
p.connect()
return p
class ChainProxTest(ProxTestBase):
"""
Chain three instances of mitmproxy in a row to test upstream mode.

View File

@@ -18,6 +18,11 @@ html, body, #container {
.main-view {
flex: 1 1 auto;
// All children of #container need an explicit height
// If we don't set this, the scrollbars disappear
// (https://github.com/mitmproxy/mitmproxy/issues/615)
height: 0;
display: flex;
flex-direction: row;