Compare commits

..

239 Commits

Author SHA1 Message Date
Thomas Kriechbaumer
91c3a99d48 update CHANGELOG with 2.0.0 release 2017-02-21 18:55:31 +01:00
Maximilian Hils
b87b69be7e Update README.md 2017-02-21 18:38:30 +01:00
Maximilian Hils
dc7a7ad697 Update README.md 2017-02-21 18:37:56 +01:00
Maximilian Hils
02a563dff1 [web] yarn? yarn. 2017-02-21 16:05:09 +01:00
Maximilian Hils
2df2fc1f38 Merge pull request #2051 from MatthewShao/fix-#1928-@concurrent-in-OOP
Fix #1928, @concurrent decorator in class
2017-02-21 15:38:34 +01:00
Maximilian Hils
29c4a43e50 docs++ 2017-02-21 15:38:20 +01:00
Maximilian Hils
96256579e6 update mitmweb resources 2017-02-21 15:33:13 +01:00
Matthew Shao
5fc4fc28b6 Add test for @concurrent decorator in class. 2017-02-21 16:56:48 +08:00
Matthew Shao
f9714fbf3e Fix #1928, @concurrent decorator now works for class methods. 2017-02-21 11:59:50 +08:00
Thomas Kriechbaumer
18401dda8f Merge pull request #2047 from Kriechi/lymanZerga11-patch-1
improve tests
2017-02-20 13:01:33 +01:00
Thomas Kriechbaumer
0de277c18b improve test 2017-02-20 11:48:57 +01:00
Thomas Kriechbaumer
050245e842 Merge pull request #2017 from lymanZerga11/patch-1
Catch ValueErrors from url.parse()
2017-02-20 11:48:40 +01:00
Thomas Kriechbaumer
48cfaf8c39 Merge pull request #2046 from Kriechi/krsoninikhil-debug
improve tests
2017-02-20 11:46:12 +01:00
Thomas Kriechbaumer
ac6915c002 improve tests 2017-02-20 11:11:10 +01:00
Thomas Kriechbaumer
51e08a5b85 Merge pull request #2041 from krsoninikhil/debug
Adds PyInstaller build indicator to --version argument
2017-02-20 10:29:15 +01:00
Thomas Kriechbaumer
927a1ebab4 Merge pull request #2045 from Kriechi/protobuf
protobuf: coverage++
2017-02-20 10:14:18 +01:00
Nikhil Soni
ed084f5413 Adds test for pyinstaller indicator 2017-02-19 19:15:36 +05:30
Thomas Kriechbaumer
36352c9539 protobuf: coverage++ 2017-02-19 14:29:09 +01:00
Maximilian Hils
3ee5227cca Merge pull request #2043 from Kriechi/http2
http2: disable priority forwarding
2017-02-18 19:27:43 +01:00
Thomas Kriechbaumer
83c2de8849 http2: disable priority forwarding 2017-02-18 14:30:08 +01:00
Maximilian Hils
4158a1ae55 Merge pull request #2040 from mhils/request-host-header
Add "Request.host_header"
2017-02-18 12:08:54 +01:00
Thomas Kriechbaumer
6ef4f094b3 Merge pull request #2038 from Kriechi/improve-http2-cov
test forbidden HTTP/2 headers in reponses
2017-02-18 11:50:57 +01:00
Thomas Kriechbaumer
8cbd6dca9f Merge pull request #2035 from Kriechi/fix-1916
fix #1916
2017-02-18 11:40:56 +01:00
Thomas Kriechbaumer
47e6f977de test forbidden HTTP/2 headers in reponses 2017-02-18 11:26:54 +01:00
Nikhil Soni
2a461d6b39 Adds PyInstaller build indicator to --version argument 2017-02-18 06:27:06 +05:30
Maximilian Hils
b9e31f213f .headers["host"] -> .host_header 2017-02-18 00:13:14 +01:00
Maximilian Hils
49c8e19f80 add request.host_header, fix #2036 2017-02-17 23:31:53 +01:00
Maximilian Hils
6a1e03ac6f tests: make full cov plugin cwd-independent 2017-02-17 23:29:10 +01:00
Maximilian Hils
33acb48c71 Merge pull request #2032 from Kriechi/individual-coverage
add individual-coverage check
2017-02-17 22:56:13 +01:00
Thomas Kriechbaumer
c04d14e53c fix #1916 2017-02-17 22:45:31 +01:00
Thomas Kriechbaumer
4cec88fc7f remove unneeded test module 2017-02-17 20:41:10 +01:00
mame82
ffdbccd571 Use existing urlencoding conventions when re-encoding edited form #1946 (#2022) 2017-02-16 15:03:27 +01:00
Thomas Kriechbaumer
6b22ca7a32 add individual-coverage check 2017-02-16 11:43:45 +01:00
Thomas Kriechbaumer
337b1c9399 fix wrong filename 2017-02-15 18:45:49 +01:00
Maximilian Hils
d1186eea18 Merge pull request #2029 from mhils/proxyauth
Improve ProxyAuth
2017-02-15 16:52:20 +01:00
Thomas Kriechbaumer
9cf00cbc70 ignore compiled mitmweb JS code 2017-02-15 16:24:53 +01:00
Maximilian Hils
2955e3d566 improve make_error_response 2017-02-15 15:55:08 +01:00
Thomas Kriechbaumer
94a7e99fda Merge pull request #2028 from Kriechi/nuke-pillow
nuke Pillow
2017-02-15 15:29:57 +01:00
Maximilian Hils
4bac850bb1 fix #1722, fix #1734, refs #2019 2017-02-15 14:20:46 +01:00
Thomas Kriechbaumer
80113a9d6e remove zlib1g dependency from install instructions 2017-02-15 14:08:36 +01:00
Maximilian Hils
0d9c7ce50c image view: add fallback, catch all images but svgs 2017-02-15 14:04:24 +01:00
Thomas Kriechbaumer
4aa656f2a6 Merge pull request #2027 from Kriechi/pytest-plugin
extract full-coverage pytest plugin
2017-02-15 12:14:19 +01:00
Thomas Kriechbaumer
8a6f8bd461 nuke Pillow 2017-02-15 00:54:14 +01:00
Thomas Kriechbaumer
24a51df9cb extract full-coverage pytest plugin 2017-02-15 00:29:43 +01:00
Thomas Kriechbaumer
bb2fa6dc7d Merge pull request #2026 from Kriechi/filename-matching
add filename-matching linter
2017-02-15 00:24:05 +01:00
Thomas Kriechbaumer
4d973e8295 fix imports 2017-02-14 23:47:33 +01:00
Thomas Kriechbaumer
a12c3d3f8e restructure and move test files
add empty test files to satisfy linter
2017-02-14 23:31:37 +01:00
Thomas Kriechbaumer
04748e6f3f add filename-matching linter 2017-02-14 22:48:24 +01:00
Thomas Kriechbaumer
cd9cd8a195 update lint options 2017-02-14 22:44:55 +01:00
Maximilian Hils
a3436897ad Merge pull request #2024 from MatthewShao/fix_websocket_msg_in_console
fix websocket message display crash in console.
2017-02-14 18:02:09 +01:00
Maximilian Hils
5acdd78b15 fix typo 2017-02-14 17:45:54 +01:00
Matthew Shao
9b9d72594c fix websocket message display crash in console. 2017-02-14 21:21:39 +08:00
lymanZerga11
d30ef7ee3e Update test_flowlist.py 2017-02-14 21:17:18 +08:00
lymanZerga11
a52d8c1dab Update test_flowlist.py 2017-02-14 21:01:01 +08:00
Maximilian Hils
75a0a4c092 Merge pull request #2016 from Kriechi/pathod-refactor
remove treq from pathoc
2017-02-13 11:16:15 +01:00
Maximilian Hils
9c29f3b96d Merge pull request #2015 from Kriechi/nuke-pypy
nuke pypy
2017-02-13 11:16:02 +01:00
lymanZerga11
26a17a3d82 Update test_flowlist.py 2017-02-13 15:05:29 +08:00
lymanZerga11
a912d67c06 Update test_flowlist.py 2017-02-13 10:47:50 +08:00
lymanZerga11
577fb818b9 Update test_flowlist.py 2017-02-13 10:39:48 +08:00
lymanZerga11
f5b30b8872 Update test_flowlist.py 2017-02-12 23:36:26 +08:00
lymanZerga11
df8a5aa9be Update test_flowlist.py 2017-02-12 23:23:23 +08:00
lymanZerga11
c622e4a649 Create test_flowlist.py 2017-02-12 23:10:49 +08:00
lymanZerga11
55e471af40 Catch ValueErrors from url.parse() 2017-02-12 23:06:11 +08:00
Thomas Kriechbaumer
4771c9599e remove treq from pathoc
Using a function intended only for tests in active live code is ugly.
However, this whole portion of pathoc could benefit from some further improvements.
2017-02-12 13:10:51 +01:00
Thomas Kriechbaumer
63cfb4e480 nuke pypy 2017-02-12 12:32:30 +01:00
Alireza Mosajjal
f77cf03543 add version info to web footer (#2010)
fixes #1893
2017-02-12 10:06:44 +01:00
Maximilian Hils
3067a971f9 Merge pull request #2000 from s4chin/add-jpeg-parser
Add jpeg parser
2017-02-11 23:00:14 +01:00
Sachin Kelkar
245e24dcf3 Add sources of images, remove copyrighted image 2017-02-12 01:19:36 +05:30
Maximilian Hils
51f6d279a7 Merge pull request #2008 from yjh0502/master
Add transparent proxy support for OpenBSD
2017-02-11 12:00:02 +01:00
Maximilian Hils
e4cb96f84d consolidate readme shields 2017-02-11 11:54:59 +01:00
Sachin Kelkar
ccca04b450 Fix coverage 2017-02-11 15:01:54 +05:30
Jihyun Yu
4adc575bad Add transparent proxy support for OpenBSD
Add transparent proxy support for OpenBSD with pf divert-to.
2017-02-11 14:05:22 +09:00
Maximilian Hils
71742654e3 Merge pull request #2005 from ujjwal96/script-reload
Closes #1828 script reloads on py file changes
2017-02-11 01:45:14 +01:00
Maximilian Hils
d4593bc333 Merge pull request #2007 from mitmproxy/update-dependencies
Update mypy
2017-02-11 01:43:44 +01:00
Maximilian Hils
85542bd12b Merge pull request #2006 from Kriechi/coverage++
coverage++
2017-02-11 01:43:27 +01:00
Maximilian Hils
705ffd6d06 update mypy
The pypi package name has changed.
2017-02-11 00:14:49 +01:00
Thomas Kriechbaumer
c8c79cc291 coverage++ 2017-02-10 22:55:15 +01:00
Sachin Kelkar
8eb1d34644 Add app1 marker and tests 2017-02-10 22:40:19 +05:30
Ujjwal Verma
809207195d closes #1828 script reloads on py file changes 2017-02-10 22:21:12 +05:30
Thomas Kriechbaumer
d4264cb719 Update .travis.yml
otherwise `mitmproxy --version` fails because `git describe` might fail fetching the distance to the last release tag
2017-02-10 16:37:22 +01:00
Maximilian Hils
d70f7cd8cc Merge pull request #2004 from mhils/update-dependencies
update dependencies
2017-02-10 15:28:10 +01:00
Maximilian Hils
43867dbd98 fix flaky coverage 2017-02-10 15:28:02 +01:00
Maximilian Hils
1c8836a8d6 update dependencies 2017-02-10 14:38:05 +01:00
Sachin Kelkar
c4e141a000 Add jpeg app0 sof0 com parsing 2017-02-10 11:16:23 +05:30
Maximilian Hils
74c991d70b Merge pull request #2002 from mitmproxy/content_views_edit
Content views edit
2017-02-09 23:21:44 +01:00
Ujjwal Verma
d6465b907f Closes #1580 Gives warning when explicit proxy configured in transparent mode (#1996) 2017-02-09 20:56:06 +01:00
Maximilian Hils
380ff50e57 Merge pull request #2003 from Kriechi/coverage++
test refactoring and coverage++
2017-02-09 17:45:50 +01:00
Maximilian Hils
da8444b11f flow_count() -> len(flows) 2017-02-09 16:44:27 +01:00
Maximilian Hils
1084588103 Merge pull request #1998 from mhils/fix-change-upstream
Fix change_upstream_proxy_server
2017-02-09 16:39:20 +01:00
Thomas Kriechbaumer
d10560d54c improve coverage report 2017-02-09 16:08:36 +01:00
Thomas Kriechbaumer
2ff5d72236 minor changes 2017-02-09 16:08:36 +01:00
Thomas Kriechbaumer
0299bb5b2e eventsequence: coverage++ 2017-02-09 16:08:36 +01:00
Thomas Kriechbaumer
5a3976c43e coverage whitelist -> blacklist 2017-02-09 16:08:36 +01:00
Thomas Kriechbaumer
f6cea09d5a stateobject: coverage++ 2017-02-09 16:08:36 +01:00
Thomas Kriechbaumer
1847cf175c websockets, tcp, version: coverage++ 2017-02-09 16:08:35 +01:00
Thomas Kriechbaumer
2134b5b06a remove FIXME 2017-02-09 16:08:35 +01:00
Thomas Kriechbaumer
b5f0342664 remove verified remark 2017-02-09 16:08:35 +01:00
Thomas Kriechbaumer
ae94ca6fa9 remove deprecated flow_count function 2017-02-09 16:08:35 +01:00
Maximilian Hils
d6d1ff0170 simplify state copy 2017-02-09 16:08:24 +01:00
Maximilian Hils
9b97b63891 fix change_upstream_proxy_server 2017-02-09 15:24:31 +01:00
Maximilian Hils
7a205e80aa improve stateobject tests 2017-02-09 14:21:25 +01:00
Maximilian Hils
fa661217c1 Merge pull request #2001 from mitmproxy/issue#40_response_body_edit
fix response body edit issue
2017-02-09 13:29:41 +01:00
Maximilian Hils
4cf6047a4e Merge pull request #1999 from Kriechi/coverage++
pytest.raises: shim new API
2017-02-09 10:42:55 +01:00
cle1000
fb06c66437 remove contentview selector and view description if edit mode is on 2017-02-08 21:09:03 +01:00
cle1000
b4bed57d4c fix show_lines button visible in edit mode if you change tabs 2017-02-08 21:07:40 +01:00
Thomas Kriechbaumer
7a9d40817c pytest.raises: shim new API 2017-02-08 20:04:07 +01:00
cle1000
dae0c23ec8 fix response body edit issue 2017-02-08 19:57:57 +01:00
Sachin Kelkar
5dd54ef012 Update kaitaistruct version to 0.6 2017-02-08 21:16:17 +05:30
Maximilian Hils
28c0596742 Merge pull request #1997 from s4chin/add-gif-parser
Add gif parser and tests
2017-02-08 12:09:48 +01:00
Maximilian Hils
79354c0b43 Merge pull request #1984 from ujjwal96/har_dump-fix
Fixes #1978
2017-02-08 12:01:32 +01:00
Sachin Kelkar
66ad95c330 Fix gif parsing error on some gifs and add test 2017-02-08 00:03:07 +05:30
Sachin Kelkar
4ef5de2cce Add gif parser and tests 2017-02-06 17:57:59 +05:30
Ujjwal Verma
c622622c59 Encoding fixes and tests 2017-02-05 18:59:01 +05:30
Thomas Kriechbaumer
2316c0fb74 Merge pull request #1993 from Kriechi/coverage++
fix flaky test
2017-02-05 11:29:27 +01:00
Thomas Kriechbaumer
288f9a3857 fix flaky test 2017-02-05 10:42:47 +01:00
Maximilian Hils
d133b8baee Merge pull request #1989 from Kriechi/coverage++
coverage++
2017-02-04 16:05:11 +01:00
Thomas Kriechbaumer
966ffaa3d6 coverage++ 2017-02-04 13:48:55 +01:00
Maximilian Hils
155670766e Merge pull request #1988 from krsoninikhil/ordereddict
Closes #1977
2017-02-04 13:46:44 +01:00
Nikhil Soni
e6de57ccc6 Changes view._store from dict to OrderedDict 2017-02-04 17:44:22 +05:30
Maximilian Hils
53f298ac41 Merge pull request #1967 from s4chin/remove-pillow
Add png parser
2017-02-03 18:58:34 +01:00
Maximilian Hils
bbdb7300fd Merge pull request #1983 from Kriechi/coverage++
coverage++
2017-02-03 18:56:56 +01:00
Sachin Kelkar
15548ff433 Feedback 2017-02-03 18:40:50 +05:30
Maximilian Hils
85e39b86bb update installbuilder 2017-02-03 12:04:11 +01:00
Thomas Kriechbaumer
bcaaa2f40b coverage++ 2017-02-03 11:55:33 +01:00
Maximilian Hils
3f26a0b5a5 Merge pull request #1982 from ujjwal96/preserve-marked-flows
Closes #1960 'Z' removes unmarked flows
2017-02-03 11:43:57 +01:00
Sachin Kelkar
0674485e76 Fix and cleanup 2017-02-03 15:02:35 +05:30
Sachin Kelkar
88dbfd5257 Add iTXt and zTXt tests 2017-02-03 14:12:54 +05:30
Sachin Kelkar
abef020e07 Fix as per feedback and add more tests 2017-02-03 14:12:54 +05:30
Sachin Kelkar
6202958048 Add png parser 2017-02-03 14:12:54 +05:30
Ujjwal Verma
e10c36fe11 Added test 2017-02-03 02:58:02 +05:30
Ujjwal Verma
11b4b3209d Closes #1960 'Z' removes unmarked flows 2017-02-03 01:35:53 +05:30
Thomas Kriechbaumer
79aa994275 fix merge 2017-02-02 17:26:31 +01:00
Maximilian Hils
8b6e3d8bd3 Merge pull request #1979 from 0neblock/master
Fixed following on flowlist
2017-02-02 17:24:04 +01:00
Thomas Kriechbaumer
3f4d472c80 Merge pull request #1980 from Kriechi/improve-tests
improve tests
2017-02-02 17:23:11 +01:00
Thomas Kriechbaumer
4f0b2bc4de adapt coverage checks 2017-02-02 14:20:56 +01:00
Aidan Cyr
00a6551622 Fixed following on flowlist
Fixed a focus_follow was missed when options were cleaned up.
2017-02-02 23:00:47 +11:00
Thomas Kriechbaumer
ae008ed80b replace tutils.raises with pytest.raises + shim 2017-02-02 12:59:01 +01:00
Thomas Kriechbaumer
ec92d7f67e cleanup test utils 2017-02-02 12:59:01 +01:00
Thomas Kriechbaumer
c1bc1ea584 Merge pull request #1959 from Kriechi/coverage-fail
add test coverage protection
2017-02-02 12:55:33 +01:00
Thomas Kriechbaumer
6e329595ca add test coverage protection 2017-02-02 11:39:33 +01:00
Thomas Kriechbaumer
380d8ec370 increase test coverage 2017-02-02 10:15:01 +01:00
Thomas Kriechbaumer
3ae060f0d3 fix console test 2017-02-02 10:13:12 +01:00
Thomas Kriechbaumer
2606de25e4 fix linting error 2017-02-02 09:36:33 +01:00
Maximilian Hils
7b2a986cea tests++ (#1976) 2017-02-01 17:52:01 +01:00
lymanZerga11
4b10212caf Add except clause to catch script parsing errors (#1929) 2017-02-01 15:27:10 +01:00
Maximilian Hils
212d9f1b98 Merge pull request #1948 from amm0nite/fix_dns_spoofing_example
Fix for dns_spoofing.py example
2017-02-01 15:25:51 +01:00
Aldo Cortesi
cf991ba4e2 Merge pull request #1974 from cortesi/setheaders
Regularise setheaders options
2017-02-01 16:47:19 +13:00
Aldo Cortesi
f44dab5d26 Merge pull request #1973 from cortesi/replace
Revamp replacement hooks
2017-02-01 13:03:29 +13:00
Aldo Cortesi
914659e888 Regularise setheaders options
As per replacements:

- Make the option type a string/tuple union
- Localise parsing strictly within the addon
- Adapt the console editor (we'll find a more elegant solution later)
2017-02-01 12:22:05 +13:00
Ammonite
9e3f06b7f2 Better test 2017-01-31 23:23:13 +01:00
Aldo Cortesi
4cc75a9560 Revamp replacement hooks
- Replacement specifiers can be either strings or tuples. This lets us cope
gracefully with command-line parsing (and posible quick interactive
specification) without having to special-case replacement hooks, or have
knowledge of hook specification leak outside the addon. We can also now use the
same command-line spec format in config files.
- Split replacement and replacement from file into separate addons and options.
Files are now read on each replacement, so you can edit replacement files in
place without restart.
- Modernise the test suite to use addon test helpers.

TODO: editing and displaying replace-from-file in console app
2017-02-01 11:10:28 +13:00
Aldo Cortesi
02f51d043d Merge pull request #1965 from cortesi/optname
Consolidate option names
2017-02-01 08:39:35 +13:00
Thomas Kriechbaumer
ff379b7665 fully test addonmanager (#1970) 2017-01-31 10:57:22 +01:00
Maximilian Hils
1523068b03 Merge pull request #1964 from mhils/new-contribution-docs
New contribution docs
2017-01-31 09:02:00 +01:00
Ammonite
e2e15df358 Add a test for the host replacement 2017-01-30 23:44:13 +01:00
Ammonite
aaff9dfd32 Reset the host header to the correct destination 2017-01-30 23:41:47 +01:00
Markus Unterwaditzer
980b8aedd3 Update shim path (#1968) 2017-01-30 20:07:37 +01:00
Maximilian Hils
45ace793d0 🙈 2017-01-30 19:30:35 +01:00
Maximilian Hils
4746ce939f README: py.test -> pytest 2017-01-30 16:46:22 +01:00
Maximilian Hils
3fe2f9578a always use 'venv' as virtualenv folder 2017-01-30 00:40:24 +01:00
Maximilian Hils
f570c57006 add contributing section to README, remove autoenv 2017-01-30 00:33:24 +01:00
Aldo Cortesi
35aff3b783 Consolidate option names
- Onboarding app options to onboarding*
- Console-specific options to console_*
- Web-specific options to web_*
2017-01-30 11:15:12 +13:00
Maximilian Hils
a384dea62b rework README dev sections 2017-01-29 23:06:50 +01:00
Maximilian Hils
edb96f69f5 add contributing section that redirects to README 2017-01-29 22:10:37 +01:00
Maximilian Hils
e10fb22f9c remove outdated testing section from docs 2017-01-29 22:09:45 +01:00
Maximilian Hils
0919e38514 remove outdated architecture section from docs 2017-01-29 22:08:57 +01:00
Ammonite
c2c6050df3 Store original host in flow metadata 2017-01-29 14:33:53 +01:00
Aldo Cortesi
695cc23696 Merge pull request #1963 from cortesi/cover
addons: unit tests to 100%
2017-01-29 12:04:54 +13:00
Aldo Cortesi
356c4987a2 addons: unit tests to 100%
This patch pushes the coverage of ./tests/mitproxy/addons to 100% of the addons
module.
2017-01-29 11:44:49 +13:00
Thomas Kriechbaumer
d24f76c98e fix newlines on windows 2017-01-25 18:51:09 +01:00
Thomas Kriechbaumer
5549757268 fix alpn requirement in web tests 2017-01-25 18:18:18 +01:00
Thomas Kriechbaumer
3028e06fd2 simplify termlog outfile handling 2017-01-25 17:50:07 +01:00
nish21
ab45e4d183 write errors to stderr (#1952) 2017-01-25 17:49:56 +01:00
Maximilian Hils
e076c23f8d Merge pull request #1957 from s4chin/add-body-filters
web: Add body filters
2017-01-24 17:03:09 +01:00
Sachin Kelkar
15482e3242 web: Add body filters 2017-01-24 21:28:07 +05:30
Thomas Kriechbaumer
acfd548fa2 Merge pull request #1956 from Kriechi/version-string
improve version specifiers
2017-01-23 22:53:02 +01:00
Maximilian Hils
2eaac31344 Merge pull request #1945 from Kriechi/ca-expired
fix #939
2017-01-23 21:22:38 +01:00
Maximilian Hils
c512f095ae Merge pull request #1949 from s4chin/add-src-dst-filters
web: Add ~src and ~dst filters
2017-01-23 21:22:21 +01:00
Maximilian Hils
c46dd1e29d Merge pull request #1955 from Kriechi/test++
increase addon test coverage
2017-01-23 21:21:44 +01:00
Thomas Kriechbaumer
6a7eeef0ee Merge branch 'offby1-patch-1' 2017-01-23 20:42:42 +01:00
Eric Hanchrow
ccb8889342 fix link to macOS and Linux binaries 2017-01-23 20:42:03 +01:00
Thomas Kriechbaumer
63beaa18ce improve version specifiers 2017-01-23 20:33:12 +01:00
Thomas Kriechbaumer
5792e2c483 test disable_h2c_upgrade addon 2017-01-23 18:38:27 +01:00
Thomas Kriechbaumer
d674de298c test websocket addon 2017-01-23 15:45:53 +01:00
Thomas Kriechbaumer
006eb39cc5 fix #939 2017-01-23 15:14:56 +01:00
Sachin Kelkar
9813294854 Merge NullaryExpr, UnaryExpr and BooleanLiteral 2017-01-23 18:38:49 +05:30
Maximilian Hils
86174eb6ad Merge pull request #1950 from Kriechi/nuke-mock
nuke mock dependency
2017-01-23 10:19:43 +01:00
Ammonite
0ca1916f1b Fix host extraction 2017-01-22 15:28:14 +01:00
Ammonite
e8fc4af4c6 Follow PEP-8 and add comment 2017-01-22 14:59:46 +01:00
Thomas Kriechbaumer
127c69c3ac nuke mock dependency
This is already included in Python 3.3+ as unittest.mock
2017-01-22 14:11:28 +01:00
Sachin Kelkar
2b2292f432 web: Add ~src and ~dst filters 2017-01-21 21:48:49 +05:30
Ammonite
93172460aa Add blank lines for lint 2017-01-21 09:39:34 +01:00
Ammonite
8aa250d679 Change class name 2017-01-20 23:48:26 +01:00
Ammonite
a55eba3b37 Get the the original header in requestheaders instead of request 2017-01-20 23:43:53 +01:00
Thomas Kriechbaumer
0022c810e5 replace iOS install instructions for CAs
fixes #1898
2017-01-20 22:04:35 +01:00
Maximilian Hils
d1ccdf41a3 Merge pull request #1947 from s4chin/add-http-tcp-filter
web: Add ~http and ~tcp filters
2017-01-20 18:32:59 +01:00
Sachin Kelkar
bbda53c8b6 web: Add ~http and ~tcp filters 2017-01-20 22:53:37 +05:30
Maximilian Hils
2ca48e5d08 Merge pull request #1940 from s4chin/add-marked-filter
web: Add ~marked filter
2017-01-20 15:26:51 +01:00
Sachin Kelkar
bc8f5a2d71 web: Add ~marked filter 2017-01-20 09:17:09 +05:30
Thomas Kriechbaumer
9a88a2fdea Merge pull request #1941 from Kriechi/sysinfo
provide git information with --version
2017-01-19 23:25:33 +01:00
Thomas Kriechbaumer
72b753c60f provide git information with --version
fixes #1848
2017-01-19 20:27:29 +01:00
Maximilian Hils
ea20bfb233 Merge pull request #1932 from wufeifei/patch-2
update stream.py
2017-01-16 11:31:47 +01:00
Feei
95cca4ce75 update stream.py 2017-01-16 18:30:05 +08:00
Maximilian Hils
345a459720 Merge pull request #1931 from wufeifei/patch-1
update log_events.py
2017-01-16 11:17:42 +01:00
Feei
eaaec4353d update log_events.py 2017-01-16 18:11:18 +08:00
Thomas Kriechbaumer
78fe04ca9d Merge pull request #1923 from mhils/display-http-version
console: display http version in details
2017-01-14 13:11:26 +01:00
Maximilian Hils
028a98380d Merge pull request #1926 from tripleee/patch-1
reSt verbatim formatting error
2017-01-13 14:29:07 +01:00
tripleee
9cedfa7ddd reSt verbatim formatting error
The rendered code snippets would be rewrapped as if it were prose, which wreaks havoc with the code.
2017-01-13 14:26:04 +02:00
Maximilian Hils
fe43e629fd console: display http version in details 2017-01-10 15:26:11 +01:00
Maximilian Hils
6f9422dfb9 Merge pull request #1918 from s4chin/update-hacking-instructions
Update instructions in README
2017-01-10 15:14:42 +01:00
Maximilian Hils
5d0f7e5c41 clarify windows install docs 2017-01-10 11:01:51 +01:00
Maximilian Hils
48e399a285 Merge pull request #1922 from mhils/read-from-stdin
mitmdump: fix reading from stdin
2017-01-10 10:58:18 +01:00
Maximilian Hils
9c133abc79 Merge pull request #1920 from rodsenra/master
Added note about server-side replay for sessions recorded in reverse-proxy mode
2017-01-10 10:57:55 +01:00
Maximilian Hils
d953d83773 mitmdump: fix reading from stdin 2017-01-10 10:37:28 +01:00
Rodrigo Senra
d013f7ec8f Added note about server-side replay for sessions recorded in reverse-proxy mode 2017-01-07 19:58:30 -05:00
Maximilian Hils
88f3459c7d stronger tests, lint 2017-01-07 23:08:50 +01:00
Maximilian Hils
ede124a587 fix #1910 2017-01-07 23:08:50 +01:00
Maximilian Hils
c41bd3fafd minor legibility improvements 2017-01-07 23:08:50 +01:00
Maximilian Hils
b1ec7e78cd Revert "add strutils.replace_surrogates"
This reverts commit 15ae88db08dbf2725c8c9f945f2006c4b2a10d46.
2017-01-07 23:08:50 +01:00
Maximilian Hils
c21ee90deb add strutils.replace_surrogates 2017-01-07 23:08:50 +01:00
Maximilian Hils
042261266f minor encoding fixes
- native() -> always_str()
  The old function name does not make sense on Python 3 only.
- Inline utility functions in message.py.
2017-01-07 23:08:50 +01:00
Maximilian Hils
af194918cf fix HTTP reason phrase encoding
While not explicitly spelled out in the RFCs,
the reason phrase is expected to be ISO-8859-1.
2017-01-07 23:08:50 +01:00
Maximilian Hils
1e89a93801 update dependencies 2017-01-07 23:07:50 +01:00
Sachin Kelkar
38fd88b3d1 Update instructions 2017-01-06 15:33:05 +05:30
lymanZerga11
3ea76a7f3e Controls the length of URL displayed using the -d option (#1903) 2017-01-05 14:37:20 +01:00
Maximilian Hils
2335a70b79 Merge pull request #1904 from chhsiao90/edit-form-encode
Encode urlencoded_form items when editing
2016-12-31 08:01:43 +01:00
chhsiao90
af0539c526 Fixed edit form failed 2016-12-31 14:38:33 +08:00
Maximilian Hils
e83083b64e Merge pull request #1905 from chhsiao90/allow-underscore-hostname
Allow underscore in hostname
2016-12-30 21:19:25 +01:00
chhsiao90
a5f1215eb2 Allow underscore in hostname 2016-12-30 21:03:22 +08:00
Maximilian Hils
973406f327 Merge pull request #1896 from mhils/3.5.0-compat
Fix compat with Python 3.5.0
2016-12-28 15:35:07 +01:00
Maximilian Hils
eab360a02b fix IO type checking 2016-12-28 14:21:19 +01:00
Maximilian Hils
0929e74b4e fix compat with Python 3.5.0 2016-12-28 14:20:53 +01:00
Maximilian Hils
8185cf2724 update io compat for 2.0 2016-12-27 21:03:59 +01:00
Maximilian Hils
9e06c69ea3 Update README.md 2016-12-27 21:01:39 +01:00
Maximilian Hils
2a96d43602 Update README.md 2016-12-27 20:58:32 +01:00
Maximilian Hils
e531a97a8b bump version for next release cycle 2016-12-27 20:52:22 +01:00
Thomas Kriechbaumer
7221f49b25 Merge pull request #1887 from Kriechi/h2-enable
enable HTTP/2 by default
2016-12-27 20:49:09 +01:00
Maximilian Hils
843b1e17c9 add mitmweb to README 2016-12-27 20:47:47 +01:00
Thomas Kriechbaumer
cc9e70e3cc enable HTTP/2 by default 2016-12-26 21:51:10 +01:00
294 changed files with 12596 additions and 3121 deletions

View File

@@ -28,7 +28,7 @@ install:
- "pip install -U tox"
test_script:
- ps: "tox -- --cov mitmproxy --cov pathod -v"
- ps: "tox -- --verbose --cov-report=term"
- ps: |
$Env:VERSION = $(python mitmproxy/version.py)
$Env:SKIP_MITMPROXY = "python -c `"print('skip mitmproxy')`""
@@ -44,12 +44,12 @@ test_script:
if (!(Test-Path "C:\projects\mitmproxy\release\installbuilder-installer.exe")) {
"Download InstallBuilder..."
(New-Object System.Net.WebClient).DownloadFile(
"https://installbuilder.bitrock.com/installbuilder-enterprise-16.11.1-windows-installer.exe",
"https://installbuilder.bitrock.com/installbuilder-enterprise-17.1.0-windows-installer.exe",
"C:\projects\mitmproxy\release\installbuilder-installer.exe"
)
}
Start-Process "C:\projects\mitmproxy\release\installbuilder-installer.exe" "--mode unattended --unattendedmodeui none" -Wait
& 'C:\Program Files (x86)\BitRock InstallBuilder Enterprise 16.11.1\bin\builder-cli.exe' `
& 'C:\Program Files (x86)\BitRock InstallBuilder Enterprise 17.1.0\bin\builder-cli.exe' `
build `
.\release\installbuilder\mitmproxy.xml `
windows `

6
.env
View File

@@ -1,6 +0,0 @@
DIR="$( dirname "${BASH_SOURCE[0]}" )"
ACTIVATE_DIR="$(if [ -f "$DIR/venv/bin/activate" ]; then echo 'bin'; else echo 'Scripts'; fi;)"
if [ -z "$VIRTUAL_ENV" ] && [ -f "$DIR/venv/$ACTIVATE_DIR/activate" ]; then
echo "Activating mitmproxy virtualenv..."
source "$DIR/venv/$ACTIVATE_DIR/activate"
fi

2
.gitattributes vendored
View File

@@ -1,2 +1,2 @@
mitmproxy/tools/web/static/**/* -diff
mitmproxy/tools/web/static/**/* -diff linguist-vendored
web/src/js/filt/filt.js -diff

2
.gitignore vendored
View File

@@ -19,3 +19,5 @@ bower_components
*.map
sslkeylogfile.log
.tox/
.python-version
coverage.xml

View File

@@ -5,6 +5,8 @@ env:
global:
- CI_DEPS=codecov>=2.0.5
- CI_COMMANDS=codecov
git:
depth: 10000
matrix:
fast_finish: true
@@ -41,12 +43,10 @@ matrix:
- debian-sid
packages:
- libssl-dev
- python: 3.5
env: TOXENV=individual_coverage
- python: 3.5
env: TOXENV=docs
git:
depth: 10000
allow_failures:
- python: pypy
install:
- |
@@ -63,7 +63,7 @@ install:
- pip install tox
script:
- tox -- --cov mitmproxy --cov pathod -v
- tox -- --verbose --cov-report=term
- |
if [[ $BDIST == "1" ]]
then

View File

@@ -1,3 +1,26 @@
21 February 2017: mitmproxy 2.0
* HTTP/2 is now enabled by default.
* Image ContentView: Parse images with Kaitai Struct (kaitai.io) instead of Pillow.
This simplifies installation, reduces binary size, and allows parsing in pure Python.
* Web: Add missing flow filters.
* Add transparent proxy support for OpenBSD.
* Check the mitmproxy CA for expiration and warn the user to regenerate it if necessary.
* Testing: Tremendous improvements, enforced 100% coverage for large parts of the
codebase, increased overall coverage.
* Enforce individual coverage: one source file -> one test file with 100% coverage.
* A myriad of other small improvements throughout the project.
* Numerous bugfixes.
26 December 2016: mitmproxy 1.0
* All mitmproxy tools are now Python 3 only! We plan to support Python 3.5 and higher.

View File

@@ -10,6 +10,8 @@ interface.
``mitmdump`` is the command-line version of mitmproxy. Think tcpdump for HTTP.
``mitmweb`` is a web-based interface for mitmproxy.
``pathoc`` and ``pathod`` are perverse HTTP client and server applications
designed to let you craft almost any conceivable HTTP request, including ones
that creatively violate the standards.
@@ -35,7 +37,7 @@ each other solve problems, and come up with new ideas for the project.
|mitmproxy_discourse|
Join our developer chat on Slack if you would like to hack on mitmproxy itself.
Join our developer chat on Slack if you would like to contribute to mitmproxy itself.
|slack|
@@ -46,72 +48,64 @@ Installation
The installation instructions are `here <http://docs.mitmproxy.org/en/stable/install.html>`__.
If you want to contribute changes, keep on reading.
Contributing
------------
Hacking
-------
As an open source project, mitmproxy welcomes contributions of all forms. If you would like to bring the project forward,
please consider contributing in the following areas:
To get started hacking on mitmproxy, make sure you have Python_ 3.5.x or above with
virtualenv_ installed (you can find installation instructions for virtualenv
`here <http://virtualenv.readthedocs.org/en/latest/>`__). Then do the following:
- **Maintenance:** We are *incredibly* thankful for individuals who are stepping up and helping with maintenance. This includes (but is not limited to) triaging issues, reviewing pull requests and picking up stale ones, helping out other users in our forums_, creating minimal, complete and verifiable examples or test cases for existing bug reports, updating documentation, or fixing minor bugs that have recently been reported.
- **Code Contributions:** We actively mark issues that we consider are `good first contributions`_. If you intend to work on a larger contribution to the project, please come talk to us first.
Development Setup
-----------------
To get started hacking on mitmproxy, please follow the `advanced installation`_ steps to install mitmproxy from source, but stop right before running ``pip3 install mitmproxy``. Instead, do the following:
.. code-block:: text
git clone https://github.com/mitmproxy/mitmproxy.git
cd mitmproxy
./dev.sh # powershell .\dev.ps1 on Windows
./dev.sh # "powershell .\dev.ps1" on Windows
The *dev* script will create a virtualenv environment in a directory called
"venv", and install all mandatory and optional dependencies into it. The
primary mitmproxy components - mitmproxy and pathod - are installed as
The *dev* script will create a `virtualenv`_ environment in a directory called "venv"
and install all mandatory and optional dependencies into it. The primary
mitmproxy components - mitmproxy and pathod - are installed as
"editable", so any changes to the source in the repository will be reflected
live in the virtualenv.
To confirm that you're up and running, activate the virtualenv, and run the
mitmproxy test suite:
.. code-block:: text
. venv/bin/activate # venv\Scripts\activate on Windows
py.test
Note that the main executables for the project - ``mitmdump``, ``mitmproxy``,
The main executables for the project - ``mitmdump``, ``mitmproxy``,
``mitmweb``, ``pathod``, and ``pathoc`` - are all created within the
virtualenv. After activating the virtualenv, they will be on your $PATH, and
you can run them like any other command:
.. code-block:: text
. venv/bin/activate # "venv\Scripts\activate" on Windows
mitmdump --version
For convenience, the project includes an autoenv_ file (`.env`_) that
auto-activates the virtualenv when you cd into the mitmproxy directory.
Testing
-------
If you've followed the procedure above, you already have all the development
requirements installed, and you can simply run the test suite:
requirements installed, and you can run the full test suite (including tests for code style and documentation) with tox_:
.. code-block:: text
py.test
tox
For speedier testing, we recommend you run `pytest`_ directly on individual test files or folders:
.. code-block:: text
cd test/mitmproxy/addons
pytest --cov mitmproxy.addons.anticache --looponfail test_anticache.py
As pytest does not check the code style, you probably want to run ``tox -e lint`` before committing your changes.
Please ensure that all patches are accompanied by matching changes in the test
suite. The project tries to maintain 100% test coverage.
You can also use `tox` to run the full suite of tests, including a quick test
to check documentation and code linting.
The following tox environments are relevant for local testing:
.. code-block:: text
tox -e py35 # runs all tests with Python 3.5
tox -e docs # runs a does-it-compile check on the documentation
tox -e lint # runs the linter for coding style checks
suite. The project tries to maintain 100% test coverage and enforces this strictly for some parts of the codebase.
Documentation
-------------
@@ -130,8 +124,8 @@ installation, you can render the documentation like this:
The last command invokes `sphinx-autobuild`_, which watches the Sphinx directory and rebuilds
the documentation when a change is detected.
Style
-----
Code Style
----------
Keeping to a consistent code style throughout the project makes it easier to
contribute and collaborate. Please stick to the guidelines in
@@ -151,7 +145,7 @@ with the following command:
:target: https://mitmproxy.org/
:alt: mitmproxy.org
.. |mitmproxy_docs| image:: https://readthedocs.org/projects/mitmproxy/badge/
.. |mitmproxy_docs| image:: https://shields.mitmproxy.org/api/docs-latest-brightgreen.svg
:target: http://docs.mitmproxy.org/en/latest/
:alt: mitmproxy documentation
@@ -163,15 +157,15 @@ with the following command:
:target: http://slack.mitmproxy.org/
:alt: Slack Developer Chat
.. |travis| image:: https://shields.mitmproxy.org/travis/mitmproxy/mitmproxy/master.svg?label=Travis%20build
.. |travis| image:: https://shields.mitmproxy.org/travis/mitmproxy/mitmproxy/master.svg?label=travis%20ci
:target: https://travis-ci.org/mitmproxy/mitmproxy
:alt: Travis Build Status
.. |appveyor| image:: https://shields.mitmproxy.org/appveyor/ci/mhils/mitmproxy/master.svg?label=Appveyor%20build
.. |appveyor| image:: https://shields.mitmproxy.org/appveyor/ci/mhils/mitmproxy/master.svg?label=appveyor%20ci
:target: https://ci.appveyor.com/project/mhils/mitmproxy
:alt: Appveyor Build Status
.. |coverage| image:: https://codecov.io/gh/mitmproxy/mitmproxy/branch/master/graph/badge.svg
.. |coverage| image:: https://shields.mitmproxy.org/codecov/c/github/mitmproxy/mitmproxy/master.svg?label=codecov
:target: https://codecov.io/gh/mitmproxy/mitmproxy
:alt: Coverage Status
@@ -183,11 +177,13 @@ with the following command:
:target: https://pypi.python.org/pypi/mitmproxy
:alt: Supported Python versions
.. _Python: https://www.python.org/
.. _virtualenv: http://virtualenv.readthedocs.org/en/latest/
.. _autoenv: https://github.com/kennethreitz/autoenv
.. _.env: https://github.com/mitmproxy/mitmproxy/blob/master/.env
.. _`advanced installation`: http://docs.mitmproxy.org/en/latest/install.html#advanced-installation
.. _virtualenv: https://virtualenv.pypa.io/
.. _`pytest`: http://pytest.org/
.. _tox: https://tox.readthedocs.io/
.. _Sphinx: http://sphinx-doc.org/
.. _sphinx-autobuild: https://pypi.python.org/pypi/sphinx-autobuild
.. _PEP8: https://www.python.org/dev/peps/pep-0008
.. _Google Style Guide: https://google.github.io/styleguide/pyguide.html
.. _`Google Style Guide`: https://google.github.io/styleguide/pyguide.html
.. _forums: https://discourse.mitmproxy.org/
.. _`good first contributions`: https://github.com/mitmproxy/mitmproxy/issues?q=is%3Aissue+is%3Aopen+label%3Agood-first-contribution

View File

@@ -1,20 +1,19 @@
$ErrorActionPreference = "Stop"
$VENV = ".\venv"
$pyver = python --version
if($pyver -notmatch "3\.[5-9]") {
Write-Warning "Unexpected Python version, expected Python 3.5 or above: $pyver"
}
python -m venv $VENV --copies
& $VENV\Scripts\activate.ps1
python -m venv .\venv --copies
& .\venv\Scripts\activate.ps1
python -m pip install --disable-pip-version-check -U pip
cmd /c "pip install -r requirements.txt 2>&1"
echo @"
* Created virtualenv environment in $VENV.
* Created virtualenv environment in .\venv.
* Installed all dependencies into the virtualenv.
* Activated virtualenv environment.

18
dev.sh
View File

@@ -2,16 +2,14 @@
set -e
set -x
PYVERSION=${1:-3.5}
VENV="venv$PYVERSION"
echo "Creating dev environment in ./venv..."
echo "Creating dev environment in $VENV using Python $PYVERSION"
python$PYVERSION -m venv "$VENV"
. "$VENV/bin/activate"
pip$PYVERSION install -U pip setuptools
pip$PYVERSION install -r requirements.txt
python3 -m venv venv
. venv/bin/activate
pip3 install -U pip setuptools
pip3 install -r requirements.txt
echo ""
echo "* Virtualenv created in $VENV and all dependencies installed."
echo "* You can now activate the $(python --version) virtualenv with this command: \`. $VENV/bin/activate\`"
echo " * Created virtualenv environment in ./venv."
echo " * Installed all dependencies into the virtualenv."
echo " * You can now activate the $(python3 --version) virtualenv with this command: \`. venv/bin/activate\`"

View File

@@ -40,7 +40,9 @@ start of mitmproxy.
iOS
^^^
http://kb.mit.edu/confluence/pages/viewpage.action?pageId=152600377
See http://jasdev.me/intercepting-ios-traffic
and http://web.archive.org/web/20150920082614/http://kb.mit.edu/confluence/pages/viewpage.action?pageId=152600377
iOS Simulator
^^^^^^^^^^^^^

View File

@@ -1,14 +0,0 @@
.. _architecture:
Architecture
============
To give you a better understanding of how mitmproxy works, mitmproxy's
high-level architecture is detailed in the following graphic:
.. image:: ../schematics/architecture.png
:download:`architecture.pdf <../schematics/architecture.pdf>`
Please don't refrain from asking any further
questions on the mailing list, the Slack channel or the GitHub issue tracker.

11
docs/dev/contributing.rst Normal file
View File

@@ -0,0 +1,11 @@
.. _contributing:
Contributing
============
As an open source project, **mitmproxy** welcomes contributions of all forms.
Please head over to the README_ to get started! 😃
.. _README: https://github.com/mitmproxy/mitmproxy/blob/master/README.rst

View File

@@ -1,47 +0,0 @@
.. _testing:
Testing
=======
All the mitmproxy projects strive to maintain 100% code coverage. In general,
patches and pull requests will be declined unless they're accompanied by a
suitable extension to the test suite.
Our tests are written for the `py.test`_ or nose_ test frameworks.
At the point where you send your pull request, a command like this:
>>> py.test --cov mitmproxy
Should give output something like this:
.. code-block:: none
> ---------- coverage: platform darwin, python 2.7.2-final-0 --
> Name Stmts Miss Cover Missing
> ----------------------------------------------------
> mitmproxy/__init__ 0 0 100%
> mitmproxy/app 4 0 100%
> mitmproxy/cmdline 100 0 100%
> mitmproxy/controller 69 0 100%
> mitmproxy/dump 150 0 100%
> mitmproxy/encoding 39 0 100%
> mitmproxy/flowfilter 201 0 100%
> mitmproxy/flow 891 0 100%
> mitmproxy/proxy 427 0 100%
> mitmproxy/script 27 0 100%
> mitmproxy/utils 133 0 100%
> mitmproxy/version 4 0 100%
> ----------------------------------------------------
> TOTAL 2045 0 100%
> ----------------------------------------------------
> Ran 251 tests in 11.864s
There are exceptions to the coverage requirement - for instance, much of the
console interface code can't sensibly be unit tested. These portions are
excluded from coverage analysis either in the **.coveragerc** file, or using
**#pragma no-cover** directives. To keep our coverage analysis relevant, we use
these measures as sparingly as possible.
.. _nose: https://nose.readthedocs.org/en/latest/
.. _py.test: https://pytest.org/

View File

@@ -33,6 +33,19 @@ updated in a similar way.
You can turn off response refreshing using the ``--norefresh`` argument, or using
the :kbd:`o` options shortcut within :program:`mitmproxy`.
Replaying a session recorded in Reverse-proxy Mode
--------------------------------------------------
If you have captured the session in reverse proxy mode, in order to replay it you
still have to specify the server URL, otherwise you may get the error:
'HTTP protocol error in client request: Invalid HTTP request form (expected authority or absolute...)'.
During replay, when the client's requests match previously recorded requests, then the
respective recorded responses are simply replayed by mitmproxy.
Otherwise, the unmatched requests is forwarded to the upstream server.
If forwarding is not desired, you can use the --kill (-k) switch to prevent that.
================== ===========
command-line ``-S path``
mitmproxy shortcut :kbd:`R` then :kbd:`s`

View File

@@ -47,6 +47,7 @@
transparent
transparent/linux
transparent/osx
transparent/openbsd
.. toctree::
:hidden:
@@ -78,10 +79,9 @@
.. toctree::
:hidden:
:caption: Hacking
:caption: Development
dev/architecture
dev/testing
dev/contributing
dev/sslkeylogfile
.. Indices and tables

View File

@@ -20,7 +20,7 @@ You can use Homebrew to install everything:
brew install mitmproxy
Or you can download the pre-built binary packages from `mitmproxy.org`_.
Or you can download the pre-built binary packages from our `releases`_.
.. _install-windows:
@@ -35,7 +35,7 @@ Both executables are added to your PATH and can be invoked from the command
line.
.. note::
mitmproxy's console interface is not supported on Windows, but you can use
Mitmproxy's console interface is not supported on Windows, but you can use
mitmweb (the web-based interface) and mitmdump.
.. _install-linux:
@@ -44,7 +44,7 @@ Installation on Linux
---------------------
The recommended way to run mitmproxy on Linux is to use the pre-built binaries
provided at `mitmproxy.org`_.
provided at `releases`_.
Our pre-built binaries provide you with the latest version of mitmproxy, a
self-contained Python 3.5 environment and a recent version of OpenSSL that
@@ -85,7 +85,7 @@ libraries. This was tested on a fully patched installation of Ubuntu 16.04.
.. code:: bash
sudo apt-get install python3-pip python3-dev libffi-dev libssl-dev libtiff5-dev libjpeg8-dev zlib1g-dev libwebp-dev
sudo apt-get install python3-dev python3-pip libffi-dev libssl-dev
sudo pip3 install mitmproxy # or pip3 install --user mitmproxy
On older Ubuntu versions, e.g., **12.04** and **14.04**, you may need to install
@@ -104,7 +104,7 @@ libraries. This was tested on a fully patched installation of Fedora 24.
.. code:: bash
sudo dnf install make gcc redhat-rpm-config python3-pip python3-devel libffi-devel openssl-devel libtiff-devel libjpeg-devel zlib-devel libwebp-devel openjpeg2-devel
sudo dnf install make gcc redhat-rpm-config python3-devel python3-pip libffi-devel openssl-devel
sudo pip3 install mitmproxy # or pip3 install --user mitmproxy
Make sure to have an up-to-date version of pip by running ``pip3 install -U pip``.
@@ -117,13 +117,13 @@ Make sure to have an up-to-date version of pip by running ``pip3 install -U pip`
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. note::
mitmproxy's console interface is not supported on Windows, but you can use
Mitmproxy's console interface is not supported on Windows, but you can use
mitmweb (the web-based interface) and mitmdump.
First, install the latest version of Python 3.5 or later from the `Python
website`_. During installation, make sure to select `Add Python to PATH`.
Now, you can install mitmproxy by running
Mitmproxy has no other dependencies on Windows. You can now install mitmproxy by running
.. code:: powershell
@@ -139,11 +139,12 @@ Latest Development Version
If you would like to install mitmproxy directly from the master branch on GitHub
or would like to get set up to contribute to the project, install the
dependencies as you would for a regular installation from source. Then see the
Hacking_ section of the README on GitHub. You can check your system information
by running: ``mitmproxy --sysinfo``
project's README_ on GitHub. You can check your system information
by running: ``mitmproxy --version``
.. _Hacking: https://github.com/mitmproxy/mitmproxy/blob/master/README.rst#hacking
.. _README: https://github.com/mitmproxy/mitmproxy/blob/master/README.rst
.. _releases: https://github.com/mitmproxy/mitmproxy/releases
.. _mitmproxy.org: https://mitmproxy.org/
.. _`Python website`: https://www.python.org/downloads/windows/
.. _pip: https://pip.pypa.io/en/latest/installing.html

Binary file not shown.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 85 KiB

Binary file not shown.

View File

@@ -27,7 +27,7 @@ Fully transparent mode
By default mitmproxy will use its own local ip address for its server-side connections.
In case this isn't desired, the --spoof-source-address argument can be used to
use the client's ip address for server-side connections. The following config is
required for this mode to work:
required for this mode to work::
CLIENT_NET=192.168.1.0/24
TABLE_ID=100
@@ -42,9 +42,9 @@ required for this mode to work:
This mode does require root privileges though. There's a wrapper in the examples directory
called 'mitmproxy_shim.c', which will enable you to use this mode with dropped priviliges.
It can be used as follows:
It can be used as follows::
gcc examples/mitmproxy_shim.c -o mitmproxy_shim -lcap
gcc examples/complex/full_transparency_shim.c -o mitmproxy_shim -lcap
sudo chown root:root mitmproxy_shim
sudo chmod u+s mitmproxy_shim
./mitmproxy_shim $(which mitmproxy) -T --spoof-source-address

View File

@@ -0,0 +1,53 @@
.. _openbsd:
OpenBSD
=======
1. :ref:`Install the mitmproxy certificate on the test device <certinstall>`
2. Enable IP forwarding:
>>> sudo sysctl -w net.inet.ip.forwarding=1
3. Place the following two lines in **/etc/pf.conf**:
.. code-block:: none
mitm_if = "re2"
pass in quick proto tcp from $mitm_if to port { 80, 443 } divert-to 127.0.0.1 port 8080
These rules tell pf to divert all traffic from ``$mitm_if`` destined for
port 80 or 443 to the local mitmproxy instance running on port 8080. You
should replace ``$mitm_if`` value with the interface on which your test
device will appear.
4. Configure pf with the rules:
>>> doas pfctl -f /etc/pf.conf
5. And now enable it:
>>> doas pfctl -e
6. Fire up mitmproxy. You probably want a command like this:
>>> mitmproxy -T --host
The ``-T`` flag turns on transparent mode, and the ``--host``
argument tells mitmproxy to use the value of the Host header for URL display.
7. Finally, configure your test device to use the host on which mitmproxy is
running as the default gateway.
.. note::
Note that the **divert-to** rules in the pf.conf given above only apply to
inbound traffic. **This means that they will NOT redirect traffic coming
from the box running pf itself.** We can't distinguish between an outbound
connection from a non-mitmproxy app, and an outbound connection from
mitmproxy itself - if you want to intercept your traffic, you should use an
external host to run mitmproxy. Nonetheless, pf is flexible to cater for a
range of creative possibilities, like intercepting traffic emanating from
VMs. See the **pf.conf** man page for more.
.. _pf: http://man.openbsd.org/OpenBSD-current/man5/pf.conf.5

View File

@@ -63,7 +63,7 @@ Note that this means we don't support transparent mode for earlier versions of O
running pf itself.** We can't distinguish between an outbound connection from a
non-mitmproxy app, and an outbound connection from mitmproxy itself - if you
want to intercept your OSX traffic, you should use an external host to run
mitmproxy. None the less, pf is flexible to cater for a range of creative
mitmproxy. Nonetheless, pf is flexible to cater for a range of creative
possibilities, like intercepting traffic emanating from VMs. See the
**pf.conf** man page for more.

View File

@@ -12,7 +12,7 @@
| nonblocking.py | Demonstrate parallel processing with a blocking script |
| remote_debug.py | This script enables remote debugging of the mitmproxy _UI_ with PyCharm. |
| sslstrip.py | sslstrip-like funtionality implemented with mitmproxy |
| stream | Enable streaming for all responses. |
| stream.py | Enable streaming for all responses. |
| stream_modify.py | Modify a streamed response body. |
| tcp_message.py | Modify a raw TCP connection |
| tls_passthrough.py | Use conditional TLS interception based on a user-defined strategy. |
| tls_passthrough.py | Use conditional TLS interception based on a user-defined strategy. |

View File

@@ -1,5 +1,5 @@
"""
This inline scripts makes it possible to use mitmproxy in scenarios where IP spoofing has been used to redirect
This script makes it possible to use mitmproxy in scenarios where IP spoofing has been used to redirect
connections to mitmproxy. The way this works is that we rely on either the TLS Server Name Indication (SNI) or the
Host header of the HTTP request.
Of course, this is not foolproof - if an HTTPS connection comes without SNI, we don't
@@ -28,22 +28,35 @@ import re
parse_host_header = re.compile(r"^(?P<host>[^:]+|\[.+\])(?::(?P<port>\d+))?$")
def request(flow):
if flow.client_conn.ssl_established:
flow.request.scheme = "https"
sni = flow.client_conn.connection.get_servername()
port = 443
else:
flow.request.scheme = "http"
sni = None
port = 80
class Rerouter:
def requestheaders(self, flow):
"""
The original host header is retrieved early
before flow.request is replaced by mitmproxy new outgoing request
"""
flow.metadata["original_host"] = flow.request.host_header
host_header = flow.request.pretty_host
m = parse_host_header.match(host_header)
if m:
host_header = m.group("host").strip("[]")
if m.group("port"):
port = int(m.group("port"))
def request(self, flow):
if flow.client_conn.ssl_established:
flow.request.scheme = "https"
sni = flow.client_conn.connection.get_servername()
port = 443
else:
flow.request.scheme = "http"
sni = None
port = 80
flow.request.host = sni or host_header
flow.request.port = port
host_header = flow.metadata["original_host"]
m = parse_host_header.match(host_header)
if m:
host_header = m.group("host").strip("[]")
if m.group("port"):
port = int(m.group("port"))
flow.request.host_header = host_header
flow.request.host = sni or host_header
flow.request.port = port
def start():
return Rerouter()

View File

@@ -7,6 +7,7 @@ import json
import sys
import base64
import zlib
import os
from datetime import datetime
import pytz
@@ -166,7 +167,7 @@ def done():
if dump_file.endswith('.zhar'):
raw = zlib.compress(raw, 9)
with open(dump_file, "wb") as f:
with open(os.path.expanduser(dump_file), "wb") as f:
f.write(raw)
mitmproxy.ctx.log("HAR dump finished (wrote %s bytes to file)" % len(json_dump))

View File

@@ -7,7 +7,7 @@
| filter_flows.py | This script demonstrates how to use mitmproxy's filter pattern in scripts. |
| io_read_dumpfile.py | Read a dumpfile generated by mitmproxy. |
| io_write_dumpfile.py | Only write selected flows into a mitmproxy dumpfile. |
| logging.py | Use mitmproxy's logging API. |
| log_events.py | Use mitmproxy's logging API. |
| modify_body_inject_iframe.py | Inject configurable iframe into pages. |
| modify_form.py | Modify HTTP form submissions. |
| modify_querystring.py | Modify HTTP query strings. |
@@ -15,4 +15,4 @@
| script_arguments.py | Add arguments to a script. |
| send_reply_from_proxy.py | Send a HTTP response directly from the proxy. |
| upsidedownternet.py | Turn all images upside down. |
| wsgi_flask_app.py | Embed a WSGI app into mitmproxy. |
| wsgi_flask_app.py | Embed a WSGI app into mitmproxy. |

View File

@@ -10,7 +10,7 @@ class ViewSwapCase(contentviews.View):
# We don't have a good solution for the keyboard shortcut yet -
# you manually need to find a free letter. Contributions welcome :)
prompt = ("swap case text", "p")
prompt = ("swap case text", "z")
content_types = ["text/plain"]
def __call__(self, data: bytes, **metadata):

View File

@@ -13,7 +13,7 @@
<!--
Cut and paste the output of "mitmdump --sysinfo".
Cut and paste the output of "mitmproxy --version".
If you're using an older version if mitmproxy, please specify the version
and OS.

View File

@@ -1,35 +1,40 @@
from mitmproxy.addons import anticache
from mitmproxy.addons import anticomp
from mitmproxy.addons import check_alpn
from mitmproxy.addons import check_ca
from mitmproxy.addons import clientplayback
from mitmproxy.addons import streamfile
from mitmproxy.addons import disable_h2c_upgrade
from mitmproxy.addons import onboarding
from mitmproxy.addons import proxyauth
from mitmproxy.addons import replace
from mitmproxy.addons import script
from mitmproxy.addons import setheaders
from mitmproxy.addons import serverplayback
from mitmproxy.addons import setheaders
from mitmproxy.addons import stickyauth
from mitmproxy.addons import stickycookie
from mitmproxy.addons import streambodies
from mitmproxy.addons import streamfile
from mitmproxy.addons import upstream_auth
from mitmproxy.addons import disable_h2c_upgrade
def default_addons():
return [
onboarding.Onboarding(),
proxyauth.ProxyAuth(),
anticache.AntiCache(),
anticomp.AntiComp(),
check_alpn.CheckALPN(),
check_ca.CheckCA(),
clientplayback.ClientPlayback(),
disable_h2c_upgrade.DisableH2CleartextUpgrade(),
onboarding.Onboarding(),
proxyauth.ProxyAuth(),
replace.Replace(),
replace.ReplaceFile(),
script.ScriptLoader(),
serverplayback.ServerPlayback(),
setheaders.SetHeaders(),
stickyauth.StickyAuth(),
stickycookie.StickyCookie(),
script.ScriptLoader(),
streamfile.StreamFile(),
streambodies.StreamBodies(),
replace.Replace(),
setheaders.SetHeaders(),
serverplayback.ServerPlayback(),
clientplayback.ClientPlayback(),
streamfile.StreamFile(),
upstream_auth.UpstreamAuth(),
disable_h2c_upgrade.DisableH2CleartextUpgrade(),
]

View File

@@ -0,0 +1,17 @@
import mitmproxy
from mitmproxy.net import tcp
class CheckALPN:
def __init__(self):
self.failed = False
def configure(self, options, updated):
self.failed = mitmproxy.ctx.master.options.http2 and not tcp.HAS_ALPN
if self.failed:
mitmproxy.ctx.master.add_log(
"HTTP/2 is disabled because ALPN support missing!\n"
"OpenSSL 1.0.2+ required to support HTTP/2 connections.\n"
"Use --no-http2 to silence this warning.",
"warn",
)

View File

@@ -0,0 +1,24 @@
import mitmproxy
class CheckCA:
def __init__(self):
self.failed = False
def configure(self, options, updated):
has_ca = (
mitmproxy.ctx.master.server and
mitmproxy.ctx.master.server.config and
mitmproxy.ctx.master.server.config.certstore and
mitmproxy.ctx.master.server.config.certstore.default_ca
)
if has_ca:
self.failed = mitmproxy.ctx.master.server.config.certstore.default_ca.has_expired()
if self.failed:
mitmproxy.ctx.master.add_log(
"The mitmproxy certificate authority has expired!\n"
"Please delete all CA-related files in your ~/.mitmproxy folder.\n"
"The CA will be regenerated automatically after restarting mitmproxy.\n"
"Then make sure all your clients have the new CA installed.",
"warn",
)

View File

@@ -2,6 +2,7 @@ import itertools
import sys
import click
import shutil
import typing # noqa
@@ -124,6 +125,9 @@ class Dumper:
url = flow.request.pretty_url
else:
url = flow.request.url
terminalWidthLimit = max(shutil.get_terminal_size()[0] - 25, 50)
if self.flow_detail < 1 and len(url) > terminalWidthLimit:
url = url[:terminalWidthLimit] + ""
url = click.style(strutils.escape_control_characters(url), bold=True)
http_version = ""
@@ -234,7 +238,7 @@ class Dumper:
def websocket_message(self, f):
if self.match(f):
message = f.messages[-1]
self.echo(message.info)
self.echo(f.message_info(message))
if self.flow_detail >= 3:
self._echo_message(message)

View File

@@ -8,9 +8,9 @@ class Onboarding(wsgiapp.WSGIApp):
self.enabled = False
def configure(self, options, updated):
self.host = options.app_host
self.port = options.app_port
self.enabled = options.app
self.host = options.onboarding_host
self.port = options.onboarding_port
self.enabled = options.onboarding
def request(self, f):
if self.enabled:

View File

@@ -1,35 +1,43 @@
import binascii
import weakref
from typing import Optional
from typing import Set # noqa
from typing import Tuple
import passlib.apache
import mitmproxy.net.http
from mitmproxy import connections # noqa
from mitmproxy import exceptions
from mitmproxy import http
import mitmproxy.net.http
from mitmproxy.net.http import status_codes
REALM = "mitmproxy"
def mkauth(username, password, scheme="basic"):
def mkauth(username: str, password: str, scheme: str = "basic") -> str:
"""
Craft a basic auth string
"""
v = binascii.b2a_base64(
(username + ":" + password).encode("utf8")
).decode("ascii")
return scheme + " " + v
def parse_http_basic_auth(s):
words = s.split()
if len(words) != 2:
return None
scheme = words[0]
def parse_http_basic_auth(s: str) -> Tuple[str, str, str]:
"""
Parse a basic auth header.
Raises a ValueError if the input is invalid.
"""
scheme, authinfo = s.split()
if scheme.lower() != "basic":
raise ValueError("Unknown scheme")
try:
user = binascii.a2b_base64(words[1]).decode("utf8", "replace")
except binascii.Error:
return None
parts = user.split(':')
if len(parts) != 2:
return None
return scheme, parts[0], parts[1]
user, password = binascii.a2b_base64(authinfo.encode()).decode("utf8", "replace").split(":")
except binascii.Error as e:
raise ValueError(str(e))
return scheme, user, password
class ProxyAuth:
@@ -37,67 +45,72 @@ class ProxyAuth:
self.nonanonymous = False
self.htpasswd = None
self.singleuser = None
self.mode = None
self.authenticated = weakref.WeakSet() # type: Set[connections.ClientConnection]
"""Contains all connections that are permanently authenticated after an HTTP CONNECT"""
def enabled(self):
def enabled(self) -> bool:
return any([self.nonanonymous, self.htpasswd, self.singleuser])
def which_auth_header(self, f):
if f.mode == "regular":
def is_proxy_auth(self) -> bool:
"""
Returns:
- True, if authentication is done as if mitmproxy is a proxy
- False, if authentication is done as if mitmproxy is a HTTP server
"""
return self.mode in ("regular", "upstream")
def which_auth_header(self) -> str:
if self.is_proxy_auth():
return 'Proxy-Authorization'
else:
return 'Authorization'
def auth_required_response(self, f):
if f.mode == "regular":
hdrname = 'Proxy-Authenticate'
else:
hdrname = 'WWW-Authenticate'
headers = mitmproxy.net.http.Headers()
headers[hdrname] = 'Basic realm="%s"' % REALM
if f.mode == "transparent":
def auth_required_response(self) -> http.HTTPResponse:
if self.is_proxy_auth():
return http.make_error_response(
401,
"Authentication Required",
headers
status_codes.PROXY_AUTH_REQUIRED,
headers=mitmproxy.net.http.Headers(Proxy_Authenticate='Basic realm="{}"'.format(REALM)),
)
else:
return http.make_error_response(
407,
"Proxy Authentication Required",
headers,
status_codes.UNAUTHORIZED,
headers=mitmproxy.net.http.Headers(WWW_Authenticate='Basic realm="{}"'.format(REALM)),
)
def check(self, f):
auth_value = f.request.headers.get(self.which_auth_header(f), None)
if not auth_value:
return False
parts = parse_http_basic_auth(auth_value)
if not parts:
return False
scheme, username, password = parts
if scheme.lower() != 'basic':
return False
def check(self, f: http.HTTPFlow) -> Optional[Tuple[str, str]]:
"""
Check if a request is correctly authenticated.
Returns:
- a (username, password) tuple if successful,
- None, otherwise.
"""
auth_value = f.request.headers.get(self.which_auth_header(), "")
try:
scheme, username, password = parse_http_basic_auth(auth_value)
except ValueError:
return None
if self.nonanonymous:
pass
return username, password
elif self.singleuser:
if [username, password] != self.singleuser:
return False
if self.singleuser == [username, password]:
return username, password
elif self.htpasswd:
if not self.htpasswd.check_password(username, password):
return False
else:
raise NotImplementedError("Should never happen.")
if self.htpasswd.check_password(username, password):
return username, password
return True
return None
def authenticate(self, f):
if self.check(f):
del f.request.headers[self.which_auth_header(f)]
def authenticate(self, f: http.HTTPFlow) -> bool:
valid_credentials = self.check(f)
if valid_credentials:
f.metadata["proxyauth"] = valid_credentials
del f.request.headers[self.which_auth_header()]
return True
else:
f.response = self.auth_required_response(f)
f.response = self.auth_required_response()
return False
# Handlers
def configure(self, options, updated):
@@ -125,24 +138,28 @@ class ProxyAuth:
)
else:
self.htpasswd = None
if "mode" in updated:
self.mode = options.mode
if self.enabled():
if options.mode == "transparent":
raise exceptions.OptionsError(
"Proxy Authentication not supported in transparent mode."
)
elif options.mode == "socks5":
if options.mode == "socks5":
raise exceptions.OptionsError(
"Proxy Authentication not supported in SOCKS mode. "
"https://github.com/mitmproxy/mitmproxy/issues/738"
)
# TODO: check for multiple auth options
# TODO: check for multiple auth options
def http_connect(self, f):
if self.enabled() and f.mode == "regular":
self.authenticate(f)
def requestheaders(self, f):
def http_connect(self, f: http.HTTPFlow) -> None:
if self.enabled():
# Are we already authenticated in CONNECT?
if not (f.mode == "regular" and f.server_conn.via):
self.authenticate(f)
if self.authenticate(f):
self.authenticated.add(f.client_conn)
def requestheaders(self, f: http.HTTPFlow) -> None:
if self.enabled():
# Is this connection authenticated by a previous HTTP CONNECT?
if f.client_conn in self.authenticated:
return
self.authenticate(f)

View File

@@ -2,9 +2,47 @@ import re
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import ctx
class Replace:
def parse_hook(s):
"""
Returns a (pattern, regex, replacement) tuple.
The general form for a replacement hook is as follows:
/patt/regex/replacement
The first character specifies the separator. Example:
:~q:foo:bar
If only two clauses are specified, the pattern is set to match
universally (i.e. ".*"). Example:
/foo/bar/
Clauses are parsed from left to right. Extra separators are taken to be
part of the final clause. For instance, the replacement clause below is
"foo/bar/":
/one/two/foo/bar/
"""
sep, rem = s[0], s[1:]
parts = rem.split(sep, 2)
if len(parts) == 2:
patt = ".*"
a, b = parts
elif len(parts) == 3:
patt, a, b = parts
else:
raise exceptions.OptionsError(
"Invalid replacement specifier: %s" % s
)
return patt, a, b
class _ReplaceBase:
def __init__(self):
self.lst = []
@@ -16,9 +54,14 @@ class Replace:
rex: a regular expression, as bytes.
s: the replacement string, as bytes
"""
if "replacements" in updated:
if self.optionName in updated:
lst = []
for fpatt, rex, s in options.replacements:
for rep in getattr(options, self.optionName):
if isinstance(rep, str):
fpatt, rex, s = parse_hook(rep)
else:
fpatt, rex, s = rep
flt = flowfilter.parse(fpatt)
if not flt:
raise exceptions.OptionsError(
@@ -37,9 +80,9 @@ class Replace:
for rex, s, flt in self.lst:
if flt(f):
if f.response:
f.response.replace(rex, s, flags=re.DOTALL)
self.replace(f.response, rex, s)
else:
f.request.replace(rex, s, flags=re.DOTALL)
self.replace(f.request, rex, s)
def request(self, flow):
if not flow.reply.has_message:
@@ -48,3 +91,22 @@ class Replace:
def response(self, flow):
if not flow.reply.has_message:
self.execute(flow)
class Replace(_ReplaceBase):
optionName = "replacements"
def replace(self, obj, rex, s):
obj.replace(rex, s, flags=re.DOTALL)
class ReplaceFile(_ReplaceBase):
optionName = "replacement_files"
def replace(self, obj, rex, s):
try:
v = open(s, "rb").read()
except IOError as e:
ctx.log.warn("Could not read replacement file: %s" % s)
return
obj.replace(rex, v, flags=re.DOTALL)

View File

@@ -8,7 +8,7 @@ import types
from mitmproxy import exceptions
from mitmproxy import ctx
from mitmproxy import events
from mitmproxy import eventsequence
import watchdog.events
@@ -20,7 +20,7 @@ def parse_command(command):
Returns a (path, args) tuple.
"""
if not command or not command.strip():
raise exceptions.OptionsError("Empty script command.")
raise ValueError("Empty script command.")
# Windows: escape all backslashes in the path.
if os.name == "nt": # pragma: no cover
backslashes = shlex.split(command, posix=False)[0].count("\\")
@@ -28,13 +28,13 @@ def parse_command(command):
args = shlex.split(command) # pragma: no cover
args[0] = os.path.expanduser(args[0])
if not os.path.exists(args[0]):
raise exceptions.OptionsError(
raise ValueError(
("Script file not found: %s.\r\n"
"If your script path contains spaces, "
"make sure to wrap it in additional quotes, e.g. -s \"'./foo bar/baz.py' --args\".") %
args[0])
elif os.path.isdir(args[0]):
raise exceptions.OptionsError("Not a file: %s" % args[0])
raise ValueError("Not a file: %s" % args[0])
return args[0], args[1:]
@@ -110,11 +110,16 @@ class ReloadHandler(watchdog.events.FileSystemEventHandler):
self.callback = callback
def filter(self, event):
"""
Returns True only when .py file is changed
"""
if event.is_directory:
return False
if os.path.basename(event.src_path).startswith("."):
return False
return True
if event.src_path.endswith(".py"):
return True
return False
def on_modified(self, event):
if self.filter(event):
@@ -141,7 +146,7 @@ class Script:
self.last_options = None
self.should_reload = threading.Event()
for i in events.Events:
for i in eventsequence.Events:
if not hasattr(self, i):
def mkprox():
evt = i
@@ -205,10 +210,13 @@ class ScriptLoader:
An addon that manages loading scripts from options.
"""
def run_once(self, command, flows):
sc = Script(command)
try:
sc = Script(command)
except ValueError as e:
raise ValueError(str(e))
sc.load_script()
for f in flows:
for evt, o in events.event_sequence(f):
for evt, o in eventsequence.iterate(f):
sc.run(evt, o)
sc.done()
return sc
@@ -246,7 +254,10 @@ class ScriptLoader:
ordered.append(current[s])
else:
ctx.log.info("Loading script: %s" % s)
sc = Script(s)
try:
sc = Script(s)
except ValueError as e:
raise exceptions.OptionsError(str(e))
ordered.append(sc)
newscripts.append(sc)

View File

@@ -2,6 +2,43 @@ from mitmproxy import exceptions
from mitmproxy import flowfilter
def parse_setheader(s):
"""
Returns a (pattern, regex, replacement) tuple.
The general form for a replacement hook is as follows:
/patt/regex/replacement
The first character specifies the separator. Example:
:~q:foo:bar
If only two clauses are specified, the pattern is set to match
universally (i.e. ".*"). Example:
/foo/bar/
Clauses are parsed from left to right. Extra separators are taken to be
part of the final clause. For instance, the replacement clause below is
"foo/bar/":
/one/two/foo/bar/
"""
sep, rem = s[0], s[1:]
parts = rem.split(sep, 2)
if len(parts) == 2:
patt = ".*"
a, b = parts
elif len(parts) == 3:
patt, a, b = parts
else:
raise exceptions.OptionsError(
"Invalid replacement specifier: %s" % s
)
return patt, a, b
class SetHeaders:
def __init__(self):
self.lst = []
@@ -16,7 +53,12 @@ class SetHeaders:
"""
if "setheaders" in updated:
self.lst = []
for fpatt, header, value in options.setheaders:
for shead in options.setheaders:
if isinstance(shead, str):
fpatt, header, value = parse_setheader(shead)
else:
fpatt, header, value = shead
flt = flowfilter.parse(fpatt)
if not flt:
raise exceptions.OptionsError(

View File

@@ -5,7 +5,7 @@ from mitmproxy import log
class TermLog:
def __init__(self, outfile=sys.stdout):
def __init__(self, outfile=None):
self.options = None
self.outfile = outfile
@@ -13,10 +13,15 @@ class TermLog:
self.options = options
def log(self, e):
if log.log_tier(e.level) == log.log_tier("error"):
outfile = self.outfile or sys.stderr
else:
outfile = self.outfile or sys.stdout
if self.options.verbosity >= log.log_tier(e.level):
click.secho(
e.msg,
file=self.outfile,
file=outfile,
fg=dict(error="red", warn="yellow").get(e.level),
dim=(e.level == "debug"),
err=(e.level == "error")

View File

@@ -102,7 +102,7 @@ orders = [
class View(collections.Sequence):
def __init__(self):
super().__init__()
self._store = {}
self._store = collections.OrderedDict()
self.filter = matchall
# Should we show only marked flows?
self.show_marked = False
@@ -230,6 +230,17 @@ class View(collections.Sequence):
self.sig_view_refresh.send(self)
self.sig_store_refresh.send(self)
def clear_not_marked(self):
"""
Clears only the unmarked flows.
"""
for flow in self._store.copy().values():
if not flow.marked:
self._store.pop(flow.id)
self._refilter()
self.sig_store_refresh.send(self)
def add(self, f: mitmproxy.flow.Flow) -> bool:
"""
Adds a flow to the state. If the flow already exists, it is
@@ -298,19 +309,19 @@ class View(collections.Sequence):
"Invalid interception filter: %s" % opts.filter
)
self.set_filter(filt)
if "order" in updated:
if opts.order is None:
if "console_order" in updated:
if opts.console_order is None:
self.set_order(self.default_order)
else:
if opts.order not in self.orders:
if opts.console_order not in self.orders:
raise exceptions.OptionsError(
"Unknown flow order: %s" % opts.order
"Unknown flow order: %s" % opts.console_order
)
self.set_order(self.orders[opts.order])
if "order_reversed" in updated:
self.set_reversed(opts.order_reversed)
if "focus_follow" in updated:
self.focus_follow = opts.focus_follow
self.set_order(self.orders[opts.console_order])
if "console_order_reversed" in updated:
self.set_reversed(opts.console_order_reversed)
if "console_focus_follow" in updated:
self.focus_follow = opts.console_focus_follow
def request(self, f):
self.add(f)

View File

@@ -3,8 +3,8 @@ import ssl
import time
import datetime
import ipaddress
import sys
from pyasn1.type import univ, constraint, char, namedtype, tag
from pyasn1.codec.der.decoder import decode
from pyasn1.error import PyAsn1Error
@@ -13,8 +13,8 @@ import OpenSSL
from mitmproxy.types import serializable
# Default expiry must not be too long: https://github.com/mitmproxy/mitmproxy/issues/815
DEFAULT_EXP = 94608000 # = 24 * 60 * 60 * 365 * 3
# Generated with "openssl dhparam". It's too slow to generate this on startup.
DEFAULT_DHPARAM = b"""
-----BEGIN DH PARAMETERS-----

View File

@@ -1,6 +1,5 @@
import time
import copy
import os
from mitmproxy import stateobject
@@ -82,9 +81,6 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
tls_version=str,
)
def copy(self):
return copy.copy(self)
def send(self, message):
if isinstance(message, list):
message = b''.join(message)
@@ -222,9 +218,6 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject):
via=None
))
def copy(self):
return copy.copy(self)
def connect(self):
self.timestamp_start = time.time()
tcp.TCPClient.connect(self)

View File

@@ -159,6 +159,7 @@ def get_content_view(viewmode: View, data: bytes, **metadata):
return desc, safe_to_print(content), error
# The order in which ContentViews are added is important!
add(auto.ViewAuto())
add(raw.ViewRaw())
add(hex.ViewHex())
@@ -172,9 +173,7 @@ add(urlencoded.ViewURLEncoded())
add(multipart.ViewMultipart())
add(image.ViewImage())
add(query.ViewQuery())
if protobuf.ViewProtobuf.is_available():
add(protobuf.ViewProtobuf())
add(protobuf.ViewProtobuf())
__all__ = [
"View", "VIEW_CUTOFF", "KEY_MAX", "format_text", "format_dict",

View File

@@ -18,6 +18,8 @@ class ViewAuto(base.View):
return contentviews.content_types_map[ct][0](data, **metadata)
elif strutils.is_xml(data):
return contentviews.get("XML/HTML")(data, **metadata)
elif ct.startswith("image/"):
return contentviews.get("Image")(data, **metadata)
if metadata.get("query"):
return contentviews.get("Query")(data, **metadata)
if data and strutils.is_mostly_bin(data):

View File

@@ -1,45 +0,0 @@
import io
from PIL import ExifTags
from PIL import Image
from mitmproxy.types import multidict
from . import base
class ViewImage(base.View):
name = "Image"
prompt = ("image", "i")
content_types = [
"image/png",
"image/jpeg",
"image/gif",
"image/vnd.microsoft.icon",
"image/x-icon",
]
def __call__(self, data, **metadata):
try:
img = Image.open(io.BytesIO(data))
except IOError:
return None
parts = [
("Format", str(img.format_description)),
("Size", "%s x %s px" % img.size),
("Mode", str(img.mode)),
]
for i in sorted(img.info.keys()):
if i != "exif":
parts.append(
(str(i), str(img.info[i]))
)
if hasattr(img, "_getexif"):
ex = img._getexif()
if ex:
for i in sorted(ex.keys()):
tag = ExifTags.TAGS.get(i, i)
parts.append(
(str(tag), str(ex[i]))
)
fmt = base.format_dict(multidict.MultiDict(parts))
return "%s image" % img.format, fmt

View File

@@ -0,0 +1,3 @@
from .view import ViewImage
__all__ = ["ViewImage"]

View File

@@ -0,0 +1,80 @@
import io
import typing
from kaitaistruct import KaitaiStream
from mitmproxy.contrib.kaitaistruct import png
from mitmproxy.contrib.kaitaistruct import gif
from mitmproxy.contrib.kaitaistruct import jpeg
Metadata = typing.List[typing.Tuple[str, str]]
def parse_png(data: bytes) -> Metadata:
img = png.Png(KaitaiStream(io.BytesIO(data)))
parts = [
('Format', 'Portable network graphics'),
('Size', "{0} x {1} px".format(img.ihdr.width, img.ihdr.height))
]
for chunk in img.chunks:
if chunk.type == 'gAMA':
parts.append(('gamma', str(chunk.body.gamma_int / 100000)))
elif chunk.type == 'pHYs':
aspectx = chunk.body.pixels_per_unit_x
aspecty = chunk.body.pixels_per_unit_y
parts.append(('aspect', "{0} x {1}".format(aspectx, aspecty)))
elif chunk.type == 'tEXt':
parts.append((chunk.body.keyword, chunk.body.text))
elif chunk.type == 'iTXt':
parts.append((chunk.body.keyword, chunk.body.text))
elif chunk.type == 'zTXt':
parts.append((chunk.body.keyword, chunk.body.text_datastream.decode('iso8859-1')))
return parts
def parse_gif(data: bytes) -> Metadata:
img = gif.Gif(KaitaiStream(io.BytesIO(data)))
descriptor = img.logical_screen_descriptor
parts = [
('Format', 'Compuserve GIF'),
('Version', "GIF{}".format(img.header.version.decode('ASCII'))),
('Size', "{} x {} px".format(descriptor.screen_width, descriptor.screen_height)),
('background', str(descriptor.bg_color_index))
]
ext_blocks = []
for block in img.blocks:
if block.block_type.name == 'extension':
ext_blocks.append(block)
comment_blocks = []
for block in ext_blocks:
if block.body.label._name_ == 'comment':
comment_blocks.append(block)
for block in comment_blocks:
entries = block.body.body.entries
for entry in entries:
comment = entry.bytes
if comment is not b'':
parts.append(('comment', str(comment)))
return parts
def parse_jpeg(data: bytes) -> Metadata:
img = jpeg.Jpeg(KaitaiStream(io.BytesIO(data)))
parts = [
('Format', 'JPEG (ISO 10918)')
]
for segment in img.segments:
if segment.marker._name_ == 'sof0':
parts.append(('Size', "{0} x {1} px".format(segment.data.image_width, segment.data.image_height)))
if segment.marker._name_ == 'app0':
parts.append(('jfif_version', "({0}, {1})".format(segment.data.version_major, segment.data.version_minor)))
parts.append(('jfif_density', "({0}, {1})".format(segment.data.density_x, segment.data.density_y)))
parts.append(('jfif_unit', str(segment.data.density_units._value_)))
if segment.marker._name_ == 'com':
parts.append(('comment', str(segment.data)))
if segment.marker._name_ == 'app1':
if hasattr(segment.data, 'body'):
for field in segment.data.body.data.body.ifd0.fields:
if field.data is not None:
parts.append((field.tag._name_, field.data.decode('UTF-8').strip('\x00')))
return parts

View File

@@ -0,0 +1,38 @@
import imghdr
from mitmproxy.contentviews import base
from mitmproxy.types import multidict
from . import image_parser
class ViewImage(base.View):
name = "Image"
prompt = ("image", "i")
# there is also a fallback in the auto view for image/*.
content_types = [
"image/png",
"image/jpeg",
"image/gif",
"image/vnd.microsoft.icon",
"image/x-icon",
"image/webp",
]
def __call__(self, data, **metadata):
image_type = imghdr.what('', h=data)
if image_type == 'png':
image_metadata = image_parser.parse_png(data)
elif image_type == 'gif':
image_metadata = image_parser.parse_gif(data)
elif image_type == 'jpeg':
image_metadata = image_parser.parse_jpeg(data)
else:
image_metadata = [
("Image Format", image_type or "unknown")
]
if image_type:
view_name = "{} Image".format(image_type.upper())
else:
view_name = "Unknown Image"
return view_name, base.format_dict(multidict.MultiDict(image_metadata))

View File

@@ -15,31 +15,28 @@ class ViewProtobuf(base.View):
"application/x-protobuffer",
]
@staticmethod
def is_available():
def is_available(self):
try:
p = subprocess.Popen(
["protoc", "--version"],
stdout=subprocess.PIPE
)
out, _ = p.communicate()
return out.startswith("libprotoc")
return out.startswith(b"libprotoc")
except:
return False
def decode_protobuf(self, content):
def __call__(self, data, **metadata):
if not self.is_available():
raise NotImplementedError("protoc not found. Please make sure 'protoc' is available in $PATH.")
# if Popen raises OSError, it will be caught in
# get_content_view and fall back to Raw
p = subprocess.Popen(['protoc', '--decode_raw'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate(input=content)
if out:
return out
else:
return err
def __call__(self, data, **metadata):
decoded = self.decode_protobuf(data)
decoded, _ = p.communicate(input=data)
if not decoded:
raise ValueError("Failed to parse input.")
return "Protobuf", base.format_text(decoded)

View File

@@ -0,0 +1,24 @@
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
# The source was exif.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/24e2d00048b8084ceec30a187a79cb87a79a48ba/image/exif.ksy
import array
import struct
import zlib
from enum import Enum
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
from .exif_le import ExifLe
from .exif_be import ExifBe
class Exif(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.endianness = self._io.read_u2le()
_on = self.endianness
if _on == 18761:
self.body = ExifLe(self._io)
elif _on == 19789:
self.body = ExifBe(self._io)

View File

@@ -0,0 +1,571 @@
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
# The source was exif_be.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/24e2d00048b8084ceec30a187a79cb87a79a48ba/image/exif_be.ksy
import array
import struct
import zlib
from enum import Enum
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
class ExifBe(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.version = self._io.read_u2be()
self.ifd0_ofs = self._io.read_u4be()
class Ifd(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.num_fields = self._io.read_u2be()
self.fields = [None] * (self.num_fields)
for i in range(self.num_fields):
self.fields[i] = self._root.IfdField(self._io, self, self._root)
self.next_ifd_ofs = self._io.read_u4be()
@property
def next_ifd(self):
if hasattr(self, '_m_next_ifd'):
return self._m_next_ifd if hasattr(self, '_m_next_ifd') else None
if self.next_ifd_ofs != 0:
_pos = self._io.pos()
self._io.seek(self.next_ifd_ofs)
self._m_next_ifd = self._root.Ifd(self._io, self, self._root)
self._io.seek(_pos)
return self._m_next_ifd if hasattr(self, '_m_next_ifd') else None
class IfdField(KaitaiStruct):
class FieldTypeEnum(Enum):
byte = 1
ascii_string = 2
word = 3
dword = 4
rational = 5
class TagEnum(Enum):
image_width = 256
image_height = 257
bits_per_sample = 258
compression = 259
photometric_interpretation = 262
thresholding = 263
cell_width = 264
cell_length = 265
fill_order = 266
document_name = 269
image_description = 270
make = 271
model = 272
strip_offsets = 273
orientation = 274
samples_per_pixel = 277
rows_per_strip = 278
strip_byte_counts = 279
min_sample_value = 280
max_sample_value = 281
x_resolution = 282
y_resolution = 283
planar_configuration = 284
page_name = 285
x_position = 286
y_position = 287
free_offsets = 288
free_byte_counts = 289
gray_response_unit = 290
gray_response_curve = 291
t4_options = 292
t6_options = 293
resolution_unit = 296
page_number = 297
color_response_unit = 300
transfer_function = 301
software = 305
modify_date = 306
artist = 315
host_computer = 316
predictor = 317
white_point = 318
primary_chromaticities = 319
color_map = 320
halftone_hints = 321
tile_width = 322
tile_length = 323
tile_offsets = 324
tile_byte_counts = 325
bad_fax_lines = 326
clean_fax_data = 327
consecutive_bad_fax_lines = 328
sub_ifd = 330
ink_set = 332
ink_names = 333
numberof_inks = 334
dot_range = 336
target_printer = 337
extra_samples = 338
sample_format = 339
s_min_sample_value = 340
s_max_sample_value = 341
transfer_range = 342
clip_path = 343
x_clip_path_units = 344
y_clip_path_units = 345
indexed = 346
jpeg_tables = 347
opi_proxy = 351
global_parameters_ifd = 400
profile_type = 401
fax_profile = 402
coding_methods = 403
version_year = 404
mode_number = 405
decode = 433
default_image_color = 434
t82_options = 435
jpeg_tables2 = 437
jpeg_proc = 512
thumbnail_offset = 513
thumbnail_length = 514
jpeg_restart_interval = 515
jpeg_lossless_predictors = 517
jpeg_point_transforms = 518
jpegq_tables = 519
jpegdc_tables = 520
jpegac_tables = 521
y_cb_cr_coefficients = 529
y_cb_cr_sub_sampling = 530
y_cb_cr_positioning = 531
reference_black_white = 532
strip_row_counts = 559
application_notes = 700
uspto_miscellaneous = 999
related_image_file_format = 4096
related_image_width = 4097
related_image_height = 4098
rating = 18246
xp_dip_xml = 18247
stitch_info = 18248
rating_percent = 18249
sony_raw_file_type = 28672
light_falloff_params = 28722
chromatic_aberration_corr_params = 28725
distortion_corr_params = 28727
image_id = 32781
wang_tag1 = 32931
wang_annotation = 32932
wang_tag3 = 32933
wang_tag4 = 32934
image_reference_points = 32953
region_xform_tack_point = 32954
warp_quadrilateral = 32955
affine_transform_mat = 32956
matteing = 32995
data_type = 32996
image_depth = 32997
tile_depth = 32998
image_full_width = 33300
image_full_height = 33301
texture_format = 33302
wrap_modes = 33303
fov_cot = 33304
matrix_world_to_screen = 33305
matrix_world_to_camera = 33306
model2 = 33405
cfa_repeat_pattern_dim = 33421
cfa_pattern2 = 33422
battery_level = 33423
kodak_ifd = 33424
copyright = 33432
exposure_time = 33434
f_number = 33437
md_file_tag = 33445
md_scale_pixel = 33446
md_color_table = 33447
md_lab_name = 33448
md_sample_info = 33449
md_prep_date = 33450
md_prep_time = 33451
md_file_units = 33452
pixel_scale = 33550
advent_scale = 33589
advent_revision = 33590
uic1_tag = 33628
uic2_tag = 33629
uic3_tag = 33630
uic4_tag = 33631
iptc_naa = 33723
intergraph_packet_data = 33918
intergraph_flag_registers = 33919
intergraph_matrix = 33920
ingr_reserved = 33921
model_tie_point = 33922
site = 34016
color_sequence = 34017
it8_header = 34018
raster_padding = 34019
bits_per_run_length = 34020
bits_per_extended_run_length = 34021
color_table = 34022
image_color_indicator = 34023
background_color_indicator = 34024
image_color_value = 34025
background_color_value = 34026
pixel_intensity_range = 34027
transparency_indicator = 34028
color_characterization = 34029
hc_usage = 34030
trap_indicator = 34031
cmyk_equivalent = 34032
sem_info = 34118
afcp_iptc = 34152
pixel_magic_jbig_options = 34232
jpl_carto_ifd = 34263
model_transform = 34264
wb_grgb_levels = 34306
leaf_data = 34310
photoshop_settings = 34377
exif_offset = 34665
icc_profile = 34675
tiff_fx_extensions = 34687
multi_profiles = 34688
shared_data = 34689
t88_options = 34690
image_layer = 34732
geo_tiff_directory = 34735
geo_tiff_double_params = 34736
geo_tiff_ascii_params = 34737
jbig_options = 34750
exposure_program = 34850
spectral_sensitivity = 34852
gps_info = 34853
iso = 34855
opto_electric_conv_factor = 34856
interlace = 34857
time_zone_offset = 34858
self_timer_mode = 34859
sensitivity_type = 34864
standard_output_sensitivity = 34865
recommended_exposure_index = 34866
iso_speed = 34867
iso_speed_latitudeyyy = 34868
iso_speed_latitudezzz = 34869
fax_recv_params = 34908
fax_sub_address = 34909
fax_recv_time = 34910
fedex_edr = 34929
leaf_sub_ifd = 34954
exif_version = 36864
date_time_original = 36867
create_date = 36868
google_plus_upload_code = 36873
offset_time = 36880
offset_time_original = 36881
offset_time_digitized = 36882
components_configuration = 37121
compressed_bits_per_pixel = 37122
shutter_speed_value = 37377
aperture_value = 37378
brightness_value = 37379
exposure_compensation = 37380
max_aperture_value = 37381
subject_distance = 37382
metering_mode = 37383
light_source = 37384
flash = 37385
focal_length = 37386
flash_energy = 37387
spatial_frequency_response = 37388
noise = 37389
focal_plane_x_resolution = 37390
focal_plane_y_resolution = 37391
focal_plane_resolution_unit = 37392
image_number = 37393
security_classification = 37394
image_history = 37395
subject_area = 37396
exposure_index = 37397
tiff_ep_standard_id = 37398
sensing_method = 37399
cip3_data_file = 37434
cip3_sheet = 37435
cip3_side = 37436
sto_nits = 37439
maker_note = 37500
user_comment = 37510
sub_sec_time = 37520
sub_sec_time_original = 37521
sub_sec_time_digitized = 37522
ms_document_text = 37679
ms_property_set_storage = 37680
ms_document_text_position = 37681
image_source_data = 37724
ambient_temperature = 37888
humidity = 37889
pressure = 37890
water_depth = 37891
acceleration = 37892
camera_elevation_angle = 37893
xp_title = 40091
xp_comment = 40092
xp_author = 40093
xp_keywords = 40094
xp_subject = 40095
flashpix_version = 40960
color_space = 40961
exif_image_width = 40962
exif_image_height = 40963
related_sound_file = 40964
interop_offset = 40965
samsung_raw_pointers_offset = 40976
samsung_raw_pointers_length = 40977
samsung_raw_byte_order = 41217
samsung_raw_unknown = 41218
flash_energy2 = 41483
spatial_frequency_response2 = 41484
noise2 = 41485
focal_plane_x_resolution2 = 41486
focal_plane_y_resolution2 = 41487
focal_plane_resolution_unit2 = 41488
image_number2 = 41489
security_classification2 = 41490
image_history2 = 41491
subject_location = 41492
exposure_index2 = 41493
tiff_ep_standard_id2 = 41494
sensing_method2 = 41495
file_source = 41728
scene_type = 41729
cfa_pattern = 41730
custom_rendered = 41985
exposure_mode = 41986
white_balance = 41987
digital_zoom_ratio = 41988
focal_length_in35mm_format = 41989
scene_capture_type = 41990
gain_control = 41991
contrast = 41992
saturation = 41993
sharpness = 41994
device_setting_description = 41995
subject_distance_range = 41996
image_unique_id = 42016
owner_name = 42032
serial_number = 42033
lens_info = 42034
lens_make = 42035
lens_model = 42036
lens_serial_number = 42037
gdal_metadata = 42112
gdal_no_data = 42113
gamma = 42240
expand_software = 44992
expand_lens = 44993
expand_film = 44994
expand_filter_lens = 44995
expand_scanner = 44996
expand_flash_lamp = 44997
pixel_format = 48129
transformation = 48130
uncompressed = 48131
image_type = 48132
image_width2 = 48256
image_height2 = 48257
width_resolution = 48258
height_resolution = 48259
image_offset = 48320
image_byte_count = 48321
alpha_offset = 48322
alpha_byte_count = 48323
image_data_discard = 48324
alpha_data_discard = 48325
oce_scanjob_desc = 50215
oce_application_selector = 50216
oce_id_number = 50217
oce_image_logic = 50218
annotations = 50255
print_im = 50341
original_file_name = 50547
uspto_original_content_type = 50560
dng_version = 50706
dng_backward_version = 50707
unique_camera_model = 50708
localized_camera_model = 50709
cfa_plane_color = 50710
cfa_layout = 50711
linearization_table = 50712
black_level_repeat_dim = 50713
black_level = 50714
black_level_delta_h = 50715
black_level_delta_v = 50716
white_level = 50717
default_scale = 50718
default_crop_origin = 50719
default_crop_size = 50720
color_matrix1 = 50721
color_matrix2 = 50722
camera_calibration1 = 50723
camera_calibration2 = 50724
reduction_matrix1 = 50725
reduction_matrix2 = 50726
analog_balance = 50727
as_shot_neutral = 50728
as_shot_white_xy = 50729
baseline_exposure = 50730
baseline_noise = 50731
baseline_sharpness = 50732
bayer_green_split = 50733
linear_response_limit = 50734
camera_serial_number = 50735
dng_lens_info = 50736
chroma_blur_radius = 50737
anti_alias_strength = 50738
shadow_scale = 50739
sr2_private = 50740
maker_note_safety = 50741
raw_image_segmentation = 50752
calibration_illuminant1 = 50778
calibration_illuminant2 = 50779
best_quality_scale = 50780
raw_data_unique_id = 50781
alias_layer_metadata = 50784
original_raw_file_name = 50827
original_raw_file_data = 50828
active_area = 50829
masked_areas = 50830
as_shot_icc_profile = 50831
as_shot_pre_profile_matrix = 50832
current_icc_profile = 50833
current_pre_profile_matrix = 50834
colorimetric_reference = 50879
s_raw_type = 50885
panasonic_title = 50898
panasonic_title2 = 50899
camera_calibration_sig = 50931
profile_calibration_sig = 50932
profile_ifd = 50933
as_shot_profile_name = 50934
noise_reduction_applied = 50935
profile_name = 50936
profile_hue_sat_map_dims = 50937
profile_hue_sat_map_data1 = 50938
profile_hue_sat_map_data2 = 50939
profile_tone_curve = 50940
profile_embed_policy = 50941
profile_copyright = 50942
forward_matrix1 = 50964
forward_matrix2 = 50965
preview_application_name = 50966
preview_application_version = 50967
preview_settings_name = 50968
preview_settings_digest = 50969
preview_color_space = 50970
preview_date_time = 50971
raw_image_digest = 50972
original_raw_file_digest = 50973
sub_tile_block_size = 50974
row_interleave_factor = 50975
profile_look_table_dims = 50981
profile_look_table_data = 50982
opcode_list1 = 51008
opcode_list2 = 51009
opcode_list3 = 51022
noise_profile = 51041
time_codes = 51043
frame_rate = 51044
t_stop = 51058
reel_name = 51081
original_default_final_size = 51089
original_best_quality_size = 51090
original_default_crop_size = 51091
camera_label = 51105
profile_hue_sat_map_encoding = 51107
profile_look_table_encoding = 51108
baseline_exposure_offset = 51109
default_black_render = 51110
new_raw_image_digest = 51111
raw_to_preview_gain = 51112
default_user_crop = 51125
padding = 59932
offset_schema = 59933
owner_name2 = 65000
serial_number2 = 65001
lens = 65002
kdc_ifd = 65024
raw_file = 65100
converter = 65101
white_balance2 = 65102
exposure = 65105
shadows = 65106
brightness = 65107
contrast2 = 65108
saturation2 = 65109
sharpness2 = 65110
smoothness = 65111
moire_filter = 65112
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.tag = self._root.IfdField.TagEnum(self._io.read_u2be())
self.field_type = self._root.IfdField.FieldTypeEnum(self._io.read_u2be())
self.length = self._io.read_u4be()
self.ofs_or_data = self._io.read_u4be()
@property
def type_byte_length(self):
if hasattr(self, '_m_type_byte_length'):
return self._m_type_byte_length if hasattr(self, '_m_type_byte_length') else None
self._m_type_byte_length = (2 if self.field_type == self._root.IfdField.FieldTypeEnum.word else (4 if self.field_type == self._root.IfdField.FieldTypeEnum.dword else 1))
return self._m_type_byte_length if hasattr(self, '_m_type_byte_length') else None
@property
def byte_length(self):
if hasattr(self, '_m_byte_length'):
return self._m_byte_length if hasattr(self, '_m_byte_length') else None
self._m_byte_length = (self.length * self.type_byte_length)
return self._m_byte_length if hasattr(self, '_m_byte_length') else None
@property
def is_immediate_data(self):
if hasattr(self, '_m_is_immediate_data'):
return self._m_is_immediate_data if hasattr(self, '_m_is_immediate_data') else None
self._m_is_immediate_data = self.byte_length <= 4
return self._m_is_immediate_data if hasattr(self, '_m_is_immediate_data') else None
@property
def data(self):
if hasattr(self, '_m_data'):
return self._m_data if hasattr(self, '_m_data') else None
if not self.is_immediate_data:
io = self._root._io
_pos = io.pos()
io.seek(self.ofs_or_data)
self._m_data = io.read_bytes(self.byte_length)
io.seek(_pos)
return self._m_data if hasattr(self, '_m_data') else None
@property
def ifd0(self):
if hasattr(self, '_m_ifd0'):
return self._m_ifd0 if hasattr(self, '_m_ifd0') else None
_pos = self._io.pos()
self._io.seek(self.ifd0_ofs)
self._m_ifd0 = self._root.Ifd(self._io, self, self._root)
self._io.seek(_pos)
return self._m_ifd0 if hasattr(self, '_m_ifd0') else None

View File

@@ -0,0 +1,571 @@
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
# The source was exif_le.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/24e2d00048b8084ceec30a187a79cb87a79a48ba/image/exif_le.ksy
import array
import struct
import zlib
from enum import Enum
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
class ExifLe(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.version = self._io.read_u2le()
self.ifd0_ofs = self._io.read_u4le()
class Ifd(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.num_fields = self._io.read_u2le()
self.fields = [None] * (self.num_fields)
for i in range(self.num_fields):
self.fields[i] = self._root.IfdField(self._io, self, self._root)
self.next_ifd_ofs = self._io.read_u4le()
@property
def next_ifd(self):
if hasattr(self, '_m_next_ifd'):
return self._m_next_ifd if hasattr(self, '_m_next_ifd') else None
if self.next_ifd_ofs != 0:
_pos = self._io.pos()
self._io.seek(self.next_ifd_ofs)
self._m_next_ifd = self._root.Ifd(self._io, self, self._root)
self._io.seek(_pos)
return self._m_next_ifd if hasattr(self, '_m_next_ifd') else None
class IfdField(KaitaiStruct):
class FieldTypeEnum(Enum):
byte = 1
ascii_string = 2
word = 3
dword = 4
rational = 5
class TagEnum(Enum):
image_width = 256
image_height = 257
bits_per_sample = 258
compression = 259
photometric_interpretation = 262
thresholding = 263
cell_width = 264
cell_length = 265
fill_order = 266
document_name = 269
image_description = 270
make = 271
model = 272
strip_offsets = 273
orientation = 274
samples_per_pixel = 277
rows_per_strip = 278
strip_byte_counts = 279
min_sample_value = 280
max_sample_value = 281
x_resolution = 282
y_resolution = 283
planar_configuration = 284
page_name = 285
x_position = 286
y_position = 287
free_offsets = 288
free_byte_counts = 289
gray_response_unit = 290
gray_response_curve = 291
t4_options = 292
t6_options = 293
resolution_unit = 296
page_number = 297
color_response_unit = 300
transfer_function = 301
software = 305
modify_date = 306
artist = 315
host_computer = 316
predictor = 317
white_point = 318
primary_chromaticities = 319
color_map = 320
halftone_hints = 321
tile_width = 322
tile_length = 323
tile_offsets = 324
tile_byte_counts = 325
bad_fax_lines = 326
clean_fax_data = 327
consecutive_bad_fax_lines = 328
sub_ifd = 330
ink_set = 332
ink_names = 333
numberof_inks = 334
dot_range = 336
target_printer = 337
extra_samples = 338
sample_format = 339
s_min_sample_value = 340
s_max_sample_value = 341
transfer_range = 342
clip_path = 343
x_clip_path_units = 344
y_clip_path_units = 345
indexed = 346
jpeg_tables = 347
opi_proxy = 351
global_parameters_ifd = 400
profile_type = 401
fax_profile = 402
coding_methods = 403
version_year = 404
mode_number = 405
decode = 433
default_image_color = 434
t82_options = 435
jpeg_tables2 = 437
jpeg_proc = 512
thumbnail_offset = 513
thumbnail_length = 514
jpeg_restart_interval = 515
jpeg_lossless_predictors = 517
jpeg_point_transforms = 518
jpegq_tables = 519
jpegdc_tables = 520
jpegac_tables = 521
y_cb_cr_coefficients = 529
y_cb_cr_sub_sampling = 530
y_cb_cr_positioning = 531
reference_black_white = 532
strip_row_counts = 559
application_notes = 700
uspto_miscellaneous = 999
related_image_file_format = 4096
related_image_width = 4097
related_image_height = 4098
rating = 18246
xp_dip_xml = 18247
stitch_info = 18248
rating_percent = 18249
sony_raw_file_type = 28672
light_falloff_params = 28722
chromatic_aberration_corr_params = 28725
distortion_corr_params = 28727
image_id = 32781
wang_tag1 = 32931
wang_annotation = 32932
wang_tag3 = 32933
wang_tag4 = 32934
image_reference_points = 32953
region_xform_tack_point = 32954
warp_quadrilateral = 32955
affine_transform_mat = 32956
matteing = 32995
data_type = 32996
image_depth = 32997
tile_depth = 32998
image_full_width = 33300
image_full_height = 33301
texture_format = 33302
wrap_modes = 33303
fov_cot = 33304
matrix_world_to_screen = 33305
matrix_world_to_camera = 33306
model2 = 33405
cfa_repeat_pattern_dim = 33421
cfa_pattern2 = 33422
battery_level = 33423
kodak_ifd = 33424
copyright = 33432
exposure_time = 33434
f_number = 33437
md_file_tag = 33445
md_scale_pixel = 33446
md_color_table = 33447
md_lab_name = 33448
md_sample_info = 33449
md_prep_date = 33450
md_prep_time = 33451
md_file_units = 33452
pixel_scale = 33550
advent_scale = 33589
advent_revision = 33590
uic1_tag = 33628
uic2_tag = 33629
uic3_tag = 33630
uic4_tag = 33631
iptc_naa = 33723
intergraph_packet_data = 33918
intergraph_flag_registers = 33919
intergraph_matrix = 33920
ingr_reserved = 33921
model_tie_point = 33922
site = 34016
color_sequence = 34017
it8_header = 34018
raster_padding = 34019
bits_per_run_length = 34020
bits_per_extended_run_length = 34021
color_table = 34022
image_color_indicator = 34023
background_color_indicator = 34024
image_color_value = 34025
background_color_value = 34026
pixel_intensity_range = 34027
transparency_indicator = 34028
color_characterization = 34029
hc_usage = 34030
trap_indicator = 34031
cmyk_equivalent = 34032
sem_info = 34118
afcp_iptc = 34152
pixel_magic_jbig_options = 34232
jpl_carto_ifd = 34263
model_transform = 34264
wb_grgb_levels = 34306
leaf_data = 34310
photoshop_settings = 34377
exif_offset = 34665
icc_profile = 34675
tiff_fx_extensions = 34687
multi_profiles = 34688
shared_data = 34689
t88_options = 34690
image_layer = 34732
geo_tiff_directory = 34735
geo_tiff_double_params = 34736
geo_tiff_ascii_params = 34737
jbig_options = 34750
exposure_program = 34850
spectral_sensitivity = 34852
gps_info = 34853
iso = 34855
opto_electric_conv_factor = 34856
interlace = 34857
time_zone_offset = 34858
self_timer_mode = 34859
sensitivity_type = 34864
standard_output_sensitivity = 34865
recommended_exposure_index = 34866
iso_speed = 34867
iso_speed_latitudeyyy = 34868
iso_speed_latitudezzz = 34869
fax_recv_params = 34908
fax_sub_address = 34909
fax_recv_time = 34910
fedex_edr = 34929
leaf_sub_ifd = 34954
exif_version = 36864
date_time_original = 36867
create_date = 36868
google_plus_upload_code = 36873
offset_time = 36880
offset_time_original = 36881
offset_time_digitized = 36882
components_configuration = 37121
compressed_bits_per_pixel = 37122
shutter_speed_value = 37377
aperture_value = 37378
brightness_value = 37379
exposure_compensation = 37380
max_aperture_value = 37381
subject_distance = 37382
metering_mode = 37383
light_source = 37384
flash = 37385
focal_length = 37386
flash_energy = 37387
spatial_frequency_response = 37388
noise = 37389
focal_plane_x_resolution = 37390
focal_plane_y_resolution = 37391
focal_plane_resolution_unit = 37392
image_number = 37393
security_classification = 37394
image_history = 37395
subject_area = 37396
exposure_index = 37397
tiff_ep_standard_id = 37398
sensing_method = 37399
cip3_data_file = 37434
cip3_sheet = 37435
cip3_side = 37436
sto_nits = 37439
maker_note = 37500
user_comment = 37510
sub_sec_time = 37520
sub_sec_time_original = 37521
sub_sec_time_digitized = 37522
ms_document_text = 37679
ms_property_set_storage = 37680
ms_document_text_position = 37681
image_source_data = 37724
ambient_temperature = 37888
humidity = 37889
pressure = 37890
water_depth = 37891
acceleration = 37892
camera_elevation_angle = 37893
xp_title = 40091
xp_comment = 40092
xp_author = 40093
xp_keywords = 40094
xp_subject = 40095
flashpix_version = 40960
color_space = 40961
exif_image_width = 40962
exif_image_height = 40963
related_sound_file = 40964
interop_offset = 40965
samsung_raw_pointers_offset = 40976
samsung_raw_pointers_length = 40977
samsung_raw_byte_order = 41217
samsung_raw_unknown = 41218
flash_energy2 = 41483
spatial_frequency_response2 = 41484
noise2 = 41485
focal_plane_x_resolution2 = 41486
focal_plane_y_resolution2 = 41487
focal_plane_resolution_unit2 = 41488
image_number2 = 41489
security_classification2 = 41490
image_history2 = 41491
subject_location = 41492
exposure_index2 = 41493
tiff_ep_standard_id2 = 41494
sensing_method2 = 41495
file_source = 41728
scene_type = 41729
cfa_pattern = 41730
custom_rendered = 41985
exposure_mode = 41986
white_balance = 41987
digital_zoom_ratio = 41988
focal_length_in35mm_format = 41989
scene_capture_type = 41990
gain_control = 41991
contrast = 41992
saturation = 41993
sharpness = 41994
device_setting_description = 41995
subject_distance_range = 41996
image_unique_id = 42016
owner_name = 42032
serial_number = 42033
lens_info = 42034
lens_make = 42035
lens_model = 42036
lens_serial_number = 42037
gdal_metadata = 42112
gdal_no_data = 42113
gamma = 42240
expand_software = 44992
expand_lens = 44993
expand_film = 44994
expand_filter_lens = 44995
expand_scanner = 44996
expand_flash_lamp = 44997
pixel_format = 48129
transformation = 48130
uncompressed = 48131
image_type = 48132
image_width2 = 48256
image_height2 = 48257
width_resolution = 48258
height_resolution = 48259
image_offset = 48320
image_byte_count = 48321
alpha_offset = 48322
alpha_byte_count = 48323
image_data_discard = 48324
alpha_data_discard = 48325
oce_scanjob_desc = 50215
oce_application_selector = 50216
oce_id_number = 50217
oce_image_logic = 50218
annotations = 50255
print_im = 50341
original_file_name = 50547
uspto_original_content_type = 50560
dng_version = 50706
dng_backward_version = 50707
unique_camera_model = 50708
localized_camera_model = 50709
cfa_plane_color = 50710
cfa_layout = 50711
linearization_table = 50712
black_level_repeat_dim = 50713
black_level = 50714
black_level_delta_h = 50715
black_level_delta_v = 50716
white_level = 50717
default_scale = 50718
default_crop_origin = 50719
default_crop_size = 50720
color_matrix1 = 50721
color_matrix2 = 50722
camera_calibration1 = 50723
camera_calibration2 = 50724
reduction_matrix1 = 50725
reduction_matrix2 = 50726
analog_balance = 50727
as_shot_neutral = 50728
as_shot_white_xy = 50729
baseline_exposure = 50730
baseline_noise = 50731
baseline_sharpness = 50732
bayer_green_split = 50733
linear_response_limit = 50734
camera_serial_number = 50735
dng_lens_info = 50736
chroma_blur_radius = 50737
anti_alias_strength = 50738
shadow_scale = 50739
sr2_private = 50740
maker_note_safety = 50741
raw_image_segmentation = 50752
calibration_illuminant1 = 50778
calibration_illuminant2 = 50779
best_quality_scale = 50780
raw_data_unique_id = 50781
alias_layer_metadata = 50784
original_raw_file_name = 50827
original_raw_file_data = 50828
active_area = 50829
masked_areas = 50830
as_shot_icc_profile = 50831
as_shot_pre_profile_matrix = 50832
current_icc_profile = 50833
current_pre_profile_matrix = 50834
colorimetric_reference = 50879
s_raw_type = 50885
panasonic_title = 50898
panasonic_title2 = 50899
camera_calibration_sig = 50931
profile_calibration_sig = 50932
profile_ifd = 50933
as_shot_profile_name = 50934
noise_reduction_applied = 50935
profile_name = 50936
profile_hue_sat_map_dims = 50937
profile_hue_sat_map_data1 = 50938
profile_hue_sat_map_data2 = 50939
profile_tone_curve = 50940
profile_embed_policy = 50941
profile_copyright = 50942
forward_matrix1 = 50964
forward_matrix2 = 50965
preview_application_name = 50966
preview_application_version = 50967
preview_settings_name = 50968
preview_settings_digest = 50969
preview_color_space = 50970
preview_date_time = 50971
raw_image_digest = 50972
original_raw_file_digest = 50973
sub_tile_block_size = 50974
row_interleave_factor = 50975
profile_look_table_dims = 50981
profile_look_table_data = 50982
opcode_list1 = 51008
opcode_list2 = 51009
opcode_list3 = 51022
noise_profile = 51041
time_codes = 51043
frame_rate = 51044
t_stop = 51058
reel_name = 51081
original_default_final_size = 51089
original_best_quality_size = 51090
original_default_crop_size = 51091
camera_label = 51105
profile_hue_sat_map_encoding = 51107
profile_look_table_encoding = 51108
baseline_exposure_offset = 51109
default_black_render = 51110
new_raw_image_digest = 51111
raw_to_preview_gain = 51112
default_user_crop = 51125
padding = 59932
offset_schema = 59933
owner_name2 = 65000
serial_number2 = 65001
lens = 65002
kdc_ifd = 65024
raw_file = 65100
converter = 65101
white_balance2 = 65102
exposure = 65105
shadows = 65106
brightness = 65107
contrast2 = 65108
saturation2 = 65109
sharpness2 = 65110
smoothness = 65111
moire_filter = 65112
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.tag = self._root.IfdField.TagEnum(self._io.read_u2le())
self.field_type = self._root.IfdField.FieldTypeEnum(self._io.read_u2le())
self.length = self._io.read_u4le()
self.ofs_or_data = self._io.read_u4le()
@property
def type_byte_length(self):
if hasattr(self, '_m_type_byte_length'):
return self._m_type_byte_length if hasattr(self, '_m_type_byte_length') else None
self._m_type_byte_length = (2 if self.field_type == self._root.IfdField.FieldTypeEnum.word else (4 if self.field_type == self._root.IfdField.FieldTypeEnum.dword else 1))
return self._m_type_byte_length if hasattr(self, '_m_type_byte_length') else None
@property
def byte_length(self):
if hasattr(self, '_m_byte_length'):
return self._m_byte_length if hasattr(self, '_m_byte_length') else None
self._m_byte_length = (self.length * self.type_byte_length)
return self._m_byte_length if hasattr(self, '_m_byte_length') else None
@property
def is_immediate_data(self):
if hasattr(self, '_m_is_immediate_data'):
return self._m_is_immediate_data if hasattr(self, '_m_is_immediate_data') else None
self._m_is_immediate_data = self.byte_length <= 4
return self._m_is_immediate_data if hasattr(self, '_m_is_immediate_data') else None
@property
def data(self):
if hasattr(self, '_m_data'):
return self._m_data if hasattr(self, '_m_data') else None
if not self.is_immediate_data:
io = self._root._io
_pos = io.pos()
io.seek(self.ofs_or_data)
self._m_data = io.read_bytes(self.byte_length)
io.seek(_pos)
return self._m_data if hasattr(self, '_m_data') else None
@property
def ifd0(self):
if hasattr(self, '_m_ifd0'):
return self._m_ifd0 if hasattr(self, '_m_ifd0') else None
_pos = self._io.pos()
self._io.seek(self.ifd0_ofs)
self._m_ifd0 = self._root.Ifd(self._io, self, self._root)
self._io.seek(_pos)
return self._m_ifd0 if hasattr(self, '_m_ifd0') else None

View File

@@ -0,0 +1,247 @@
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
# The source was png.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/562154250bea0081fed4e232751b934bc270a0c7/image/gif.ksy
import array
import struct
import zlib
from enum import Enum
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
class Gif(KaitaiStruct):
class BlockType(Enum):
extension = 33
local_image_descriptor = 44
end_of_file = 59
class ExtensionLabel(Enum):
graphic_control = 249
comment = 254
application = 255
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.header = self._root.Header(self._io, self, self._root)
self.logical_screen_descriptor = self._root.LogicalScreenDescriptor(self._io, self, self._root)
if self.logical_screen_descriptor.has_color_table:
self._raw_global_color_table = self._io.read_bytes((self.logical_screen_descriptor.color_table_size * 3))
io = KaitaiStream(BytesIO(self._raw_global_color_table))
self.global_color_table = self._root.ColorTable(io, self, self._root)
self.blocks = []
while not self._io.is_eof():
self.blocks.append(self._root.Block(self._io, self, self._root))
class ImageData(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.lzw_min_code_size = self._io.read_u1()
self.subblocks = self._root.Subblocks(self._io, self, self._root)
class ColorTableEntry(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.red = self._io.read_u1()
self.green = self._io.read_u1()
self.blue = self._io.read_u1()
class LogicalScreenDescriptor(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.screen_width = self._io.read_u2le()
self.screen_height = self._io.read_u2le()
self.flags = self._io.read_u1()
self.bg_color_index = self._io.read_u1()
self.pixel_aspect_ratio = self._io.read_u1()
@property
def has_color_table(self):
if hasattr(self, '_m_has_color_table'):
return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None
self._m_has_color_table = (self.flags & 128) != 0
return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None
@property
def color_table_size(self):
if hasattr(self, '_m_color_table_size'):
return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None
self._m_color_table_size = (2 << (self.flags & 7))
return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None
class LocalImageDescriptor(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.left = self._io.read_u2le()
self.top = self._io.read_u2le()
self.width = self._io.read_u2le()
self.height = self._io.read_u2le()
self.flags = self._io.read_u1()
if self.has_color_table:
self._raw_local_color_table = self._io.read_bytes((self.color_table_size * 3))
io = KaitaiStream(BytesIO(self._raw_local_color_table))
self.local_color_table = self._root.ColorTable(io, self, self._root)
self.image_data = self._root.ImageData(self._io, self, self._root)
@property
def has_color_table(self):
if hasattr(self, '_m_has_color_table'):
return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None
self._m_has_color_table = (self.flags & 128) != 0
return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None
@property
def has_interlace(self):
if hasattr(self, '_m_has_interlace'):
return self._m_has_interlace if hasattr(self, '_m_has_interlace') else None
self._m_has_interlace = (self.flags & 64) != 0
return self._m_has_interlace if hasattr(self, '_m_has_interlace') else None
@property
def has_sorted_color_table(self):
if hasattr(self, '_m_has_sorted_color_table'):
return self._m_has_sorted_color_table if hasattr(self, '_m_has_sorted_color_table') else None
self._m_has_sorted_color_table = (self.flags & 32) != 0
return self._m_has_sorted_color_table if hasattr(self, '_m_has_sorted_color_table') else None
@property
def color_table_size(self):
if hasattr(self, '_m_color_table_size'):
return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None
self._m_color_table_size = (2 << (self.flags & 7))
return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None
class Block(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.block_type = self._root.BlockType(self._io.read_u1())
_on = self.block_type
if _on == self._root.BlockType.extension:
self.body = self._root.Extension(self._io, self, self._root)
elif _on == self._root.BlockType.local_image_descriptor:
self.body = self._root.LocalImageDescriptor(self._io, self, self._root)
class ColorTable(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.entries = []
while not self._io.is_eof():
self.entries.append(self._root.ColorTableEntry(self._io, self, self._root))
class Header(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.magic = self._io.ensure_fixed_contents(struct.pack('3b', 71, 73, 70))
self.version = self._io.read_bytes(3)
class ExtGraphicControl(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.block_size = self._io.ensure_fixed_contents(struct.pack('1b', 4))
self.flags = self._io.read_u1()
self.delay_time = self._io.read_u2le()
self.transparent_idx = self._io.read_u1()
self.terminator = self._io.ensure_fixed_contents(struct.pack('1b', 0))
@property
def transparent_color_flag(self):
if hasattr(self, '_m_transparent_color_flag'):
return self._m_transparent_color_flag if hasattr(self, '_m_transparent_color_flag') else None
self._m_transparent_color_flag = (self.flags & 1) != 0
return self._m_transparent_color_flag if hasattr(self, '_m_transparent_color_flag') else None
@property
def user_input_flag(self):
if hasattr(self, '_m_user_input_flag'):
return self._m_user_input_flag if hasattr(self, '_m_user_input_flag') else None
self._m_user_input_flag = (self.flags & 2) != 0
return self._m_user_input_flag if hasattr(self, '_m_user_input_flag') else None
class Subblock(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.num_bytes = self._io.read_u1()
self.bytes = self._io.read_bytes(self.num_bytes)
class ExtApplication(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.application_id = self._root.Subblock(self._io, self, self._root)
self.subblocks = []
while True:
_ = self._root.Subblock(self._io, self, self._root)
self.subblocks.append(_)
if _.num_bytes == 0:
break
class Subblocks(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.entries = []
while True:
_ = self._root.Subblock(self._io, self, self._root)
self.entries.append(_)
if _.num_bytes == 0:
break
class Extension(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.label = self._root.ExtensionLabel(self._io.read_u1())
_on = self.label
if _on == self._root.ExtensionLabel.application:
self.body = self._root.ExtApplication(self._io, self, self._root)
elif _on == self._root.ExtensionLabel.comment:
self.body = self._root.Subblocks(self._io, self, self._root)
elif _on == self._root.ExtensionLabel.graphic_control:
self.body = self._root.ExtGraphicControl(self._io, self, self._root)
else:
self.body = self._root.Subblocks(self._io, self, self._root)

View File

@@ -0,0 +1,206 @@
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
# The source was jpeg.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/24e2d00048b8084ceec30a187a79cb87a79a48ba/image/jpeg.ksy
import array
import struct
import zlib
from enum import Enum
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
from .exif import Exif
class Jpeg(KaitaiStruct):
class ComponentId(Enum):
y = 1
cb = 2
cr = 3
i = 4
q = 5
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.segments = []
while not self._io.is_eof():
self.segments.append(self._root.Segment(self._io, self, self._root))
class Segment(KaitaiStruct):
class MarkerEnum(Enum):
tem = 1
sof0 = 192
sof1 = 193
sof2 = 194
sof3 = 195
dht = 196
sof5 = 197
sof6 = 198
sof7 = 199
soi = 216
eoi = 217
sos = 218
dqt = 219
dnl = 220
dri = 221
dhp = 222
app0 = 224
app1 = 225
app2 = 226
app3 = 227
app4 = 228
app5 = 229
app6 = 230
app7 = 231
app8 = 232
app9 = 233
app10 = 234
app11 = 235
app12 = 236
app13 = 237
app14 = 238
app15 = 239
com = 254
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.magic = self._io.ensure_fixed_contents(struct.pack('1b', -1))
self.marker = self._root.Segment.MarkerEnum(self._io.read_u1())
if ((self.marker != self._root.Segment.MarkerEnum.soi) and (self.marker != self._root.Segment.MarkerEnum.eoi)) :
self.length = self._io.read_u2be()
if ((self.marker != self._root.Segment.MarkerEnum.soi) and (self.marker != self._root.Segment.MarkerEnum.eoi)) :
_on = self.marker
if _on == self._root.Segment.MarkerEnum.sos:
self._raw_data = self._io.read_bytes((self.length - 2))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentSos(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.app1:
self._raw_data = self._io.read_bytes((self.length - 2))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentApp1(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.sof0:
self._raw_data = self._io.read_bytes((self.length - 2))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentSof0(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.app0:
self._raw_data = self._io.read_bytes((self.length - 2))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentApp0(io, self, self._root)
else:
self.data = self._io.read_bytes((self.length - 2))
if self.marker == self._root.Segment.MarkerEnum.sos:
self.image_data = self._io.read_bytes_full()
class SegmentSos(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.num_components = self._io.read_u1()
self.components = [None] * (self.num_components)
for i in range(self.num_components):
self.components[i] = self._root.SegmentSos.Component(self._io, self, self._root)
self.start_spectral_selection = self._io.read_u1()
self.end_spectral = self._io.read_u1()
self.appr_bit_pos = self._io.read_u1()
class Component(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.id = self._root.ComponentId(self._io.read_u1())
self.huffman_table = self._io.read_u1()
class SegmentApp1(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.magic = self._io.read_strz("ASCII", 0, False, True, True)
_on = self.magic
if _on == u"Exif":
self.body = self._root.ExifInJpeg(self._io, self, self._root)
class SegmentSof0(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.bits_per_sample = self._io.read_u1()
self.image_height = self._io.read_u2be()
self.image_width = self._io.read_u2be()
self.num_components = self._io.read_u1()
self.components = [None] * (self.num_components)
for i in range(self.num_components):
self.components[i] = self._root.SegmentSof0.Component(self._io, self, self._root)
class Component(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.id = self._root.ComponentId(self._io.read_u1())
self.sampling_factors = self._io.read_u1()
self.quantization_table_id = self._io.read_u1()
@property
def sampling_x(self):
if hasattr(self, '_m_sampling_x'):
return self._m_sampling_x if hasattr(self, '_m_sampling_x') else None
self._m_sampling_x = ((self.sampling_factors & 240) >> 4)
return self._m_sampling_x if hasattr(self, '_m_sampling_x') else None
@property
def sampling_y(self):
if hasattr(self, '_m_sampling_y'):
return self._m_sampling_y if hasattr(self, '_m_sampling_y') else None
self._m_sampling_y = (self.sampling_factors & 15)
return self._m_sampling_y if hasattr(self, '_m_sampling_y') else None
class ExifInJpeg(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.extra_zero = self._io.ensure_fixed_contents(struct.pack('1b', 0))
self._raw_data = self._io.read_bytes_full()
io = KaitaiStream(BytesIO(self._raw_data))
self.data = Exif(io)
class SegmentApp0(KaitaiStruct):
class DensityUnit(Enum):
no_units = 0
pixels_per_inch = 1
pixels_per_cm = 2
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.magic = self._io.read_str_byte_limit(5, "ASCII")
self.version_major = self._io.read_u1()
self.version_minor = self._io.read_u1()
self.density_units = self._root.SegmentApp0.DensityUnit(self._io.read_u1())
self.density_x = self._io.read_u2be()
self.density_y = self._io.read_u2be()
self.thumbnail_x = self._io.read_u1()
self.thumbnail_y = self._io.read_u1()
self.thumbnail = self._io.read_bytes(((self.thumbnail_x * self.thumbnail_y) * 3))

View File

@@ -0,0 +1,289 @@
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
# The source was png.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/9370c720b7d2ad329102d89bdc880ba6a706ef26/image/png.ksy
import array
import struct
import zlib
from enum import Enum
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
class Png(KaitaiStruct):
class ColorType(Enum):
greyscale = 0
truecolor = 2
indexed = 3
greyscale_alpha = 4
truecolor_alpha = 6
class PhysUnit(Enum):
unknown = 0
meter = 1
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.magic = self._io.ensure_fixed_contents(struct.pack('8b', -119, 80, 78, 71, 13, 10, 26, 10))
self.ihdr_len = self._io.ensure_fixed_contents(struct.pack('4b', 0, 0, 0, 13))
self.ihdr_type = self._io.ensure_fixed_contents(struct.pack('4b', 73, 72, 68, 82))
self.ihdr = self._root.IhdrChunk(self._io, self, self._root)
self.ihdr_crc = self._io.read_bytes(4)
self.chunks = []
while not self._io.is_eof():
self.chunks.append(self._root.Chunk(self._io, self, self._root))
class Rgb(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.r = self._io.read_u1()
self.g = self._io.read_u1()
self.b = self._io.read_u1()
class Chunk(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.len = self._io.read_u4be()
self.type = self._io.read_str_byte_limit(4, "UTF-8")
_on = self.type
if _on == u"iTXt":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.InternationalTextChunk(io, self, self._root)
elif _on == u"gAMA":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.GamaChunk(io, self, self._root)
elif _on == u"tIME":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.TimeChunk(io, self, self._root)
elif _on == u"PLTE":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.PlteChunk(io, self, self._root)
elif _on == u"bKGD":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.BkgdChunk(io, self, self._root)
elif _on == u"pHYs":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.PhysChunk(io, self, self._root)
elif _on == u"tEXt":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.TextChunk(io, self, self._root)
elif _on == u"cHRM":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.ChrmChunk(io, self, self._root)
elif _on == u"sRGB":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.SrgbChunk(io, self, self._root)
elif _on == u"zTXt":
self._raw_body = self._io.read_bytes(self.len)
io = KaitaiStream(BytesIO(self._raw_body))
self.body = self._root.CompressedTextChunk(io, self, self._root)
else:
self.body = self._io.read_bytes(self.len)
self.crc = self._io.read_bytes(4)
class BkgdIndexed(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.palette_index = self._io.read_u1()
class Point(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.x_int = self._io.read_u4be()
self.y_int = self._io.read_u4be()
@property
def x(self):
if hasattr(self, '_m_x'):
return self._m_x if hasattr(self, '_m_x') else None
self._m_x = (self.x_int / 100000.0)
return self._m_x if hasattr(self, '_m_x') else None
@property
def y(self):
if hasattr(self, '_m_y'):
return self._m_y if hasattr(self, '_m_y') else None
self._m_y = (self.y_int / 100000.0)
return self._m_y if hasattr(self, '_m_y') else None
class BkgdGreyscale(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.value = self._io.read_u2be()
class ChrmChunk(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.white_point = self._root.Point(self._io, self, self._root)
self.red = self._root.Point(self._io, self, self._root)
self.green = self._root.Point(self._io, self, self._root)
self.blue = self._root.Point(self._io, self, self._root)
class IhdrChunk(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.width = self._io.read_u4be()
self.height = self._io.read_u4be()
self.bit_depth = self._io.read_u1()
self.color_type = self._root.ColorType(self._io.read_u1())
self.compression_method = self._io.read_u1()
self.filter_method = self._io.read_u1()
self.interlace_method = self._io.read_u1()
class PlteChunk(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.entries = []
while not self._io.is_eof():
self.entries.append(self._root.Rgb(self._io, self, self._root))
class SrgbChunk(KaitaiStruct):
class Intent(Enum):
perceptual = 0
relative_colorimetric = 1
saturation = 2
absolute_colorimetric = 3
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.render_intent = self._root.SrgbChunk.Intent(self._io.read_u1())
class CompressedTextChunk(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.keyword = self._io.read_strz("UTF-8", 0, False, True, True)
self.compression_method = self._io.read_u1()
self._raw_text_datastream = self._io.read_bytes_full()
self.text_datastream = zlib.decompress(self._raw_text_datastream)
class BkgdTruecolor(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.red = self._io.read_u2be()
self.green = self._io.read_u2be()
self.blue = self._io.read_u2be()
class GamaChunk(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.gamma_int = self._io.read_u4be()
@property
def gamma_ratio(self):
if hasattr(self, '_m_gamma_ratio'):
return self._m_gamma_ratio if hasattr(self, '_m_gamma_ratio') else None
self._m_gamma_ratio = (100000.0 / self.gamma_int)
return self._m_gamma_ratio if hasattr(self, '_m_gamma_ratio') else None
class BkgdChunk(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
_on = self._root.ihdr.color_type
if _on == self._root.ColorType.greyscale_alpha:
self.bkgd = self._root.BkgdGreyscale(self._io, self, self._root)
elif _on == self._root.ColorType.indexed:
self.bkgd = self._root.BkgdIndexed(self._io, self, self._root)
elif _on == self._root.ColorType.greyscale:
self.bkgd = self._root.BkgdGreyscale(self._io, self, self._root)
elif _on == self._root.ColorType.truecolor_alpha:
self.bkgd = self._root.BkgdTruecolor(self._io, self, self._root)
elif _on == self._root.ColorType.truecolor:
self.bkgd = self._root.BkgdTruecolor(self._io, self, self._root)
class PhysChunk(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.pixels_per_unit_x = self._io.read_u4be()
self.pixels_per_unit_y = self._io.read_u4be()
self.unit = self._root.PhysUnit(self._io.read_u1())
class InternationalTextChunk(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.keyword = self._io.read_strz("UTF-8", 0, False, True, True)
self.compression_flag = self._io.read_u1()
self.compression_method = self._io.read_u1()
self.language_tag = self._io.read_strz("ASCII", 0, False, True, True)
self.translated_keyword = self._io.read_strz("UTF-8", 0, False, True, True)
self.text = self._io.read_str_eos("UTF-8")
class TextChunk(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.keyword = self._io.read_strz("iso8859-1", 0, False, True, True)
self.text = self._io.read_str_eos("iso8859-1")
class TimeChunk(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.year = self._io.read_u2be()
self.month = self._io.read_u1()
self.day = self._io.read_u1()
self.hour = self._io.read_u1()
self.minute = self._io.read_u1()
self.second = self._io.read_u1()

View File

@@ -37,7 +37,7 @@ Events = frozenset([
])
def event_sequence(f):
def iterate(f):
if isinstance(f, http.HTTPFlow):
if f.request:
yield "requestheaders", f
@@ -70,4 +70,4 @@ def event_sequence(f):
yield "tcp_error", f
yield "tcp_end", f
else:
raise NotImplementedError
raise TypeError()

View File

@@ -73,7 +73,7 @@ def python_code(flow: http.HTTPFlow):
headers = flow.request.headers.copy()
# requests adds those by default.
for x in ("host", "content-length"):
for x in (":authority", "host", "content-length"):
headers.pop(x, None)
writearg("headers", dict(headers))
try:
@@ -130,7 +130,7 @@ def locust_code(flow):
if flow.request.headers:
lines = [
(_native(k), _native(v)) for k, v in flow.request.headers.fields
if _native(k).lower() not in ["host", "cookie"]
if _native(k).lower() not in [":authority", "host", "cookie"]
]
lines = [" '%s': '%s',\n" % (k, v) for k, v in lines]
headers += "\n headers = {\n%s }\n" % "".join(lines)

View File

@@ -1,5 +1,4 @@
import time
import copy
import uuid
from mitmproxy import controller # noqa
@@ -7,7 +6,7 @@ from mitmproxy import stateobject
from mitmproxy import connections
from mitmproxy import version
import typing # noqa
import typing # noqa
class Error(stateobject.StateObject):
@@ -53,10 +52,6 @@ class Error(stateobject.StateObject):
f.set_state(state)
return f
def copy(self):
c = copy.copy(self)
return c
class Flow(stateobject.StateObject):
@@ -116,16 +111,9 @@ class Flow(stateobject.StateObject):
return f
def copy(self):
f = copy.copy(self)
f = super().copy()
f.id = str(uuid.uuid4())
f.live = False
f.client_conn = self.client_conn.copy()
f.server_conn = self.server_conn.copy()
f.metadata = self.metadata.copy()
if self.error:
f.error = self.error.copy()
return f
def modified(self):

View File

@@ -1,4 +1,5 @@
import cgi
import html
from typing import Optional
from mitmproxy import flow
@@ -203,16 +204,27 @@ class HTTPFlow(flow.Flow):
return c
def make_error_response(status_code, message, headers=None):
response = http.status_codes.RESPONSES.get(status_code, "Unknown")
def make_error_response(
status_code: int,
message: str="",
headers: Optional[http.Headers]=None,
) -> HTTPResponse:
reason = http.status_codes.RESPONSES.get(status_code, "Unknown")
body = """
<html>
<head>
<title>%d %s</title>
<title>{status_code} {reason}</title>
</head>
<body>%s</body>
<body>
<h1>{status_code} {reason}</h1>
<p>{message}</p>
</body>
</html>
""".strip() % (status_code, response, cgi.escape(message))
""".strip().format(
status_code=status_code,
reason=reason,
message=html.escape(message),
)
body = body.encode("utf8", "replace")
if not headers:
@@ -226,7 +238,7 @@ def make_error_response(status_code, message, headers=None):
return HTTPResponse(
b"HTTP/1.1",
status_code,
response,
reason,
headers,
body,
)

View File

@@ -86,9 +86,14 @@ def convert_019_100(data):
return data
def convert_100_200(data):
data["version"] = (2, 0, 0)
return data
def _convert_dict_keys(o: Any) -> Any:
if isinstance(o, dict):
return {strutils.native(k): _convert_dict_keys(v) for k, v in o.items()}
return {strutils.always_str(k): _convert_dict_keys(v) for k, v in o.items()}
else:
return o
@@ -98,7 +103,7 @@ def _convert_dict_vals(o: dict, values_to_convert: dict) -> dict:
if not o or k not in o:
continue
if v is True:
o[k] = strutils.native(o[k])
o[k] = strutils.always_str(o[k])
else:
_convert_dict_vals(o[k], v)
return o
@@ -134,6 +139,7 @@ converters = {
(0, 17): convert_017_018,
(0, 18): convert_018_019,
(0, 19): convert_019_100,
(1, 0): convert_100_200,
}

View File

@@ -7,7 +7,7 @@ import sys
from mitmproxy import addonmanager
from mitmproxy import options
from mitmproxy import controller
from mitmproxy import events
from mitmproxy import eventsequence
from mitmproxy import exceptions
from mitmproxy import connections
from mitmproxy import http
@@ -91,7 +91,7 @@ class Master:
changed = False
try:
mtype, obj = self.event_queue.get(timeout=timeout)
if mtype not in events.Events:
if mtype not in eventsequence.Events:
raise exceptions.ControlException(
"Unknown event %s" % repr(mtype)
)
@@ -153,7 +153,7 @@ class Master:
f.request.port = self.server.config.upstream_server.address.port
f.request.scheme = self.server.config.upstream_server.scheme
f.reply = controller.DummyReply()
for e, o in events.event_sequence(f):
for e, o in eventsequence.iterate(f):
getattr(self, e)(o)
def load_flows(self, fr: io.FlowReader) -> int:
@@ -170,8 +170,11 @@ class Master:
path = os.path.expanduser(path)
try:
if path == "-":
# This is incompatible with Python 3 - maybe we can use click?
freader = io.FlowReader(sys.stdin)
try:
sys.stdin.buffer.read(0)
except Exception as e:
raise IOError("Cannot read from stdin: {}".format(e))
freader = io.FlowReader(sys.stdin.buffer)
return self.load_flows(freader)
else:
with open(path, "rb") as f:

View File

@@ -1,6 +1,7 @@
import re
_label_valid = re.compile(b"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
# Allow underscore in host name
_label_valid = re.compile(b"(?!-)[A-Z\d\-_]{1,63}(?<!-)$", re.IGNORECASE)
def is_valid_host(host: bytes) -> bool:

View File

@@ -31,8 +31,8 @@ def decode(encoded: Union[str, bytes], encoding: str, errors: str='strict') -> U
Raises:
ValueError, if decoding fails.
"""
if len(encoded) == 0:
return encoded
if encoded is None:
return None
global _cache
cached = (
@@ -72,8 +72,8 @@ def encode(decoded: Union[str, bytes], encoding: str, errors: str='strict') -> U
Raises:
ValueError, if encoding fails.
"""
if len(decoded) == 0:
return decoded
if decoded is None:
return None
global _cache
cached = (
@@ -86,10 +86,7 @@ def encode(decoded: Union[str, bytes], encoding: str, errors: str='strict') -> U
return _cache.encoded
try:
try:
value = decoded
if isinstance(value, str):
value = decoded.encode()
encoded = custom_encode[encoding](value)
encoded = custom_encode[encoding](decoded)
except KeyError:
encoded = codecs.encode(decoded, encoding, errors)
if encoding in ("gzip", "deflate", "br"):
@@ -114,12 +111,14 @@ def identity(content):
return content
def decode_gzip(content):
def decode_gzip(content: bytes) -> bytes:
if not content:
return b""
gfile = gzip.GzipFile(fileobj=BytesIO(content))
return gfile.read()
def encode_gzip(content):
def encode_gzip(content: bytes) -> bytes:
s = BytesIO()
gf = gzip.GzipFile(fileobj=s, mode='wb')
gf.write(content)
@@ -127,15 +126,17 @@ def encode_gzip(content):
return s.getvalue()
def decode_brotli(content):
def decode_brotli(content: bytes) -> bytes:
if not content:
return b""
return brotli.decompress(content)
def encode_brotli(content):
def encode_brotli(content: bytes) -> bytes:
return brotli.compress(content)
def decode_deflate(content):
def decode_deflate(content: bytes) -> bytes:
"""
Returns decompressed data for DEFLATE. Some servers may respond with
compressed data without a zlib header or checksum. An undocumented
@@ -144,13 +145,15 @@ def decode_deflate(content):
http://bugs.python.org/issue5784
"""
if not content:
return b""
try:
return zlib.decompress(content)
except zlib.error:
return zlib.decompress(content, -15)
def encode_deflate(content):
def encode_deflate(content: bytes) -> bytes:
"""
Returns compressed content, always including zlib header and checksum.
"""

View File

@@ -78,8 +78,9 @@ def _assemble_request_headers(request_data):
Args:
request_data (mitmproxy.net.http.request.RequestData)
"""
headers = request_data.headers.copy()
headers = request_data.headers
if "host" not in headers and request_data.scheme and request_data.host and request_data.port:
headers = headers.copy()
headers["host"] = mitmproxy.net.http.url.hostport(
request_data.scheme,
request_data.host,

View File

@@ -158,8 +158,9 @@ def connection_close(http_version, headers):
"""
Checks the message to see if the client connection should be closed
according to RFC 2616 Section 8.1.
If we don't have a Connection header, HTTP 1.1 connections are assumed
to be persistent.
"""
# At first, check if we have an explicit Connection header.
if "connection" in headers:
tokens = get_header_tokens(headers, "connection")
if "close" in tokens:
@@ -167,9 +168,7 @@ def connection_close(http_version, headers):
elif "keep-alive" in tokens:
return False
# If we don't have a Connection header, HTTP 1.1 connections are assumed to
# be persistent
return http_version != "HTTP/1.1" and http_version != b"HTTP/1.1" # FIXME: Remove one case.
return http_version != "HTTP/1.1" and http_version != b"HTTP/1.1"
def expected_http_body_size(request, response=None):
@@ -228,7 +227,7 @@ def _get_first_line(rfile):
if line == b"\r\n" or line == b"\n":
# Possible leftover from previous message
line = rfile.readline()
except exceptions.TcpDisconnect:
except (exceptions.TcpDisconnect, exceptions.TlsException):
raise exceptions.HttpReadDisconnect("Remote disconnected")
if not line:
raise exceptions.HttpReadDisconnect("Remote disconnected")

View File

@@ -1,6 +1,6 @@
import codecs
import hyperframe
import hyperframe.frame
from mitmproxy import exceptions
@@ -20,6 +20,6 @@ def parse_frame(header, body=None):
body = header[9:]
header = header[:9]
frame, length = hyperframe.frame.Frame.parse_frame_header(header)
frame, _ = hyperframe.frame.Frame.parse_frame_header(header)
frame.parse_body(memoryview(body))
return frame

View File

@@ -14,13 +14,13 @@ def parse_headers(headers):
host = None
port = None
if method == b'CONNECT':
raise NotImplementedError("CONNECT over HTTP/2 is not implemented.")
if path == b'*' or path.startswith(b"/"):
first_line_format = "relative"
elif method == b'CONNECT': # pragma: no cover
raise NotImplementedError("CONNECT over HTTP/2 is not implemented.")
else: # pragma: no cover
else:
first_line_format = "absolute"
# FIXME: verify if path or :host contains what we need
scheme, host, port, _ = url.parse(path)
if authority:

View File

@@ -7,15 +7,6 @@ from mitmproxy.types import serializable
from mitmproxy.net.http import headers
# While headers _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded.
def _native(x):
return x.decode("utf-8", "surrogateescape")
def _always_bytes(x):
return strutils.always_bytes(x, "utf-8", "surrogateescape")
class MessageData(serializable.Serializable):
def __eq__(self, other):
if isinstance(other, MessageData):
@@ -142,11 +133,11 @@ class Message(serializable.Serializable):
"""
Version string, e.g. "HTTP/1.1"
"""
return _native(self.data.http_version)
return self.data.http_version.decode("utf-8", "surrogateescape")
@http_version.setter
def http_version(self, http_version):
self.data.http_version = _always_bytes(http_version)
self.data.http_version = strutils.always_bytes(http_version, "utf-8", "surrogateescape")
@property
def timestamp_start(self):

View File

@@ -1,5 +1,6 @@
import re
import urllib
from typing import Optional
from mitmproxy.types import multidict
from mitmproxy.utils import strutils
@@ -115,24 +116,24 @@ class Request(message.Message):
"""
HTTP request method, e.g. "GET".
"""
return message._native(self.data.method).upper()
return self.data.method.decode("utf-8", "surrogateescape").upper()
@method.setter
def method(self, method):
self.data.method = message._always_bytes(method)
self.data.method = strutils.always_bytes(method, "utf-8", "surrogateescape")
@property
def scheme(self):
"""
HTTP request scheme, which should be "http" or "https".
"""
if not self.data.scheme:
return self.data.scheme
return message._native(self.data.scheme)
if self.data.scheme is None:
return None
return self.data.scheme.decode("utf-8", "surrogateescape")
@scheme.setter
def scheme(self, scheme):
self.data.scheme = message._always_bytes(scheme)
self.data.scheme = strutils.always_bytes(scheme, "utf-8", "surrogateescape")
@property
def host(self):
@@ -164,11 +165,44 @@ class Request(message.Message):
self.data.host = host
# Update host header
if "host" in self.headers:
if host:
self.headers["host"] = host
if self.host_header is not None:
self.host_header = host
@property
def host_header(self) -> Optional[str]:
"""
The request's host/authority header.
This property maps to either ``request.headers["Host"]`` or
``request.headers[":authority"]``, depending on whether it's HTTP/1.x or HTTP/2.0.
"""
if ":authority" in self.headers:
return self.headers[":authority"]
if "Host" in self.headers:
return self.headers["Host"]
return None
@host_header.setter
def host_header(self, val: Optional[str]) -> None:
if val is None:
self.headers.pop("Host", None)
self.headers.pop(":authority", None)
elif self.host_header is not None:
# Update any existing headers.
if ":authority" in self.headers:
self.headers[":authority"] = val
if "Host" in self.headers:
self.headers["Host"] = val
else:
# Only add the correct new header.
if self.http_version.upper().startswith("HTTP/2"):
self.headers[":authority"] = val
else:
self.headers.pop("host")
self.headers["Host"] = val
@host_header.deleter
def host_header(self):
self.host_header = None
@property
def port(self):
@@ -190,11 +224,11 @@ class Request(message.Message):
if self.data.path is None:
return None
else:
return message._native(self.data.path)
return self.data.path.decode("utf-8", "surrogateescape")
@path.setter
def path(self, path):
self.data.path = message._always_bytes(path)
self.data.path = strutils.always_bytes(path, "utf-8", "surrogateescape")
@property
def url(self):
@@ -211,9 +245,10 @@ class Request(message.Message):
def _parse_host_header(self):
"""Extract the host and port from Host header"""
if "host" not in self.headers:
host = self.host_header
if not host:
return None, None
host, port = self.headers["host"], None
port = None
m = host_header_re.match(host)
if m:
host = m.group("host").strip("[]")
@@ -373,7 +408,7 @@ class Request(message.Message):
This will overwrite the existing content if there is one.
"""
self.headers["content-type"] = "application/x-www-form-urlencoded"
self.content = mitmproxy.net.http.url.encode(form_data).encode()
self.content = mitmproxy.net.http.url.encode(form_data, self.content.decode()).encode()
@urlencoded_form.setter
def urlencoded_form(self, value):

View File

@@ -6,6 +6,7 @@ from mitmproxy.net.http import cookies
from mitmproxy.net.http import headers as nheaders
from mitmproxy.net.http import message
from mitmproxy.net.http import status_codes
from mitmproxy.utils import strutils
from typing import AnyStr
from typing import Dict
from typing import Iterable
@@ -121,11 +122,12 @@ class Response(message.Message):
HTTP Reason Phrase, e.g. "Not Found".
This is always :py:obj:`None` for HTTP2 requests, because HTTP2 responses do not contain a reason phrase.
"""
return message._native(self.data.reason)
# Encoding: http://stackoverflow.com/a/16674906/934719
return self.data.reason.decode("ISO-8859-1", "surrogateescape")
@reason.setter
def reason(self, reason):
self.data.reason = message._always_bytes(reason)
self.data.reason = strutils.always_bytes(reason, "ISO-8859-1", "surrogateescape")
@property
def cookies(self) -> multidict.MultiDictView:

View File

@@ -82,11 +82,24 @@ def unparse(scheme, host, port, path=""):
return "%s://%s%s" % (scheme, hostport(scheme, host, port), path)
def encode(s: Sequence[Tuple[str, str]]) -> str:
def encode(s: Sequence[Tuple[str, str]], similar_to: str=None) -> str:
"""
Takes a list of (key, value) tuples and returns a urlencoded string.
If similar_to is passed, the output is formatted similar to the provided urlencoded string.
"""
return urllib.parse.urlencode(s, False, errors="surrogateescape")
remove_trailing_equal = False
if similar_to:
remove_trailing_equal = any("=" not in param for param in similar_to.split("&"))
encoded = urllib.parse.urlencode(s, False, errors="surrogateescape")
if remove_trailing_equal:
encoded = encoded.replace("=&", "&")
if encoded[-1] == '=':
encoded = encoded[:-1]
return encoded
def decode(s):

View File

@@ -538,7 +538,7 @@ class _Connection:
self.ssl_verification_error = exceptions.InvalidCertificateException(
"Certificate Verification Error for {}: {} (errno: {}, depth: {})".format(
sni,
strutils.native(SSL._ffi.string(SSL._lib.X509_verify_cert_error_string(errno)), "utf8"),
strutils.always_str(SSL._ffi.string(SSL._lib.X509_verify_cert_error_string(errno)), "utf8"),
errno,
err_depth
)

View File

@@ -57,38 +57,38 @@ class WSGIAdaptor:
Raises:
ValueError, if the content-encoding is invalid.
"""
path = strutils.native(flow.request.path, "latin-1")
path = strutils.always_str(flow.request.path, "latin-1")
if '?' in path:
path_info, query = strutils.native(path, "latin-1").split('?', 1)
path_info, query = strutils.always_str(path, "latin-1").split('?', 1)
else:
path_info = path
query = ''
environ = {
'wsgi.version': (1, 0),
'wsgi.url_scheme': strutils.native(flow.request.scheme, "latin-1"),
'wsgi.url_scheme': strutils.always_str(flow.request.scheme, "latin-1"),
'wsgi.input': io.BytesIO(flow.request.content or b""),
'wsgi.errors': errsoc,
'wsgi.multithread': True,
'wsgi.multiprocess': False,
'wsgi.run_once': False,
'SERVER_SOFTWARE': self.sversion,
'REQUEST_METHOD': strutils.native(flow.request.method, "latin-1"),
'REQUEST_METHOD': strutils.always_str(flow.request.method, "latin-1"),
'SCRIPT_NAME': '',
'PATH_INFO': urllib.parse.unquote(path_info),
'QUERY_STRING': query,
'CONTENT_TYPE': strutils.native(flow.request.headers.get('Content-Type', ''), "latin-1"),
'CONTENT_LENGTH': strutils.native(flow.request.headers.get('Content-Length', ''), "latin-1"),
'CONTENT_TYPE': strutils.always_str(flow.request.headers.get('Content-Type', ''), "latin-1"),
'CONTENT_LENGTH': strutils.always_str(flow.request.headers.get('Content-Length', ''), "latin-1"),
'SERVER_NAME': self.domain,
'SERVER_PORT': str(self.port),
'SERVER_PROTOCOL': strutils.native(flow.request.http_version, "latin-1"),
'SERVER_PROTOCOL': strutils.always_str(flow.request.http_version, "latin-1"),
}
environ.update(extra)
if flow.client_conn.address:
environ["REMOTE_ADDR"] = strutils.native(flow.client_conn.address.host, "latin-1")
environ["REMOTE_ADDR"] = strutils.always_str(flow.client_conn.address.host, "latin-1")
environ["REMOTE_PORT"] = flow.client_conn.address.port
for key, value in flow.request.headers.items():
key = 'HTTP_' + strutils.native(key, "latin-1").upper().replace('-', '_')
key = 'HTTP_' + strutils.always_str(key, "latin-1").upper().replace('-', '_')
if key not in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
environ[key] = value
return environ

View File

@@ -1,4 +1,4 @@
from typing import Tuple, Optional, Sequence
from typing import Tuple, Optional, Sequence, Union
from mitmproxy import optmanager
@@ -22,96 +22,101 @@ DEFAULT_CLIENT_CIPHERS = "ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA
class Options(optmanager.OptManager):
def __init__(
self,
*, # all args are keyword-only.
# TODO: rename to onboarding_app_*
app: bool = True,
app_host: str = APP_HOST,
app_port: int = APP_PORT,
anticache: bool = False,
anticomp: bool = False,
client_replay: Sequence[str] = [],
replay_kill_extra: bool = False,
keepserving: bool = True,
no_server: bool = False,
server_replay_nopop: bool = False,
refresh_server_playback: bool = True,
rfile: Optional[str] = None,
scripts: Sequence[str] = [],
showhost: bool = False,
replacements: Sequence[Tuple[str, str, str]] = [],
server_replay_use_headers: Sequence[str] = [],
setheaders: Sequence[Tuple[str, str, str]] = [],
server_replay: Sequence[str] = [],
stickycookie: Optional[str] = None,
stickyauth: Optional[str] = None,
stream_large_bodies: Optional[int] = None,
verbosity: int = 2,
default_contentview: str = "auto",
streamfile: Optional[str] = None,
streamfile_append: bool = False,
server_replay_ignore_content: bool = False,
server_replay_ignore_params: Sequence[str] = [],
server_replay_ignore_payload_params: Sequence[str] = [],
server_replay_ignore_host: bool = False,
# Proxy options
auth_nonanonymous: bool = False,
auth_singleuser: Optional[str] = None,
auth_htpasswd: Optional[str] = None,
add_upstream_certs_to_client_chain: bool = False,
body_size_limit: Optional[int] = None,
cadir: str = CA_DIR,
certs: Sequence[Tuple[str, str]] = [],
ciphers_client: str=DEFAULT_CLIENT_CIPHERS,
ciphers_server: Optional[str]=None,
clientcerts: Optional[str] = None,
http2: bool = True,
ignore_hosts: Sequence[str] = [],
listen_host: str = "",
listen_port: int = LISTEN_PORT,
upstream_bind_address: str = "",
mode: str = "regular",
no_upstream_cert: bool = False,
rawtcp: bool = False,
websocket: bool = True,
spoof_source_address: bool = False,
upstream_server: Optional[str] = None,
upstream_auth: Optional[str] = None,
ssl_version_client: str = "secure",
ssl_version_server: str = "secure",
ssl_insecure: bool = False,
ssl_verify_upstream_trusted_cadir: Optional[str] = None,
ssl_verify_upstream_trusted_ca: Optional[str] = None,
tcp_hosts: Sequence[str] = [],
self,
*, # all args are keyword-only.
onboarding: bool = True,
onboarding_host: str = APP_HOST,
onboarding_port: int = APP_PORT,
anticache: bool = False,
anticomp: bool = False,
client_replay: Sequence[str] = [],
replay_kill_extra: bool = False,
keepserving: bool = True,
no_server: bool = False,
server_replay_nopop: bool = False,
refresh_server_playback: bool = True,
rfile: Optional[str] = None,
scripts: Sequence[str] = [],
showhost: bool = False,
replacements: Sequence[Union[Tuple[str, str, str], str]] = [],
replacement_files: Sequence[Union[Tuple[str, str, str], str]] = [],
server_replay_use_headers: Sequence[str] = [],
setheaders: Sequence[Union[Tuple[str, str, str], str]] = [],
server_replay: Sequence[str] = [],
stickycookie: Optional[str] = None,
stickyauth: Optional[str] = None,
stream_large_bodies: Optional[int] = None,
verbosity: int = 2,
default_contentview: str = "auto",
streamfile: Optional[str] = None,
streamfile_append: bool = False,
server_replay_ignore_content: bool = False,
server_replay_ignore_params: Sequence[str] = [],
server_replay_ignore_payload_params: Sequence[str] = [],
server_replay_ignore_host: bool = False,
intercept: Optional[str] = None,
# Proxy options
auth_nonanonymous: bool = False,
auth_singleuser: Optional[str] = None,
auth_htpasswd: Optional[str] = None,
add_upstream_certs_to_client_chain: bool = False,
body_size_limit: Optional[int] = None,
cadir: str = CA_DIR,
certs: Sequence[Tuple[str, str]] = [],
ciphers_client: str=DEFAULT_CLIENT_CIPHERS,
ciphers_server: Optional[str]=None,
clientcerts: Optional[str] = None,
ignore_hosts: Sequence[str] = [],
listen_host: str = "",
listen_port: int = LISTEN_PORT,
upstream_bind_address: str = "",
mode: str = "regular",
no_upstream_cert: bool = False,
# Console options
eventlog: bool = False,
focus_follow: bool = False,
filter: Optional[str] = None,
palette: Optional[str] = "dark",
palette_transparent: bool = False,
no_mouse: bool = False,
order: Optional[str] = None,
order_reversed: bool = False,
http2: bool = True,
http2_priority: bool = False,
websocket: bool = True,
rawtcp: bool = False,
# Web options
open_browser: bool = True,
wdebug: bool = False,
wport: int = 8081,
wiface: str = "127.0.0.1",
spoof_source_address: bool = False,
upstream_server: Optional[str] = None,
upstream_auth: Optional[str] = None,
ssl_version_client: str = "secure",
ssl_version_server: str = "secure",
ssl_insecure: bool = False,
ssl_verify_upstream_trusted_cadir: Optional[str] = None,
ssl_verify_upstream_trusted_ca: Optional[str] = None,
tcp_hosts: Sequence[str] = [],
# Dump options
filtstr: Optional[str] = None,
flow_detail: int = 1
intercept: Optional[str] = None,
# Console options
console_eventlog: bool = False,
console_focus_follow: bool = False,
console_palette: Optional[str] = "dark",
console_palette_transparent: bool = False,
console_no_mouse: bool = False,
console_order: Optional[str] = None,
console_order_reversed: bool = False,
filter: Optional[str] = None,
# Web options
web_open_browser: bool = True,
web_debug: bool = False,
web_port: int = 8081,
web_iface: str = "127.0.0.1",
# Dump options
filtstr: Optional[str] = None,
flow_detail: int = 1
) -> None:
# We could replace all assignments with clever metaprogramming,
# but type hints are a much more valueable asset.
self.app = app
self.app_host = app_host
self.app_port = app_port
self.onboarding = onboarding
self.onboarding_host = onboarding_host
self.onboarding_port = onboarding_port
self.anticache = anticache
self.anticomp = anticomp
self.client_replay = client_replay
@@ -124,6 +129,7 @@ class Options(optmanager.OptManager):
self.scripts = scripts
self.showhost = showhost
self.replacements = replacements
self.replacement_files = replacement_files
self.server_replay_use_headers = server_replay_use_headers
self.setheaders = setheaders
self.server_replay = server_replay
@@ -150,15 +156,18 @@ class Options(optmanager.OptManager):
self.ciphers_client = ciphers_client
self.ciphers_server = ciphers_server
self.clientcerts = clientcerts
self.http2 = http2
self.ignore_hosts = ignore_hosts
self.listen_host = listen_host
self.listen_port = listen_port
self.upstream_bind_address = upstream_bind_address
self.mode = mode
self.no_upstream_cert = no_upstream_cert
self.rawtcp = rawtcp
self.http2 = http2
self.http2_priority = http2_priority
self.websocket = websocket
self.rawtcp = rawtcp
self.spoof_source_address = spoof_source_address
self.upstream_server = upstream_server
self.upstream_auth = upstream_auth
@@ -172,20 +181,21 @@ class Options(optmanager.OptManager):
self.intercept = intercept
# Console options
self.eventlog = eventlog
self.focus_follow = focus_follow
self.console_eventlog = console_eventlog
self.console_focus_follow = console_focus_follow
self.console_palette = console_palette
self.console_palette_transparent = console_palette_transparent
self.console_no_mouse = console_no_mouse
self.console_order = console_order
self.console_order_reversed = console_order_reversed
self.filter = filter
self.palette = palette
self.palette_transparent = palette_transparent
self.no_mouse = no_mouse
self.order = order
self.order_reversed = order_reversed
# Web options
self.open_browser = open_browser
self.wdebug = wdebug
self.wport = wport
self.wiface = wiface
self.web_open_browser = web_open_browser
self.web_debug = web_debug
self.web_port = web_port
self.web_iface = web_iface
# Dump options
self.filtstr = filtstr

View File

@@ -229,7 +229,10 @@ class OptManager(metaclass=_DefaultsMeta):
this object. May raise OptionsError if the config file is invalid.
"""
data = self._load(text)
self.update(**data)
try:
self.update(**data)
except KeyError as v:
raise exceptions.OptionsError(v)
def load_paths(self, *paths):
"""
@@ -242,7 +245,12 @@ class OptManager(metaclass=_DefaultsMeta):
if os.path.exists(p) and os.path.isfile(p):
with open(p, "r") as f:
txt = f.read()
self.load(txt)
try:
self.load(txt)
except exceptions.OptionsError as e:
raise exceptions.OptionsError(
"Error reading %s: %s" % (p, e)
)
def merge(self, opts):
"""

View File

@@ -25,6 +25,10 @@ elif sys.platform == "darwin" or sys.platform.startswith("freebsd"):
from . import osx
original_addr = osx.original_addr # noqa
elif sys.platform.startswith("openbsd"):
from . import openbsd
original_addr = openbsd.original_addr # noqa
elif sys.platform == "win32":
from . import windows

View File

@@ -0,0 +1,2 @@
def original_addr(csock):
return csock.getsockname()

View File

@@ -30,7 +30,7 @@ class Socks5Proxy(protocol.Layer, protocol.ServerConnectionMixin):
if connect_request.msg != socks.CMD.CONNECT:
raise socks.SocksError(
socks.REP.COMMAND_NOT_SUPPORTED,
"mitmproxy only supports SOCKS5 CONNECT."
"mitmproxy only supports SOCKS5 CONNECT"
)
# We always connect lazily, but we need to pretend to the client that we connected.

View File

@@ -74,16 +74,6 @@ class Layer(_LayerCodeCompletion):
"""
return getattr(self.ctx, name)
@property
def layers(self):
"""
List of all layers, including the current layer (``[self, self.ctx, self.ctx.ctx, ...]``)
"""
return [self] + self.ctx.layers
def __repr__(self):
return type(self).__name__
class ServerConnectionMixin:

View File

@@ -88,6 +88,10 @@ class UpstreamConnectLayer(base.Layer):
layer()
def _send_connect_request(self):
self.log("Sending CONNECT request", "debug", [
"Proxy Server: {}".format(self.ctx.server_conn.address),
"Connect to: {}:{}".format(self.connect_request.host, self.connect_request.port)
])
self.send_request(self.connect_request)
resp = self.read_response(self.connect_request)
if resp.status_code != 200:
@@ -101,6 +105,7 @@ class UpstreamConnectLayer(base.Layer):
pass # swallow the message
def change_upstream_proxy_server(self, address):
self.log("Changing upstream proxy to {} (CONNECTed)".format(repr(address)), "debug")
if address != self.server_conn.via.address:
self.ctx.set_server(address)
@@ -126,7 +131,7 @@ class HTTPMode(enum.Enum):
# At this point, we see only a subset of the proxy modes
MODE_REQUEST_FORMS = {
HTTPMode.regular: ("authority", "absolute"),
HTTPMode.transparent: ("relative"),
HTTPMode.transparent: ("relative",),
HTTPMode.upstream: ("authority", "absolute"),
}
@@ -138,9 +143,16 @@ def validate_request_form(mode, request):
)
allowed_request_forms = MODE_REQUEST_FORMS[mode]
if request.first_line_format not in allowed_request_forms:
err_message = "Invalid HTTP request form (expected: %s, got: %s)" % (
" or ".join(allowed_request_forms), request.first_line_format
)
if mode == HTTPMode.transparent:
err_message = (
"Mitmproxy received an {} request even though it is not running in regular mode. "
"This usually indicates a misconfiguration, please see "
"http://docs.mitmproxy.org/en/stable/modes.html for details."
).format("HTTP CONNECT" if request.first_line_format == "authority" else "absolute-form")
else:
err_message = "Invalid HTTP request form (expected: %s, got: %s)" % (
" or ".join(allowed_request_forms), request.first_line_format
)
raise exceptions.HttpException(err_message)
@@ -279,7 +291,7 @@ class HttpLayer(base.Layer):
# update host header in reverse proxy mode
if self.config.options.mode == "reverse":
f.request.headers["Host"] = self.config.upstream_server.address.host
f.request.host_header = self.config.upstream_server.address.host
# Determine .scheme, .host and .port attributes for inline scripts. For
# absolute-form requests, they are directly given in the request. For
@@ -289,11 +301,10 @@ class HttpLayer(base.Layer):
if self.mode is HTTPMode.transparent:
# Setting request.host also updates the host header, which we want
# to preserve
host_header = f.request.headers.get("host", None)
host_header = f.request.host_header
f.request.host = self.__initial_server_conn.address.host
f.request.port = self.__initial_server_conn.address.port
if host_header:
f.request.headers["host"] = host_header
f.request.host_header = host_header # set again as .host overwrites this.
f.request.scheme = "https" if self.__initial_server_tls else "http"
self.channel.ask("request", f)
@@ -432,10 +443,13 @@ class HttpLayer(base.Layer):
except (exceptions.NetlibException, h2.exceptions.H2Error, exceptions.Http2ProtocolException):
self.log("Failed to send error response to client: {}".format(message), "debug")
def change_upstream_proxy_server(self, address) -> None:
def change_upstream_proxy_server(self, address):
# Make set_upstream_proxy_server always available,
# even if there's no UpstreamConnectLayer
if address != self.server_conn.address:
if hasattr(self.ctx, "change_upstream_proxy_server"):
self.ctx.change_upstream_proxy_server(address)
elif address != self.server_conn.address:
self.log("Changing upstream proxy to {} (not CONNECTed)".format(repr(address)), "debug")
self.set_server(address)
def establish_server_connection(self, host: str, port: int, scheme: str):

View File

@@ -268,6 +268,10 @@ class Http2Layer(base.Layer):
return True
def _handle_priority_updated(self, eid, event):
if not self.config.options.http2_priority:
self.log("HTTP/2 PRIORITY frame surpressed. Use --http2-priority to enable forwarding.", "debug")
return True
if eid in self.streams and self.streams[eid].handled_priority_event is event:
# this event was already handled during stream creation
# HeadersFrame + Priority information as RequestReceived
@@ -527,9 +531,12 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
if self.handled_priority_event:
# only send priority information if they actually came with the original HeadersFrame
# and not if they got updated before/after with a PriorityFrame
priority_exclusive = self.priority_exclusive
priority_depends_on = self._map_depends_on_stream_id(self.server_stream_id, self.priority_depends_on)
priority_weight = self.priority_weight
if not self.config.options.http2_priority:
self.log("HTTP/2 PRIORITY information in HEADERS frame surpressed. Use --http2-priority to enable forwarding.", "debug")
else:
priority_exclusive = self.priority_exclusive
priority_depends_on = self._map_depends_on_stream_id(self.server_stream_id, self.priority_depends_on)
priority_weight = self.priority_weight
try:
self.connections[self.server_conn].safe_send_headers(
@@ -610,7 +617,7 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
chunks
)
def __call__(self):
def __call__(self): # pragma: no cover
raise EnvironmentError('Http2SingleStreamLayer must be run as thread')
def run(self):

View File

@@ -8,7 +8,7 @@ from mitmproxy import flow
from mitmproxy.proxy.protocol import base
from mitmproxy.net import tcp
from mitmproxy.net import websockets
from mitmproxy.websocket import WebSocketFlow, WebSocketBinaryMessage, WebSocketTextMessage
from mitmproxy.websocket import WebSocketFlow, WebSocketMessage
class WebSocketLayer(base.Layer):
@@ -65,12 +65,7 @@ class WebSocketLayer(base.Layer):
compressed_message = fb[0].header.rsv1
fb.clear()
if message_type == websockets.OPCODE.TEXT:
t = WebSocketTextMessage
else:
t = WebSocketBinaryMessage
websocket_message = t(self.flow, not is_server, payload)
websocket_message = WebSocketMessage(message_type, not is_server, payload)
length = len(websocket_message.content)
self.flow.messages.append(websocket_message)
self.channel.ask("websocket_message", self.flow)

View File

@@ -110,10 +110,3 @@ class RootContext:
full_msg.append(" -> " + i)
full_msg = "\n".join(full_msg)
self.channel.tell("log", log.LogEntry(full_msg, level))
@property
def layers(self):
return []
def __repr__(self):
return "RootContext"

View File

@@ -3,7 +3,7 @@ This module provides a @concurrent decorator primitive to
offload computations from mitmproxy's main master thread.
"""
from mitmproxy import events
from mitmproxy import eventsequence
from mitmproxy.types import basethread
@@ -12,7 +12,7 @@ class ScriptThread(basethread.BaseThread):
def concurrent(fn):
if fn.__name__ not in events.Events - {"start", "configure", "tick"}:
if fn.__name__ not in eventsequence.Events - {"start", "configure", "tick"}:
raise NotImplementedError(
"Concurrent decorator not supported for '%s' method." % fn.__name__
)
@@ -29,4 +29,8 @@ def concurrent(fn):
"script.concurrent (%s)" % fn.__name__,
target=run
).start()
return _concurrent
# Support @concurrent for class-based addons
if "." in fn.__qualname__:
return staticmethod(_concurrent)
else:
return _concurrent

View File

@@ -9,8 +9,8 @@ from mitmproxy.types import serializable
class TCPMessage(serializable.Serializable):
def __init__(self, from_client, content, timestamp=None):
self.content = content
self.from_client = from_client
self.content = content
self.timestamp = timestamp or time.time()
@classmethod
@@ -21,9 +21,7 @@ class TCPMessage(serializable.Serializable):
return self.from_client, self.content, self.timestamp
def set_state(self, state):
self.from_client = state.pop("from_client")
self.content = state.pop("content")
self.timestamp = state.pop("timestamp")
self.from_client, self.content, self.timestamp = state
def __repr__(self):
return "{direction} {content}".format(

View File

@@ -3,7 +3,7 @@ import contextlib
import mitmproxy.master
import mitmproxy.options
from mitmproxy import proxy
from mitmproxy import events
from mitmproxy import eventsequence
from mitmproxy import exceptions
@@ -57,7 +57,7 @@ class context:
is taken (as in flow interception).
"""
f.reply._state = "handled"
for evt, arg in events.event_sequence(f):
for evt, arg in eventsequence.iterate(f):
h = getattr(addon, evt, None)
if h:
h(arg)

View File

@@ -1,9 +1,12 @@
from mitmproxy.net import websockets
from mitmproxy.test import tutils
from mitmproxy import tcp
from mitmproxy import websocket
from mitmproxy import controller
from mitmproxy import http
from mitmproxy import connections
from mitmproxy import flow
from mitmproxy.net import http as net_http
def ttcpflow(client_conn=True, server_conn=True, messages=True, err=None):
@@ -26,6 +29,60 @@ def ttcpflow(client_conn=True, server_conn=True, messages=True, err=None):
return f
def twebsocketflow(client_conn=True, server_conn=True, messages=True, err=None, handshake_flow=True):
if client_conn is True:
client_conn = tclient_conn()
if server_conn is True:
server_conn = tserver_conn()
if handshake_flow is True:
req = http.HTTPRequest(
"relative",
"GET",
"http",
"example.com",
"80",
"/ws",
"HTTP/1.1",
headers=net_http.Headers(
connection="upgrade",
upgrade="websocket",
sec_websocket_version="13",
sec_websocket_key="1234",
),
content=b''
)
resp = http.HTTPResponse(
"HTTP/1.1",
101,
reason=net_http.status_codes.RESPONSES.get(101),
headers=net_http.Headers(
connection='upgrade',
upgrade='websocket',
sec_websocket_accept=b'',
),
content=b'',
)
handshake_flow = http.HTTPFlow(client_conn, server_conn)
handshake_flow.request = req
handshake_flow.response = resp
f = websocket.WebSocketFlow(client_conn, server_conn, handshake_flow)
if messages is True:
messages = [
websocket.WebSocketMessage(websockets.OPCODE.BINARY, True, b"hello binary"),
websocket.WebSocketMessage(websockets.OPCODE.TEXT, False, "hello text".encode()),
]
if err is True:
err = terr()
f.messages = messages
f.error = err
f.reply = controller.DummyReply()
return f
def tflow(client_conn=True, server_conn=True, req=True, resp=None, err=None):
"""
@type client_conn: bool | None | mitmproxy.proxy.connection.ClientConnection
@@ -59,6 +116,27 @@ def tflow(client_conn=True, server_conn=True, req=True, resp=None, err=None):
return f
class DummyFlow(flow.Flow):
"""A flow that is neither HTTP nor TCP."""
def __init__(self, client_conn, server_conn, live=None):
super().__init__("dummy", client_conn, server_conn, live)
def tdummyflow(client_conn=True, server_conn=True, err=None):
if client_conn is True:
client_conn = tclient_conn()
if server_conn is True:
server_conn = tserver_conn()
if err is True:
err = terr()
f = DummyFlow(client_conn, server_conn)
f.error = err
f.reply = controller.DummyReply()
return f
def tclient_conn():
"""
@return: mitmproxy.proxy.connection.ClientConnection

View File

@@ -4,27 +4,13 @@ import os
import time
import shutil
from contextlib import contextmanager
import sys
from mitmproxy.utils import data
from mitmproxy.net import tcp
from mitmproxy.net import http
def treader(bytes):
"""
Construct a tcp.Read object from bytes.
"""
fp = BytesIO(bytes)
return tcp.Reader(fp)
@contextmanager
def chdir(dir):
orig_dir = os.getcwd()
os.chdir(dir)
yield
os.chdir(orig_dir)
test_data = data.Data(__name__).push("../../test/")
@contextmanager
@@ -39,65 +25,12 @@ def tmpdir(*args, **kwargs):
shutil.rmtree(temp_workdir)
def _check_exception(expected, actual, exc_tb):
if isinstance(expected, str):
if expected.lower() not in str(actual).lower():
raise AssertionError(
"Expected %s, but caught %s" % (
repr(expected), repr(actual)
)
)
else:
if not isinstance(actual, expected):
raise AssertionError(
"Expected %s, but caught %s %s" % (
expected.__name__, actual.__class__.__name__, repr(actual)
)
)
def raises(expected_exception, obj=None, *args, **kwargs):
def treader(bytes):
"""
Assert that a callable raises a specified exception.
:exc An exception class or a string. If a class, assert that an
exception of this type is raised. If a string, assert that the string
occurs in the string representation of the exception, based on a
case-insenstivie match.
:obj A callable object.
:args Arguments to be passsed to the callable.
:kwargs Arguments to be passed to the callable.
Construct a tcp.Read object from bytes.
"""
if obj is None:
return RaisesContext(expected_exception)
else:
try:
ret = obj(*args, **kwargs)
except Exception as actual:
_check_exception(expected_exception, actual, sys.exc_info()[2])
else:
raise AssertionError("No exception raised. Return value: {}".format(ret))
class RaisesContext:
def __init__(self, expected_exception):
self.expected_exception = expected_exception
def __enter__(self):
return
def __exit__(self, exc_type, exc_val, exc_tb):
if not exc_type:
raise AssertionError("No exception raised.")
else:
_check_exception(self.expected_exception, exc_val, exc_tb)
return True
test_data = data.Data(__name__).push("../../test/")
fp = BytesIO(bytes)
return tcp.Reader(fp)
def treq(**kwargs):

View File

@@ -1,9 +1,7 @@
import argparse
import re
import os
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import options
from mitmproxy import platform
from mitmproxy.utils import human
@@ -19,91 +17,6 @@ class ParseException(Exception):
pass
def _parse_hook(s):
sep, rem = s[0], s[1:]
parts = rem.split(sep, 2)
if len(parts) == 2:
patt = ".*"
a, b = parts
elif len(parts) == 3:
patt, a, b = parts
else:
raise ParseException(
"Malformed hook specifier - too few clauses: %s" % s
)
if not a:
raise ParseException("Empty clause: %s" % str(patt))
if not flowfilter.parse(patt):
raise ParseException("Malformed filter pattern: %s" % patt)
return patt, a, b
def parse_replace_hook(s):
"""
Returns a (pattern, regex, replacement) tuple.
The general form for a replacement hook is as follows:
/patt/regex/replacement
The first character specifies the separator. Example:
:~q:foo:bar
If only two clauses are specified, the pattern is set to match
universally (i.e. ".*"). Example:
/foo/bar/
Clauses are parsed from left to right. Extra separators are taken to be
part of the final clause. For instance, the replacement clause below is
"foo/bar/":
/one/two/foo/bar/
Checks that pattern and regex are both well-formed. Raises
ParseException on error.
"""
patt, regex, replacement = _parse_hook(s)
try:
re.compile(regex)
except re.error as e:
raise ParseException("Malformed replacement regex: %s" % str(e))
return patt, regex, replacement
def parse_setheader(s):
"""
Returns a (pattern, header, value) tuple.
The general form for a replacement hook is as follows:
/patt/header/value
The first character specifies the separator. Example:
:~q:foo:bar
If only two clauses are specified, the pattern is set to match
universally (i.e. ".*"). Example:
/foo/bar/
Clauses are parsed from left to right. Extra separators are taken to be
part of the final clause. For instance, the value clause below is
"foo/bar/":
/one/two/foo/bar/
Checks that pattern and regex are both well-formed. Raises
ParseException on error.
"""
return _parse_hook(s)
def get_common_options(args):
stickycookie, stickyauth = None, None
if args.stickycookie_filt:
@@ -116,34 +29,6 @@ def get_common_options(args):
if stream_large_bodies:
stream_large_bodies = human.parse_size(stream_large_bodies)
reps = []
for i in args.replace or []:
try:
p = parse_replace_hook(i)
except ParseException as e:
raise exceptions.OptionsError(e)
reps.append(p)
for i in args.replace_file or []:
try:
patt, rex, path = parse_replace_hook(i)
except ParseException as e:
raise exceptions.OptionsError(e)
try:
v = open(path, "rb").read()
except IOError as e:
raise exceptions.OptionsError(
"Could not read replace file: %s" % path
)
reps.append((patt, rex, v))
setheaders = []
for i in args.setheader or []:
try:
p = parse_setheader(i)
except ParseException as e:
raise exceptions.OptionsError(e)
setheaders.append(p)
if args.streamfile and args.streamfile[0] == args.rfile:
if args.streamfile[1] == "wb":
raise exceptions.OptionsError(
@@ -212,9 +97,9 @@ def get_common_options(args):
args.verbose = 0
return dict(
app=args.app,
app_host=args.app_host,
app_port=args.app_port,
onboarding=args.onboarding,
onboarding_host=args.onboarding_host,
onboarding_port=args.onboarding_port,
anticache=args.anticache,
anticomp=args.anticomp,
@@ -224,8 +109,9 @@ def get_common_options(args):
refresh_server_playback=not args.norefresh,
server_replay_use_headers=args.server_replay_use_headers,
rfile=args.rfile,
replacements=reps,
setheaders=setheaders,
replacements=args.replacements,
replacement_files=args.replacement_files,
setheaders=args.setheaders,
server_replay=args.server_replay,
scripts=args.scripts,
stickycookie=stickycookie,
@@ -251,7 +137,6 @@ def get_common_options(args):
ciphers_client = args.ciphers_client,
ciphers_server = args.ciphers_server,
clientcerts = args.clientcerts,
http2 = args.http2,
ignore_hosts = args.ignore_hosts,
listen_host = args.addr,
listen_port = args.port,
@@ -259,8 +144,12 @@ def get_common_options(args):
mode = mode,
no_upstream_cert = args.no_upstream_cert,
spoof_source_address = args.spoof_source_address,
rawtcp = args.rawtcp,
http2 = args.http2,
http2_priority = args.http2_priority,
websocket = args.websocket,
rawtcp = args.rawtcp,
upstream_server = upstream_server,
upstream_auth = args.upstream_auth,
ssl_version_client = args.ssl_version_client,
@@ -275,13 +164,8 @@ def get_common_options(args):
def basic_options(parser):
parser.add_argument(
'--version',
action='version',
version="%(prog)s" + " " + version.VERSION
)
parser.add_argument(
'--sysinfo',
action='store_true',
dest='sysinfo',
dest='version',
)
parser.add_argument(
'--shortversion',
@@ -453,19 +337,26 @@ def proxy_options(parser):
)
http2 = group.add_mutually_exclusive_group()
http2.add_argument("--http2", action="store_true", dest="http2")
http2.add_argument("--no-http2", action="store_false", dest="http2",
http2.add_argument("--no-http2", action="store_false", dest="http2")
http2.add_argument("--http2", action="store_true", dest="http2",
help="Explicitly enable/disable HTTP/2 support. "
"Disabled by default until major websites implement the spec correctly. "
"Default value will change in a future version."
"HTTP/2 support is enabled by default.",
)
http2_priority = group.add_mutually_exclusive_group()
http2_priority.add_argument("--http2-priority", action="store_true", dest="http2_priority")
http2_priority.add_argument("--no-http2-priority", action="store_false", dest="http2_priority",
help="Explicitly enable/disable PRIORITY forwarding for HTTP/2 connections. "
"PRIORITY forwarding is disabled by default, "
"because some webservers fail at implementing the RFC properly.",
)
websocket = group.add_mutually_exclusive_group()
websocket.add_argument("--no-websocket", action="store_false", dest="websocket",
websocket.add_argument("--no-websocket", action="store_false", dest="websocket")
websocket.add_argument("--websocket", action="store_true", dest="websocket",
help="Explicitly enable/disable WebSocket support. "
"Enabled by default."
"WebSocket support is enabled by default.",
)
websocket.add_argument("--websocket", action="store_true", dest="websocket")
parser.add_argument(
"--upstream-auth",
@@ -574,13 +465,13 @@ def proxy_ssl_options(parser):
def onboarding_app(parser):
group = parser.add_argument_group("Onboarding App")
group.add_argument(
"--noapp",
action="store_false", dest="app",
"--no-onboarding",
action="store_false", dest="onboarding",
help="Disable the mitmproxy onboarding app."
)
group.add_argument(
"--app-host",
action="store", dest="app_host",
"--onboarding-host",
action="store", dest="onboarding_host",
help="""
Domain to serve the onboarding app from. For transparent mode, use
an IP when a DNS entry for the app domain is not present. Default:
@@ -588,9 +479,9 @@ def onboarding_app(parser):
""" % options.APP_HOST
)
group.add_argument(
"--app-port",
"--onboarding-port",
action="store",
dest="app_port",
dest="onboarding_port",
type=int,
metavar="80",
help="Port to serve the onboarding app from."
@@ -682,13 +573,13 @@ def replacements(parser):
)
group.add_argument(
"--replace",
action="append", type=str, dest="replace",
action="append", type=str, dest="replacements",
metavar="PATTERN",
help="Replacement pattern."
)
group.add_argument(
"--replace-from-file",
action="append", type=str, dest="replace_file",
action="append", type=str, dest="replacement_files",
metavar="PATH",
help="""
Replacement pattern, where the replacement clause is a path to a
@@ -708,7 +599,7 @@ def set_headers(parser):
)
group.add_argument(
"--setheader",
action="append", type=str, dest="setheader",
action="append", type=str, dest="setheaders",
metavar="PATTERN",
help="Header set pattern."
)
@@ -768,42 +659,42 @@ def common_options(parser):
def mitmproxy():
# Don't import mitmproxy.tools.console for mitmdump, urwid is not available on all
# platforms.
# Don't import mitmproxy.tools.console for mitmdump, urwid is not available
# on all platforms.
from .console import palettes
parser = argparse.ArgumentParser(usage="%(prog)s [options]")
common_options(parser)
parser.add_argument(
"--palette", type=str,
action="store", dest="palette",
action="store", dest="console_palette",
choices=sorted(palettes.palettes.keys()),
help="Select color palette: " + ", ".join(palettes.palettes.keys())
)
parser.add_argument(
"--palette-transparent",
action="store_true", dest="palette_transparent",
action="store_true", dest="console_palette_transparent",
help="Set transparent background for palette."
)
parser.add_argument(
"-e", "--eventlog",
action="store_true", dest="eventlog",
action="store_true", dest="console_eventlog",
help="Show event log."
)
parser.add_argument(
"--follow",
action="store_true", dest="focus_follow",
action="store_true", dest="console_focus_follow",
help="Focus follows new flows."
)
parser.add_argument(
"--order",
type=str, dest="order",
type=str, dest="console_order",
choices=[o[1] for o in view.orders],
help="Flow sort order."
)
parser.add_argument(
"--no-mouse",
action="store_true", dest="no_mouse",
action="store_true", dest="console_no_mouse",
help="Disable mouse interaction."
)
group = parser.add_argument_group(
@@ -857,24 +748,24 @@ def mitmweb():
group = parser.add_argument_group("Mitmweb")
group.add_argument(
"--no-browser",
action="store_false", dest="open_browser",
action="store_false", dest="web_open_browser",
help="Don't start a browser"
)
group.add_argument(
"--wport",
action="store", type=int, dest="wport",
"--web-port",
action="store", type=int, dest="web_port",
metavar="PORT",
help="Mitmweb port."
)
group.add_argument(
"--wiface",
action="store", dest="wiface",
"--web-iface",
action="store", dest="web_iface",
metavar="IFACE",
help="Mitmweb interface."
)
group.add_argument(
"--wdebug",
action="store_true", dest="wdebug",
"--web-debug",
action="store_true", dest="web_debug",
help="Turn on mitmweb debugging"
)

View File

@@ -1,5 +1,6 @@
import urwid
from mitmproxy import http
from mitmproxy.tools.console import common, searchable
from mitmproxy.utils import human
from mitmproxy.utils import strutils
@@ -12,7 +13,7 @@ def maybe_timestamp(base, attr):
return "active"
def flowdetails(state, flow):
def flowdetails(state, flow: http.HTTPFlow):
text = []
sc = flow.server_conn
@@ -21,7 +22,7 @@ def flowdetails(state, flow):
resp = flow.response
metadata = flow.metadata
if metadata is not None and len(metadata.items()) > 0:
if metadata is not None and len(metadata) > 0:
parts = [[str(k), repr(v)] for k, v in metadata.items()]
text.append(urwid.Text([("head", "Metadata:")]))
text.extend(common.format_keyvals(parts, key="key", val="text", indent=4))
@@ -32,6 +33,8 @@ def flowdetails(state, flow):
["Address", repr(sc.address)],
["Resolved Address", repr(sc.ip_address)],
]
if resp:
parts.append(["HTTP Version", resp.http_version])
if sc.alpn_proto_negotiated:
parts.append(["ALPN", sc.alpn_proto_negotiated])
@@ -91,6 +94,8 @@ def flowdetails(state, flow):
parts = [
["Address", repr(cc.address)],
]
if req:
parts.append(["HTTP Version", req.http_version])
if cc.tls_version:
parts.append(["TLS Version", cc.tls_version])
if cc.sni:

View File

@@ -35,6 +35,7 @@ def _mkhelp():
("W", "stream flows to file"),
("X", "kill and delete flow, even if it's mid-intercept"),
("z", "clear flow list or eventlog"),
("Z", "clear unmarked flows"),
("tab", "tab between eventlog and flow list"),
("enter", "view flow"),
("|", "run script on this flow"),
@@ -337,9 +338,10 @@ class FlowListBox(urwid.ListBox):
)
def new_request(self, url, method):
parts = mitmproxy.net.http.url.parse(str(url))
if not parts:
signals.status_message.send(message="Invalid Url")
try:
parts = mitmproxy.net.http.url.parse(str(url))
except ValueError as e:
signals.status_message.send(message = "Invalid URL: " + str(e))
return
scheme, host, port, path = parts
f = self.master.create_request(method, scheme, host, port, path)
@@ -354,6 +356,8 @@ class FlowListBox(urwid.ListBox):
self.master.view.update(f)
elif key == "z":
self.master.view.clear()
elif key == "Z":
self.master.view.clear_not_marked()
elif key == "e":
self.master.toggle_eventlog()
elif key == "g":
@@ -387,7 +391,7 @@ class FlowListBox(urwid.ListBox):
lookup = dict([(i[0], i[1]) for i in view.orders])
def change_order(k):
self.master.options.order = lookup[k]
self.master.options.console_order = lookup[k]
signals.status_prompt_onekey.send(
prompt = "Order",
@@ -396,10 +400,10 @@ class FlowListBox(urwid.ListBox):
)
elif key == "F":
o = self.master.options
o.focus_follow = not o.focus_follow
o.console_focus_follow = not o.console_focus_follow
elif key == "v":
val = not self.master.options.order_reversed
self.master.options.order_reversed = val
val = not self.master.options.console_order_reversed
self.master.options.console_order_reversed = val
elif key == "W":
if self.master.options.streamfile:
self.master.options.streamfile = None

View File

@@ -65,8 +65,8 @@ class HeaderEditor(base.GridEditor):
class URLEncodedFormEditor(base.GridEditor):
title = "Editing URL-encoded form"
columns = [
col_bytes.Column("Key"),
col_bytes.Column("Value")
col_text.Column("Key"),
col_text.Column("Value")
]

View File

@@ -20,7 +20,6 @@ from mitmproxy import io
from mitmproxy import log
from mitmproxy.addons import view
from mitmproxy.addons import intercept
import mitmproxy.options
from mitmproxy.tools.console import flowlist
from mitmproxy.tools.console import flowview
from mitmproxy.tools.console import grideditor
@@ -33,8 +32,6 @@ from mitmproxy.tools.console import statusbar
from mitmproxy.tools.console import window
from mitmproxy.utils import strutils
from mitmproxy.net import tcp
EVENTLOG_SIZE = 10000
@@ -108,7 +105,7 @@ class ConsoleMaster(master.Master):
self.logbuffer.append(e)
if len(self.logbuffer) > EVENTLOG_SIZE:
self.logbuffer.pop(0)
if self.options.focus_follow:
if self.options.console_focus_follow:
self.logbuffer.set_focus(len(self.logbuffer) - 1)
def sig_call_in(self, sender, seconds, callback, args=()):
@@ -148,11 +145,11 @@ class ConsoleMaster(master.Master):
try:
with self.handlecontext():
sc.run_once(command, [f])
except mitmproxy.exceptions.AddonError as e:
signals.add_log("Script error: %s" % e, "warn")
except ValueError as e:
signals.add_log("Input error: %s" % e, "warn")
def toggle_eventlog(self):
self.options.eventlog = not self.options.eventlog
self.options.console_eventlog = not self.options.console_eventlog
self.view_flowlist()
signals.replace_view_state.send(self)
@@ -232,8 +229,8 @@ class ConsoleMaster(master.Master):
def set_palette(self, options, updated):
self.ui.register_palette(
palettes.palettes[options.palette].palette(
options.palette_transparent
palettes.palettes[options.console_palette].palette(
options.console_palette_transparent
)
)
self.ui.clear()
@@ -255,7 +252,7 @@ class ConsoleMaster(master.Master):
self.loop = urwid.MainLoop(
urwid.SolidFill("x"),
screen = self.ui,
handle_mouse = not self.options.no_mouse,
handle_mouse = not self.options.console_no_mouse,
)
self.ab = statusbar.ActionBar()
@@ -273,14 +270,6 @@ class ConsoleMaster(master.Master):
sys.exit(1)
self.loop.set_alarm_in(0.01, self.ticker)
if self.options.http2 and not tcp.HAS_ALPN: # pragma: no cover
def http2err(*args, **kwargs):
signals.status_message.send(
message = "HTTP/2 disabled - OpenSSL 1.0.2+ required."
" Use --no-http2 to silence this warning.",
expire=5
)
self.loop.set_alarm_in(0.01, http2err)
self.loop.set_alarm_in(
0.0001,
@@ -357,7 +346,7 @@ class ConsoleMaster(master.Master):
if self.ui.started:
self.ui.clear()
if self.options.eventlog:
if self.options.console_eventlog:
body = flowlist.BodyPile(self)
else:
body = flowlist.FlowListBox(self)
@@ -423,7 +412,7 @@ class ConsoleMaster(master.Master):
def websocket_message(self, f):
super().websocket_message(f)
message = f.messages[-1]
signals.add_log(message.info, "info")
signals.add_log(f.message_info(message), "info")
signals.add_log(strutils.bytes_to_escaped_str(message.content), "debug")
@controller.handler

View File

@@ -6,6 +6,9 @@ from mitmproxy.tools.console import grideditor
from mitmproxy.tools.console import select
from mitmproxy.tools.console import signals
from mitmproxy.addons import replace
from mitmproxy.addons import setheaders
footer = [
('heading_key', "enter/space"), ":toggle ",
('heading_key', "C"), ":clear all ",
@@ -75,7 +78,7 @@ class Options(urwid.WidgetWrap):
select.Option(
"Palette",
"P",
checker("palette", master.options),
checker("console_palette", master.options),
self.palette
),
select.Option(
@@ -188,10 +191,16 @@ class Options(urwid.WidgetWrap):
)
def setheaders(self):
data = []
for d in self.master.options.setheaders:
if isinstance(d, str):
data.append(setheaders.parse_setheader(d))
else:
data.append(d)
self.master.view_grideditor(
grideditor.SetHeadersEditor(
self.master,
self.master.options.setheaders,
data,
self.master.options.setter("setheaders")
)
)
@@ -215,10 +224,16 @@ class Options(urwid.WidgetWrap):
)
def replacepatterns(self):
data = []
for d in self.master.options.replacements:
if isinstance(d, str):
data.append(replace.parse_hook(d))
else:
data.append(d)
self.master.view_grideditor(
grideditor.ReplaceEditor(
self.master,
self.master.options.replacements,
data,
self.master.options.setter("replacements")
)
)

View File

@@ -42,7 +42,7 @@ class PalettePicker(urwid.WidgetWrap):
return select.Option(
i,
None,
lambda: self.master.options.palette == name,
lambda: self.master.options.console_palette == name,
lambda: setattr(self.master.options, "palette", name)
)
@@ -58,7 +58,7 @@ class PalettePicker(urwid.WidgetWrap):
select.Option(
"Transparent",
"T",
lambda: master.options.palette_transparent,
lambda: master.options.console_palette_transparent,
master.options.toggler("palette_transparent")
)
]

View File

@@ -204,10 +204,10 @@ class StatusBar(urwid.WidgetWrap):
r.append("[")
r.append(("heading_key", "M"))
r.append(":%s]" % self.master.options.default_contentview)
if self.master.options.order:
if self.master.options.console_order:
r.append("[")
r.append(("heading_key", "o"))
r.append(":%s]" % self.master.options.order)
r.append(":%s]" % self.master.options.console_order)
opts = []
if self.master.options.anticache:
@@ -222,7 +222,7 @@ class StatusBar(urwid.WidgetWrap):
opts.append("killextra")
if self.master.options.no_upstream_cert:
opts.append("no-upstream-cert")
if self.master.options.focus_follow:
if self.master.options.console_focus_follow:
opts.append("following")
if self.master.options.stream_large_bodies:
opts.append(
@@ -258,7 +258,7 @@ class StatusBar(urwid.WidgetWrap):
else:
offset = self.master.view.focus.index + 1
if self.master.options.order_reversed:
if self.master.options.console_order_reversed:
arrow = common.SYMBOL_UP
else:
arrow = common.SYMBOL_DOWN

View File

@@ -6,7 +6,6 @@ from mitmproxy import addons
from mitmproxy import options
from mitmproxy import master
from mitmproxy.addons import dumper, termlog
from mitmproxy.net import tcp
class DumpError(Exception):
@@ -30,7 +29,13 @@ class Options(options.Options):
class DumpMaster(master.Master):
def __init__(self, options, server, with_termlog=True, with_dumper=True):
def __init__(
self,
options: Options,
server,
with_termlog=True,
with_dumper=True,
) -> None:
master.Master.__init__(self, options, server)
self.has_errored = False
if with_termlog:
@@ -38,8 +43,6 @@ class DumpMaster(master.Master):
self.addons.add(*addons.default_addons())
if with_dumper:
self.addons.add(dumper.Dumper())
# This line is just for type hinting
self.options = self.options # type: Options
if not self.options.no_server:
self.add_log(
@@ -47,13 +50,6 @@ class DumpMaster(master.Master):
"info"
)
if self.server and self.options.http2 and not tcp.HAS_ALPN: # pragma: no cover
self.add_log(
"ALPN support missing (OpenSSL 1.0.2+ required)!\n"
"HTTP/2 is disabled. Use --no-http2 to silence this warning.",
"error"
)
if options.rfile:
try:
self.load_flows_file(options.rfile)

View File

@@ -35,9 +35,10 @@ def assert_utf8_env():
def process_options(parser, options, args):
if args.sysinfo:
print(debug.sysinfo())
if args.version:
print(debug.dump_system_info())
sys.exit(0)
debug.register_info_dumpers()
pconf = config.ProxyConfig(options)
if options.no_server:
@@ -69,14 +70,15 @@ def mitmproxy(args=None): # pragma: no cover
console_options.merge(cmdline.get_common_options(args))
console_options.merge(
dict(
palette = args.palette,
palette_transparent = args.palette_transparent,
eventlog = args.eventlog,
focus_follow = args.focus_follow,
intercept = args.intercept,
console_palette = args.console_palette,
console_palette_transparent = args.console_palette_transparent,
console_eventlog = args.console_eventlog,
console_focus_follow = args.console_focus_follow,
console_no_mouse = args.console_no_mouse,
console_order = args.console_order,
filter = args.filter,
no_mouse = args.no_mouse,
order = args.order,
intercept = args.intercept,
)
)
@@ -148,10 +150,10 @@ def mitmweb(args=None): # pragma: no cover
web_options.merge(
dict(
intercept = args.intercept,
open_browser = args.open_browser,
wdebug = args.wdebug,
wiface = args.wiface,
wport = args.wport,
web_open_browser = args.web_open_browser,
web_debug = args.web_debug,
web_iface = args.web_iface,
web_port = args.web_port,
)
)
server = process_options(parser, web_options, args)

View File

@@ -33,6 +33,7 @@ def flow_to_json(flow: mitmproxy.flow.Flow) -> dict:
"server_conn": flow.server_conn.get_state(),
"type": flow.type,
"modified": flow.modified(),
"marked": flow.marked,
}
# .alpn_proto_negotiated is bytes, we need to decode that.
for conn in "client_conn", "server_conn":
@@ -45,6 +46,12 @@ def flow_to_json(flow: mitmproxy.flow.Flow) -> dict:
if isinstance(flow, http.HTTPFlow):
if flow.request:
if flow.request.raw_content:
content_length = len(flow.request.raw_content)
content_hash = hashlib.sha256(flow.request.raw_content).hexdigest()
else:
content_length = None
content_hash = None
f["request"] = {
"method": flow.request.method,
"scheme": flow.request.scheme,
@@ -53,24 +60,26 @@ def flow_to_json(flow: mitmproxy.flow.Flow) -> dict:
"path": flow.request.path,
"http_version": flow.request.http_version,
"headers": tuple(flow.request.headers.items(True)),
"contentLength": len(
flow.request.raw_content) if flow.request.raw_content is not None else None,
"contentHash": hashlib.sha256(
flow.request.raw_content).hexdigest() if flow.request.raw_content is not None else None,
"contentLength": content_length,
"contentHash": content_hash,
"timestamp_start": flow.request.timestamp_start,
"timestamp_end": flow.request.timestamp_end,
"is_replay": flow.request.is_replay,
}
if flow.response:
if flow.response.raw_content:
content_length = len(flow.response.raw_content)
content_hash = hashlib.sha256(flow.response.raw_content).hexdigest()
else:
content_length = None
content_hash = None
f["response"] = {
"http_version": flow.response.http_version,
"status_code": flow.response.status_code,
"reason": flow.response.reason,
"headers": tuple(flow.response.headers.items(True)),
"contentLength": len(
flow.response.raw_content) if flow.response.raw_content is not None else None,
"contentHash": hashlib.sha256(
flow.response.raw_content).hexdigest() if flow.response.raw_content is not None else None,
"contentLength": content_length,
"contentHash": content_hash,
"timestamp_start": flow.response.timestamp_start,
"timestamp_end": flow.response.timestamp_end,
"is_replay": flow.response.is_replay,
@@ -185,7 +194,7 @@ class WebSocketEventBroadcaster(tornado.websocket.WebSocketHandler):
@classmethod
def broadcast(cls, **kwargs):
message = json.dumps(kwargs, ensure_ascii=False)
message = json.dumps(kwargs, ensure_ascii=False).encode("utf8", "surrogateescape")
for conn in cls.connections:
try:

View File

@@ -38,7 +38,7 @@ class WebMaster(master.Master):
if with_termlog:
self.addons.add(termlog.TermLog())
self.app = app.Application(
self, self.options.wdebug
self, self.options.web_debug
)
# This line is just for type hinting
self.options = self.options # type: Options
@@ -103,7 +103,7 @@ class WebMaster(master.Master):
iol = tornado.ioloop.IOLoop.instance()
http_server = tornado.httpserver.HTTPServer(self.app)
http_server.listen(self.options.wport, self.options.wiface)
http_server.listen(self.options.web_port, self.options.web_iface)
iol.add_callback(self.start)
tornado.ioloop.PeriodicCallback(lambda: self.tick(timeout=0), 5).start()
@@ -113,13 +113,13 @@ class WebMaster(master.Master):
"info"
)
web_url = "http://{}:{}/".format(self.options.wiface, self.options.wport)
web_url = "http://{}:{}/".format(self.options.web_iface, self.options.web_port)
self.add_log(
"Web server listening at {}".format(web_url),
"info"
)
if self.options.open_browser:
if self.options.web_open_browser:
success = open_browser(web_url)
if not success:
self.add_log(

View File

@@ -564,7 +564,6 @@ footer .label {
.CodeMirror {
border: 1px solid #ccc;
height: auto !important;
max-height: 2048px !important;
}
/* BASICS */

Some files were not shown because too many files have changed in this diff Show More