mirror of
https://github.com/zhigang1992/mitmproxy.git
synced 2026-04-05 22:37:58 +08:00
Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
be19ac8be6 | ||
|
|
d0411a62ee | ||
|
|
c4e643d3bd |
@@ -28,7 +28,7 @@ install:
|
||||
- "pip install -U tox"
|
||||
|
||||
test_script:
|
||||
- ps: "tox -- --verbose --cov-report=term"
|
||||
- ps: "tox -- --cov mitmproxy --cov pathod -v"
|
||||
- ps: |
|
||||
$Env:VERSION = $(python mitmproxy/version.py)
|
||||
$Env:SKIP_MITMPROXY = "python -c `"print('skip mitmproxy')`""
|
||||
@@ -44,12 +44,12 @@ test_script:
|
||||
if (!(Test-Path "C:\projects\mitmproxy\release\installbuilder-installer.exe")) {
|
||||
"Download InstallBuilder..."
|
||||
(New-Object System.Net.WebClient).DownloadFile(
|
||||
"https://installbuilder.bitrock.com/installbuilder-enterprise-17.1.0-windows-installer.exe",
|
||||
"https://installbuilder.bitrock.com/installbuilder-enterprise-16.11.1-windows-installer.exe",
|
||||
"C:\projects\mitmproxy\release\installbuilder-installer.exe"
|
||||
)
|
||||
}
|
||||
Start-Process "C:\projects\mitmproxy\release\installbuilder-installer.exe" "--mode unattended --unattendedmodeui none" -Wait
|
||||
& 'C:\Program Files (x86)\BitRock InstallBuilder Enterprise 17.1.0\bin\builder-cli.exe' `
|
||||
& 'C:\Program Files (x86)\BitRock InstallBuilder Enterprise 16.11.1\bin\builder-cli.exe' `
|
||||
build `
|
||||
.\release\installbuilder\mitmproxy.xml `
|
||||
windows `
|
||||
|
||||
6
.env
Normal file
6
.env
Normal file
@@ -0,0 +1,6 @@
|
||||
DIR="$( dirname "${BASH_SOURCE[0]}" )"
|
||||
ACTIVATE_DIR="$(if [ -f "$DIR/venv/bin/activate" ]; then echo 'bin'; else echo 'Scripts'; fi;)"
|
||||
if [ -z "$VIRTUAL_ENV" ] && [ -f "$DIR/venv/$ACTIVATE_DIR/activate" ]; then
|
||||
echo "Activating mitmproxy virtualenv..."
|
||||
source "$DIR/venv/$ACTIVATE_DIR/activate"
|
||||
fi
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,2 +1,2 @@
|
||||
mitmproxy/tools/web/static/**/* -diff linguist-vendored
|
||||
mitmproxy/tools/web/static/**/* -diff
|
||||
web/src/js/filt/filt.js -diff
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -19,5 +19,3 @@ bower_components
|
||||
*.map
|
||||
sslkeylogfile.log
|
||||
.tox/
|
||||
.python-version
|
||||
coverage.xml
|
||||
|
||||
10
.travis.yml
10
.travis.yml
@@ -5,8 +5,6 @@ env:
|
||||
global:
|
||||
- CI_DEPS=codecov>=2.0.5
|
||||
- CI_COMMANDS=codecov
|
||||
git:
|
||||
depth: 10000
|
||||
|
||||
matrix:
|
||||
fast_finish: true
|
||||
@@ -43,10 +41,12 @@ matrix:
|
||||
- debian-sid
|
||||
packages:
|
||||
- libssl-dev
|
||||
- python: 3.5
|
||||
env: TOXENV=individual_coverage
|
||||
- python: 3.5
|
||||
env: TOXENV=docs
|
||||
git:
|
||||
depth: 10000
|
||||
allow_failures:
|
||||
- python: pypy
|
||||
|
||||
install:
|
||||
- |
|
||||
@@ -63,7 +63,7 @@ install:
|
||||
- pip install tox
|
||||
|
||||
script:
|
||||
- tox -- --verbose --cov-report=term
|
||||
- tox -- --cov mitmproxy --cov pathod -v
|
||||
- |
|
||||
if [[ $BDIST == "1" ]]
|
||||
then
|
||||
|
||||
39
CHANGELOG
39
CHANGELOG
@@ -1,42 +1,3 @@
|
||||
28 April 2017: mitmproxy 2.0.2
|
||||
|
||||
* Fix mitmweb's Content-Security-Policy to work with Chrome 58+
|
||||
|
||||
* HTTP/2: actually use header normalization from hyper-h2
|
||||
|
||||
|
||||
15 March 2017: mitmproxy 2.0.1
|
||||
|
||||
* bump cryptography dependency
|
||||
|
||||
* bump pyparsing dependency
|
||||
|
||||
* HTTP/2: use header normalization from hyper-h2
|
||||
|
||||
|
||||
21 February 2017: mitmproxy 2.0
|
||||
|
||||
* HTTP/2 is now enabled by default.
|
||||
|
||||
* Image ContentView: Parse images with Kaitai Struct (kaitai.io) instead of Pillow.
|
||||
This simplifies installation, reduces binary size, and allows parsing in pure Python.
|
||||
|
||||
* Web: Add missing flow filters.
|
||||
|
||||
* Add transparent proxy support for OpenBSD.
|
||||
|
||||
* Check the mitmproxy CA for expiration and warn the user to regenerate it if necessary.
|
||||
|
||||
* Testing: Tremendous improvements, enforced 100% coverage for large parts of the
|
||||
codebase, increased overall coverage.
|
||||
|
||||
* Enforce individual coverage: one source file -> one test file with 100% coverage.
|
||||
|
||||
* A myriad of other small improvements throughout the project.
|
||||
|
||||
* Numerous bugfixes.
|
||||
|
||||
|
||||
26 December 2016: mitmproxy 1.0
|
||||
|
||||
* All mitmproxy tools are now Python 3 only! We plan to support Python 3.5 and higher.
|
||||
|
||||
94
README.rst
94
README.rst
@@ -10,8 +10,6 @@ interface.
|
||||
|
||||
``mitmdump`` is the command-line version of mitmproxy. Think tcpdump for HTTP.
|
||||
|
||||
``mitmweb`` is a web-based interface for mitmproxy.
|
||||
|
||||
``pathoc`` and ``pathod`` are perverse HTTP client and server applications
|
||||
designed to let you craft almost any conceivable HTTP request, including ones
|
||||
that creatively violate the standards.
|
||||
@@ -37,7 +35,7 @@ each other solve problems, and come up with new ideas for the project.
|
||||
|mitmproxy_discourse|
|
||||
|
||||
|
||||
Join our developer chat on Slack if you would like to contribute to mitmproxy itself.
|
||||
Join our developer chat on Slack if you would like to hack on mitmproxy itself.
|
||||
|
||||
|slack|
|
||||
|
||||
@@ -48,64 +46,72 @@ Installation
|
||||
The installation instructions are `here <http://docs.mitmproxy.org/en/stable/install.html>`__.
|
||||
If you want to contribute changes, keep on reading.
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
As an open source project, mitmproxy welcomes contributions of all forms. If you would like to bring the project forward,
|
||||
please consider contributing in the following areas:
|
||||
Hacking
|
||||
-------
|
||||
|
||||
- **Maintenance:** We are *incredibly* thankful for individuals who are stepping up and helping with maintenance. This includes (but is not limited to) triaging issues, reviewing pull requests and picking up stale ones, helping out other users in our forums_, creating minimal, complete and verifiable examples or test cases for existing bug reports, updating documentation, or fixing minor bugs that have recently been reported.
|
||||
- **Code Contributions:** We actively mark issues that we consider are `good first contributions`_. If you intend to work on a larger contribution to the project, please come talk to us first.
|
||||
|
||||
Development Setup
|
||||
-----------------
|
||||
|
||||
To get started hacking on mitmproxy, please follow the `advanced installation`_ steps to install mitmproxy from source, but stop right before running ``pip3 install mitmproxy``. Instead, do the following:
|
||||
To get started hacking on mitmproxy, make sure you have Python_ 3.5.x or above with
|
||||
virtualenv_ installed (you can find installation instructions for virtualenv
|
||||
`here <http://virtualenv.readthedocs.org/en/latest/>`__). Then do the following:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
git clone https://github.com/mitmproxy/mitmproxy.git
|
||||
cd mitmproxy
|
||||
./dev.sh # "powershell .\dev.ps1" on Windows
|
||||
./dev.sh # powershell .\dev.ps1 on Windows
|
||||
|
||||
|
||||
The *dev* script will create a `virtualenv`_ environment in a directory called "venv"
|
||||
and install all mandatory and optional dependencies into it. The primary
|
||||
mitmproxy components - mitmproxy and pathod - are installed as
|
||||
The *dev* script will create a virtualenv environment in a directory called
|
||||
"venv", and install all mandatory and optional dependencies into it. The
|
||||
primary mitmproxy components - mitmproxy and pathod - are installed as
|
||||
"editable", so any changes to the source in the repository will be reflected
|
||||
live in the virtualenv.
|
||||
|
||||
The main executables for the project - ``mitmdump``, ``mitmproxy``,
|
||||
To confirm that you're up and running, activate the virtualenv, and run the
|
||||
mitmproxy test suite:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
. venv/bin/activate # venv\Scripts\activate on Windows
|
||||
py.test
|
||||
|
||||
Note that the main executables for the project - ``mitmdump``, ``mitmproxy``,
|
||||
``mitmweb``, ``pathod``, and ``pathoc`` - are all created within the
|
||||
virtualenv. After activating the virtualenv, they will be on your $PATH, and
|
||||
you can run them like any other command:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
. venv/bin/activate # "venv\Scripts\activate" on Windows
|
||||
mitmdump --version
|
||||
|
||||
For convenience, the project includes an autoenv_ file (`.env`_) that
|
||||
auto-activates the virtualenv when you cd into the mitmproxy directory.
|
||||
|
||||
|
||||
Testing
|
||||
-------
|
||||
|
||||
If you've followed the procedure above, you already have all the development
|
||||
requirements installed, and you can run the full test suite (including tests for code style and documentation) with tox_:
|
||||
requirements installed, and you can simply run the test suite:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
tox
|
||||
|
||||
For speedier testing, we recommend you run `pytest`_ directly on individual test files or folders:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
cd test/mitmproxy/addons
|
||||
pytest --cov mitmproxy.addons.anticache --looponfail test_anticache.py
|
||||
|
||||
As pytest does not check the code style, you probably want to run ``tox -e lint`` before committing your changes.
|
||||
py.test
|
||||
|
||||
Please ensure that all patches are accompanied by matching changes in the test
|
||||
suite. The project tries to maintain 100% test coverage and enforces this strictly for some parts of the codebase.
|
||||
suite. The project tries to maintain 100% test coverage.
|
||||
|
||||
You can also use `tox` to run the full suite of tests, including a quick test
|
||||
to check documentation and code linting.
|
||||
|
||||
The following tox environments are relevant for local testing:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
tox -e py35 # runs all tests with Python 3.5
|
||||
tox -e docs # runs a does-it-compile check on the documentation
|
||||
tox -e lint # runs the linter for coding style checks
|
||||
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
@@ -124,8 +130,8 @@ installation, you can render the documentation like this:
|
||||
The last command invokes `sphinx-autobuild`_, which watches the Sphinx directory and rebuilds
|
||||
the documentation when a change is detected.
|
||||
|
||||
Code Style
|
||||
----------
|
||||
Style
|
||||
-----
|
||||
|
||||
Keeping to a consistent code style throughout the project makes it easier to
|
||||
contribute and collaborate. Please stick to the guidelines in
|
||||
@@ -145,7 +151,7 @@ with the following command:
|
||||
:target: https://mitmproxy.org/
|
||||
:alt: mitmproxy.org
|
||||
|
||||
.. |mitmproxy_docs| image:: https://shields.mitmproxy.org/api/docs-latest-brightgreen.svg
|
||||
.. |mitmproxy_docs| image:: https://readthedocs.org/projects/mitmproxy/badge/
|
||||
:target: http://docs.mitmproxy.org/en/latest/
|
||||
:alt: mitmproxy documentation
|
||||
|
||||
@@ -157,15 +163,15 @@ with the following command:
|
||||
:target: http://slack.mitmproxy.org/
|
||||
:alt: Slack Developer Chat
|
||||
|
||||
.. |travis| image:: https://shields.mitmproxy.org/travis/mitmproxy/mitmproxy/master.svg?label=travis%20ci
|
||||
.. |travis| image:: https://shields.mitmproxy.org/travis/mitmproxy/mitmproxy/master.svg?label=Travis%20build
|
||||
:target: https://travis-ci.org/mitmproxy/mitmproxy
|
||||
:alt: Travis Build Status
|
||||
|
||||
.. |appveyor| image:: https://shields.mitmproxy.org/appveyor/ci/mhils/mitmproxy/master.svg?label=appveyor%20ci
|
||||
.. |appveyor| image:: https://shields.mitmproxy.org/appveyor/ci/mhils/mitmproxy/master.svg?label=Appveyor%20build
|
||||
:target: https://ci.appveyor.com/project/mhils/mitmproxy
|
||||
:alt: Appveyor Build Status
|
||||
|
||||
.. |coverage| image:: https://shields.mitmproxy.org/codecov/c/github/mitmproxy/mitmproxy/master.svg?label=codecov
|
||||
.. |coverage| image:: https://codecov.io/gh/mitmproxy/mitmproxy/branch/master/graph/badge.svg
|
||||
:target: https://codecov.io/gh/mitmproxy/mitmproxy
|
||||
:alt: Coverage Status
|
||||
|
||||
@@ -177,13 +183,11 @@ with the following command:
|
||||
:target: https://pypi.python.org/pypi/mitmproxy
|
||||
:alt: Supported Python versions
|
||||
|
||||
.. _`advanced installation`: http://docs.mitmproxy.org/en/latest/install.html#advanced-installation
|
||||
.. _virtualenv: https://virtualenv.pypa.io/
|
||||
.. _`pytest`: http://pytest.org/
|
||||
.. _tox: https://tox.readthedocs.io/
|
||||
.. _Python: https://www.python.org/
|
||||
.. _virtualenv: http://virtualenv.readthedocs.org/en/latest/
|
||||
.. _autoenv: https://github.com/kennethreitz/autoenv
|
||||
.. _.env: https://github.com/mitmproxy/mitmproxy/blob/master/.env
|
||||
.. _Sphinx: http://sphinx-doc.org/
|
||||
.. _sphinx-autobuild: https://pypi.python.org/pypi/sphinx-autobuild
|
||||
.. _PEP8: https://www.python.org/dev/peps/pep-0008
|
||||
.. _`Google Style Guide`: https://google.github.io/styleguide/pyguide.html
|
||||
.. _forums: https://discourse.mitmproxy.org/
|
||||
.. _`good first contributions`: https://github.com/mitmproxy/mitmproxy/issues?q=is%3Aissue+is%3Aopen+label%3Agood-first-contribution
|
||||
.. _Google Style Guide: https://google.github.io/styleguide/pyguide.html
|
||||
|
||||
7
dev.ps1
7
dev.ps1
@@ -1,19 +1,20 @@
|
||||
$ErrorActionPreference = "Stop"
|
||||
$VENV = ".\venv"
|
||||
|
||||
$pyver = python --version
|
||||
if($pyver -notmatch "3\.[5-9]") {
|
||||
Write-Warning "Unexpected Python version, expected Python 3.5 or above: $pyver"
|
||||
}
|
||||
|
||||
python -m venv .\venv --copies
|
||||
& .\venv\Scripts\activate.ps1
|
||||
python -m venv $VENV --copies
|
||||
& $VENV\Scripts\activate.ps1
|
||||
|
||||
python -m pip install --disable-pip-version-check -U pip
|
||||
cmd /c "pip install -r requirements.txt 2>&1"
|
||||
|
||||
echo @"
|
||||
|
||||
* Created virtualenv environment in .\venv.
|
||||
* Created virtualenv environment in $VENV.
|
||||
* Installed all dependencies into the virtualenv.
|
||||
* Activated virtualenv environment.
|
||||
|
||||
|
||||
18
dev.sh
18
dev.sh
@@ -2,14 +2,16 @@
|
||||
set -e
|
||||
set -x
|
||||
|
||||
echo "Creating dev environment in ./venv..."
|
||||
PYVERSION=${1:-3.5}
|
||||
VENV="venv$PYVERSION"
|
||||
|
||||
python3 -m venv venv
|
||||
. venv/bin/activate
|
||||
pip3 install -U pip setuptools
|
||||
pip3 install -r requirements.txt
|
||||
echo "Creating dev environment in $VENV using Python $PYVERSION"
|
||||
|
||||
python$PYVERSION -m venv "$VENV"
|
||||
. "$VENV/bin/activate"
|
||||
pip$PYVERSION install -U pip setuptools
|
||||
pip$PYVERSION install -r requirements.txt
|
||||
|
||||
echo ""
|
||||
echo " * Created virtualenv environment in ./venv."
|
||||
echo " * Installed all dependencies into the virtualenv."
|
||||
echo " * You can now activate the $(python3 --version) virtualenv with this command: \`. venv/bin/activate\`"
|
||||
echo "* Virtualenv created in $VENV and all dependencies installed."
|
||||
echo "* You can now activate the $(python --version) virtualenv with this command: \`. $VENV/bin/activate\`"
|
||||
|
||||
@@ -40,9 +40,7 @@ start of mitmproxy.
|
||||
iOS
|
||||
^^^
|
||||
|
||||
See http://jasdev.me/intercepting-ios-traffic
|
||||
|
||||
and http://web.archive.org/web/20150920082614/http://kb.mit.edu/confluence/pages/viewpage.action?pageId=152600377
|
||||
http://kb.mit.edu/confluence/pages/viewpage.action?pageId=152600377
|
||||
|
||||
iOS Simulator
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
14
docs/dev/architecture.rst
Normal file
14
docs/dev/architecture.rst
Normal file
@@ -0,0 +1,14 @@
|
||||
.. _architecture:
|
||||
|
||||
Architecture
|
||||
============
|
||||
|
||||
To give you a better understanding of how mitmproxy works, mitmproxy's
|
||||
high-level architecture is detailed in the following graphic:
|
||||
|
||||
.. image:: ../schematics/architecture.png
|
||||
|
||||
:download:`architecture.pdf <../schematics/architecture.pdf>`
|
||||
|
||||
Please don't refrain from asking any further
|
||||
questions on the mailing list, the Slack channel or the GitHub issue tracker.
|
||||
@@ -1,11 +0,0 @@
|
||||
.. _contributing:
|
||||
|
||||
Contributing
|
||||
============
|
||||
|
||||
As an open source project, **mitmproxy** welcomes contributions of all forms.
|
||||
|
||||
Please head over to the README_ to get started! 😃
|
||||
|
||||
|
||||
.. _README: https://github.com/mitmproxy/mitmproxy/blob/master/README.rst
|
||||
47
docs/dev/testing.rst
Normal file
47
docs/dev/testing.rst
Normal file
@@ -0,0 +1,47 @@
|
||||
.. _testing:
|
||||
|
||||
Testing
|
||||
=======
|
||||
|
||||
All the mitmproxy projects strive to maintain 100% code coverage. In general,
|
||||
patches and pull requests will be declined unless they're accompanied by a
|
||||
suitable extension to the test suite.
|
||||
|
||||
Our tests are written for the `py.test`_ or nose_ test frameworks.
|
||||
At the point where you send your pull request, a command like this:
|
||||
|
||||
>>> py.test --cov mitmproxy
|
||||
|
||||
Should give output something like this:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
> ---------- coverage: platform darwin, python 2.7.2-final-0 --
|
||||
> Name Stmts Miss Cover Missing
|
||||
> ----------------------------------------------------
|
||||
> mitmproxy/__init__ 0 0 100%
|
||||
> mitmproxy/app 4 0 100%
|
||||
> mitmproxy/cmdline 100 0 100%
|
||||
> mitmproxy/controller 69 0 100%
|
||||
> mitmproxy/dump 150 0 100%
|
||||
> mitmproxy/encoding 39 0 100%
|
||||
> mitmproxy/flowfilter 201 0 100%
|
||||
> mitmproxy/flow 891 0 100%
|
||||
> mitmproxy/proxy 427 0 100%
|
||||
> mitmproxy/script 27 0 100%
|
||||
> mitmproxy/utils 133 0 100%
|
||||
> mitmproxy/version 4 0 100%
|
||||
> ----------------------------------------------------
|
||||
> TOTAL 2045 0 100%
|
||||
> ----------------------------------------------------
|
||||
> Ran 251 tests in 11.864s
|
||||
|
||||
|
||||
There are exceptions to the coverage requirement - for instance, much of the
|
||||
console interface code can't sensibly be unit tested. These portions are
|
||||
excluded from coverage analysis either in the **.coveragerc** file, or using
|
||||
**#pragma no-cover** directives. To keep our coverage analysis relevant, we use
|
||||
these measures as sparingly as possible.
|
||||
|
||||
.. _nose: https://nose.readthedocs.org/en/latest/
|
||||
.. _py.test: https://pytest.org/
|
||||
@@ -33,19 +33,6 @@ updated in a similar way.
|
||||
You can turn off response refreshing using the ``--norefresh`` argument, or using
|
||||
the :kbd:`o` options shortcut within :program:`mitmproxy`.
|
||||
|
||||
|
||||
Replaying a session recorded in Reverse-proxy Mode
|
||||
--------------------------------------------------
|
||||
|
||||
If you have captured the session in reverse proxy mode, in order to replay it you
|
||||
still have to specify the server URL, otherwise you may get the error:
|
||||
'HTTP protocol error in client request: Invalid HTTP request form (expected authority or absolute...)'.
|
||||
|
||||
During replay, when the client's requests match previously recorded requests, then the
|
||||
respective recorded responses are simply replayed by mitmproxy.
|
||||
Otherwise, the unmatched requests is forwarded to the upstream server.
|
||||
If forwarding is not desired, you can use the --kill (-k) switch to prevent that.
|
||||
|
||||
================== ===========
|
||||
command-line ``-S path``
|
||||
mitmproxy shortcut :kbd:`R` then :kbd:`s`
|
||||
|
||||
@@ -47,7 +47,6 @@
|
||||
transparent
|
||||
transparent/linux
|
||||
transparent/osx
|
||||
transparent/openbsd
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
@@ -79,9 +78,10 @@
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
:caption: Development
|
||||
:caption: Hacking
|
||||
|
||||
dev/contributing
|
||||
dev/architecture
|
||||
dev/testing
|
||||
dev/sslkeylogfile
|
||||
|
||||
.. Indices and tables
|
||||
|
||||
@@ -20,7 +20,7 @@ You can use Homebrew to install everything:
|
||||
|
||||
brew install mitmproxy
|
||||
|
||||
Or you can download the pre-built binary packages from our `releases`_.
|
||||
Or you can download the pre-built binary packages from `mitmproxy.org`_.
|
||||
|
||||
|
||||
.. _install-windows:
|
||||
@@ -35,7 +35,7 @@ Both executables are added to your PATH and can be invoked from the command
|
||||
line.
|
||||
|
||||
.. note::
|
||||
Mitmproxy's console interface is not supported on Windows, but you can use
|
||||
mitmproxy's console interface is not supported on Windows, but you can use
|
||||
mitmweb (the web-based interface) and mitmdump.
|
||||
|
||||
.. _install-linux:
|
||||
@@ -44,7 +44,7 @@ Installation on Linux
|
||||
---------------------
|
||||
|
||||
The recommended way to run mitmproxy on Linux is to use the pre-built binaries
|
||||
provided at `releases`_.
|
||||
provided at `mitmproxy.org`_.
|
||||
|
||||
Our pre-built binaries provide you with the latest version of mitmproxy, a
|
||||
self-contained Python 3.5 environment and a recent version of OpenSSL that
|
||||
@@ -85,7 +85,7 @@ libraries. This was tested on a fully patched installation of Ubuntu 16.04.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
sudo apt-get install python3-dev python3-pip libffi-dev libssl-dev
|
||||
sudo apt-get install python3-pip python3-dev libffi-dev libssl-dev libtiff5-dev libjpeg8-dev zlib1g-dev libwebp-dev
|
||||
sudo pip3 install mitmproxy # or pip3 install --user mitmproxy
|
||||
|
||||
On older Ubuntu versions, e.g., **12.04** and **14.04**, you may need to install
|
||||
@@ -104,7 +104,7 @@ libraries. This was tested on a fully patched installation of Fedora 24.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
sudo dnf install make gcc redhat-rpm-config python3-devel python3-pip libffi-devel openssl-devel
|
||||
sudo dnf install make gcc redhat-rpm-config python3-pip python3-devel libffi-devel openssl-devel libtiff-devel libjpeg-devel zlib-devel libwebp-devel openjpeg2-devel
|
||||
sudo pip3 install mitmproxy # or pip3 install --user mitmproxy
|
||||
|
||||
Make sure to have an up-to-date version of pip by running ``pip3 install -U pip``.
|
||||
@@ -117,13 +117,13 @@ Make sure to have an up-to-date version of pip by running ``pip3 install -U pip`
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. note::
|
||||
Mitmproxy's console interface is not supported on Windows, but you can use
|
||||
mitmproxy's console interface is not supported on Windows, but you can use
|
||||
mitmweb (the web-based interface) and mitmdump.
|
||||
|
||||
First, install the latest version of Python 3.5 or later from the `Python
|
||||
website`_. During installation, make sure to select `Add Python to PATH`.
|
||||
|
||||
Mitmproxy has no other dependencies on Windows. You can now install mitmproxy by running
|
||||
Now, you can install mitmproxy by running
|
||||
|
||||
.. code:: powershell
|
||||
|
||||
@@ -139,12 +139,11 @@ Latest Development Version
|
||||
If you would like to install mitmproxy directly from the master branch on GitHub
|
||||
or would like to get set up to contribute to the project, install the
|
||||
dependencies as you would for a regular installation from source. Then see the
|
||||
project's README_ on GitHub. You can check your system information
|
||||
by running: ``mitmproxy --version``
|
||||
Hacking_ section of the README on GitHub. You can check your system information
|
||||
by running: ``mitmproxy --sysinfo``
|
||||
|
||||
|
||||
.. _README: https://github.com/mitmproxy/mitmproxy/blob/master/README.rst
|
||||
.. _releases: https://github.com/mitmproxy/mitmproxy/releases
|
||||
.. _Hacking: https://github.com/mitmproxy/mitmproxy/blob/master/README.rst#hacking
|
||||
.. _mitmproxy.org: https://mitmproxy.org/
|
||||
.. _`Python website`: https://www.python.org/downloads/windows/
|
||||
.. _pip: https://pip.pypa.io/en/latest/installing.html
|
||||
|
||||
BIN
docs/schematics/architecture.pdf
Normal file
BIN
docs/schematics/architecture.pdf
Normal file
Binary file not shown.
BIN
docs/schematics/architecture.png
Normal file
BIN
docs/schematics/architecture.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 85 KiB |
BIN
docs/schematics/architecture.vsdx
Normal file
BIN
docs/schematics/architecture.vsdx
Normal file
Binary file not shown.
@@ -27,7 +27,7 @@ Fully transparent mode
|
||||
By default mitmproxy will use its own local ip address for its server-side connections.
|
||||
In case this isn't desired, the --spoof-source-address argument can be used to
|
||||
use the client's ip address for server-side connections. The following config is
|
||||
required for this mode to work::
|
||||
required for this mode to work:
|
||||
|
||||
CLIENT_NET=192.168.1.0/24
|
||||
TABLE_ID=100
|
||||
@@ -42,9 +42,9 @@ required for this mode to work::
|
||||
|
||||
This mode does require root privileges though. There's a wrapper in the examples directory
|
||||
called 'mitmproxy_shim.c', which will enable you to use this mode with dropped priviliges.
|
||||
It can be used as follows::
|
||||
It can be used as follows:
|
||||
|
||||
gcc examples/complex/full_transparency_shim.c -o mitmproxy_shim -lcap
|
||||
gcc examples/mitmproxy_shim.c -o mitmproxy_shim -lcap
|
||||
sudo chown root:root mitmproxy_shim
|
||||
sudo chmod u+s mitmproxy_shim
|
||||
./mitmproxy_shim $(which mitmproxy) -T --spoof-source-address
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
.. _openbsd:
|
||||
|
||||
OpenBSD
|
||||
=======
|
||||
|
||||
1. :ref:`Install the mitmproxy certificate on the test device <certinstall>`
|
||||
|
||||
2. Enable IP forwarding:
|
||||
|
||||
>>> sudo sysctl -w net.inet.ip.forwarding=1
|
||||
|
||||
3. Place the following two lines in **/etc/pf.conf**:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
mitm_if = "re2"
|
||||
pass in quick proto tcp from $mitm_if to port { 80, 443 } divert-to 127.0.0.1 port 8080
|
||||
|
||||
These rules tell pf to divert all traffic from ``$mitm_if`` destined for
|
||||
port 80 or 443 to the local mitmproxy instance running on port 8080. You
|
||||
should replace ``$mitm_if`` value with the interface on which your test
|
||||
device will appear.
|
||||
|
||||
4. Configure pf with the rules:
|
||||
|
||||
>>> doas pfctl -f /etc/pf.conf
|
||||
|
||||
5. And now enable it:
|
||||
|
||||
>>> doas pfctl -e
|
||||
|
||||
6. Fire up mitmproxy. You probably want a command like this:
|
||||
|
||||
>>> mitmproxy -T --host
|
||||
|
||||
The ``-T`` flag turns on transparent mode, and the ``--host``
|
||||
argument tells mitmproxy to use the value of the Host header for URL display.
|
||||
|
||||
7. Finally, configure your test device to use the host on which mitmproxy is
|
||||
running as the default gateway.
|
||||
|
||||
.. note::
|
||||
|
||||
Note that the **divert-to** rules in the pf.conf given above only apply to
|
||||
inbound traffic. **This means that they will NOT redirect traffic coming
|
||||
from the box running pf itself.** We can't distinguish between an outbound
|
||||
connection from a non-mitmproxy app, and an outbound connection from
|
||||
mitmproxy itself - if you want to intercept your traffic, you should use an
|
||||
external host to run mitmproxy. Nonetheless, pf is flexible to cater for a
|
||||
range of creative possibilities, like intercepting traffic emanating from
|
||||
VMs. See the **pf.conf** man page for more.
|
||||
|
||||
.. _pf: http://man.openbsd.org/OpenBSD-current/man5/pf.conf.5
|
||||
@@ -63,7 +63,7 @@ Note that this means we don't support transparent mode for earlier versions of O
|
||||
running pf itself.** We can't distinguish between an outbound connection from a
|
||||
non-mitmproxy app, and an outbound connection from mitmproxy itself - if you
|
||||
want to intercept your OSX traffic, you should use an external host to run
|
||||
mitmproxy. Nonetheless, pf is flexible to cater for a range of creative
|
||||
mitmproxy. None the less, pf is flexible to cater for a range of creative
|
||||
possibilities, like intercepting traffic emanating from VMs. See the
|
||||
**pf.conf** man page for more.
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
| nonblocking.py | Demonstrate parallel processing with a blocking script |
|
||||
| remote_debug.py | This script enables remote debugging of the mitmproxy _UI_ with PyCharm. |
|
||||
| sslstrip.py | sslstrip-like funtionality implemented with mitmproxy |
|
||||
| stream.py | Enable streaming for all responses. |
|
||||
| stream | Enable streaming for all responses. |
|
||||
| stream_modify.py | Modify a streamed response body. |
|
||||
| tcp_message.py | Modify a raw TCP connection |
|
||||
| tls_passthrough.py | Use conditional TLS interception based on a user-defined strategy. |
|
||||
| tls_passthrough.py | Use conditional TLS interception based on a user-defined strategy. |
|
||||
@@ -1,5 +1,5 @@
|
||||
"""
|
||||
This script makes it possible to use mitmproxy in scenarios where IP spoofing has been used to redirect
|
||||
This inline scripts makes it possible to use mitmproxy in scenarios where IP spoofing has been used to redirect
|
||||
connections to mitmproxy. The way this works is that we rely on either the TLS Server Name Indication (SNI) or the
|
||||
Host header of the HTTP request.
|
||||
Of course, this is not foolproof - if an HTTPS connection comes without SNI, we don't
|
||||
@@ -28,35 +28,22 @@ import re
|
||||
parse_host_header = re.compile(r"^(?P<host>[^:]+|\[.+\])(?::(?P<port>\d+))?$")
|
||||
|
||||
|
||||
class Rerouter:
|
||||
def requestheaders(self, flow):
|
||||
"""
|
||||
The original host header is retrieved early
|
||||
before flow.request is replaced by mitmproxy new outgoing request
|
||||
"""
|
||||
flow.metadata["original_host"] = flow.request.host_header
|
||||
def request(flow):
|
||||
if flow.client_conn.ssl_established:
|
||||
flow.request.scheme = "https"
|
||||
sni = flow.client_conn.connection.get_servername()
|
||||
port = 443
|
||||
else:
|
||||
flow.request.scheme = "http"
|
||||
sni = None
|
||||
port = 80
|
||||
|
||||
def request(self, flow):
|
||||
if flow.client_conn.ssl_established:
|
||||
flow.request.scheme = "https"
|
||||
sni = flow.client_conn.connection.get_servername()
|
||||
port = 443
|
||||
else:
|
||||
flow.request.scheme = "http"
|
||||
sni = None
|
||||
port = 80
|
||||
host_header = flow.request.pretty_host
|
||||
m = parse_host_header.match(host_header)
|
||||
if m:
|
||||
host_header = m.group("host").strip("[]")
|
||||
if m.group("port"):
|
||||
port = int(m.group("port"))
|
||||
|
||||
host_header = flow.metadata["original_host"]
|
||||
m = parse_host_header.match(host_header)
|
||||
if m:
|
||||
host_header = m.group("host").strip("[]")
|
||||
if m.group("port"):
|
||||
port = int(m.group("port"))
|
||||
|
||||
flow.request.host_header = host_header
|
||||
flow.request.host = sni or host_header
|
||||
flow.request.port = port
|
||||
|
||||
|
||||
def start():
|
||||
return Rerouter()
|
||||
flow.request.host = sni or host_header
|
||||
flow.request.port = port
|
||||
|
||||
@@ -7,7 +7,6 @@ import json
|
||||
import sys
|
||||
import base64
|
||||
import zlib
|
||||
import os
|
||||
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
@@ -167,7 +166,7 @@ def done():
|
||||
if dump_file.endswith('.zhar'):
|
||||
raw = zlib.compress(raw, 9)
|
||||
|
||||
with open(os.path.expanduser(dump_file), "wb") as f:
|
||||
with open(dump_file, "wb") as f:
|
||||
f.write(raw)
|
||||
|
||||
mitmproxy.ctx.log("HAR dump finished (wrote %s bytes to file)" % len(json_dump))
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
| filter_flows.py | This script demonstrates how to use mitmproxy's filter pattern in scripts. |
|
||||
| io_read_dumpfile.py | Read a dumpfile generated by mitmproxy. |
|
||||
| io_write_dumpfile.py | Only write selected flows into a mitmproxy dumpfile. |
|
||||
| log_events.py | Use mitmproxy's logging API. |
|
||||
| logging.py | Use mitmproxy's logging API. |
|
||||
| modify_body_inject_iframe.py | Inject configurable iframe into pages. |
|
||||
| modify_form.py | Modify HTTP form submissions. |
|
||||
| modify_querystring.py | Modify HTTP query strings. |
|
||||
@@ -15,4 +15,4 @@
|
||||
| script_arguments.py | Add arguments to a script. |
|
||||
| send_reply_from_proxy.py | Send a HTTP response directly from the proxy. |
|
||||
| upsidedownternet.py | Turn all images upside down. |
|
||||
| wsgi_flask_app.py | Embed a WSGI app into mitmproxy. |
|
||||
| wsgi_flask_app.py | Embed a WSGI app into mitmproxy. |
|
||||
@@ -10,7 +10,7 @@ class ViewSwapCase(contentviews.View):
|
||||
|
||||
# We don't have a good solution for the keyboard shortcut yet -
|
||||
# you manually need to find a free letter. Contributions welcome :)
|
||||
prompt = ("swap case text", "z")
|
||||
prompt = ("swap case text", "p")
|
||||
content_types = ["text/plain"]
|
||||
|
||||
def __call__(self, data: bytes, **metadata):
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
|
||||
|
||||
<!--
|
||||
Cut and paste the output of "mitmproxy --version".
|
||||
Cut and paste the output of "mitmdump --sysinfo".
|
||||
|
||||
If you're using an older version if mitmproxy, please specify the version
|
||||
and OS.
|
||||
|
||||
@@ -1,40 +1,35 @@
|
||||
from mitmproxy.addons import anticache
|
||||
from mitmproxy.addons import anticomp
|
||||
from mitmproxy.addons import check_alpn
|
||||
from mitmproxy.addons import check_ca
|
||||
from mitmproxy.addons import clientplayback
|
||||
from mitmproxy.addons import disable_h2c_upgrade
|
||||
from mitmproxy.addons import streamfile
|
||||
from mitmproxy.addons import onboarding
|
||||
from mitmproxy.addons import proxyauth
|
||||
from mitmproxy.addons import replace
|
||||
from mitmproxy.addons import script
|
||||
from mitmproxy.addons import serverplayback
|
||||
from mitmproxy.addons import setheaders
|
||||
from mitmproxy.addons import serverplayback
|
||||
from mitmproxy.addons import stickyauth
|
||||
from mitmproxy.addons import stickycookie
|
||||
from mitmproxy.addons import streambodies
|
||||
from mitmproxy.addons import streamfile
|
||||
from mitmproxy.addons import upstream_auth
|
||||
from mitmproxy.addons import disable_h2c_upgrade
|
||||
|
||||
|
||||
def default_addons():
|
||||
return [
|
||||
anticache.AntiCache(),
|
||||
anticomp.AntiComp(),
|
||||
check_alpn.CheckALPN(),
|
||||
check_ca.CheckCA(),
|
||||
clientplayback.ClientPlayback(),
|
||||
disable_h2c_upgrade.DisableH2CleartextUpgrade(),
|
||||
onboarding.Onboarding(),
|
||||
proxyauth.ProxyAuth(),
|
||||
replace.Replace(),
|
||||
replace.ReplaceFile(),
|
||||
script.ScriptLoader(),
|
||||
serverplayback.ServerPlayback(),
|
||||
setheaders.SetHeaders(),
|
||||
anticache.AntiCache(),
|
||||
anticomp.AntiComp(),
|
||||
stickyauth.StickyAuth(),
|
||||
stickycookie.StickyCookie(),
|
||||
streambodies.StreamBodies(),
|
||||
script.ScriptLoader(),
|
||||
streamfile.StreamFile(),
|
||||
streambodies.StreamBodies(),
|
||||
replace.Replace(),
|
||||
setheaders.SetHeaders(),
|
||||
serverplayback.ServerPlayback(),
|
||||
clientplayback.ClientPlayback(),
|
||||
upstream_auth.UpstreamAuth(),
|
||||
disable_h2c_upgrade.DisableH2CleartextUpgrade(),
|
||||
]
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import mitmproxy
|
||||
from mitmproxy.net import tcp
|
||||
|
||||
|
||||
class CheckALPN:
|
||||
def __init__(self):
|
||||
self.failed = False
|
||||
|
||||
def configure(self, options, updated):
|
||||
self.failed = mitmproxy.ctx.master.options.http2 and not tcp.HAS_ALPN
|
||||
if self.failed:
|
||||
mitmproxy.ctx.master.add_log(
|
||||
"HTTP/2 is disabled because ALPN support missing!\n"
|
||||
"OpenSSL 1.0.2+ required to support HTTP/2 connections.\n"
|
||||
"Use --no-http2 to silence this warning.",
|
||||
"warn",
|
||||
)
|
||||
@@ -1,24 +0,0 @@
|
||||
import mitmproxy
|
||||
|
||||
|
||||
class CheckCA:
|
||||
def __init__(self):
|
||||
self.failed = False
|
||||
|
||||
def configure(self, options, updated):
|
||||
has_ca = (
|
||||
mitmproxy.ctx.master.server and
|
||||
mitmproxy.ctx.master.server.config and
|
||||
mitmproxy.ctx.master.server.config.certstore and
|
||||
mitmproxy.ctx.master.server.config.certstore.default_ca
|
||||
)
|
||||
if has_ca:
|
||||
self.failed = mitmproxy.ctx.master.server.config.certstore.default_ca.has_expired()
|
||||
if self.failed:
|
||||
mitmproxy.ctx.master.add_log(
|
||||
"The mitmproxy certificate authority has expired!\n"
|
||||
"Please delete all CA-related files in your ~/.mitmproxy folder.\n"
|
||||
"The CA will be regenerated automatically after restarting mitmproxy.\n"
|
||||
"Then make sure all your clients have the new CA installed.",
|
||||
"warn",
|
||||
)
|
||||
@@ -2,7 +2,6 @@ import itertools
|
||||
import sys
|
||||
|
||||
import click
|
||||
import shutil
|
||||
|
||||
import typing # noqa
|
||||
|
||||
@@ -125,9 +124,6 @@ class Dumper:
|
||||
url = flow.request.pretty_url
|
||||
else:
|
||||
url = flow.request.url
|
||||
terminalWidthLimit = max(shutil.get_terminal_size()[0] - 25, 50)
|
||||
if self.flow_detail < 1 and len(url) > terminalWidthLimit:
|
||||
url = url[:terminalWidthLimit] + "…"
|
||||
url = click.style(strutils.escape_control_characters(url), bold=True)
|
||||
|
||||
http_version = ""
|
||||
@@ -238,7 +234,7 @@ class Dumper:
|
||||
def websocket_message(self, f):
|
||||
if self.match(f):
|
||||
message = f.messages[-1]
|
||||
self.echo(f.message_info(message))
|
||||
self.echo(message.info)
|
||||
if self.flow_detail >= 3:
|
||||
self._echo_message(message)
|
||||
|
||||
|
||||
@@ -8,9 +8,9 @@ class Onboarding(wsgiapp.WSGIApp):
|
||||
self.enabled = False
|
||||
|
||||
def configure(self, options, updated):
|
||||
self.host = options.onboarding_host
|
||||
self.port = options.onboarding_port
|
||||
self.enabled = options.onboarding
|
||||
self.host = options.app_host
|
||||
self.port = options.app_port
|
||||
self.enabled = options.app
|
||||
|
||||
def request(self, f):
|
||||
if self.enabled:
|
||||
|
||||
@@ -1,43 +1,35 @@
|
||||
import binascii
|
||||
import weakref
|
||||
from typing import Optional
|
||||
from typing import Set # noqa
|
||||
from typing import Tuple
|
||||
|
||||
import passlib.apache
|
||||
|
||||
import mitmproxy.net.http
|
||||
from mitmproxy import connections # noqa
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import http
|
||||
from mitmproxy.net.http import status_codes
|
||||
import mitmproxy.net.http
|
||||
|
||||
|
||||
REALM = "mitmproxy"
|
||||
|
||||
|
||||
def mkauth(username: str, password: str, scheme: str = "basic") -> str:
|
||||
"""
|
||||
Craft a basic auth string
|
||||
"""
|
||||
def mkauth(username, password, scheme="basic"):
|
||||
v = binascii.b2a_base64(
|
||||
(username + ":" + password).encode("utf8")
|
||||
).decode("ascii")
|
||||
return scheme + " " + v
|
||||
|
||||
|
||||
def parse_http_basic_auth(s: str) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Parse a basic auth header.
|
||||
Raises a ValueError if the input is invalid.
|
||||
"""
|
||||
scheme, authinfo = s.split()
|
||||
if scheme.lower() != "basic":
|
||||
raise ValueError("Unknown scheme")
|
||||
def parse_http_basic_auth(s):
|
||||
words = s.split()
|
||||
if len(words) != 2:
|
||||
return None
|
||||
scheme = words[0]
|
||||
try:
|
||||
user, password = binascii.a2b_base64(authinfo.encode()).decode("utf8", "replace").split(":")
|
||||
except binascii.Error as e:
|
||||
raise ValueError(str(e))
|
||||
return scheme, user, password
|
||||
user = binascii.a2b_base64(words[1]).decode("utf8", "replace")
|
||||
except binascii.Error:
|
||||
return None
|
||||
parts = user.split(':')
|
||||
if len(parts) != 2:
|
||||
return None
|
||||
return scheme, parts[0], parts[1]
|
||||
|
||||
|
||||
class ProxyAuth:
|
||||
@@ -45,72 +37,67 @@ class ProxyAuth:
|
||||
self.nonanonymous = False
|
||||
self.htpasswd = None
|
||||
self.singleuser = None
|
||||
self.mode = None
|
||||
self.authenticated = weakref.WeakSet() # type: Set[connections.ClientConnection]
|
||||
"""Contains all connections that are permanently authenticated after an HTTP CONNECT"""
|
||||
|
||||
def enabled(self) -> bool:
|
||||
def enabled(self):
|
||||
return any([self.nonanonymous, self.htpasswd, self.singleuser])
|
||||
|
||||
def is_proxy_auth(self) -> bool:
|
||||
"""
|
||||
Returns:
|
||||
- True, if authentication is done as if mitmproxy is a proxy
|
||||
- False, if authentication is done as if mitmproxy is a HTTP server
|
||||
"""
|
||||
return self.mode in ("regular", "upstream")
|
||||
|
||||
def which_auth_header(self) -> str:
|
||||
if self.is_proxy_auth():
|
||||
def which_auth_header(self, f):
|
||||
if f.mode == "regular":
|
||||
return 'Proxy-Authorization'
|
||||
else:
|
||||
return 'Authorization'
|
||||
|
||||
def auth_required_response(self) -> http.HTTPResponse:
|
||||
if self.is_proxy_auth():
|
||||
def auth_required_response(self, f):
|
||||
if f.mode == "regular":
|
||||
hdrname = 'Proxy-Authenticate'
|
||||
else:
|
||||
hdrname = 'WWW-Authenticate'
|
||||
|
||||
headers = mitmproxy.net.http.Headers()
|
||||
headers[hdrname] = 'Basic realm="%s"' % REALM
|
||||
|
||||
if f.mode == "transparent":
|
||||
return http.make_error_response(
|
||||
status_codes.PROXY_AUTH_REQUIRED,
|
||||
headers=mitmproxy.net.http.Headers(Proxy_Authenticate='Basic realm="{}"'.format(REALM)),
|
||||
401,
|
||||
"Authentication Required",
|
||||
headers
|
||||
)
|
||||
else:
|
||||
return http.make_error_response(
|
||||
status_codes.UNAUTHORIZED,
|
||||
headers=mitmproxy.net.http.Headers(WWW_Authenticate='Basic realm="{}"'.format(REALM)),
|
||||
407,
|
||||
"Proxy Authentication Required",
|
||||
headers,
|
||||
)
|
||||
|
||||
def check(self, f: http.HTTPFlow) -> Optional[Tuple[str, str]]:
|
||||
"""
|
||||
Check if a request is correctly authenticated.
|
||||
Returns:
|
||||
- a (username, password) tuple if successful,
|
||||
- None, otherwise.
|
||||
"""
|
||||
auth_value = f.request.headers.get(self.which_auth_header(), "")
|
||||
try:
|
||||
scheme, username, password = parse_http_basic_auth(auth_value)
|
||||
except ValueError:
|
||||
return None
|
||||
def check(self, f):
|
||||
auth_value = f.request.headers.get(self.which_auth_header(f), None)
|
||||
if not auth_value:
|
||||
return False
|
||||
parts = parse_http_basic_auth(auth_value)
|
||||
if not parts:
|
||||
return False
|
||||
scheme, username, password = parts
|
||||
if scheme.lower() != 'basic':
|
||||
return False
|
||||
|
||||
if self.nonanonymous:
|
||||
return username, password
|
||||
pass
|
||||
elif self.singleuser:
|
||||
if self.singleuser == [username, password]:
|
||||
return username, password
|
||||
if [username, password] != self.singleuser:
|
||||
return False
|
||||
elif self.htpasswd:
|
||||
if self.htpasswd.check_password(username, password):
|
||||
return username, password
|
||||
|
||||
return None
|
||||
|
||||
def authenticate(self, f: http.HTTPFlow) -> bool:
|
||||
valid_credentials = self.check(f)
|
||||
if valid_credentials:
|
||||
f.metadata["proxyauth"] = valid_credentials
|
||||
del f.request.headers[self.which_auth_header()]
|
||||
return True
|
||||
if not self.htpasswd.check_password(username, password):
|
||||
return False
|
||||
else:
|
||||
f.response = self.auth_required_response()
|
||||
return False
|
||||
raise NotImplementedError("Should never happen.")
|
||||
|
||||
return True
|
||||
|
||||
def authenticate(self, f):
|
||||
if self.check(f):
|
||||
del f.request.headers[self.which_auth_header(f)]
|
||||
else:
|
||||
f.response = self.auth_required_response(f)
|
||||
|
||||
# Handlers
|
||||
def configure(self, options, updated):
|
||||
@@ -138,28 +125,24 @@ class ProxyAuth:
|
||||
)
|
||||
else:
|
||||
self.htpasswd = None
|
||||
if "mode" in updated:
|
||||
self.mode = options.mode
|
||||
if self.enabled():
|
||||
if options.mode == "transparent":
|
||||
raise exceptions.OptionsError(
|
||||
"Proxy Authentication not supported in transparent mode."
|
||||
)
|
||||
if options.mode == "socks5":
|
||||
elif options.mode == "socks5":
|
||||
raise exceptions.OptionsError(
|
||||
"Proxy Authentication not supported in SOCKS mode. "
|
||||
"https://github.com/mitmproxy/mitmproxy/issues/738"
|
||||
)
|
||||
# TODO: check for multiple auth options
|
||||
# TODO: check for multiple auth options
|
||||
|
||||
def http_connect(self, f: http.HTTPFlow) -> None:
|
||||
if self.enabled():
|
||||
if self.authenticate(f):
|
||||
self.authenticated.add(f.client_conn)
|
||||
|
||||
def requestheaders(self, f: http.HTTPFlow) -> None:
|
||||
if self.enabled():
|
||||
# Is this connection authenticated by a previous HTTP CONNECT?
|
||||
if f.client_conn in self.authenticated:
|
||||
return
|
||||
def http_connect(self, f):
|
||||
if self.enabled() and f.mode == "regular":
|
||||
self.authenticate(f)
|
||||
|
||||
def requestheaders(self, f):
|
||||
if self.enabled():
|
||||
# Are we already authenticated in CONNECT?
|
||||
if not (f.mode == "regular" and f.server_conn.via):
|
||||
self.authenticate(f)
|
||||
|
||||
@@ -2,47 +2,9 @@ import re
|
||||
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import flowfilter
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
def parse_hook(s):
|
||||
"""
|
||||
Returns a (pattern, regex, replacement) tuple.
|
||||
|
||||
The general form for a replacement hook is as follows:
|
||||
|
||||
/patt/regex/replacement
|
||||
|
||||
The first character specifies the separator. Example:
|
||||
|
||||
:~q:foo:bar
|
||||
|
||||
If only two clauses are specified, the pattern is set to match
|
||||
universally (i.e. ".*"). Example:
|
||||
|
||||
/foo/bar/
|
||||
|
||||
Clauses are parsed from left to right. Extra separators are taken to be
|
||||
part of the final clause. For instance, the replacement clause below is
|
||||
"foo/bar/":
|
||||
|
||||
/one/two/foo/bar/
|
||||
"""
|
||||
sep, rem = s[0], s[1:]
|
||||
parts = rem.split(sep, 2)
|
||||
if len(parts) == 2:
|
||||
patt = ".*"
|
||||
a, b = parts
|
||||
elif len(parts) == 3:
|
||||
patt, a, b = parts
|
||||
else:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid replacement specifier: %s" % s
|
||||
)
|
||||
return patt, a, b
|
||||
|
||||
|
||||
class _ReplaceBase:
|
||||
class Replace:
|
||||
def __init__(self):
|
||||
self.lst = []
|
||||
|
||||
@@ -54,14 +16,9 @@ class _ReplaceBase:
|
||||
rex: a regular expression, as bytes.
|
||||
s: the replacement string, as bytes
|
||||
"""
|
||||
if self.optionName in updated:
|
||||
if "replacements" in updated:
|
||||
lst = []
|
||||
for rep in getattr(options, self.optionName):
|
||||
if isinstance(rep, str):
|
||||
fpatt, rex, s = parse_hook(rep)
|
||||
else:
|
||||
fpatt, rex, s = rep
|
||||
|
||||
for fpatt, rex, s in options.replacements:
|
||||
flt = flowfilter.parse(fpatt)
|
||||
if not flt:
|
||||
raise exceptions.OptionsError(
|
||||
@@ -80,9 +37,9 @@ class _ReplaceBase:
|
||||
for rex, s, flt in self.lst:
|
||||
if flt(f):
|
||||
if f.response:
|
||||
self.replace(f.response, rex, s)
|
||||
f.response.replace(rex, s, flags=re.DOTALL)
|
||||
else:
|
||||
self.replace(f.request, rex, s)
|
||||
f.request.replace(rex, s, flags=re.DOTALL)
|
||||
|
||||
def request(self, flow):
|
||||
if not flow.reply.has_message:
|
||||
@@ -91,22 +48,3 @@ class _ReplaceBase:
|
||||
def response(self, flow):
|
||||
if not flow.reply.has_message:
|
||||
self.execute(flow)
|
||||
|
||||
|
||||
class Replace(_ReplaceBase):
|
||||
optionName = "replacements"
|
||||
|
||||
def replace(self, obj, rex, s):
|
||||
obj.replace(rex, s, flags=re.DOTALL)
|
||||
|
||||
|
||||
class ReplaceFile(_ReplaceBase):
|
||||
optionName = "replacement_files"
|
||||
|
||||
def replace(self, obj, rex, s):
|
||||
try:
|
||||
v = open(s, "rb").read()
|
||||
except IOError as e:
|
||||
ctx.log.warn("Could not read replacement file: %s" % s)
|
||||
return
|
||||
obj.replace(rex, v, flags=re.DOTALL)
|
||||
|
||||
@@ -8,7 +8,7 @@ import types
|
||||
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy import eventsequence
|
||||
from mitmproxy import events
|
||||
|
||||
|
||||
import watchdog.events
|
||||
@@ -20,7 +20,7 @@ def parse_command(command):
|
||||
Returns a (path, args) tuple.
|
||||
"""
|
||||
if not command or not command.strip():
|
||||
raise ValueError("Empty script command.")
|
||||
raise exceptions.OptionsError("Empty script command.")
|
||||
# Windows: escape all backslashes in the path.
|
||||
if os.name == "nt": # pragma: no cover
|
||||
backslashes = shlex.split(command, posix=False)[0].count("\\")
|
||||
@@ -28,13 +28,13 @@ def parse_command(command):
|
||||
args = shlex.split(command) # pragma: no cover
|
||||
args[0] = os.path.expanduser(args[0])
|
||||
if not os.path.exists(args[0]):
|
||||
raise ValueError(
|
||||
raise exceptions.OptionsError(
|
||||
("Script file not found: %s.\r\n"
|
||||
"If your script path contains spaces, "
|
||||
"make sure to wrap it in additional quotes, e.g. -s \"'./foo bar/baz.py' --args\".") %
|
||||
args[0])
|
||||
elif os.path.isdir(args[0]):
|
||||
raise ValueError("Not a file: %s" % args[0])
|
||||
raise exceptions.OptionsError("Not a file: %s" % args[0])
|
||||
return args[0], args[1:]
|
||||
|
||||
|
||||
@@ -110,16 +110,11 @@ class ReloadHandler(watchdog.events.FileSystemEventHandler):
|
||||
self.callback = callback
|
||||
|
||||
def filter(self, event):
|
||||
"""
|
||||
Returns True only when .py file is changed
|
||||
"""
|
||||
if event.is_directory:
|
||||
return False
|
||||
if os.path.basename(event.src_path).startswith("."):
|
||||
return False
|
||||
if event.src_path.endswith(".py"):
|
||||
return True
|
||||
return False
|
||||
return True
|
||||
|
||||
def on_modified(self, event):
|
||||
if self.filter(event):
|
||||
@@ -146,7 +141,7 @@ class Script:
|
||||
self.last_options = None
|
||||
self.should_reload = threading.Event()
|
||||
|
||||
for i in eventsequence.Events:
|
||||
for i in events.Events:
|
||||
if not hasattr(self, i):
|
||||
def mkprox():
|
||||
evt = i
|
||||
@@ -210,13 +205,10 @@ class ScriptLoader:
|
||||
An addon that manages loading scripts from options.
|
||||
"""
|
||||
def run_once(self, command, flows):
|
||||
try:
|
||||
sc = Script(command)
|
||||
except ValueError as e:
|
||||
raise ValueError(str(e))
|
||||
sc = Script(command)
|
||||
sc.load_script()
|
||||
for f in flows:
|
||||
for evt, o in eventsequence.iterate(f):
|
||||
for evt, o in events.event_sequence(f):
|
||||
sc.run(evt, o)
|
||||
sc.done()
|
||||
return sc
|
||||
@@ -254,10 +246,7 @@ class ScriptLoader:
|
||||
ordered.append(current[s])
|
||||
else:
|
||||
ctx.log.info("Loading script: %s" % s)
|
||||
try:
|
||||
sc = Script(s)
|
||||
except ValueError as e:
|
||||
raise exceptions.OptionsError(str(e))
|
||||
sc = Script(s)
|
||||
ordered.append(sc)
|
||||
newscripts.append(sc)
|
||||
|
||||
|
||||
@@ -2,43 +2,6 @@ from mitmproxy import exceptions
|
||||
from mitmproxy import flowfilter
|
||||
|
||||
|
||||
def parse_setheader(s):
|
||||
"""
|
||||
Returns a (pattern, regex, replacement) tuple.
|
||||
|
||||
The general form for a replacement hook is as follows:
|
||||
|
||||
/patt/regex/replacement
|
||||
|
||||
The first character specifies the separator. Example:
|
||||
|
||||
:~q:foo:bar
|
||||
|
||||
If only two clauses are specified, the pattern is set to match
|
||||
universally (i.e. ".*"). Example:
|
||||
|
||||
/foo/bar/
|
||||
|
||||
Clauses are parsed from left to right. Extra separators are taken to be
|
||||
part of the final clause. For instance, the replacement clause below is
|
||||
"foo/bar/":
|
||||
|
||||
/one/two/foo/bar/
|
||||
"""
|
||||
sep, rem = s[0], s[1:]
|
||||
parts = rem.split(sep, 2)
|
||||
if len(parts) == 2:
|
||||
patt = ".*"
|
||||
a, b = parts
|
||||
elif len(parts) == 3:
|
||||
patt, a, b = parts
|
||||
else:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid replacement specifier: %s" % s
|
||||
)
|
||||
return patt, a, b
|
||||
|
||||
|
||||
class SetHeaders:
|
||||
def __init__(self):
|
||||
self.lst = []
|
||||
@@ -53,12 +16,7 @@ class SetHeaders:
|
||||
"""
|
||||
if "setheaders" in updated:
|
||||
self.lst = []
|
||||
for shead in options.setheaders:
|
||||
if isinstance(shead, str):
|
||||
fpatt, header, value = parse_setheader(shead)
|
||||
else:
|
||||
fpatt, header, value = shead
|
||||
|
||||
for fpatt, header, value in options.setheaders:
|
||||
flt = flowfilter.parse(fpatt)
|
||||
if not flt:
|
||||
raise exceptions.OptionsError(
|
||||
|
||||
@@ -5,7 +5,7 @@ from mitmproxy import log
|
||||
|
||||
|
||||
class TermLog:
|
||||
def __init__(self, outfile=None):
|
||||
def __init__(self, outfile=sys.stdout):
|
||||
self.options = None
|
||||
self.outfile = outfile
|
||||
|
||||
@@ -13,15 +13,10 @@ class TermLog:
|
||||
self.options = options
|
||||
|
||||
def log(self, e):
|
||||
if log.log_tier(e.level) == log.log_tier("error"):
|
||||
outfile = self.outfile or sys.stderr
|
||||
else:
|
||||
outfile = self.outfile or sys.stdout
|
||||
|
||||
if self.options.verbosity >= log.log_tier(e.level):
|
||||
click.secho(
|
||||
e.msg,
|
||||
file=outfile,
|
||||
file=self.outfile,
|
||||
fg=dict(error="red", warn="yellow").get(e.level),
|
||||
dim=(e.level == "debug"),
|
||||
err=(e.level == "error")
|
||||
|
||||
@@ -102,7 +102,7 @@ orders = [
|
||||
class View(collections.Sequence):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._store = collections.OrderedDict()
|
||||
self._store = {}
|
||||
self.filter = matchall
|
||||
# Should we show only marked flows?
|
||||
self.show_marked = False
|
||||
@@ -230,17 +230,6 @@ class View(collections.Sequence):
|
||||
self.sig_view_refresh.send(self)
|
||||
self.sig_store_refresh.send(self)
|
||||
|
||||
def clear_not_marked(self):
|
||||
"""
|
||||
Clears only the unmarked flows.
|
||||
"""
|
||||
for flow in self._store.copy().values():
|
||||
if not flow.marked:
|
||||
self._store.pop(flow.id)
|
||||
|
||||
self._refilter()
|
||||
self.sig_store_refresh.send(self)
|
||||
|
||||
def add(self, f: mitmproxy.flow.Flow) -> bool:
|
||||
"""
|
||||
Adds a flow to the state. If the flow already exists, it is
|
||||
@@ -309,19 +298,19 @@ class View(collections.Sequence):
|
||||
"Invalid interception filter: %s" % opts.filter
|
||||
)
|
||||
self.set_filter(filt)
|
||||
if "console_order" in updated:
|
||||
if opts.console_order is None:
|
||||
if "order" in updated:
|
||||
if opts.order is None:
|
||||
self.set_order(self.default_order)
|
||||
else:
|
||||
if opts.console_order not in self.orders:
|
||||
if opts.order not in self.orders:
|
||||
raise exceptions.OptionsError(
|
||||
"Unknown flow order: %s" % opts.console_order
|
||||
"Unknown flow order: %s" % opts.order
|
||||
)
|
||||
self.set_order(self.orders[opts.console_order])
|
||||
if "console_order_reversed" in updated:
|
||||
self.set_reversed(opts.console_order_reversed)
|
||||
if "console_focus_follow" in updated:
|
||||
self.focus_follow = opts.console_focus_follow
|
||||
self.set_order(self.orders[opts.order])
|
||||
if "order_reversed" in updated:
|
||||
self.set_reversed(opts.order_reversed)
|
||||
if "focus_follow" in updated:
|
||||
self.focus_follow = opts.focus_follow
|
||||
|
||||
def request(self, f):
|
||||
self.add(f)
|
||||
|
||||
@@ -3,8 +3,8 @@ import ssl
|
||||
import time
|
||||
import datetime
|
||||
import ipaddress
|
||||
import sys
|
||||
|
||||
import sys
|
||||
from pyasn1.type import univ, constraint, char, namedtype, tag
|
||||
from pyasn1.codec.der.decoder import decode
|
||||
from pyasn1.error import PyAsn1Error
|
||||
@@ -13,8 +13,8 @@ import OpenSSL
|
||||
from mitmproxy.types import serializable
|
||||
|
||||
# Default expiry must not be too long: https://github.com/mitmproxy/mitmproxy/issues/815
|
||||
DEFAULT_EXP = 94608000 # = 24 * 60 * 60 * 365 * 3
|
||||
|
||||
DEFAULT_EXP = 94608000 # = 24 * 60 * 60 * 365 * 3
|
||||
# Generated with "openssl dhparam". It's too slow to generate this on startup.
|
||||
DEFAULT_DHPARAM = b"""
|
||||
-----BEGIN DH PARAMETERS-----
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import time
|
||||
|
||||
import copy
|
||||
import os
|
||||
|
||||
from mitmproxy import stateobject
|
||||
@@ -81,6 +82,9 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
|
||||
tls_version=str,
|
||||
)
|
||||
|
||||
def copy(self):
|
||||
return copy.copy(self)
|
||||
|
||||
def send(self, message):
|
||||
if isinstance(message, list):
|
||||
message = b''.join(message)
|
||||
@@ -218,6 +222,9 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject):
|
||||
via=None
|
||||
))
|
||||
|
||||
def copy(self):
|
||||
return copy.copy(self)
|
||||
|
||||
def connect(self):
|
||||
self.timestamp_start = time.time()
|
||||
tcp.TCPClient.connect(self)
|
||||
|
||||
@@ -159,7 +159,6 @@ def get_content_view(viewmode: View, data: bytes, **metadata):
|
||||
return desc, safe_to_print(content), error
|
||||
|
||||
|
||||
# The order in which ContentViews are added is important!
|
||||
add(auto.ViewAuto())
|
||||
add(raw.ViewRaw())
|
||||
add(hex.ViewHex())
|
||||
@@ -173,7 +172,9 @@ add(urlencoded.ViewURLEncoded())
|
||||
add(multipart.ViewMultipart())
|
||||
add(image.ViewImage())
|
||||
add(query.ViewQuery())
|
||||
add(protobuf.ViewProtobuf())
|
||||
|
||||
if protobuf.ViewProtobuf.is_available():
|
||||
add(protobuf.ViewProtobuf())
|
||||
|
||||
__all__ = [
|
||||
"View", "VIEW_CUTOFF", "KEY_MAX", "format_text", "format_dict",
|
||||
|
||||
@@ -18,8 +18,6 @@ class ViewAuto(base.View):
|
||||
return contentviews.content_types_map[ct][0](data, **metadata)
|
||||
elif strutils.is_xml(data):
|
||||
return contentviews.get("XML/HTML")(data, **metadata)
|
||||
elif ct.startswith("image/"):
|
||||
return contentviews.get("Image")(data, **metadata)
|
||||
if metadata.get("query"):
|
||||
return contentviews.get("Query")(data, **metadata)
|
||||
if data and strutils.is_mostly_bin(data):
|
||||
|
||||
45
mitmproxy/contentviews/image.py
Normal file
45
mitmproxy/contentviews/image.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import io
|
||||
|
||||
from PIL import ExifTags
|
||||
from PIL import Image
|
||||
|
||||
from mitmproxy.types import multidict
|
||||
from . import base
|
||||
|
||||
|
||||
class ViewImage(base.View):
|
||||
name = "Image"
|
||||
prompt = ("image", "i")
|
||||
content_types = [
|
||||
"image/png",
|
||||
"image/jpeg",
|
||||
"image/gif",
|
||||
"image/vnd.microsoft.icon",
|
||||
"image/x-icon",
|
||||
]
|
||||
|
||||
def __call__(self, data, **metadata):
|
||||
try:
|
||||
img = Image.open(io.BytesIO(data))
|
||||
except IOError:
|
||||
return None
|
||||
parts = [
|
||||
("Format", str(img.format_description)),
|
||||
("Size", "%s x %s px" % img.size),
|
||||
("Mode", str(img.mode)),
|
||||
]
|
||||
for i in sorted(img.info.keys()):
|
||||
if i != "exif":
|
||||
parts.append(
|
||||
(str(i), str(img.info[i]))
|
||||
)
|
||||
if hasattr(img, "_getexif"):
|
||||
ex = img._getexif()
|
||||
if ex:
|
||||
for i in sorted(ex.keys()):
|
||||
tag = ExifTags.TAGS.get(i, i)
|
||||
parts.append(
|
||||
(str(tag), str(ex[i]))
|
||||
)
|
||||
fmt = base.format_dict(multidict.MultiDict(parts))
|
||||
return "%s image" % img.format, fmt
|
||||
@@ -1,3 +0,0 @@
|
||||
from .view import ViewImage
|
||||
|
||||
__all__ = ["ViewImage"]
|
||||
@@ -1,80 +0,0 @@
|
||||
import io
|
||||
import typing
|
||||
|
||||
from kaitaistruct import KaitaiStream
|
||||
|
||||
from mitmproxy.contrib.kaitaistruct import png
|
||||
from mitmproxy.contrib.kaitaistruct import gif
|
||||
from mitmproxy.contrib.kaitaistruct import jpeg
|
||||
|
||||
Metadata = typing.List[typing.Tuple[str, str]]
|
||||
|
||||
|
||||
def parse_png(data: bytes) -> Metadata:
|
||||
img = png.Png(KaitaiStream(io.BytesIO(data)))
|
||||
parts = [
|
||||
('Format', 'Portable network graphics'),
|
||||
('Size', "{0} x {1} px".format(img.ihdr.width, img.ihdr.height))
|
||||
]
|
||||
for chunk in img.chunks:
|
||||
if chunk.type == 'gAMA':
|
||||
parts.append(('gamma', str(chunk.body.gamma_int / 100000)))
|
||||
elif chunk.type == 'pHYs':
|
||||
aspectx = chunk.body.pixels_per_unit_x
|
||||
aspecty = chunk.body.pixels_per_unit_y
|
||||
parts.append(('aspect', "{0} x {1}".format(aspectx, aspecty)))
|
||||
elif chunk.type == 'tEXt':
|
||||
parts.append((chunk.body.keyword, chunk.body.text))
|
||||
elif chunk.type == 'iTXt':
|
||||
parts.append((chunk.body.keyword, chunk.body.text))
|
||||
elif chunk.type == 'zTXt':
|
||||
parts.append((chunk.body.keyword, chunk.body.text_datastream.decode('iso8859-1')))
|
||||
return parts
|
||||
|
||||
|
||||
def parse_gif(data: bytes) -> Metadata:
|
||||
img = gif.Gif(KaitaiStream(io.BytesIO(data)))
|
||||
descriptor = img.logical_screen_descriptor
|
||||
parts = [
|
||||
('Format', 'Compuserve GIF'),
|
||||
('Version', "GIF{}".format(img.header.version.decode('ASCII'))),
|
||||
('Size', "{} x {} px".format(descriptor.screen_width, descriptor.screen_height)),
|
||||
('background', str(descriptor.bg_color_index))
|
||||
]
|
||||
ext_blocks = []
|
||||
for block in img.blocks:
|
||||
if block.block_type.name == 'extension':
|
||||
ext_blocks.append(block)
|
||||
comment_blocks = []
|
||||
for block in ext_blocks:
|
||||
if block.body.label._name_ == 'comment':
|
||||
comment_blocks.append(block)
|
||||
for block in comment_blocks:
|
||||
entries = block.body.body.entries
|
||||
for entry in entries:
|
||||
comment = entry.bytes
|
||||
if comment is not b'':
|
||||
parts.append(('comment', str(comment)))
|
||||
return parts
|
||||
|
||||
|
||||
def parse_jpeg(data: bytes) -> Metadata:
|
||||
img = jpeg.Jpeg(KaitaiStream(io.BytesIO(data)))
|
||||
parts = [
|
||||
('Format', 'JPEG (ISO 10918)')
|
||||
]
|
||||
for segment in img.segments:
|
||||
if segment.marker._name_ == 'sof0':
|
||||
parts.append(('Size', "{0} x {1} px".format(segment.data.image_width, segment.data.image_height)))
|
||||
if segment.marker._name_ == 'app0':
|
||||
parts.append(('jfif_version', "({0}, {1})".format(segment.data.version_major, segment.data.version_minor)))
|
||||
parts.append(('jfif_density', "({0}, {1})".format(segment.data.density_x, segment.data.density_y)))
|
||||
parts.append(('jfif_unit', str(segment.data.density_units._value_)))
|
||||
if segment.marker._name_ == 'com':
|
||||
parts.append(('comment', str(segment.data)))
|
||||
if segment.marker._name_ == 'app1':
|
||||
if hasattr(segment.data, 'body'):
|
||||
for field in segment.data.body.data.body.ifd0.fields:
|
||||
if field.data is not None:
|
||||
parts.append((field.tag._name_, field.data.decode('UTF-8').strip('\x00')))
|
||||
return parts
|
||||
@@ -1,38 +0,0 @@
|
||||
import imghdr
|
||||
|
||||
from mitmproxy.contentviews import base
|
||||
from mitmproxy.types import multidict
|
||||
from . import image_parser
|
||||
|
||||
|
||||
class ViewImage(base.View):
|
||||
name = "Image"
|
||||
prompt = ("image", "i")
|
||||
|
||||
# there is also a fallback in the auto view for image/*.
|
||||
content_types = [
|
||||
"image/png",
|
||||
"image/jpeg",
|
||||
"image/gif",
|
||||
"image/vnd.microsoft.icon",
|
||||
"image/x-icon",
|
||||
"image/webp",
|
||||
]
|
||||
|
||||
def __call__(self, data, **metadata):
|
||||
image_type = imghdr.what('', h=data)
|
||||
if image_type == 'png':
|
||||
image_metadata = image_parser.parse_png(data)
|
||||
elif image_type == 'gif':
|
||||
image_metadata = image_parser.parse_gif(data)
|
||||
elif image_type == 'jpeg':
|
||||
image_metadata = image_parser.parse_jpeg(data)
|
||||
else:
|
||||
image_metadata = [
|
||||
("Image Format", image_type or "unknown")
|
||||
]
|
||||
if image_type:
|
||||
view_name = "{} Image".format(image_type.upper())
|
||||
else:
|
||||
view_name = "Unknown Image"
|
||||
return view_name, base.format_dict(multidict.MultiDict(image_metadata))
|
||||
@@ -15,28 +15,31 @@ class ViewProtobuf(base.View):
|
||||
"application/x-protobuffer",
|
||||
]
|
||||
|
||||
def is_available(self):
|
||||
@staticmethod
|
||||
def is_available():
|
||||
try:
|
||||
p = subprocess.Popen(
|
||||
["protoc", "--version"],
|
||||
stdout=subprocess.PIPE
|
||||
)
|
||||
out, _ = p.communicate()
|
||||
return out.startswith(b"libprotoc")
|
||||
return out.startswith("libprotoc")
|
||||
except:
|
||||
return False
|
||||
|
||||
def __call__(self, data, **metadata):
|
||||
if not self.is_available():
|
||||
raise NotImplementedError("protoc not found. Please make sure 'protoc' is available in $PATH.")
|
||||
|
||||
def decode_protobuf(self, content):
|
||||
# if Popen raises OSError, it will be caught in
|
||||
# get_content_view and fall back to Raw
|
||||
p = subprocess.Popen(['protoc', '--decode_raw'],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE)
|
||||
decoded, _ = p.communicate(input=data)
|
||||
if not decoded:
|
||||
raise ValueError("Failed to parse input.")
|
||||
out, err = p.communicate(input=content)
|
||||
if out:
|
||||
return out
|
||||
else:
|
||||
return err
|
||||
|
||||
def __call__(self, data, **metadata):
|
||||
decoded = self.decode_protobuf(data)
|
||||
return "Protobuf", base.format_text(decoded)
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
# The source was exif.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/24e2d00048b8084ceec30a187a79cb87a79a48ba/image/exif.ksy
|
||||
|
||||
import array
|
||||
import struct
|
||||
import zlib
|
||||
from enum import Enum
|
||||
|
||||
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
|
||||
|
||||
|
||||
from .exif_le import ExifLe
|
||||
from .exif_be import ExifBe
|
||||
class Exif(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.endianness = self._io.read_u2le()
|
||||
_on = self.endianness
|
||||
if _on == 18761:
|
||||
self.body = ExifLe(self._io)
|
||||
elif _on == 19789:
|
||||
self.body = ExifBe(self._io)
|
||||
@@ -1,571 +0,0 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
# The source was exif_be.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/24e2d00048b8084ceec30a187a79cb87a79a48ba/image/exif_be.ksy
|
||||
|
||||
import array
|
||||
import struct
|
||||
import zlib
|
||||
from enum import Enum
|
||||
|
||||
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
|
||||
|
||||
|
||||
class ExifBe(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.version = self._io.read_u2be()
|
||||
self.ifd0_ofs = self._io.read_u4be()
|
||||
|
||||
class Ifd(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.num_fields = self._io.read_u2be()
|
||||
self.fields = [None] * (self.num_fields)
|
||||
for i in range(self.num_fields):
|
||||
self.fields[i] = self._root.IfdField(self._io, self, self._root)
|
||||
|
||||
self.next_ifd_ofs = self._io.read_u4be()
|
||||
|
||||
@property
|
||||
def next_ifd(self):
|
||||
if hasattr(self, '_m_next_ifd'):
|
||||
return self._m_next_ifd if hasattr(self, '_m_next_ifd') else None
|
||||
|
||||
if self.next_ifd_ofs != 0:
|
||||
_pos = self._io.pos()
|
||||
self._io.seek(self.next_ifd_ofs)
|
||||
self._m_next_ifd = self._root.Ifd(self._io, self, self._root)
|
||||
self._io.seek(_pos)
|
||||
|
||||
return self._m_next_ifd if hasattr(self, '_m_next_ifd') else None
|
||||
|
||||
|
||||
class IfdField(KaitaiStruct):
|
||||
|
||||
class FieldTypeEnum(Enum):
|
||||
byte = 1
|
||||
ascii_string = 2
|
||||
word = 3
|
||||
dword = 4
|
||||
rational = 5
|
||||
|
||||
class TagEnum(Enum):
|
||||
image_width = 256
|
||||
image_height = 257
|
||||
bits_per_sample = 258
|
||||
compression = 259
|
||||
photometric_interpretation = 262
|
||||
thresholding = 263
|
||||
cell_width = 264
|
||||
cell_length = 265
|
||||
fill_order = 266
|
||||
document_name = 269
|
||||
image_description = 270
|
||||
make = 271
|
||||
model = 272
|
||||
strip_offsets = 273
|
||||
orientation = 274
|
||||
samples_per_pixel = 277
|
||||
rows_per_strip = 278
|
||||
strip_byte_counts = 279
|
||||
min_sample_value = 280
|
||||
max_sample_value = 281
|
||||
x_resolution = 282
|
||||
y_resolution = 283
|
||||
planar_configuration = 284
|
||||
page_name = 285
|
||||
x_position = 286
|
||||
y_position = 287
|
||||
free_offsets = 288
|
||||
free_byte_counts = 289
|
||||
gray_response_unit = 290
|
||||
gray_response_curve = 291
|
||||
t4_options = 292
|
||||
t6_options = 293
|
||||
resolution_unit = 296
|
||||
page_number = 297
|
||||
color_response_unit = 300
|
||||
transfer_function = 301
|
||||
software = 305
|
||||
modify_date = 306
|
||||
artist = 315
|
||||
host_computer = 316
|
||||
predictor = 317
|
||||
white_point = 318
|
||||
primary_chromaticities = 319
|
||||
color_map = 320
|
||||
halftone_hints = 321
|
||||
tile_width = 322
|
||||
tile_length = 323
|
||||
tile_offsets = 324
|
||||
tile_byte_counts = 325
|
||||
bad_fax_lines = 326
|
||||
clean_fax_data = 327
|
||||
consecutive_bad_fax_lines = 328
|
||||
sub_ifd = 330
|
||||
ink_set = 332
|
||||
ink_names = 333
|
||||
numberof_inks = 334
|
||||
dot_range = 336
|
||||
target_printer = 337
|
||||
extra_samples = 338
|
||||
sample_format = 339
|
||||
s_min_sample_value = 340
|
||||
s_max_sample_value = 341
|
||||
transfer_range = 342
|
||||
clip_path = 343
|
||||
x_clip_path_units = 344
|
||||
y_clip_path_units = 345
|
||||
indexed = 346
|
||||
jpeg_tables = 347
|
||||
opi_proxy = 351
|
||||
global_parameters_ifd = 400
|
||||
profile_type = 401
|
||||
fax_profile = 402
|
||||
coding_methods = 403
|
||||
version_year = 404
|
||||
mode_number = 405
|
||||
decode = 433
|
||||
default_image_color = 434
|
||||
t82_options = 435
|
||||
jpeg_tables2 = 437
|
||||
jpeg_proc = 512
|
||||
thumbnail_offset = 513
|
||||
thumbnail_length = 514
|
||||
jpeg_restart_interval = 515
|
||||
jpeg_lossless_predictors = 517
|
||||
jpeg_point_transforms = 518
|
||||
jpegq_tables = 519
|
||||
jpegdc_tables = 520
|
||||
jpegac_tables = 521
|
||||
y_cb_cr_coefficients = 529
|
||||
y_cb_cr_sub_sampling = 530
|
||||
y_cb_cr_positioning = 531
|
||||
reference_black_white = 532
|
||||
strip_row_counts = 559
|
||||
application_notes = 700
|
||||
uspto_miscellaneous = 999
|
||||
related_image_file_format = 4096
|
||||
related_image_width = 4097
|
||||
related_image_height = 4098
|
||||
rating = 18246
|
||||
xp_dip_xml = 18247
|
||||
stitch_info = 18248
|
||||
rating_percent = 18249
|
||||
sony_raw_file_type = 28672
|
||||
light_falloff_params = 28722
|
||||
chromatic_aberration_corr_params = 28725
|
||||
distortion_corr_params = 28727
|
||||
image_id = 32781
|
||||
wang_tag1 = 32931
|
||||
wang_annotation = 32932
|
||||
wang_tag3 = 32933
|
||||
wang_tag4 = 32934
|
||||
image_reference_points = 32953
|
||||
region_xform_tack_point = 32954
|
||||
warp_quadrilateral = 32955
|
||||
affine_transform_mat = 32956
|
||||
matteing = 32995
|
||||
data_type = 32996
|
||||
image_depth = 32997
|
||||
tile_depth = 32998
|
||||
image_full_width = 33300
|
||||
image_full_height = 33301
|
||||
texture_format = 33302
|
||||
wrap_modes = 33303
|
||||
fov_cot = 33304
|
||||
matrix_world_to_screen = 33305
|
||||
matrix_world_to_camera = 33306
|
||||
model2 = 33405
|
||||
cfa_repeat_pattern_dim = 33421
|
||||
cfa_pattern2 = 33422
|
||||
battery_level = 33423
|
||||
kodak_ifd = 33424
|
||||
copyright = 33432
|
||||
exposure_time = 33434
|
||||
f_number = 33437
|
||||
md_file_tag = 33445
|
||||
md_scale_pixel = 33446
|
||||
md_color_table = 33447
|
||||
md_lab_name = 33448
|
||||
md_sample_info = 33449
|
||||
md_prep_date = 33450
|
||||
md_prep_time = 33451
|
||||
md_file_units = 33452
|
||||
pixel_scale = 33550
|
||||
advent_scale = 33589
|
||||
advent_revision = 33590
|
||||
uic1_tag = 33628
|
||||
uic2_tag = 33629
|
||||
uic3_tag = 33630
|
||||
uic4_tag = 33631
|
||||
iptc_naa = 33723
|
||||
intergraph_packet_data = 33918
|
||||
intergraph_flag_registers = 33919
|
||||
intergraph_matrix = 33920
|
||||
ingr_reserved = 33921
|
||||
model_tie_point = 33922
|
||||
site = 34016
|
||||
color_sequence = 34017
|
||||
it8_header = 34018
|
||||
raster_padding = 34019
|
||||
bits_per_run_length = 34020
|
||||
bits_per_extended_run_length = 34021
|
||||
color_table = 34022
|
||||
image_color_indicator = 34023
|
||||
background_color_indicator = 34024
|
||||
image_color_value = 34025
|
||||
background_color_value = 34026
|
||||
pixel_intensity_range = 34027
|
||||
transparency_indicator = 34028
|
||||
color_characterization = 34029
|
||||
hc_usage = 34030
|
||||
trap_indicator = 34031
|
||||
cmyk_equivalent = 34032
|
||||
sem_info = 34118
|
||||
afcp_iptc = 34152
|
||||
pixel_magic_jbig_options = 34232
|
||||
jpl_carto_ifd = 34263
|
||||
model_transform = 34264
|
||||
wb_grgb_levels = 34306
|
||||
leaf_data = 34310
|
||||
photoshop_settings = 34377
|
||||
exif_offset = 34665
|
||||
icc_profile = 34675
|
||||
tiff_fx_extensions = 34687
|
||||
multi_profiles = 34688
|
||||
shared_data = 34689
|
||||
t88_options = 34690
|
||||
image_layer = 34732
|
||||
geo_tiff_directory = 34735
|
||||
geo_tiff_double_params = 34736
|
||||
geo_tiff_ascii_params = 34737
|
||||
jbig_options = 34750
|
||||
exposure_program = 34850
|
||||
spectral_sensitivity = 34852
|
||||
gps_info = 34853
|
||||
iso = 34855
|
||||
opto_electric_conv_factor = 34856
|
||||
interlace = 34857
|
||||
time_zone_offset = 34858
|
||||
self_timer_mode = 34859
|
||||
sensitivity_type = 34864
|
||||
standard_output_sensitivity = 34865
|
||||
recommended_exposure_index = 34866
|
||||
iso_speed = 34867
|
||||
iso_speed_latitudeyyy = 34868
|
||||
iso_speed_latitudezzz = 34869
|
||||
fax_recv_params = 34908
|
||||
fax_sub_address = 34909
|
||||
fax_recv_time = 34910
|
||||
fedex_edr = 34929
|
||||
leaf_sub_ifd = 34954
|
||||
exif_version = 36864
|
||||
date_time_original = 36867
|
||||
create_date = 36868
|
||||
google_plus_upload_code = 36873
|
||||
offset_time = 36880
|
||||
offset_time_original = 36881
|
||||
offset_time_digitized = 36882
|
||||
components_configuration = 37121
|
||||
compressed_bits_per_pixel = 37122
|
||||
shutter_speed_value = 37377
|
||||
aperture_value = 37378
|
||||
brightness_value = 37379
|
||||
exposure_compensation = 37380
|
||||
max_aperture_value = 37381
|
||||
subject_distance = 37382
|
||||
metering_mode = 37383
|
||||
light_source = 37384
|
||||
flash = 37385
|
||||
focal_length = 37386
|
||||
flash_energy = 37387
|
||||
spatial_frequency_response = 37388
|
||||
noise = 37389
|
||||
focal_plane_x_resolution = 37390
|
||||
focal_plane_y_resolution = 37391
|
||||
focal_plane_resolution_unit = 37392
|
||||
image_number = 37393
|
||||
security_classification = 37394
|
||||
image_history = 37395
|
||||
subject_area = 37396
|
||||
exposure_index = 37397
|
||||
tiff_ep_standard_id = 37398
|
||||
sensing_method = 37399
|
||||
cip3_data_file = 37434
|
||||
cip3_sheet = 37435
|
||||
cip3_side = 37436
|
||||
sto_nits = 37439
|
||||
maker_note = 37500
|
||||
user_comment = 37510
|
||||
sub_sec_time = 37520
|
||||
sub_sec_time_original = 37521
|
||||
sub_sec_time_digitized = 37522
|
||||
ms_document_text = 37679
|
||||
ms_property_set_storage = 37680
|
||||
ms_document_text_position = 37681
|
||||
image_source_data = 37724
|
||||
ambient_temperature = 37888
|
||||
humidity = 37889
|
||||
pressure = 37890
|
||||
water_depth = 37891
|
||||
acceleration = 37892
|
||||
camera_elevation_angle = 37893
|
||||
xp_title = 40091
|
||||
xp_comment = 40092
|
||||
xp_author = 40093
|
||||
xp_keywords = 40094
|
||||
xp_subject = 40095
|
||||
flashpix_version = 40960
|
||||
color_space = 40961
|
||||
exif_image_width = 40962
|
||||
exif_image_height = 40963
|
||||
related_sound_file = 40964
|
||||
interop_offset = 40965
|
||||
samsung_raw_pointers_offset = 40976
|
||||
samsung_raw_pointers_length = 40977
|
||||
samsung_raw_byte_order = 41217
|
||||
samsung_raw_unknown = 41218
|
||||
flash_energy2 = 41483
|
||||
spatial_frequency_response2 = 41484
|
||||
noise2 = 41485
|
||||
focal_plane_x_resolution2 = 41486
|
||||
focal_plane_y_resolution2 = 41487
|
||||
focal_plane_resolution_unit2 = 41488
|
||||
image_number2 = 41489
|
||||
security_classification2 = 41490
|
||||
image_history2 = 41491
|
||||
subject_location = 41492
|
||||
exposure_index2 = 41493
|
||||
tiff_ep_standard_id2 = 41494
|
||||
sensing_method2 = 41495
|
||||
file_source = 41728
|
||||
scene_type = 41729
|
||||
cfa_pattern = 41730
|
||||
custom_rendered = 41985
|
||||
exposure_mode = 41986
|
||||
white_balance = 41987
|
||||
digital_zoom_ratio = 41988
|
||||
focal_length_in35mm_format = 41989
|
||||
scene_capture_type = 41990
|
||||
gain_control = 41991
|
||||
contrast = 41992
|
||||
saturation = 41993
|
||||
sharpness = 41994
|
||||
device_setting_description = 41995
|
||||
subject_distance_range = 41996
|
||||
image_unique_id = 42016
|
||||
owner_name = 42032
|
||||
serial_number = 42033
|
||||
lens_info = 42034
|
||||
lens_make = 42035
|
||||
lens_model = 42036
|
||||
lens_serial_number = 42037
|
||||
gdal_metadata = 42112
|
||||
gdal_no_data = 42113
|
||||
gamma = 42240
|
||||
expand_software = 44992
|
||||
expand_lens = 44993
|
||||
expand_film = 44994
|
||||
expand_filter_lens = 44995
|
||||
expand_scanner = 44996
|
||||
expand_flash_lamp = 44997
|
||||
pixel_format = 48129
|
||||
transformation = 48130
|
||||
uncompressed = 48131
|
||||
image_type = 48132
|
||||
image_width2 = 48256
|
||||
image_height2 = 48257
|
||||
width_resolution = 48258
|
||||
height_resolution = 48259
|
||||
image_offset = 48320
|
||||
image_byte_count = 48321
|
||||
alpha_offset = 48322
|
||||
alpha_byte_count = 48323
|
||||
image_data_discard = 48324
|
||||
alpha_data_discard = 48325
|
||||
oce_scanjob_desc = 50215
|
||||
oce_application_selector = 50216
|
||||
oce_id_number = 50217
|
||||
oce_image_logic = 50218
|
||||
annotations = 50255
|
||||
print_im = 50341
|
||||
original_file_name = 50547
|
||||
uspto_original_content_type = 50560
|
||||
dng_version = 50706
|
||||
dng_backward_version = 50707
|
||||
unique_camera_model = 50708
|
||||
localized_camera_model = 50709
|
||||
cfa_plane_color = 50710
|
||||
cfa_layout = 50711
|
||||
linearization_table = 50712
|
||||
black_level_repeat_dim = 50713
|
||||
black_level = 50714
|
||||
black_level_delta_h = 50715
|
||||
black_level_delta_v = 50716
|
||||
white_level = 50717
|
||||
default_scale = 50718
|
||||
default_crop_origin = 50719
|
||||
default_crop_size = 50720
|
||||
color_matrix1 = 50721
|
||||
color_matrix2 = 50722
|
||||
camera_calibration1 = 50723
|
||||
camera_calibration2 = 50724
|
||||
reduction_matrix1 = 50725
|
||||
reduction_matrix2 = 50726
|
||||
analog_balance = 50727
|
||||
as_shot_neutral = 50728
|
||||
as_shot_white_xy = 50729
|
||||
baseline_exposure = 50730
|
||||
baseline_noise = 50731
|
||||
baseline_sharpness = 50732
|
||||
bayer_green_split = 50733
|
||||
linear_response_limit = 50734
|
||||
camera_serial_number = 50735
|
||||
dng_lens_info = 50736
|
||||
chroma_blur_radius = 50737
|
||||
anti_alias_strength = 50738
|
||||
shadow_scale = 50739
|
||||
sr2_private = 50740
|
||||
maker_note_safety = 50741
|
||||
raw_image_segmentation = 50752
|
||||
calibration_illuminant1 = 50778
|
||||
calibration_illuminant2 = 50779
|
||||
best_quality_scale = 50780
|
||||
raw_data_unique_id = 50781
|
||||
alias_layer_metadata = 50784
|
||||
original_raw_file_name = 50827
|
||||
original_raw_file_data = 50828
|
||||
active_area = 50829
|
||||
masked_areas = 50830
|
||||
as_shot_icc_profile = 50831
|
||||
as_shot_pre_profile_matrix = 50832
|
||||
current_icc_profile = 50833
|
||||
current_pre_profile_matrix = 50834
|
||||
colorimetric_reference = 50879
|
||||
s_raw_type = 50885
|
||||
panasonic_title = 50898
|
||||
panasonic_title2 = 50899
|
||||
camera_calibration_sig = 50931
|
||||
profile_calibration_sig = 50932
|
||||
profile_ifd = 50933
|
||||
as_shot_profile_name = 50934
|
||||
noise_reduction_applied = 50935
|
||||
profile_name = 50936
|
||||
profile_hue_sat_map_dims = 50937
|
||||
profile_hue_sat_map_data1 = 50938
|
||||
profile_hue_sat_map_data2 = 50939
|
||||
profile_tone_curve = 50940
|
||||
profile_embed_policy = 50941
|
||||
profile_copyright = 50942
|
||||
forward_matrix1 = 50964
|
||||
forward_matrix2 = 50965
|
||||
preview_application_name = 50966
|
||||
preview_application_version = 50967
|
||||
preview_settings_name = 50968
|
||||
preview_settings_digest = 50969
|
||||
preview_color_space = 50970
|
||||
preview_date_time = 50971
|
||||
raw_image_digest = 50972
|
||||
original_raw_file_digest = 50973
|
||||
sub_tile_block_size = 50974
|
||||
row_interleave_factor = 50975
|
||||
profile_look_table_dims = 50981
|
||||
profile_look_table_data = 50982
|
||||
opcode_list1 = 51008
|
||||
opcode_list2 = 51009
|
||||
opcode_list3 = 51022
|
||||
noise_profile = 51041
|
||||
time_codes = 51043
|
||||
frame_rate = 51044
|
||||
t_stop = 51058
|
||||
reel_name = 51081
|
||||
original_default_final_size = 51089
|
||||
original_best_quality_size = 51090
|
||||
original_default_crop_size = 51091
|
||||
camera_label = 51105
|
||||
profile_hue_sat_map_encoding = 51107
|
||||
profile_look_table_encoding = 51108
|
||||
baseline_exposure_offset = 51109
|
||||
default_black_render = 51110
|
||||
new_raw_image_digest = 51111
|
||||
raw_to_preview_gain = 51112
|
||||
default_user_crop = 51125
|
||||
padding = 59932
|
||||
offset_schema = 59933
|
||||
owner_name2 = 65000
|
||||
serial_number2 = 65001
|
||||
lens = 65002
|
||||
kdc_ifd = 65024
|
||||
raw_file = 65100
|
||||
converter = 65101
|
||||
white_balance2 = 65102
|
||||
exposure = 65105
|
||||
shadows = 65106
|
||||
brightness = 65107
|
||||
contrast2 = 65108
|
||||
saturation2 = 65109
|
||||
sharpness2 = 65110
|
||||
smoothness = 65111
|
||||
moire_filter = 65112
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.tag = self._root.IfdField.TagEnum(self._io.read_u2be())
|
||||
self.field_type = self._root.IfdField.FieldTypeEnum(self._io.read_u2be())
|
||||
self.length = self._io.read_u4be()
|
||||
self.ofs_or_data = self._io.read_u4be()
|
||||
|
||||
@property
|
||||
def type_byte_length(self):
|
||||
if hasattr(self, '_m_type_byte_length'):
|
||||
return self._m_type_byte_length if hasattr(self, '_m_type_byte_length') else None
|
||||
|
||||
self._m_type_byte_length = (2 if self.field_type == self._root.IfdField.FieldTypeEnum.word else (4 if self.field_type == self._root.IfdField.FieldTypeEnum.dword else 1))
|
||||
return self._m_type_byte_length if hasattr(self, '_m_type_byte_length') else None
|
||||
|
||||
@property
|
||||
def byte_length(self):
|
||||
if hasattr(self, '_m_byte_length'):
|
||||
return self._m_byte_length if hasattr(self, '_m_byte_length') else None
|
||||
|
||||
self._m_byte_length = (self.length * self.type_byte_length)
|
||||
return self._m_byte_length if hasattr(self, '_m_byte_length') else None
|
||||
|
||||
@property
|
||||
def is_immediate_data(self):
|
||||
if hasattr(self, '_m_is_immediate_data'):
|
||||
return self._m_is_immediate_data if hasattr(self, '_m_is_immediate_data') else None
|
||||
|
||||
self._m_is_immediate_data = self.byte_length <= 4
|
||||
return self._m_is_immediate_data if hasattr(self, '_m_is_immediate_data') else None
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
if hasattr(self, '_m_data'):
|
||||
return self._m_data if hasattr(self, '_m_data') else None
|
||||
|
||||
if not self.is_immediate_data:
|
||||
io = self._root._io
|
||||
_pos = io.pos()
|
||||
io.seek(self.ofs_or_data)
|
||||
self._m_data = io.read_bytes(self.byte_length)
|
||||
io.seek(_pos)
|
||||
|
||||
return self._m_data if hasattr(self, '_m_data') else None
|
||||
|
||||
|
||||
@property
|
||||
def ifd0(self):
|
||||
if hasattr(self, '_m_ifd0'):
|
||||
return self._m_ifd0 if hasattr(self, '_m_ifd0') else None
|
||||
|
||||
_pos = self._io.pos()
|
||||
self._io.seek(self.ifd0_ofs)
|
||||
self._m_ifd0 = self._root.Ifd(self._io, self, self._root)
|
||||
self._io.seek(_pos)
|
||||
return self._m_ifd0 if hasattr(self, '_m_ifd0') else None
|
||||
@@ -1,571 +0,0 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
# The source was exif_le.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/24e2d00048b8084ceec30a187a79cb87a79a48ba/image/exif_le.ksy
|
||||
|
||||
import array
|
||||
import struct
|
||||
import zlib
|
||||
from enum import Enum
|
||||
|
||||
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
|
||||
|
||||
|
||||
class ExifLe(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.version = self._io.read_u2le()
|
||||
self.ifd0_ofs = self._io.read_u4le()
|
||||
|
||||
class Ifd(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.num_fields = self._io.read_u2le()
|
||||
self.fields = [None] * (self.num_fields)
|
||||
for i in range(self.num_fields):
|
||||
self.fields[i] = self._root.IfdField(self._io, self, self._root)
|
||||
|
||||
self.next_ifd_ofs = self._io.read_u4le()
|
||||
|
||||
@property
|
||||
def next_ifd(self):
|
||||
if hasattr(self, '_m_next_ifd'):
|
||||
return self._m_next_ifd if hasattr(self, '_m_next_ifd') else None
|
||||
|
||||
if self.next_ifd_ofs != 0:
|
||||
_pos = self._io.pos()
|
||||
self._io.seek(self.next_ifd_ofs)
|
||||
self._m_next_ifd = self._root.Ifd(self._io, self, self._root)
|
||||
self._io.seek(_pos)
|
||||
|
||||
return self._m_next_ifd if hasattr(self, '_m_next_ifd') else None
|
||||
|
||||
|
||||
class IfdField(KaitaiStruct):
|
||||
|
||||
class FieldTypeEnum(Enum):
|
||||
byte = 1
|
||||
ascii_string = 2
|
||||
word = 3
|
||||
dword = 4
|
||||
rational = 5
|
||||
|
||||
class TagEnum(Enum):
|
||||
image_width = 256
|
||||
image_height = 257
|
||||
bits_per_sample = 258
|
||||
compression = 259
|
||||
photometric_interpretation = 262
|
||||
thresholding = 263
|
||||
cell_width = 264
|
||||
cell_length = 265
|
||||
fill_order = 266
|
||||
document_name = 269
|
||||
image_description = 270
|
||||
make = 271
|
||||
model = 272
|
||||
strip_offsets = 273
|
||||
orientation = 274
|
||||
samples_per_pixel = 277
|
||||
rows_per_strip = 278
|
||||
strip_byte_counts = 279
|
||||
min_sample_value = 280
|
||||
max_sample_value = 281
|
||||
x_resolution = 282
|
||||
y_resolution = 283
|
||||
planar_configuration = 284
|
||||
page_name = 285
|
||||
x_position = 286
|
||||
y_position = 287
|
||||
free_offsets = 288
|
||||
free_byte_counts = 289
|
||||
gray_response_unit = 290
|
||||
gray_response_curve = 291
|
||||
t4_options = 292
|
||||
t6_options = 293
|
||||
resolution_unit = 296
|
||||
page_number = 297
|
||||
color_response_unit = 300
|
||||
transfer_function = 301
|
||||
software = 305
|
||||
modify_date = 306
|
||||
artist = 315
|
||||
host_computer = 316
|
||||
predictor = 317
|
||||
white_point = 318
|
||||
primary_chromaticities = 319
|
||||
color_map = 320
|
||||
halftone_hints = 321
|
||||
tile_width = 322
|
||||
tile_length = 323
|
||||
tile_offsets = 324
|
||||
tile_byte_counts = 325
|
||||
bad_fax_lines = 326
|
||||
clean_fax_data = 327
|
||||
consecutive_bad_fax_lines = 328
|
||||
sub_ifd = 330
|
||||
ink_set = 332
|
||||
ink_names = 333
|
||||
numberof_inks = 334
|
||||
dot_range = 336
|
||||
target_printer = 337
|
||||
extra_samples = 338
|
||||
sample_format = 339
|
||||
s_min_sample_value = 340
|
||||
s_max_sample_value = 341
|
||||
transfer_range = 342
|
||||
clip_path = 343
|
||||
x_clip_path_units = 344
|
||||
y_clip_path_units = 345
|
||||
indexed = 346
|
||||
jpeg_tables = 347
|
||||
opi_proxy = 351
|
||||
global_parameters_ifd = 400
|
||||
profile_type = 401
|
||||
fax_profile = 402
|
||||
coding_methods = 403
|
||||
version_year = 404
|
||||
mode_number = 405
|
||||
decode = 433
|
||||
default_image_color = 434
|
||||
t82_options = 435
|
||||
jpeg_tables2 = 437
|
||||
jpeg_proc = 512
|
||||
thumbnail_offset = 513
|
||||
thumbnail_length = 514
|
||||
jpeg_restart_interval = 515
|
||||
jpeg_lossless_predictors = 517
|
||||
jpeg_point_transforms = 518
|
||||
jpegq_tables = 519
|
||||
jpegdc_tables = 520
|
||||
jpegac_tables = 521
|
||||
y_cb_cr_coefficients = 529
|
||||
y_cb_cr_sub_sampling = 530
|
||||
y_cb_cr_positioning = 531
|
||||
reference_black_white = 532
|
||||
strip_row_counts = 559
|
||||
application_notes = 700
|
||||
uspto_miscellaneous = 999
|
||||
related_image_file_format = 4096
|
||||
related_image_width = 4097
|
||||
related_image_height = 4098
|
||||
rating = 18246
|
||||
xp_dip_xml = 18247
|
||||
stitch_info = 18248
|
||||
rating_percent = 18249
|
||||
sony_raw_file_type = 28672
|
||||
light_falloff_params = 28722
|
||||
chromatic_aberration_corr_params = 28725
|
||||
distortion_corr_params = 28727
|
||||
image_id = 32781
|
||||
wang_tag1 = 32931
|
||||
wang_annotation = 32932
|
||||
wang_tag3 = 32933
|
||||
wang_tag4 = 32934
|
||||
image_reference_points = 32953
|
||||
region_xform_tack_point = 32954
|
||||
warp_quadrilateral = 32955
|
||||
affine_transform_mat = 32956
|
||||
matteing = 32995
|
||||
data_type = 32996
|
||||
image_depth = 32997
|
||||
tile_depth = 32998
|
||||
image_full_width = 33300
|
||||
image_full_height = 33301
|
||||
texture_format = 33302
|
||||
wrap_modes = 33303
|
||||
fov_cot = 33304
|
||||
matrix_world_to_screen = 33305
|
||||
matrix_world_to_camera = 33306
|
||||
model2 = 33405
|
||||
cfa_repeat_pattern_dim = 33421
|
||||
cfa_pattern2 = 33422
|
||||
battery_level = 33423
|
||||
kodak_ifd = 33424
|
||||
copyright = 33432
|
||||
exposure_time = 33434
|
||||
f_number = 33437
|
||||
md_file_tag = 33445
|
||||
md_scale_pixel = 33446
|
||||
md_color_table = 33447
|
||||
md_lab_name = 33448
|
||||
md_sample_info = 33449
|
||||
md_prep_date = 33450
|
||||
md_prep_time = 33451
|
||||
md_file_units = 33452
|
||||
pixel_scale = 33550
|
||||
advent_scale = 33589
|
||||
advent_revision = 33590
|
||||
uic1_tag = 33628
|
||||
uic2_tag = 33629
|
||||
uic3_tag = 33630
|
||||
uic4_tag = 33631
|
||||
iptc_naa = 33723
|
||||
intergraph_packet_data = 33918
|
||||
intergraph_flag_registers = 33919
|
||||
intergraph_matrix = 33920
|
||||
ingr_reserved = 33921
|
||||
model_tie_point = 33922
|
||||
site = 34016
|
||||
color_sequence = 34017
|
||||
it8_header = 34018
|
||||
raster_padding = 34019
|
||||
bits_per_run_length = 34020
|
||||
bits_per_extended_run_length = 34021
|
||||
color_table = 34022
|
||||
image_color_indicator = 34023
|
||||
background_color_indicator = 34024
|
||||
image_color_value = 34025
|
||||
background_color_value = 34026
|
||||
pixel_intensity_range = 34027
|
||||
transparency_indicator = 34028
|
||||
color_characterization = 34029
|
||||
hc_usage = 34030
|
||||
trap_indicator = 34031
|
||||
cmyk_equivalent = 34032
|
||||
sem_info = 34118
|
||||
afcp_iptc = 34152
|
||||
pixel_magic_jbig_options = 34232
|
||||
jpl_carto_ifd = 34263
|
||||
model_transform = 34264
|
||||
wb_grgb_levels = 34306
|
||||
leaf_data = 34310
|
||||
photoshop_settings = 34377
|
||||
exif_offset = 34665
|
||||
icc_profile = 34675
|
||||
tiff_fx_extensions = 34687
|
||||
multi_profiles = 34688
|
||||
shared_data = 34689
|
||||
t88_options = 34690
|
||||
image_layer = 34732
|
||||
geo_tiff_directory = 34735
|
||||
geo_tiff_double_params = 34736
|
||||
geo_tiff_ascii_params = 34737
|
||||
jbig_options = 34750
|
||||
exposure_program = 34850
|
||||
spectral_sensitivity = 34852
|
||||
gps_info = 34853
|
||||
iso = 34855
|
||||
opto_electric_conv_factor = 34856
|
||||
interlace = 34857
|
||||
time_zone_offset = 34858
|
||||
self_timer_mode = 34859
|
||||
sensitivity_type = 34864
|
||||
standard_output_sensitivity = 34865
|
||||
recommended_exposure_index = 34866
|
||||
iso_speed = 34867
|
||||
iso_speed_latitudeyyy = 34868
|
||||
iso_speed_latitudezzz = 34869
|
||||
fax_recv_params = 34908
|
||||
fax_sub_address = 34909
|
||||
fax_recv_time = 34910
|
||||
fedex_edr = 34929
|
||||
leaf_sub_ifd = 34954
|
||||
exif_version = 36864
|
||||
date_time_original = 36867
|
||||
create_date = 36868
|
||||
google_plus_upload_code = 36873
|
||||
offset_time = 36880
|
||||
offset_time_original = 36881
|
||||
offset_time_digitized = 36882
|
||||
components_configuration = 37121
|
||||
compressed_bits_per_pixel = 37122
|
||||
shutter_speed_value = 37377
|
||||
aperture_value = 37378
|
||||
brightness_value = 37379
|
||||
exposure_compensation = 37380
|
||||
max_aperture_value = 37381
|
||||
subject_distance = 37382
|
||||
metering_mode = 37383
|
||||
light_source = 37384
|
||||
flash = 37385
|
||||
focal_length = 37386
|
||||
flash_energy = 37387
|
||||
spatial_frequency_response = 37388
|
||||
noise = 37389
|
||||
focal_plane_x_resolution = 37390
|
||||
focal_plane_y_resolution = 37391
|
||||
focal_plane_resolution_unit = 37392
|
||||
image_number = 37393
|
||||
security_classification = 37394
|
||||
image_history = 37395
|
||||
subject_area = 37396
|
||||
exposure_index = 37397
|
||||
tiff_ep_standard_id = 37398
|
||||
sensing_method = 37399
|
||||
cip3_data_file = 37434
|
||||
cip3_sheet = 37435
|
||||
cip3_side = 37436
|
||||
sto_nits = 37439
|
||||
maker_note = 37500
|
||||
user_comment = 37510
|
||||
sub_sec_time = 37520
|
||||
sub_sec_time_original = 37521
|
||||
sub_sec_time_digitized = 37522
|
||||
ms_document_text = 37679
|
||||
ms_property_set_storage = 37680
|
||||
ms_document_text_position = 37681
|
||||
image_source_data = 37724
|
||||
ambient_temperature = 37888
|
||||
humidity = 37889
|
||||
pressure = 37890
|
||||
water_depth = 37891
|
||||
acceleration = 37892
|
||||
camera_elevation_angle = 37893
|
||||
xp_title = 40091
|
||||
xp_comment = 40092
|
||||
xp_author = 40093
|
||||
xp_keywords = 40094
|
||||
xp_subject = 40095
|
||||
flashpix_version = 40960
|
||||
color_space = 40961
|
||||
exif_image_width = 40962
|
||||
exif_image_height = 40963
|
||||
related_sound_file = 40964
|
||||
interop_offset = 40965
|
||||
samsung_raw_pointers_offset = 40976
|
||||
samsung_raw_pointers_length = 40977
|
||||
samsung_raw_byte_order = 41217
|
||||
samsung_raw_unknown = 41218
|
||||
flash_energy2 = 41483
|
||||
spatial_frequency_response2 = 41484
|
||||
noise2 = 41485
|
||||
focal_plane_x_resolution2 = 41486
|
||||
focal_plane_y_resolution2 = 41487
|
||||
focal_plane_resolution_unit2 = 41488
|
||||
image_number2 = 41489
|
||||
security_classification2 = 41490
|
||||
image_history2 = 41491
|
||||
subject_location = 41492
|
||||
exposure_index2 = 41493
|
||||
tiff_ep_standard_id2 = 41494
|
||||
sensing_method2 = 41495
|
||||
file_source = 41728
|
||||
scene_type = 41729
|
||||
cfa_pattern = 41730
|
||||
custom_rendered = 41985
|
||||
exposure_mode = 41986
|
||||
white_balance = 41987
|
||||
digital_zoom_ratio = 41988
|
||||
focal_length_in35mm_format = 41989
|
||||
scene_capture_type = 41990
|
||||
gain_control = 41991
|
||||
contrast = 41992
|
||||
saturation = 41993
|
||||
sharpness = 41994
|
||||
device_setting_description = 41995
|
||||
subject_distance_range = 41996
|
||||
image_unique_id = 42016
|
||||
owner_name = 42032
|
||||
serial_number = 42033
|
||||
lens_info = 42034
|
||||
lens_make = 42035
|
||||
lens_model = 42036
|
||||
lens_serial_number = 42037
|
||||
gdal_metadata = 42112
|
||||
gdal_no_data = 42113
|
||||
gamma = 42240
|
||||
expand_software = 44992
|
||||
expand_lens = 44993
|
||||
expand_film = 44994
|
||||
expand_filter_lens = 44995
|
||||
expand_scanner = 44996
|
||||
expand_flash_lamp = 44997
|
||||
pixel_format = 48129
|
||||
transformation = 48130
|
||||
uncompressed = 48131
|
||||
image_type = 48132
|
||||
image_width2 = 48256
|
||||
image_height2 = 48257
|
||||
width_resolution = 48258
|
||||
height_resolution = 48259
|
||||
image_offset = 48320
|
||||
image_byte_count = 48321
|
||||
alpha_offset = 48322
|
||||
alpha_byte_count = 48323
|
||||
image_data_discard = 48324
|
||||
alpha_data_discard = 48325
|
||||
oce_scanjob_desc = 50215
|
||||
oce_application_selector = 50216
|
||||
oce_id_number = 50217
|
||||
oce_image_logic = 50218
|
||||
annotations = 50255
|
||||
print_im = 50341
|
||||
original_file_name = 50547
|
||||
uspto_original_content_type = 50560
|
||||
dng_version = 50706
|
||||
dng_backward_version = 50707
|
||||
unique_camera_model = 50708
|
||||
localized_camera_model = 50709
|
||||
cfa_plane_color = 50710
|
||||
cfa_layout = 50711
|
||||
linearization_table = 50712
|
||||
black_level_repeat_dim = 50713
|
||||
black_level = 50714
|
||||
black_level_delta_h = 50715
|
||||
black_level_delta_v = 50716
|
||||
white_level = 50717
|
||||
default_scale = 50718
|
||||
default_crop_origin = 50719
|
||||
default_crop_size = 50720
|
||||
color_matrix1 = 50721
|
||||
color_matrix2 = 50722
|
||||
camera_calibration1 = 50723
|
||||
camera_calibration2 = 50724
|
||||
reduction_matrix1 = 50725
|
||||
reduction_matrix2 = 50726
|
||||
analog_balance = 50727
|
||||
as_shot_neutral = 50728
|
||||
as_shot_white_xy = 50729
|
||||
baseline_exposure = 50730
|
||||
baseline_noise = 50731
|
||||
baseline_sharpness = 50732
|
||||
bayer_green_split = 50733
|
||||
linear_response_limit = 50734
|
||||
camera_serial_number = 50735
|
||||
dng_lens_info = 50736
|
||||
chroma_blur_radius = 50737
|
||||
anti_alias_strength = 50738
|
||||
shadow_scale = 50739
|
||||
sr2_private = 50740
|
||||
maker_note_safety = 50741
|
||||
raw_image_segmentation = 50752
|
||||
calibration_illuminant1 = 50778
|
||||
calibration_illuminant2 = 50779
|
||||
best_quality_scale = 50780
|
||||
raw_data_unique_id = 50781
|
||||
alias_layer_metadata = 50784
|
||||
original_raw_file_name = 50827
|
||||
original_raw_file_data = 50828
|
||||
active_area = 50829
|
||||
masked_areas = 50830
|
||||
as_shot_icc_profile = 50831
|
||||
as_shot_pre_profile_matrix = 50832
|
||||
current_icc_profile = 50833
|
||||
current_pre_profile_matrix = 50834
|
||||
colorimetric_reference = 50879
|
||||
s_raw_type = 50885
|
||||
panasonic_title = 50898
|
||||
panasonic_title2 = 50899
|
||||
camera_calibration_sig = 50931
|
||||
profile_calibration_sig = 50932
|
||||
profile_ifd = 50933
|
||||
as_shot_profile_name = 50934
|
||||
noise_reduction_applied = 50935
|
||||
profile_name = 50936
|
||||
profile_hue_sat_map_dims = 50937
|
||||
profile_hue_sat_map_data1 = 50938
|
||||
profile_hue_sat_map_data2 = 50939
|
||||
profile_tone_curve = 50940
|
||||
profile_embed_policy = 50941
|
||||
profile_copyright = 50942
|
||||
forward_matrix1 = 50964
|
||||
forward_matrix2 = 50965
|
||||
preview_application_name = 50966
|
||||
preview_application_version = 50967
|
||||
preview_settings_name = 50968
|
||||
preview_settings_digest = 50969
|
||||
preview_color_space = 50970
|
||||
preview_date_time = 50971
|
||||
raw_image_digest = 50972
|
||||
original_raw_file_digest = 50973
|
||||
sub_tile_block_size = 50974
|
||||
row_interleave_factor = 50975
|
||||
profile_look_table_dims = 50981
|
||||
profile_look_table_data = 50982
|
||||
opcode_list1 = 51008
|
||||
opcode_list2 = 51009
|
||||
opcode_list3 = 51022
|
||||
noise_profile = 51041
|
||||
time_codes = 51043
|
||||
frame_rate = 51044
|
||||
t_stop = 51058
|
||||
reel_name = 51081
|
||||
original_default_final_size = 51089
|
||||
original_best_quality_size = 51090
|
||||
original_default_crop_size = 51091
|
||||
camera_label = 51105
|
||||
profile_hue_sat_map_encoding = 51107
|
||||
profile_look_table_encoding = 51108
|
||||
baseline_exposure_offset = 51109
|
||||
default_black_render = 51110
|
||||
new_raw_image_digest = 51111
|
||||
raw_to_preview_gain = 51112
|
||||
default_user_crop = 51125
|
||||
padding = 59932
|
||||
offset_schema = 59933
|
||||
owner_name2 = 65000
|
||||
serial_number2 = 65001
|
||||
lens = 65002
|
||||
kdc_ifd = 65024
|
||||
raw_file = 65100
|
||||
converter = 65101
|
||||
white_balance2 = 65102
|
||||
exposure = 65105
|
||||
shadows = 65106
|
||||
brightness = 65107
|
||||
contrast2 = 65108
|
||||
saturation2 = 65109
|
||||
sharpness2 = 65110
|
||||
smoothness = 65111
|
||||
moire_filter = 65112
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.tag = self._root.IfdField.TagEnum(self._io.read_u2le())
|
||||
self.field_type = self._root.IfdField.FieldTypeEnum(self._io.read_u2le())
|
||||
self.length = self._io.read_u4le()
|
||||
self.ofs_or_data = self._io.read_u4le()
|
||||
|
||||
@property
|
||||
def type_byte_length(self):
|
||||
if hasattr(self, '_m_type_byte_length'):
|
||||
return self._m_type_byte_length if hasattr(self, '_m_type_byte_length') else None
|
||||
|
||||
self._m_type_byte_length = (2 if self.field_type == self._root.IfdField.FieldTypeEnum.word else (4 if self.field_type == self._root.IfdField.FieldTypeEnum.dword else 1))
|
||||
return self._m_type_byte_length if hasattr(self, '_m_type_byte_length') else None
|
||||
|
||||
@property
|
||||
def byte_length(self):
|
||||
if hasattr(self, '_m_byte_length'):
|
||||
return self._m_byte_length if hasattr(self, '_m_byte_length') else None
|
||||
|
||||
self._m_byte_length = (self.length * self.type_byte_length)
|
||||
return self._m_byte_length if hasattr(self, '_m_byte_length') else None
|
||||
|
||||
@property
|
||||
def is_immediate_data(self):
|
||||
if hasattr(self, '_m_is_immediate_data'):
|
||||
return self._m_is_immediate_data if hasattr(self, '_m_is_immediate_data') else None
|
||||
|
||||
self._m_is_immediate_data = self.byte_length <= 4
|
||||
return self._m_is_immediate_data if hasattr(self, '_m_is_immediate_data') else None
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
if hasattr(self, '_m_data'):
|
||||
return self._m_data if hasattr(self, '_m_data') else None
|
||||
|
||||
if not self.is_immediate_data:
|
||||
io = self._root._io
|
||||
_pos = io.pos()
|
||||
io.seek(self.ofs_or_data)
|
||||
self._m_data = io.read_bytes(self.byte_length)
|
||||
io.seek(_pos)
|
||||
|
||||
return self._m_data if hasattr(self, '_m_data') else None
|
||||
|
||||
|
||||
@property
|
||||
def ifd0(self):
|
||||
if hasattr(self, '_m_ifd0'):
|
||||
return self._m_ifd0 if hasattr(self, '_m_ifd0') else None
|
||||
|
||||
_pos = self._io.pos()
|
||||
self._io.seek(self.ifd0_ofs)
|
||||
self._m_ifd0 = self._root.Ifd(self._io, self, self._root)
|
||||
self._io.seek(_pos)
|
||||
return self._m_ifd0 if hasattr(self, '_m_ifd0') else None
|
||||
@@ -1,247 +0,0 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
# The source was png.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/562154250bea0081fed4e232751b934bc270a0c7/image/gif.ksy
|
||||
|
||||
import array
|
||||
import struct
|
||||
import zlib
|
||||
from enum import Enum
|
||||
|
||||
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
|
||||
|
||||
|
||||
class Gif(KaitaiStruct):
|
||||
|
||||
class BlockType(Enum):
|
||||
extension = 33
|
||||
local_image_descriptor = 44
|
||||
end_of_file = 59
|
||||
|
||||
class ExtensionLabel(Enum):
|
||||
graphic_control = 249
|
||||
comment = 254
|
||||
application = 255
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.header = self._root.Header(self._io, self, self._root)
|
||||
self.logical_screen_descriptor = self._root.LogicalScreenDescriptor(self._io, self, self._root)
|
||||
if self.logical_screen_descriptor.has_color_table:
|
||||
self._raw_global_color_table = self._io.read_bytes((self.logical_screen_descriptor.color_table_size * 3))
|
||||
io = KaitaiStream(BytesIO(self._raw_global_color_table))
|
||||
self.global_color_table = self._root.ColorTable(io, self, self._root)
|
||||
|
||||
self.blocks = []
|
||||
while not self._io.is_eof():
|
||||
self.blocks.append(self._root.Block(self._io, self, self._root))
|
||||
|
||||
|
||||
class ImageData(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.lzw_min_code_size = self._io.read_u1()
|
||||
self.subblocks = self._root.Subblocks(self._io, self, self._root)
|
||||
|
||||
|
||||
class ColorTableEntry(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.red = self._io.read_u1()
|
||||
self.green = self._io.read_u1()
|
||||
self.blue = self._io.read_u1()
|
||||
|
||||
|
||||
class LogicalScreenDescriptor(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.screen_width = self._io.read_u2le()
|
||||
self.screen_height = self._io.read_u2le()
|
||||
self.flags = self._io.read_u1()
|
||||
self.bg_color_index = self._io.read_u1()
|
||||
self.pixel_aspect_ratio = self._io.read_u1()
|
||||
|
||||
@property
|
||||
def has_color_table(self):
|
||||
if hasattr(self, '_m_has_color_table'):
|
||||
return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None
|
||||
|
||||
self._m_has_color_table = (self.flags & 128) != 0
|
||||
return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None
|
||||
|
||||
@property
|
||||
def color_table_size(self):
|
||||
if hasattr(self, '_m_color_table_size'):
|
||||
return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None
|
||||
|
||||
self._m_color_table_size = (2 << (self.flags & 7))
|
||||
return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None
|
||||
|
||||
|
||||
class LocalImageDescriptor(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.left = self._io.read_u2le()
|
||||
self.top = self._io.read_u2le()
|
||||
self.width = self._io.read_u2le()
|
||||
self.height = self._io.read_u2le()
|
||||
self.flags = self._io.read_u1()
|
||||
if self.has_color_table:
|
||||
self._raw_local_color_table = self._io.read_bytes((self.color_table_size * 3))
|
||||
io = KaitaiStream(BytesIO(self._raw_local_color_table))
|
||||
self.local_color_table = self._root.ColorTable(io, self, self._root)
|
||||
|
||||
self.image_data = self._root.ImageData(self._io, self, self._root)
|
||||
|
||||
@property
|
||||
def has_color_table(self):
|
||||
if hasattr(self, '_m_has_color_table'):
|
||||
return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None
|
||||
|
||||
self._m_has_color_table = (self.flags & 128) != 0
|
||||
return self._m_has_color_table if hasattr(self, '_m_has_color_table') else None
|
||||
|
||||
@property
|
||||
def has_interlace(self):
|
||||
if hasattr(self, '_m_has_interlace'):
|
||||
return self._m_has_interlace if hasattr(self, '_m_has_interlace') else None
|
||||
|
||||
self._m_has_interlace = (self.flags & 64) != 0
|
||||
return self._m_has_interlace if hasattr(self, '_m_has_interlace') else None
|
||||
|
||||
@property
|
||||
def has_sorted_color_table(self):
|
||||
if hasattr(self, '_m_has_sorted_color_table'):
|
||||
return self._m_has_sorted_color_table if hasattr(self, '_m_has_sorted_color_table') else None
|
||||
|
||||
self._m_has_sorted_color_table = (self.flags & 32) != 0
|
||||
return self._m_has_sorted_color_table if hasattr(self, '_m_has_sorted_color_table') else None
|
||||
|
||||
@property
|
||||
def color_table_size(self):
|
||||
if hasattr(self, '_m_color_table_size'):
|
||||
return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None
|
||||
|
||||
self._m_color_table_size = (2 << (self.flags & 7))
|
||||
return self._m_color_table_size if hasattr(self, '_m_color_table_size') else None
|
||||
|
||||
|
||||
class Block(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.block_type = self._root.BlockType(self._io.read_u1())
|
||||
_on = self.block_type
|
||||
if _on == self._root.BlockType.extension:
|
||||
self.body = self._root.Extension(self._io, self, self._root)
|
||||
elif _on == self._root.BlockType.local_image_descriptor:
|
||||
self.body = self._root.LocalImageDescriptor(self._io, self, self._root)
|
||||
|
||||
|
||||
class ColorTable(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.entries = []
|
||||
while not self._io.is_eof():
|
||||
self.entries.append(self._root.ColorTableEntry(self._io, self, self._root))
|
||||
|
||||
|
||||
|
||||
class Header(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.magic = self._io.ensure_fixed_contents(struct.pack('3b', 71, 73, 70))
|
||||
self.version = self._io.read_bytes(3)
|
||||
|
||||
|
||||
class ExtGraphicControl(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.block_size = self._io.ensure_fixed_contents(struct.pack('1b', 4))
|
||||
self.flags = self._io.read_u1()
|
||||
self.delay_time = self._io.read_u2le()
|
||||
self.transparent_idx = self._io.read_u1()
|
||||
self.terminator = self._io.ensure_fixed_contents(struct.pack('1b', 0))
|
||||
|
||||
@property
|
||||
def transparent_color_flag(self):
|
||||
if hasattr(self, '_m_transparent_color_flag'):
|
||||
return self._m_transparent_color_flag if hasattr(self, '_m_transparent_color_flag') else None
|
||||
|
||||
self._m_transparent_color_flag = (self.flags & 1) != 0
|
||||
return self._m_transparent_color_flag if hasattr(self, '_m_transparent_color_flag') else None
|
||||
|
||||
@property
|
||||
def user_input_flag(self):
|
||||
if hasattr(self, '_m_user_input_flag'):
|
||||
return self._m_user_input_flag if hasattr(self, '_m_user_input_flag') else None
|
||||
|
||||
self._m_user_input_flag = (self.flags & 2) != 0
|
||||
return self._m_user_input_flag if hasattr(self, '_m_user_input_flag') else None
|
||||
|
||||
|
||||
class Subblock(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.num_bytes = self._io.read_u1()
|
||||
self.bytes = self._io.read_bytes(self.num_bytes)
|
||||
|
||||
|
||||
class ExtApplication(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.application_id = self._root.Subblock(self._io, self, self._root)
|
||||
self.subblocks = []
|
||||
while True:
|
||||
_ = self._root.Subblock(self._io, self, self._root)
|
||||
self.subblocks.append(_)
|
||||
if _.num_bytes == 0:
|
||||
break
|
||||
|
||||
|
||||
class Subblocks(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.entries = []
|
||||
while True:
|
||||
_ = self._root.Subblock(self._io, self, self._root)
|
||||
self.entries.append(_)
|
||||
if _.num_bytes == 0:
|
||||
break
|
||||
|
||||
|
||||
class Extension(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.label = self._root.ExtensionLabel(self._io.read_u1())
|
||||
_on = self.label
|
||||
if _on == self._root.ExtensionLabel.application:
|
||||
self.body = self._root.ExtApplication(self._io, self, self._root)
|
||||
elif _on == self._root.ExtensionLabel.comment:
|
||||
self.body = self._root.Subblocks(self._io, self, self._root)
|
||||
elif _on == self._root.ExtensionLabel.graphic_control:
|
||||
self.body = self._root.ExtGraphicControl(self._io, self, self._root)
|
||||
else:
|
||||
self.body = self._root.Subblocks(self._io, self, self._root)
|
||||
@@ -1,206 +0,0 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
# The source was jpeg.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/24e2d00048b8084ceec30a187a79cb87a79a48ba/image/jpeg.ksy
|
||||
|
||||
import array
|
||||
import struct
|
||||
import zlib
|
||||
from enum import Enum
|
||||
|
||||
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
|
||||
|
||||
|
||||
from .exif import Exif
|
||||
class Jpeg(KaitaiStruct):
|
||||
|
||||
class ComponentId(Enum):
|
||||
y = 1
|
||||
cb = 2
|
||||
cr = 3
|
||||
i = 4
|
||||
q = 5
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.segments = []
|
||||
while not self._io.is_eof():
|
||||
self.segments.append(self._root.Segment(self._io, self, self._root))
|
||||
|
||||
|
||||
class Segment(KaitaiStruct):
|
||||
|
||||
class MarkerEnum(Enum):
|
||||
tem = 1
|
||||
sof0 = 192
|
||||
sof1 = 193
|
||||
sof2 = 194
|
||||
sof3 = 195
|
||||
dht = 196
|
||||
sof5 = 197
|
||||
sof6 = 198
|
||||
sof7 = 199
|
||||
soi = 216
|
||||
eoi = 217
|
||||
sos = 218
|
||||
dqt = 219
|
||||
dnl = 220
|
||||
dri = 221
|
||||
dhp = 222
|
||||
app0 = 224
|
||||
app1 = 225
|
||||
app2 = 226
|
||||
app3 = 227
|
||||
app4 = 228
|
||||
app5 = 229
|
||||
app6 = 230
|
||||
app7 = 231
|
||||
app8 = 232
|
||||
app9 = 233
|
||||
app10 = 234
|
||||
app11 = 235
|
||||
app12 = 236
|
||||
app13 = 237
|
||||
app14 = 238
|
||||
app15 = 239
|
||||
com = 254
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.magic = self._io.ensure_fixed_contents(struct.pack('1b', -1))
|
||||
self.marker = self._root.Segment.MarkerEnum(self._io.read_u1())
|
||||
if ((self.marker != self._root.Segment.MarkerEnum.soi) and (self.marker != self._root.Segment.MarkerEnum.eoi)) :
|
||||
self.length = self._io.read_u2be()
|
||||
|
||||
if ((self.marker != self._root.Segment.MarkerEnum.soi) and (self.marker != self._root.Segment.MarkerEnum.eoi)) :
|
||||
_on = self.marker
|
||||
if _on == self._root.Segment.MarkerEnum.sos:
|
||||
self._raw_data = self._io.read_bytes((self.length - 2))
|
||||
io = KaitaiStream(BytesIO(self._raw_data))
|
||||
self.data = self._root.SegmentSos(io, self, self._root)
|
||||
elif _on == self._root.Segment.MarkerEnum.app1:
|
||||
self._raw_data = self._io.read_bytes((self.length - 2))
|
||||
io = KaitaiStream(BytesIO(self._raw_data))
|
||||
self.data = self._root.SegmentApp1(io, self, self._root)
|
||||
elif _on == self._root.Segment.MarkerEnum.sof0:
|
||||
self._raw_data = self._io.read_bytes((self.length - 2))
|
||||
io = KaitaiStream(BytesIO(self._raw_data))
|
||||
self.data = self._root.SegmentSof0(io, self, self._root)
|
||||
elif _on == self._root.Segment.MarkerEnum.app0:
|
||||
self._raw_data = self._io.read_bytes((self.length - 2))
|
||||
io = KaitaiStream(BytesIO(self._raw_data))
|
||||
self.data = self._root.SegmentApp0(io, self, self._root)
|
||||
else:
|
||||
self.data = self._io.read_bytes((self.length - 2))
|
||||
|
||||
if self.marker == self._root.Segment.MarkerEnum.sos:
|
||||
self.image_data = self._io.read_bytes_full()
|
||||
|
||||
|
||||
|
||||
class SegmentSos(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.num_components = self._io.read_u1()
|
||||
self.components = [None] * (self.num_components)
|
||||
for i in range(self.num_components):
|
||||
self.components[i] = self._root.SegmentSos.Component(self._io, self, self._root)
|
||||
|
||||
self.start_spectral_selection = self._io.read_u1()
|
||||
self.end_spectral = self._io.read_u1()
|
||||
self.appr_bit_pos = self._io.read_u1()
|
||||
|
||||
class Component(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.id = self._root.ComponentId(self._io.read_u1())
|
||||
self.huffman_table = self._io.read_u1()
|
||||
|
||||
|
||||
|
||||
class SegmentApp1(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.magic = self._io.read_strz("ASCII", 0, False, True, True)
|
||||
_on = self.magic
|
||||
if _on == u"Exif":
|
||||
self.body = self._root.ExifInJpeg(self._io, self, self._root)
|
||||
|
||||
|
||||
class SegmentSof0(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.bits_per_sample = self._io.read_u1()
|
||||
self.image_height = self._io.read_u2be()
|
||||
self.image_width = self._io.read_u2be()
|
||||
self.num_components = self._io.read_u1()
|
||||
self.components = [None] * (self.num_components)
|
||||
for i in range(self.num_components):
|
||||
self.components[i] = self._root.SegmentSof0.Component(self._io, self, self._root)
|
||||
|
||||
|
||||
class Component(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.id = self._root.ComponentId(self._io.read_u1())
|
||||
self.sampling_factors = self._io.read_u1()
|
||||
self.quantization_table_id = self._io.read_u1()
|
||||
|
||||
@property
|
||||
def sampling_x(self):
|
||||
if hasattr(self, '_m_sampling_x'):
|
||||
return self._m_sampling_x if hasattr(self, '_m_sampling_x') else None
|
||||
|
||||
self._m_sampling_x = ((self.sampling_factors & 240) >> 4)
|
||||
return self._m_sampling_x if hasattr(self, '_m_sampling_x') else None
|
||||
|
||||
@property
|
||||
def sampling_y(self):
|
||||
if hasattr(self, '_m_sampling_y'):
|
||||
return self._m_sampling_y if hasattr(self, '_m_sampling_y') else None
|
||||
|
||||
self._m_sampling_y = (self.sampling_factors & 15)
|
||||
return self._m_sampling_y if hasattr(self, '_m_sampling_y') else None
|
||||
|
||||
|
||||
|
||||
class ExifInJpeg(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.extra_zero = self._io.ensure_fixed_contents(struct.pack('1b', 0))
|
||||
self._raw_data = self._io.read_bytes_full()
|
||||
io = KaitaiStream(BytesIO(self._raw_data))
|
||||
self.data = Exif(io)
|
||||
|
||||
|
||||
class SegmentApp0(KaitaiStruct):
|
||||
|
||||
class DensityUnit(Enum):
|
||||
no_units = 0
|
||||
pixels_per_inch = 1
|
||||
pixels_per_cm = 2
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.magic = self._io.read_str_byte_limit(5, "ASCII")
|
||||
self.version_major = self._io.read_u1()
|
||||
self.version_minor = self._io.read_u1()
|
||||
self.density_units = self._root.SegmentApp0.DensityUnit(self._io.read_u1())
|
||||
self.density_x = self._io.read_u2be()
|
||||
self.density_y = self._io.read_u2be()
|
||||
self.thumbnail_x = self._io.read_u1()
|
||||
self.thumbnail_y = self._io.read_u1()
|
||||
self.thumbnail = self._io.read_bytes(((self.thumbnail_x * self.thumbnail_y) * 3))
|
||||
@@ -1,289 +0,0 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
# The source was png.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/9370c720b7d2ad329102d89bdc880ba6a706ef26/image/png.ksy
|
||||
|
||||
import array
|
||||
import struct
|
||||
import zlib
|
||||
from enum import Enum
|
||||
|
||||
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
|
||||
|
||||
|
||||
class Png(KaitaiStruct):
|
||||
|
||||
class ColorType(Enum):
|
||||
greyscale = 0
|
||||
truecolor = 2
|
||||
indexed = 3
|
||||
greyscale_alpha = 4
|
||||
truecolor_alpha = 6
|
||||
|
||||
class PhysUnit(Enum):
|
||||
unknown = 0
|
||||
meter = 1
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.magic = self._io.ensure_fixed_contents(struct.pack('8b', -119, 80, 78, 71, 13, 10, 26, 10))
|
||||
self.ihdr_len = self._io.ensure_fixed_contents(struct.pack('4b', 0, 0, 0, 13))
|
||||
self.ihdr_type = self._io.ensure_fixed_contents(struct.pack('4b', 73, 72, 68, 82))
|
||||
self.ihdr = self._root.IhdrChunk(self._io, self, self._root)
|
||||
self.ihdr_crc = self._io.read_bytes(4)
|
||||
self.chunks = []
|
||||
while not self._io.is_eof():
|
||||
self.chunks.append(self._root.Chunk(self._io, self, self._root))
|
||||
|
||||
|
||||
class Rgb(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.r = self._io.read_u1()
|
||||
self.g = self._io.read_u1()
|
||||
self.b = self._io.read_u1()
|
||||
|
||||
|
||||
class Chunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.len = self._io.read_u4be()
|
||||
self.type = self._io.read_str_byte_limit(4, "UTF-8")
|
||||
_on = self.type
|
||||
if _on == u"iTXt":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
io = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = self._root.InternationalTextChunk(io, self, self._root)
|
||||
elif _on == u"gAMA":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
io = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = self._root.GamaChunk(io, self, self._root)
|
||||
elif _on == u"tIME":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
io = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = self._root.TimeChunk(io, self, self._root)
|
||||
elif _on == u"PLTE":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
io = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = self._root.PlteChunk(io, self, self._root)
|
||||
elif _on == u"bKGD":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
io = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = self._root.BkgdChunk(io, self, self._root)
|
||||
elif _on == u"pHYs":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
io = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = self._root.PhysChunk(io, self, self._root)
|
||||
elif _on == u"tEXt":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
io = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = self._root.TextChunk(io, self, self._root)
|
||||
elif _on == u"cHRM":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
io = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = self._root.ChrmChunk(io, self, self._root)
|
||||
elif _on == u"sRGB":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
io = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = self._root.SrgbChunk(io, self, self._root)
|
||||
elif _on == u"zTXt":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
io = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = self._root.CompressedTextChunk(io, self, self._root)
|
||||
else:
|
||||
self.body = self._io.read_bytes(self.len)
|
||||
self.crc = self._io.read_bytes(4)
|
||||
|
||||
|
||||
class BkgdIndexed(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.palette_index = self._io.read_u1()
|
||||
|
||||
|
||||
class Point(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.x_int = self._io.read_u4be()
|
||||
self.y_int = self._io.read_u4be()
|
||||
|
||||
@property
|
||||
def x(self):
|
||||
if hasattr(self, '_m_x'):
|
||||
return self._m_x if hasattr(self, '_m_x') else None
|
||||
|
||||
self._m_x = (self.x_int / 100000.0)
|
||||
return self._m_x if hasattr(self, '_m_x') else None
|
||||
|
||||
@property
|
||||
def y(self):
|
||||
if hasattr(self, '_m_y'):
|
||||
return self._m_y if hasattr(self, '_m_y') else None
|
||||
|
||||
self._m_y = (self.y_int / 100000.0)
|
||||
return self._m_y if hasattr(self, '_m_y') else None
|
||||
|
||||
|
||||
class BkgdGreyscale(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.value = self._io.read_u2be()
|
||||
|
||||
|
||||
class ChrmChunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.white_point = self._root.Point(self._io, self, self._root)
|
||||
self.red = self._root.Point(self._io, self, self._root)
|
||||
self.green = self._root.Point(self._io, self, self._root)
|
||||
self.blue = self._root.Point(self._io, self, self._root)
|
||||
|
||||
|
||||
class IhdrChunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.width = self._io.read_u4be()
|
||||
self.height = self._io.read_u4be()
|
||||
self.bit_depth = self._io.read_u1()
|
||||
self.color_type = self._root.ColorType(self._io.read_u1())
|
||||
self.compression_method = self._io.read_u1()
|
||||
self.filter_method = self._io.read_u1()
|
||||
self.interlace_method = self._io.read_u1()
|
||||
|
||||
|
||||
class PlteChunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.entries = []
|
||||
while not self._io.is_eof():
|
||||
self.entries.append(self._root.Rgb(self._io, self, self._root))
|
||||
|
||||
|
||||
|
||||
class SrgbChunk(KaitaiStruct):
|
||||
|
||||
class Intent(Enum):
|
||||
perceptual = 0
|
||||
relative_colorimetric = 1
|
||||
saturation = 2
|
||||
absolute_colorimetric = 3
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.render_intent = self._root.SrgbChunk.Intent(self._io.read_u1())
|
||||
|
||||
|
||||
class CompressedTextChunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.keyword = self._io.read_strz("UTF-8", 0, False, True, True)
|
||||
self.compression_method = self._io.read_u1()
|
||||
self._raw_text_datastream = self._io.read_bytes_full()
|
||||
self.text_datastream = zlib.decompress(self._raw_text_datastream)
|
||||
|
||||
|
||||
class BkgdTruecolor(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.red = self._io.read_u2be()
|
||||
self.green = self._io.read_u2be()
|
||||
self.blue = self._io.read_u2be()
|
||||
|
||||
|
||||
class GamaChunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.gamma_int = self._io.read_u4be()
|
||||
|
||||
@property
|
||||
def gamma_ratio(self):
|
||||
if hasattr(self, '_m_gamma_ratio'):
|
||||
return self._m_gamma_ratio if hasattr(self, '_m_gamma_ratio') else None
|
||||
|
||||
self._m_gamma_ratio = (100000.0 / self.gamma_int)
|
||||
return self._m_gamma_ratio if hasattr(self, '_m_gamma_ratio') else None
|
||||
|
||||
|
||||
class BkgdChunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
_on = self._root.ihdr.color_type
|
||||
if _on == self._root.ColorType.greyscale_alpha:
|
||||
self.bkgd = self._root.BkgdGreyscale(self._io, self, self._root)
|
||||
elif _on == self._root.ColorType.indexed:
|
||||
self.bkgd = self._root.BkgdIndexed(self._io, self, self._root)
|
||||
elif _on == self._root.ColorType.greyscale:
|
||||
self.bkgd = self._root.BkgdGreyscale(self._io, self, self._root)
|
||||
elif _on == self._root.ColorType.truecolor_alpha:
|
||||
self.bkgd = self._root.BkgdTruecolor(self._io, self, self._root)
|
||||
elif _on == self._root.ColorType.truecolor:
|
||||
self.bkgd = self._root.BkgdTruecolor(self._io, self, self._root)
|
||||
|
||||
|
||||
class PhysChunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.pixels_per_unit_x = self._io.read_u4be()
|
||||
self.pixels_per_unit_y = self._io.read_u4be()
|
||||
self.unit = self._root.PhysUnit(self._io.read_u1())
|
||||
|
||||
|
||||
class InternationalTextChunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.keyword = self._io.read_strz("UTF-8", 0, False, True, True)
|
||||
self.compression_flag = self._io.read_u1()
|
||||
self.compression_method = self._io.read_u1()
|
||||
self.language_tag = self._io.read_strz("ASCII", 0, False, True, True)
|
||||
self.translated_keyword = self._io.read_strz("UTF-8", 0, False, True, True)
|
||||
self.text = self._io.read_str_eos("UTF-8")
|
||||
|
||||
|
||||
class TextChunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.keyword = self._io.read_strz("iso8859-1", 0, False, True, True)
|
||||
self.text = self._io.read_str_eos("iso8859-1")
|
||||
|
||||
|
||||
class TimeChunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self.year = self._io.read_u2be()
|
||||
self.month = self._io.read_u1()
|
||||
self.day = self._io.read_u1()
|
||||
self.hour = self._io.read_u1()
|
||||
self.minute = self._io.read_u1()
|
||||
self.second = self._io.read_u1()
|
||||
@@ -37,7 +37,7 @@ Events = frozenset([
|
||||
])
|
||||
|
||||
|
||||
def iterate(f):
|
||||
def event_sequence(f):
|
||||
if isinstance(f, http.HTTPFlow):
|
||||
if f.request:
|
||||
yield "requestheaders", f
|
||||
@@ -70,4 +70,4 @@ def iterate(f):
|
||||
yield "tcp_error", f
|
||||
yield "tcp_end", f
|
||||
else:
|
||||
raise TypeError()
|
||||
raise NotImplementedError
|
||||
@@ -73,7 +73,7 @@ def python_code(flow: http.HTTPFlow):
|
||||
|
||||
headers = flow.request.headers.copy()
|
||||
# requests adds those by default.
|
||||
for x in (":authority", "host", "content-length"):
|
||||
for x in ("host", "content-length"):
|
||||
headers.pop(x, None)
|
||||
writearg("headers", dict(headers))
|
||||
try:
|
||||
@@ -130,7 +130,7 @@ def locust_code(flow):
|
||||
if flow.request.headers:
|
||||
lines = [
|
||||
(_native(k), _native(v)) for k, v in flow.request.headers.fields
|
||||
if _native(k).lower() not in [":authority", "host", "cookie"]
|
||||
if _native(k).lower() not in ["host", "cookie"]
|
||||
]
|
||||
lines = [" '%s': '%s',\n" % (k, v) for k, v in lines]
|
||||
headers += "\n headers = {\n%s }\n" % "".join(lines)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import time
|
||||
import copy
|
||||
import uuid
|
||||
|
||||
from mitmproxy import controller # noqa
|
||||
@@ -6,7 +7,7 @@ from mitmproxy import stateobject
|
||||
from mitmproxy import connections
|
||||
from mitmproxy import version
|
||||
|
||||
import typing # noqa
|
||||
import typing # noqa
|
||||
|
||||
|
||||
class Error(stateobject.StateObject):
|
||||
@@ -52,6 +53,10 @@ class Error(stateobject.StateObject):
|
||||
f.set_state(state)
|
||||
return f
|
||||
|
||||
def copy(self):
|
||||
c = copy.copy(self)
|
||||
return c
|
||||
|
||||
|
||||
class Flow(stateobject.StateObject):
|
||||
|
||||
@@ -111,9 +116,16 @@ class Flow(stateobject.StateObject):
|
||||
return f
|
||||
|
||||
def copy(self):
|
||||
f = super().copy()
|
||||
f = copy.copy(self)
|
||||
|
||||
f.id = str(uuid.uuid4())
|
||||
f.live = False
|
||||
f.client_conn = self.client_conn.copy()
|
||||
f.server_conn = self.server_conn.copy()
|
||||
f.metadata = self.metadata.copy()
|
||||
|
||||
if self.error:
|
||||
f.error = self.error.copy()
|
||||
return f
|
||||
|
||||
def modified(self):
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import html
|
||||
from typing import Optional
|
||||
import cgi
|
||||
|
||||
from mitmproxy import flow
|
||||
|
||||
@@ -204,27 +203,16 @@ class HTTPFlow(flow.Flow):
|
||||
return c
|
||||
|
||||
|
||||
def make_error_response(
|
||||
status_code: int,
|
||||
message: str="",
|
||||
headers: Optional[http.Headers]=None,
|
||||
) -> HTTPResponse:
|
||||
reason = http.status_codes.RESPONSES.get(status_code, "Unknown")
|
||||
def make_error_response(status_code, message, headers=None):
|
||||
response = http.status_codes.RESPONSES.get(status_code, "Unknown")
|
||||
body = """
|
||||
<html>
|
||||
<head>
|
||||
<title>{status_code} {reason}</title>
|
||||
<title>%d %s</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>{status_code} {reason}</h1>
|
||||
<p>{message}</p>
|
||||
</body>
|
||||
<body>%s</body>
|
||||
</html>
|
||||
""".strip().format(
|
||||
status_code=status_code,
|
||||
reason=reason,
|
||||
message=html.escape(message),
|
||||
)
|
||||
""".strip() % (status_code, response, cgi.escape(message))
|
||||
body = body.encode("utf8", "replace")
|
||||
|
||||
if not headers:
|
||||
@@ -238,7 +226,7 @@ def make_error_response(
|
||||
return HTTPResponse(
|
||||
b"HTTP/1.1",
|
||||
status_code,
|
||||
reason,
|
||||
response,
|
||||
headers,
|
||||
body,
|
||||
)
|
||||
|
||||
@@ -86,14 +86,9 @@ def convert_019_100(data):
|
||||
return data
|
||||
|
||||
|
||||
def convert_100_200(data):
|
||||
data["version"] = (2, 0, 0)
|
||||
return data
|
||||
|
||||
|
||||
def _convert_dict_keys(o: Any) -> Any:
|
||||
if isinstance(o, dict):
|
||||
return {strutils.always_str(k): _convert_dict_keys(v) for k, v in o.items()}
|
||||
return {strutils.native(k): _convert_dict_keys(v) for k, v in o.items()}
|
||||
else:
|
||||
return o
|
||||
|
||||
@@ -103,7 +98,7 @@ def _convert_dict_vals(o: dict, values_to_convert: dict) -> dict:
|
||||
if not o or k not in o:
|
||||
continue
|
||||
if v is True:
|
||||
o[k] = strutils.always_str(o[k])
|
||||
o[k] = strutils.native(o[k])
|
||||
else:
|
||||
_convert_dict_vals(o[k], v)
|
||||
return o
|
||||
@@ -139,7 +134,6 @@ converters = {
|
||||
(0, 17): convert_017_018,
|
||||
(0, 18): convert_018_019,
|
||||
(0, 19): convert_019_100,
|
||||
(1, 0): convert_100_200,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import sys
|
||||
from mitmproxy import addonmanager
|
||||
from mitmproxy import options
|
||||
from mitmproxy import controller
|
||||
from mitmproxy import eventsequence
|
||||
from mitmproxy import events
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import connections
|
||||
from mitmproxy import http
|
||||
@@ -91,7 +91,7 @@ class Master:
|
||||
changed = False
|
||||
try:
|
||||
mtype, obj = self.event_queue.get(timeout=timeout)
|
||||
if mtype not in eventsequence.Events:
|
||||
if mtype not in events.Events:
|
||||
raise exceptions.ControlException(
|
||||
"Unknown event %s" % repr(mtype)
|
||||
)
|
||||
@@ -153,7 +153,7 @@ class Master:
|
||||
f.request.port = self.server.config.upstream_server.address.port
|
||||
f.request.scheme = self.server.config.upstream_server.scheme
|
||||
f.reply = controller.DummyReply()
|
||||
for e, o in eventsequence.iterate(f):
|
||||
for e, o in events.event_sequence(f):
|
||||
getattr(self, e)(o)
|
||||
|
||||
def load_flows(self, fr: io.FlowReader) -> int:
|
||||
@@ -170,11 +170,8 @@ class Master:
|
||||
path = os.path.expanduser(path)
|
||||
try:
|
||||
if path == "-":
|
||||
try:
|
||||
sys.stdin.buffer.read(0)
|
||||
except Exception as e:
|
||||
raise IOError("Cannot read from stdin: {}".format(e))
|
||||
freader = io.FlowReader(sys.stdin.buffer)
|
||||
# This is incompatible with Python 3 - maybe we can use click?
|
||||
freader = io.FlowReader(sys.stdin)
|
||||
return self.load_flows(freader)
|
||||
else:
|
||||
with open(path, "rb") as f:
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import re
|
||||
|
||||
# Allow underscore in host name
|
||||
_label_valid = re.compile(b"(?!-)[A-Z\d\-_]{1,63}(?<!-)$", re.IGNORECASE)
|
||||
_label_valid = re.compile(b"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
|
||||
|
||||
|
||||
def is_valid_host(host: bytes) -> bool:
|
||||
|
||||
@@ -31,8 +31,8 @@ def decode(encoded: Union[str, bytes], encoding: str, errors: str='strict') -> U
|
||||
Raises:
|
||||
ValueError, if decoding fails.
|
||||
"""
|
||||
if encoded is None:
|
||||
return None
|
||||
if len(encoded) == 0:
|
||||
return encoded
|
||||
|
||||
global _cache
|
||||
cached = (
|
||||
@@ -72,8 +72,8 @@ def encode(decoded: Union[str, bytes], encoding: str, errors: str='strict') -> U
|
||||
Raises:
|
||||
ValueError, if encoding fails.
|
||||
"""
|
||||
if decoded is None:
|
||||
return None
|
||||
if len(decoded) == 0:
|
||||
return decoded
|
||||
|
||||
global _cache
|
||||
cached = (
|
||||
@@ -86,7 +86,10 @@ def encode(decoded: Union[str, bytes], encoding: str, errors: str='strict') -> U
|
||||
return _cache.encoded
|
||||
try:
|
||||
try:
|
||||
encoded = custom_encode[encoding](decoded)
|
||||
value = decoded
|
||||
if isinstance(value, str):
|
||||
value = decoded.encode()
|
||||
encoded = custom_encode[encoding](value)
|
||||
except KeyError:
|
||||
encoded = codecs.encode(decoded, encoding, errors)
|
||||
if encoding in ("gzip", "deflate", "br"):
|
||||
@@ -111,14 +114,12 @@ def identity(content):
|
||||
return content
|
||||
|
||||
|
||||
def decode_gzip(content: bytes) -> bytes:
|
||||
if not content:
|
||||
return b""
|
||||
def decode_gzip(content):
|
||||
gfile = gzip.GzipFile(fileobj=BytesIO(content))
|
||||
return gfile.read()
|
||||
|
||||
|
||||
def encode_gzip(content: bytes) -> bytes:
|
||||
def encode_gzip(content):
|
||||
s = BytesIO()
|
||||
gf = gzip.GzipFile(fileobj=s, mode='wb')
|
||||
gf.write(content)
|
||||
@@ -126,17 +127,15 @@ def encode_gzip(content: bytes) -> bytes:
|
||||
return s.getvalue()
|
||||
|
||||
|
||||
def decode_brotli(content: bytes) -> bytes:
|
||||
if not content:
|
||||
return b""
|
||||
def decode_brotli(content):
|
||||
return brotli.decompress(content)
|
||||
|
||||
|
||||
def encode_brotli(content: bytes) -> bytes:
|
||||
def encode_brotli(content):
|
||||
return brotli.compress(content)
|
||||
|
||||
|
||||
def decode_deflate(content: bytes) -> bytes:
|
||||
def decode_deflate(content):
|
||||
"""
|
||||
Returns decompressed data for DEFLATE. Some servers may respond with
|
||||
compressed data without a zlib header or checksum. An undocumented
|
||||
@@ -145,15 +144,13 @@ def decode_deflate(content: bytes) -> bytes:
|
||||
|
||||
http://bugs.python.org/issue5784
|
||||
"""
|
||||
if not content:
|
||||
return b""
|
||||
try:
|
||||
return zlib.decompress(content)
|
||||
except zlib.error:
|
||||
return zlib.decompress(content, -15)
|
||||
|
||||
|
||||
def encode_deflate(content: bytes) -> bytes:
|
||||
def encode_deflate(content):
|
||||
"""
|
||||
Returns compressed content, always including zlib header and checksum.
|
||||
"""
|
||||
|
||||
@@ -78,9 +78,8 @@ def _assemble_request_headers(request_data):
|
||||
Args:
|
||||
request_data (mitmproxy.net.http.request.RequestData)
|
||||
"""
|
||||
headers = request_data.headers
|
||||
headers = request_data.headers.copy()
|
||||
if "host" not in headers and request_data.scheme and request_data.host and request_data.port:
|
||||
headers = headers.copy()
|
||||
headers["host"] = mitmproxy.net.http.url.hostport(
|
||||
request_data.scheme,
|
||||
request_data.host,
|
||||
|
||||
@@ -158,9 +158,8 @@ def connection_close(http_version, headers):
|
||||
"""
|
||||
Checks the message to see if the client connection should be closed
|
||||
according to RFC 2616 Section 8.1.
|
||||
If we don't have a Connection header, HTTP 1.1 connections are assumed
|
||||
to be persistent.
|
||||
"""
|
||||
# At first, check if we have an explicit Connection header.
|
||||
if "connection" in headers:
|
||||
tokens = get_header_tokens(headers, "connection")
|
||||
if "close" in tokens:
|
||||
@@ -168,7 +167,9 @@ def connection_close(http_version, headers):
|
||||
elif "keep-alive" in tokens:
|
||||
return False
|
||||
|
||||
return http_version != "HTTP/1.1" and http_version != b"HTTP/1.1"
|
||||
# If we don't have a Connection header, HTTP 1.1 connections are assumed to
|
||||
# be persistent
|
||||
return http_version != "HTTP/1.1" and http_version != b"HTTP/1.1" # FIXME: Remove one case.
|
||||
|
||||
|
||||
def expected_http_body_size(request, response=None):
|
||||
@@ -227,7 +228,7 @@ def _get_first_line(rfile):
|
||||
if line == b"\r\n" or line == b"\n":
|
||||
# Possible leftover from previous message
|
||||
line = rfile.readline()
|
||||
except (exceptions.TcpDisconnect, exceptions.TlsException):
|
||||
except exceptions.TcpDisconnect:
|
||||
raise exceptions.HttpReadDisconnect("Remote disconnected")
|
||||
if not line:
|
||||
raise exceptions.HttpReadDisconnect("Remote disconnected")
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import codecs
|
||||
|
||||
import hyperframe.frame
|
||||
import hyperframe
|
||||
from mitmproxy import exceptions
|
||||
|
||||
|
||||
@@ -20,6 +20,6 @@ def parse_frame(header, body=None):
|
||||
body = header[9:]
|
||||
header = header[:9]
|
||||
|
||||
frame, _ = hyperframe.frame.Frame.parse_frame_header(header)
|
||||
frame, length = hyperframe.frame.Frame.parse_frame_header(header)
|
||||
frame.parse_body(memoryview(body))
|
||||
return frame
|
||||
|
||||
@@ -14,13 +14,13 @@ def parse_headers(headers):
|
||||
host = None
|
||||
port = None
|
||||
|
||||
if method == b'CONNECT':
|
||||
raise NotImplementedError("CONNECT over HTTP/2 is not implemented.")
|
||||
|
||||
if path == b'*' or path.startswith(b"/"):
|
||||
first_line_format = "relative"
|
||||
else:
|
||||
elif method == b'CONNECT': # pragma: no cover
|
||||
raise NotImplementedError("CONNECT over HTTP/2 is not implemented.")
|
||||
else: # pragma: no cover
|
||||
first_line_format = "absolute"
|
||||
# FIXME: verify if path or :host contains what we need
|
||||
scheme, host, port, _ = url.parse(path)
|
||||
|
||||
if authority:
|
||||
|
||||
@@ -7,6 +7,15 @@ from mitmproxy.types import serializable
|
||||
from mitmproxy.net.http import headers
|
||||
|
||||
|
||||
# While headers _should_ be ASCII, it's not uncommon for certain headers to be utf-8 encoded.
|
||||
def _native(x):
|
||||
return x.decode("utf-8", "surrogateescape")
|
||||
|
||||
|
||||
def _always_bytes(x):
|
||||
return strutils.always_bytes(x, "utf-8", "surrogateescape")
|
||||
|
||||
|
||||
class MessageData(serializable.Serializable):
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, MessageData):
|
||||
@@ -133,11 +142,11 @@ class Message(serializable.Serializable):
|
||||
"""
|
||||
Version string, e.g. "HTTP/1.1"
|
||||
"""
|
||||
return self.data.http_version.decode("utf-8", "surrogateescape")
|
||||
return _native(self.data.http_version)
|
||||
|
||||
@http_version.setter
|
||||
def http_version(self, http_version):
|
||||
self.data.http_version = strutils.always_bytes(http_version, "utf-8", "surrogateescape")
|
||||
self.data.http_version = _always_bytes(http_version)
|
||||
|
||||
@property
|
||||
def timestamp_start(self):
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import re
|
||||
import urllib
|
||||
from typing import Optional
|
||||
|
||||
from mitmproxy.types import multidict
|
||||
from mitmproxy.utils import strutils
|
||||
@@ -116,24 +115,24 @@ class Request(message.Message):
|
||||
"""
|
||||
HTTP request method, e.g. "GET".
|
||||
"""
|
||||
return self.data.method.decode("utf-8", "surrogateescape").upper()
|
||||
return message._native(self.data.method).upper()
|
||||
|
||||
@method.setter
|
||||
def method(self, method):
|
||||
self.data.method = strutils.always_bytes(method, "utf-8", "surrogateescape")
|
||||
self.data.method = message._always_bytes(method)
|
||||
|
||||
@property
|
||||
def scheme(self):
|
||||
"""
|
||||
HTTP request scheme, which should be "http" or "https".
|
||||
"""
|
||||
if self.data.scheme is None:
|
||||
return None
|
||||
return self.data.scheme.decode("utf-8", "surrogateescape")
|
||||
if not self.data.scheme:
|
||||
return self.data.scheme
|
||||
return message._native(self.data.scheme)
|
||||
|
||||
@scheme.setter
|
||||
def scheme(self, scheme):
|
||||
self.data.scheme = strutils.always_bytes(scheme, "utf-8", "surrogateescape")
|
||||
self.data.scheme = message._always_bytes(scheme)
|
||||
|
||||
@property
|
||||
def host(self):
|
||||
@@ -165,44 +164,11 @@ class Request(message.Message):
|
||||
self.data.host = host
|
||||
|
||||
# Update host header
|
||||
if self.host_header is not None:
|
||||
self.host_header = host
|
||||
|
||||
@property
|
||||
def host_header(self) -> Optional[str]:
|
||||
"""
|
||||
The request's host/authority header.
|
||||
|
||||
This property maps to either ``request.headers["Host"]`` or
|
||||
``request.headers[":authority"]``, depending on whether it's HTTP/1.x or HTTP/2.0.
|
||||
"""
|
||||
if ":authority" in self.headers:
|
||||
return self.headers[":authority"]
|
||||
if "Host" in self.headers:
|
||||
return self.headers["Host"]
|
||||
return None
|
||||
|
||||
@host_header.setter
|
||||
def host_header(self, val: Optional[str]) -> None:
|
||||
if val is None:
|
||||
self.headers.pop("Host", None)
|
||||
self.headers.pop(":authority", None)
|
||||
elif self.host_header is not None:
|
||||
# Update any existing headers.
|
||||
if ":authority" in self.headers:
|
||||
self.headers[":authority"] = val
|
||||
if "Host" in self.headers:
|
||||
self.headers["Host"] = val
|
||||
else:
|
||||
# Only add the correct new header.
|
||||
if self.http_version.upper().startswith("HTTP/2"):
|
||||
self.headers[":authority"] = val
|
||||
if "host" in self.headers:
|
||||
if host:
|
||||
self.headers["host"] = host
|
||||
else:
|
||||
self.headers["Host"] = val
|
||||
|
||||
@host_header.deleter
|
||||
def host_header(self):
|
||||
self.host_header = None
|
||||
self.headers.pop("host")
|
||||
|
||||
@property
|
||||
def port(self):
|
||||
@@ -224,11 +190,11 @@ class Request(message.Message):
|
||||
if self.data.path is None:
|
||||
return None
|
||||
else:
|
||||
return self.data.path.decode("utf-8", "surrogateescape")
|
||||
return message._native(self.data.path)
|
||||
|
||||
@path.setter
|
||||
def path(self, path):
|
||||
self.data.path = strutils.always_bytes(path, "utf-8", "surrogateescape")
|
||||
self.data.path = message._always_bytes(path)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
@@ -245,10 +211,9 @@ class Request(message.Message):
|
||||
|
||||
def _parse_host_header(self):
|
||||
"""Extract the host and port from Host header"""
|
||||
host = self.host_header
|
||||
if not host:
|
||||
if "host" not in self.headers:
|
||||
return None, None
|
||||
port = None
|
||||
host, port = self.headers["host"], None
|
||||
m = host_header_re.match(host)
|
||||
if m:
|
||||
host = m.group("host").strip("[]")
|
||||
@@ -408,7 +373,7 @@ class Request(message.Message):
|
||||
This will overwrite the existing content if there is one.
|
||||
"""
|
||||
self.headers["content-type"] = "application/x-www-form-urlencoded"
|
||||
self.content = mitmproxy.net.http.url.encode(form_data, self.content.decode()).encode()
|
||||
self.content = mitmproxy.net.http.url.encode(form_data).encode()
|
||||
|
||||
@urlencoded_form.setter
|
||||
def urlencoded_form(self, value):
|
||||
|
||||
@@ -6,7 +6,6 @@ from mitmproxy.net.http import cookies
|
||||
from mitmproxy.net.http import headers as nheaders
|
||||
from mitmproxy.net.http import message
|
||||
from mitmproxy.net.http import status_codes
|
||||
from mitmproxy.utils import strutils
|
||||
from typing import AnyStr
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
@@ -122,12 +121,11 @@ class Response(message.Message):
|
||||
HTTP Reason Phrase, e.g. "Not Found".
|
||||
This is always :py:obj:`None` for HTTP2 requests, because HTTP2 responses do not contain a reason phrase.
|
||||
"""
|
||||
# Encoding: http://stackoverflow.com/a/16674906/934719
|
||||
return self.data.reason.decode("ISO-8859-1", "surrogateescape")
|
||||
return message._native(self.data.reason)
|
||||
|
||||
@reason.setter
|
||||
def reason(self, reason):
|
||||
self.data.reason = strutils.always_bytes(reason, "ISO-8859-1", "surrogateescape")
|
||||
self.data.reason = message._always_bytes(reason)
|
||||
|
||||
@property
|
||||
def cookies(self) -> multidict.MultiDictView:
|
||||
|
||||
@@ -82,24 +82,11 @@ def unparse(scheme, host, port, path=""):
|
||||
return "%s://%s%s" % (scheme, hostport(scheme, host, port), path)
|
||||
|
||||
|
||||
def encode(s: Sequence[Tuple[str, str]], similar_to: str=None) -> str:
|
||||
def encode(s: Sequence[Tuple[str, str]]) -> str:
|
||||
"""
|
||||
Takes a list of (key, value) tuples and returns a urlencoded string.
|
||||
If similar_to is passed, the output is formatted similar to the provided urlencoded string.
|
||||
"""
|
||||
|
||||
remove_trailing_equal = False
|
||||
if similar_to:
|
||||
remove_trailing_equal = any("=" not in param for param in similar_to.split("&"))
|
||||
|
||||
encoded = urllib.parse.urlencode(s, False, errors="surrogateescape")
|
||||
|
||||
if remove_trailing_equal:
|
||||
encoded = encoded.replace("=&", "&")
|
||||
if encoded[-1] == '=':
|
||||
encoded = encoded[:-1]
|
||||
|
||||
return encoded
|
||||
return urllib.parse.urlencode(s, False, errors="surrogateescape")
|
||||
|
||||
|
||||
def decode(s):
|
||||
|
||||
@@ -538,7 +538,7 @@ class _Connection:
|
||||
self.ssl_verification_error = exceptions.InvalidCertificateException(
|
||||
"Certificate Verification Error for {}: {} (errno: {}, depth: {})".format(
|
||||
sni,
|
||||
strutils.always_str(SSL._ffi.string(SSL._lib.X509_verify_cert_error_string(errno)), "utf8"),
|
||||
strutils.native(SSL._ffi.string(SSL._lib.X509_verify_cert_error_string(errno)), "utf8"),
|
||||
errno,
|
||||
err_depth
|
||||
)
|
||||
|
||||
@@ -57,38 +57,38 @@ class WSGIAdaptor:
|
||||
Raises:
|
||||
ValueError, if the content-encoding is invalid.
|
||||
"""
|
||||
path = strutils.always_str(flow.request.path, "latin-1")
|
||||
path = strutils.native(flow.request.path, "latin-1")
|
||||
if '?' in path:
|
||||
path_info, query = strutils.always_str(path, "latin-1").split('?', 1)
|
||||
path_info, query = strutils.native(path, "latin-1").split('?', 1)
|
||||
else:
|
||||
path_info = path
|
||||
query = ''
|
||||
environ = {
|
||||
'wsgi.version': (1, 0),
|
||||
'wsgi.url_scheme': strutils.always_str(flow.request.scheme, "latin-1"),
|
||||
'wsgi.url_scheme': strutils.native(flow.request.scheme, "latin-1"),
|
||||
'wsgi.input': io.BytesIO(flow.request.content or b""),
|
||||
'wsgi.errors': errsoc,
|
||||
'wsgi.multithread': True,
|
||||
'wsgi.multiprocess': False,
|
||||
'wsgi.run_once': False,
|
||||
'SERVER_SOFTWARE': self.sversion,
|
||||
'REQUEST_METHOD': strutils.always_str(flow.request.method, "latin-1"),
|
||||
'REQUEST_METHOD': strutils.native(flow.request.method, "latin-1"),
|
||||
'SCRIPT_NAME': '',
|
||||
'PATH_INFO': urllib.parse.unquote(path_info),
|
||||
'QUERY_STRING': query,
|
||||
'CONTENT_TYPE': strutils.always_str(flow.request.headers.get('Content-Type', ''), "latin-1"),
|
||||
'CONTENT_LENGTH': strutils.always_str(flow.request.headers.get('Content-Length', ''), "latin-1"),
|
||||
'CONTENT_TYPE': strutils.native(flow.request.headers.get('Content-Type', ''), "latin-1"),
|
||||
'CONTENT_LENGTH': strutils.native(flow.request.headers.get('Content-Length', ''), "latin-1"),
|
||||
'SERVER_NAME': self.domain,
|
||||
'SERVER_PORT': str(self.port),
|
||||
'SERVER_PROTOCOL': strutils.always_str(flow.request.http_version, "latin-1"),
|
||||
'SERVER_PROTOCOL': strutils.native(flow.request.http_version, "latin-1"),
|
||||
}
|
||||
environ.update(extra)
|
||||
if flow.client_conn.address:
|
||||
environ["REMOTE_ADDR"] = strutils.always_str(flow.client_conn.address.host, "latin-1")
|
||||
environ["REMOTE_ADDR"] = strutils.native(flow.client_conn.address.host, "latin-1")
|
||||
environ["REMOTE_PORT"] = flow.client_conn.address.port
|
||||
|
||||
for key, value in flow.request.headers.items():
|
||||
key = 'HTTP_' + strutils.always_str(key, "latin-1").upper().replace('-', '_')
|
||||
key = 'HTTP_' + strutils.native(key, "latin-1").upper().replace('-', '_')
|
||||
if key not in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
|
||||
environ[key] = value
|
||||
return environ
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Tuple, Optional, Sequence, Union
|
||||
from typing import Tuple, Optional, Sequence
|
||||
|
||||
from mitmproxy import optmanager
|
||||
|
||||
@@ -22,101 +22,96 @@ DEFAULT_CLIENT_CIPHERS = "ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA
|
||||
|
||||
class Options(optmanager.OptManager):
|
||||
def __init__(
|
||||
self,
|
||||
*, # all args are keyword-only.
|
||||
onboarding: bool = True,
|
||||
onboarding_host: str = APP_HOST,
|
||||
onboarding_port: int = APP_PORT,
|
||||
anticache: bool = False,
|
||||
anticomp: bool = False,
|
||||
client_replay: Sequence[str] = [],
|
||||
replay_kill_extra: bool = False,
|
||||
keepserving: bool = True,
|
||||
no_server: bool = False,
|
||||
server_replay_nopop: bool = False,
|
||||
refresh_server_playback: bool = True,
|
||||
rfile: Optional[str] = None,
|
||||
scripts: Sequence[str] = [],
|
||||
showhost: bool = False,
|
||||
replacements: Sequence[Union[Tuple[str, str, str], str]] = [],
|
||||
replacement_files: Sequence[Union[Tuple[str, str, str], str]] = [],
|
||||
server_replay_use_headers: Sequence[str] = [],
|
||||
setheaders: Sequence[Union[Tuple[str, str, str], str]] = [],
|
||||
server_replay: Sequence[str] = [],
|
||||
stickycookie: Optional[str] = None,
|
||||
stickyauth: Optional[str] = None,
|
||||
stream_large_bodies: Optional[int] = None,
|
||||
verbosity: int = 2,
|
||||
default_contentview: str = "auto",
|
||||
streamfile: Optional[str] = None,
|
||||
streamfile_append: bool = False,
|
||||
server_replay_ignore_content: bool = False,
|
||||
server_replay_ignore_params: Sequence[str] = [],
|
||||
server_replay_ignore_payload_params: Sequence[str] = [],
|
||||
server_replay_ignore_host: bool = False,
|
||||
self,
|
||||
*, # all args are keyword-only.
|
||||
# TODO: rename to onboarding_app_*
|
||||
app: bool = True,
|
||||
app_host: str = APP_HOST,
|
||||
app_port: int = APP_PORT,
|
||||
anticache: bool = False,
|
||||
anticomp: bool = False,
|
||||
client_replay: Sequence[str] = [],
|
||||
replay_kill_extra: bool = False,
|
||||
keepserving: bool = True,
|
||||
no_server: bool = False,
|
||||
server_replay_nopop: bool = False,
|
||||
refresh_server_playback: bool = True,
|
||||
rfile: Optional[str] = None,
|
||||
scripts: Sequence[str] = [],
|
||||
showhost: bool = False,
|
||||
replacements: Sequence[Tuple[str, str, str]] = [],
|
||||
server_replay_use_headers: Sequence[str] = [],
|
||||
setheaders: Sequence[Tuple[str, str, str]] = [],
|
||||
server_replay: Sequence[str] = [],
|
||||
stickycookie: Optional[str] = None,
|
||||
stickyauth: Optional[str] = None,
|
||||
stream_large_bodies: Optional[int] = None,
|
||||
verbosity: int = 2,
|
||||
default_contentview: str = "auto",
|
||||
streamfile: Optional[str] = None,
|
||||
streamfile_append: bool = False,
|
||||
server_replay_ignore_content: bool = False,
|
||||
server_replay_ignore_params: Sequence[str] = [],
|
||||
server_replay_ignore_payload_params: Sequence[str] = [],
|
||||
server_replay_ignore_host: bool = False,
|
||||
# Proxy options
|
||||
auth_nonanonymous: bool = False,
|
||||
auth_singleuser: Optional[str] = None,
|
||||
auth_htpasswd: Optional[str] = None,
|
||||
add_upstream_certs_to_client_chain: bool = False,
|
||||
body_size_limit: Optional[int] = None,
|
||||
cadir: str = CA_DIR,
|
||||
certs: Sequence[Tuple[str, str]] = [],
|
||||
ciphers_client: str=DEFAULT_CLIENT_CIPHERS,
|
||||
ciphers_server: Optional[str]=None,
|
||||
clientcerts: Optional[str] = None,
|
||||
http2: bool = True,
|
||||
ignore_hosts: Sequence[str] = [],
|
||||
listen_host: str = "",
|
||||
listen_port: int = LISTEN_PORT,
|
||||
upstream_bind_address: str = "",
|
||||
mode: str = "regular",
|
||||
no_upstream_cert: bool = False,
|
||||
rawtcp: bool = False,
|
||||
websocket: bool = True,
|
||||
spoof_source_address: bool = False,
|
||||
upstream_server: Optional[str] = None,
|
||||
upstream_auth: Optional[str] = None,
|
||||
ssl_version_client: str = "secure",
|
||||
ssl_version_server: str = "secure",
|
||||
ssl_insecure: bool = False,
|
||||
ssl_verify_upstream_trusted_cadir: Optional[str] = None,
|
||||
ssl_verify_upstream_trusted_ca: Optional[str] = None,
|
||||
tcp_hosts: Sequence[str] = [],
|
||||
|
||||
# Proxy options
|
||||
auth_nonanonymous: bool = False,
|
||||
auth_singleuser: Optional[str] = None,
|
||||
auth_htpasswd: Optional[str] = None,
|
||||
add_upstream_certs_to_client_chain: bool = False,
|
||||
body_size_limit: Optional[int] = None,
|
||||
cadir: str = CA_DIR,
|
||||
certs: Sequence[Tuple[str, str]] = [],
|
||||
ciphers_client: str=DEFAULT_CLIENT_CIPHERS,
|
||||
ciphers_server: Optional[str]=None,
|
||||
clientcerts: Optional[str] = None,
|
||||
ignore_hosts: Sequence[str] = [],
|
||||
listen_host: str = "",
|
||||
listen_port: int = LISTEN_PORT,
|
||||
upstream_bind_address: str = "",
|
||||
mode: str = "regular",
|
||||
no_upstream_cert: bool = False,
|
||||
intercept: Optional[str] = None,
|
||||
|
||||
http2: bool = True,
|
||||
http2_priority: bool = False,
|
||||
websocket: bool = True,
|
||||
rawtcp: bool = False,
|
||||
# Console options
|
||||
eventlog: bool = False,
|
||||
focus_follow: bool = False,
|
||||
filter: Optional[str] = None,
|
||||
palette: Optional[str] = "dark",
|
||||
palette_transparent: bool = False,
|
||||
no_mouse: bool = False,
|
||||
order: Optional[str] = None,
|
||||
order_reversed: bool = False,
|
||||
|
||||
spoof_source_address: bool = False,
|
||||
upstream_server: Optional[str] = None,
|
||||
upstream_auth: Optional[str] = None,
|
||||
ssl_version_client: str = "secure",
|
||||
ssl_version_server: str = "secure",
|
||||
ssl_insecure: bool = False,
|
||||
ssl_verify_upstream_trusted_cadir: Optional[str] = None,
|
||||
ssl_verify_upstream_trusted_ca: Optional[str] = None,
|
||||
tcp_hosts: Sequence[str] = [],
|
||||
# Web options
|
||||
open_browser: bool = True,
|
||||
wdebug: bool = False,
|
||||
wport: int = 8081,
|
||||
wiface: str = "127.0.0.1",
|
||||
|
||||
intercept: Optional[str] = None,
|
||||
|
||||
# Console options
|
||||
console_eventlog: bool = False,
|
||||
console_focus_follow: bool = False,
|
||||
console_palette: Optional[str] = "dark",
|
||||
console_palette_transparent: bool = False,
|
||||
console_no_mouse: bool = False,
|
||||
console_order: Optional[str] = None,
|
||||
console_order_reversed: bool = False,
|
||||
|
||||
filter: Optional[str] = None,
|
||||
|
||||
# Web options
|
||||
web_open_browser: bool = True,
|
||||
web_debug: bool = False,
|
||||
web_port: int = 8081,
|
||||
web_iface: str = "127.0.0.1",
|
||||
|
||||
# Dump options
|
||||
filtstr: Optional[str] = None,
|
||||
flow_detail: int = 1
|
||||
# Dump options
|
||||
filtstr: Optional[str] = None,
|
||||
flow_detail: int = 1
|
||||
) -> None:
|
||||
# We could replace all assignments with clever metaprogramming,
|
||||
# but type hints are a much more valueable asset.
|
||||
|
||||
self.onboarding = onboarding
|
||||
self.onboarding_host = onboarding_host
|
||||
self.onboarding_port = onboarding_port
|
||||
self.app = app
|
||||
self.app_host = app_host
|
||||
self.app_port = app_port
|
||||
self.anticache = anticache
|
||||
self.anticomp = anticomp
|
||||
self.client_replay = client_replay
|
||||
@@ -129,7 +124,6 @@ class Options(optmanager.OptManager):
|
||||
self.scripts = scripts
|
||||
self.showhost = showhost
|
||||
self.replacements = replacements
|
||||
self.replacement_files = replacement_files
|
||||
self.server_replay_use_headers = server_replay_use_headers
|
||||
self.setheaders = setheaders
|
||||
self.server_replay = server_replay
|
||||
@@ -156,18 +150,15 @@ class Options(optmanager.OptManager):
|
||||
self.ciphers_client = ciphers_client
|
||||
self.ciphers_server = ciphers_server
|
||||
self.clientcerts = clientcerts
|
||||
self.http2 = http2
|
||||
self.ignore_hosts = ignore_hosts
|
||||
self.listen_host = listen_host
|
||||
self.listen_port = listen_port
|
||||
self.upstream_bind_address = upstream_bind_address
|
||||
self.mode = mode
|
||||
self.no_upstream_cert = no_upstream_cert
|
||||
|
||||
self.http2 = http2
|
||||
self.http2_priority = http2_priority
|
||||
self.websocket = websocket
|
||||
self.rawtcp = rawtcp
|
||||
|
||||
self.websocket = websocket
|
||||
self.spoof_source_address = spoof_source_address
|
||||
self.upstream_server = upstream_server
|
||||
self.upstream_auth = upstream_auth
|
||||
@@ -181,21 +172,20 @@ class Options(optmanager.OptManager):
|
||||
self.intercept = intercept
|
||||
|
||||
# Console options
|
||||
self.console_eventlog = console_eventlog
|
||||
self.console_focus_follow = console_focus_follow
|
||||
self.console_palette = console_palette
|
||||
self.console_palette_transparent = console_palette_transparent
|
||||
self.console_no_mouse = console_no_mouse
|
||||
self.console_order = console_order
|
||||
self.console_order_reversed = console_order_reversed
|
||||
|
||||
self.eventlog = eventlog
|
||||
self.focus_follow = focus_follow
|
||||
self.filter = filter
|
||||
self.palette = palette
|
||||
self.palette_transparent = palette_transparent
|
||||
self.no_mouse = no_mouse
|
||||
self.order = order
|
||||
self.order_reversed = order_reversed
|
||||
|
||||
# Web options
|
||||
self.web_open_browser = web_open_browser
|
||||
self.web_debug = web_debug
|
||||
self.web_port = web_port
|
||||
self.web_iface = web_iface
|
||||
self.open_browser = open_browser
|
||||
self.wdebug = wdebug
|
||||
self.wport = wport
|
||||
self.wiface = wiface
|
||||
|
||||
# Dump options
|
||||
self.filtstr = filtstr
|
||||
|
||||
@@ -229,10 +229,7 @@ class OptManager(metaclass=_DefaultsMeta):
|
||||
this object. May raise OptionsError if the config file is invalid.
|
||||
"""
|
||||
data = self._load(text)
|
||||
try:
|
||||
self.update(**data)
|
||||
except KeyError as v:
|
||||
raise exceptions.OptionsError(v)
|
||||
self.update(**data)
|
||||
|
||||
def load_paths(self, *paths):
|
||||
"""
|
||||
@@ -245,12 +242,7 @@ class OptManager(metaclass=_DefaultsMeta):
|
||||
if os.path.exists(p) and os.path.isfile(p):
|
||||
with open(p, "r") as f:
|
||||
txt = f.read()
|
||||
try:
|
||||
self.load(txt)
|
||||
except exceptions.OptionsError as e:
|
||||
raise exceptions.OptionsError(
|
||||
"Error reading %s: %s" % (p, e)
|
||||
)
|
||||
self.load(txt)
|
||||
|
||||
def merge(self, opts):
|
||||
"""
|
||||
|
||||
@@ -25,10 +25,6 @@ elif sys.platform == "darwin" or sys.platform.startswith("freebsd"):
|
||||
from . import osx
|
||||
|
||||
original_addr = osx.original_addr # noqa
|
||||
elif sys.platform.startswith("openbsd"):
|
||||
from . import openbsd
|
||||
|
||||
original_addr = openbsd.original_addr # noqa
|
||||
elif sys.platform == "win32":
|
||||
from . import windows
|
||||
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
def original_addr(csock):
|
||||
return csock.getsockname()
|
||||
@@ -30,7 +30,7 @@ class Socks5Proxy(protocol.Layer, protocol.ServerConnectionMixin):
|
||||
if connect_request.msg != socks.CMD.CONNECT:
|
||||
raise socks.SocksError(
|
||||
socks.REP.COMMAND_NOT_SUPPORTED,
|
||||
"mitmproxy only supports SOCKS5 CONNECT"
|
||||
"mitmproxy only supports SOCKS5 CONNECT."
|
||||
)
|
||||
|
||||
# We always connect lazily, but we need to pretend to the client that we connected.
|
||||
|
||||
@@ -74,6 +74,16 @@ class Layer(_LayerCodeCompletion):
|
||||
"""
|
||||
return getattr(self.ctx, name)
|
||||
|
||||
@property
|
||||
def layers(self):
|
||||
"""
|
||||
List of all layers, including the current layer (``[self, self.ctx, self.ctx.ctx, ...]``)
|
||||
"""
|
||||
return [self] + self.ctx.layers
|
||||
|
||||
def __repr__(self):
|
||||
return type(self).__name__
|
||||
|
||||
|
||||
class ServerConnectionMixin:
|
||||
|
||||
|
||||
@@ -88,10 +88,6 @@ class UpstreamConnectLayer(base.Layer):
|
||||
layer()
|
||||
|
||||
def _send_connect_request(self):
|
||||
self.log("Sending CONNECT request", "debug", [
|
||||
"Proxy Server: {}".format(self.ctx.server_conn.address),
|
||||
"Connect to: {}:{}".format(self.connect_request.host, self.connect_request.port)
|
||||
])
|
||||
self.send_request(self.connect_request)
|
||||
resp = self.read_response(self.connect_request)
|
||||
if resp.status_code != 200:
|
||||
@@ -105,7 +101,6 @@ class UpstreamConnectLayer(base.Layer):
|
||||
pass # swallow the message
|
||||
|
||||
def change_upstream_proxy_server(self, address):
|
||||
self.log("Changing upstream proxy to {} (CONNECTed)".format(repr(address)), "debug")
|
||||
if address != self.server_conn.via.address:
|
||||
self.ctx.set_server(address)
|
||||
|
||||
@@ -131,7 +126,7 @@ class HTTPMode(enum.Enum):
|
||||
# At this point, we see only a subset of the proxy modes
|
||||
MODE_REQUEST_FORMS = {
|
||||
HTTPMode.regular: ("authority", "absolute"),
|
||||
HTTPMode.transparent: ("relative",),
|
||||
HTTPMode.transparent: ("relative"),
|
||||
HTTPMode.upstream: ("authority", "absolute"),
|
||||
}
|
||||
|
||||
@@ -143,16 +138,9 @@ def validate_request_form(mode, request):
|
||||
)
|
||||
allowed_request_forms = MODE_REQUEST_FORMS[mode]
|
||||
if request.first_line_format not in allowed_request_forms:
|
||||
if mode == HTTPMode.transparent:
|
||||
err_message = (
|
||||
"Mitmproxy received an {} request even though it is not running in regular mode. "
|
||||
"This usually indicates a misconfiguration, please see "
|
||||
"http://docs.mitmproxy.org/en/stable/modes.html for details."
|
||||
).format("HTTP CONNECT" if request.first_line_format == "authority" else "absolute-form")
|
||||
else:
|
||||
err_message = "Invalid HTTP request form (expected: %s, got: %s)" % (
|
||||
" or ".join(allowed_request_forms), request.first_line_format
|
||||
)
|
||||
err_message = "Invalid HTTP request form (expected: %s, got: %s)" % (
|
||||
" or ".join(allowed_request_forms), request.first_line_format
|
||||
)
|
||||
raise exceptions.HttpException(err_message)
|
||||
|
||||
|
||||
@@ -291,7 +279,7 @@ class HttpLayer(base.Layer):
|
||||
|
||||
# update host header in reverse proxy mode
|
||||
if self.config.options.mode == "reverse":
|
||||
f.request.host_header = self.config.upstream_server.address.host
|
||||
f.request.headers["Host"] = self.config.upstream_server.address.host
|
||||
|
||||
# Determine .scheme, .host and .port attributes for inline scripts. For
|
||||
# absolute-form requests, they are directly given in the request. For
|
||||
@@ -301,10 +289,11 @@ class HttpLayer(base.Layer):
|
||||
if self.mode is HTTPMode.transparent:
|
||||
# Setting request.host also updates the host header, which we want
|
||||
# to preserve
|
||||
host_header = f.request.host_header
|
||||
host_header = f.request.headers.get("host", None)
|
||||
f.request.host = self.__initial_server_conn.address.host
|
||||
f.request.port = self.__initial_server_conn.address.port
|
||||
f.request.host_header = host_header # set again as .host overwrites this.
|
||||
if host_header:
|
||||
f.request.headers["host"] = host_header
|
||||
f.request.scheme = "https" if self.__initial_server_tls else "http"
|
||||
self.channel.ask("request", f)
|
||||
|
||||
@@ -443,13 +432,10 @@ class HttpLayer(base.Layer):
|
||||
except (exceptions.NetlibException, h2.exceptions.H2Error, exceptions.Http2ProtocolException):
|
||||
self.log("Failed to send error response to client: {}".format(message), "debug")
|
||||
|
||||
def change_upstream_proxy_server(self, address):
|
||||
def change_upstream_proxy_server(self, address) -> None:
|
||||
# Make set_upstream_proxy_server always available,
|
||||
# even if there's no UpstreamConnectLayer
|
||||
if hasattr(self.ctx, "change_upstream_proxy_server"):
|
||||
self.ctx.change_upstream_proxy_server(address)
|
||||
elif address != self.server_conn.address:
|
||||
self.log("Changing upstream proxy to {} (not CONNECTed)".format(repr(address)), "debug")
|
||||
if address != self.server_conn.address:
|
||||
self.set_server(address)
|
||||
|
||||
def establish_server_connection(self, host: str, port: int, scheme: str):
|
||||
|
||||
@@ -97,6 +97,7 @@ class Http2Layer(base.Layer):
|
||||
client_side=False,
|
||||
header_encoding=False,
|
||||
validate_outbound_headers=False,
|
||||
normalize_outbound_headers=False,
|
||||
validate_inbound_headers=False)
|
||||
self.connections[self.client_conn] = SafeH2Connection(self.client_conn, config=config)
|
||||
|
||||
@@ -106,6 +107,7 @@ class Http2Layer(base.Layer):
|
||||
client_side=True,
|
||||
header_encoding=False,
|
||||
validate_outbound_headers=False,
|
||||
normalize_outbound_headers=False,
|
||||
validate_inbound_headers=False)
|
||||
self.connections[self.server_conn] = SafeH2Connection(self.server_conn, config=config)
|
||||
self.connections[self.server_conn].initiate_connection()
|
||||
@@ -266,10 +268,6 @@ class Http2Layer(base.Layer):
|
||||
return True
|
||||
|
||||
def _handle_priority_updated(self, eid, event):
|
||||
if not self.config.options.http2_priority:
|
||||
self.log("HTTP/2 PRIORITY frame surpressed. Use --http2-priority to enable forwarding.", "debug")
|
||||
return True
|
||||
|
||||
if eid in self.streams and self.streams[eid].handled_priority_event is event:
|
||||
# this event was already handled during stream creation
|
||||
# HeadersFrame + Priority information as RequestReceived
|
||||
@@ -529,12 +527,9 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
|
||||
if self.handled_priority_event:
|
||||
# only send priority information if they actually came with the original HeadersFrame
|
||||
# and not if they got updated before/after with a PriorityFrame
|
||||
if not self.config.options.http2_priority:
|
||||
self.log("HTTP/2 PRIORITY information in HEADERS frame surpressed. Use --http2-priority to enable forwarding.", "debug")
|
||||
else:
|
||||
priority_exclusive = self.priority_exclusive
|
||||
priority_depends_on = self._map_depends_on_stream_id(self.server_stream_id, self.priority_depends_on)
|
||||
priority_weight = self.priority_weight
|
||||
priority_exclusive = self.priority_exclusive
|
||||
priority_depends_on = self._map_depends_on_stream_id(self.server_stream_id, self.priority_depends_on)
|
||||
priority_weight = self.priority_weight
|
||||
|
||||
try:
|
||||
self.connections[self.server_conn].safe_send_headers(
|
||||
@@ -597,6 +592,9 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
|
||||
def send_response_headers(self, response):
|
||||
headers = response.headers.copy()
|
||||
headers.insert(0, ":status", str(response.status_code))
|
||||
for forbidden_header in h2.utilities.CONNECTION_HEADERS:
|
||||
if forbidden_header in headers:
|
||||
del headers[forbidden_header]
|
||||
with self.connections[self.client_conn].lock:
|
||||
self.connections[self.client_conn].safe_send_headers(
|
||||
self.raise_zombie,
|
||||
@@ -612,7 +610,7 @@ class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThr
|
||||
chunks
|
||||
)
|
||||
|
||||
def __call__(self): # pragma: no cover
|
||||
def __call__(self):
|
||||
raise EnvironmentError('Http2SingleStreamLayer must be run as thread')
|
||||
|
||||
def run(self):
|
||||
|
||||
@@ -8,7 +8,7 @@ from mitmproxy import flow
|
||||
from mitmproxy.proxy.protocol import base
|
||||
from mitmproxy.net import tcp
|
||||
from mitmproxy.net import websockets
|
||||
from mitmproxy.websocket import WebSocketFlow, WebSocketMessage
|
||||
from mitmproxy.websocket import WebSocketFlow, WebSocketBinaryMessage, WebSocketTextMessage
|
||||
|
||||
|
||||
class WebSocketLayer(base.Layer):
|
||||
@@ -65,7 +65,12 @@ class WebSocketLayer(base.Layer):
|
||||
compressed_message = fb[0].header.rsv1
|
||||
fb.clear()
|
||||
|
||||
websocket_message = WebSocketMessage(message_type, not is_server, payload)
|
||||
if message_type == websockets.OPCODE.TEXT:
|
||||
t = WebSocketTextMessage
|
||||
else:
|
||||
t = WebSocketBinaryMessage
|
||||
|
||||
websocket_message = t(self.flow, not is_server, payload)
|
||||
length = len(websocket_message.content)
|
||||
self.flow.messages.append(websocket_message)
|
||||
self.channel.ask("websocket_message", self.flow)
|
||||
|
||||
@@ -110,3 +110,10 @@ class RootContext:
|
||||
full_msg.append(" -> " + i)
|
||||
full_msg = "\n".join(full_msg)
|
||||
self.channel.tell("log", log.LogEntry(full_msg, level))
|
||||
|
||||
@property
|
||||
def layers(self):
|
||||
return []
|
||||
|
||||
def __repr__(self):
|
||||
return "RootContext"
|
||||
|
||||
@@ -3,7 +3,7 @@ This module provides a @concurrent decorator primitive to
|
||||
offload computations from mitmproxy's main master thread.
|
||||
"""
|
||||
|
||||
from mitmproxy import eventsequence
|
||||
from mitmproxy import events
|
||||
from mitmproxy.types import basethread
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ class ScriptThread(basethread.BaseThread):
|
||||
|
||||
|
||||
def concurrent(fn):
|
||||
if fn.__name__ not in eventsequence.Events - {"start", "configure", "tick"}:
|
||||
if fn.__name__ not in events.Events - {"start", "configure", "tick"}:
|
||||
raise NotImplementedError(
|
||||
"Concurrent decorator not supported for '%s' method." % fn.__name__
|
||||
)
|
||||
@@ -29,8 +29,4 @@ def concurrent(fn):
|
||||
"script.concurrent (%s)" % fn.__name__,
|
||||
target=run
|
||||
).start()
|
||||
# Support @concurrent for class-based addons
|
||||
if "." in fn.__qualname__:
|
||||
return staticmethod(_concurrent)
|
||||
else:
|
||||
return _concurrent
|
||||
return _concurrent
|
||||
|
||||
@@ -9,8 +9,8 @@ from mitmproxy.types import serializable
|
||||
class TCPMessage(serializable.Serializable):
|
||||
|
||||
def __init__(self, from_client, content, timestamp=None):
|
||||
self.from_client = from_client
|
||||
self.content = content
|
||||
self.from_client = from_client
|
||||
self.timestamp = timestamp or time.time()
|
||||
|
||||
@classmethod
|
||||
@@ -21,7 +21,9 @@ class TCPMessage(serializable.Serializable):
|
||||
return self.from_client, self.content, self.timestamp
|
||||
|
||||
def set_state(self, state):
|
||||
self.from_client, self.content, self.timestamp = state
|
||||
self.from_client = state.pop("from_client")
|
||||
self.content = state.pop("content")
|
||||
self.timestamp = state.pop("timestamp")
|
||||
|
||||
def __repr__(self):
|
||||
return "{direction} {content}".format(
|
||||
|
||||
@@ -3,7 +3,7 @@ import contextlib
|
||||
import mitmproxy.master
|
||||
import mitmproxy.options
|
||||
from mitmproxy import proxy
|
||||
from mitmproxy import eventsequence
|
||||
from mitmproxy import events
|
||||
from mitmproxy import exceptions
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ class context:
|
||||
is taken (as in flow interception).
|
||||
"""
|
||||
f.reply._state = "handled"
|
||||
for evt, arg in eventsequence.iterate(f):
|
||||
for evt, arg in events.event_sequence(f):
|
||||
h = getattr(addon, evt, None)
|
||||
if h:
|
||||
h(arg)
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
from mitmproxy.net import websockets
|
||||
from mitmproxy.test import tutils
|
||||
from mitmproxy import tcp
|
||||
from mitmproxy import websocket
|
||||
from mitmproxy import controller
|
||||
from mitmproxy import http
|
||||
from mitmproxy import connections
|
||||
from mitmproxy import flow
|
||||
from mitmproxy.net import http as net_http
|
||||
|
||||
|
||||
def ttcpflow(client_conn=True, server_conn=True, messages=True, err=None):
|
||||
@@ -29,60 +26,6 @@ def ttcpflow(client_conn=True, server_conn=True, messages=True, err=None):
|
||||
return f
|
||||
|
||||
|
||||
def twebsocketflow(client_conn=True, server_conn=True, messages=True, err=None, handshake_flow=True):
|
||||
|
||||
if client_conn is True:
|
||||
client_conn = tclient_conn()
|
||||
if server_conn is True:
|
||||
server_conn = tserver_conn()
|
||||
if handshake_flow is True:
|
||||
req = http.HTTPRequest(
|
||||
"relative",
|
||||
"GET",
|
||||
"http",
|
||||
"example.com",
|
||||
"80",
|
||||
"/ws",
|
||||
"HTTP/1.1",
|
||||
headers=net_http.Headers(
|
||||
connection="upgrade",
|
||||
upgrade="websocket",
|
||||
sec_websocket_version="13",
|
||||
sec_websocket_key="1234",
|
||||
),
|
||||
content=b''
|
||||
)
|
||||
resp = http.HTTPResponse(
|
||||
"HTTP/1.1",
|
||||
101,
|
||||
reason=net_http.status_codes.RESPONSES.get(101),
|
||||
headers=net_http.Headers(
|
||||
connection='upgrade',
|
||||
upgrade='websocket',
|
||||
sec_websocket_accept=b'',
|
||||
),
|
||||
content=b'',
|
||||
)
|
||||
handshake_flow = http.HTTPFlow(client_conn, server_conn)
|
||||
handshake_flow.request = req
|
||||
handshake_flow.response = resp
|
||||
|
||||
f = websocket.WebSocketFlow(client_conn, server_conn, handshake_flow)
|
||||
|
||||
if messages is True:
|
||||
messages = [
|
||||
websocket.WebSocketMessage(websockets.OPCODE.BINARY, True, b"hello binary"),
|
||||
websocket.WebSocketMessage(websockets.OPCODE.TEXT, False, "hello text".encode()),
|
||||
]
|
||||
if err is True:
|
||||
err = terr()
|
||||
|
||||
f.messages = messages
|
||||
f.error = err
|
||||
f.reply = controller.DummyReply()
|
||||
return f
|
||||
|
||||
|
||||
def tflow(client_conn=True, server_conn=True, req=True, resp=None, err=None):
|
||||
"""
|
||||
@type client_conn: bool | None | mitmproxy.proxy.connection.ClientConnection
|
||||
@@ -116,27 +59,6 @@ def tflow(client_conn=True, server_conn=True, req=True, resp=None, err=None):
|
||||
return f
|
||||
|
||||
|
||||
class DummyFlow(flow.Flow):
|
||||
"""A flow that is neither HTTP nor TCP."""
|
||||
|
||||
def __init__(self, client_conn, server_conn, live=None):
|
||||
super().__init__("dummy", client_conn, server_conn, live)
|
||||
|
||||
|
||||
def tdummyflow(client_conn=True, server_conn=True, err=None):
|
||||
if client_conn is True:
|
||||
client_conn = tclient_conn()
|
||||
if server_conn is True:
|
||||
server_conn = tserver_conn()
|
||||
if err is True:
|
||||
err = terr()
|
||||
|
||||
f = DummyFlow(client_conn, server_conn)
|
||||
f.error = err
|
||||
f.reply = controller.DummyReply()
|
||||
return f
|
||||
|
||||
|
||||
def tclient_conn():
|
||||
"""
|
||||
@return: mitmproxy.proxy.connection.ClientConnection
|
||||
|
||||
@@ -4,13 +4,27 @@ import os
|
||||
import time
|
||||
import shutil
|
||||
from contextlib import contextmanager
|
||||
import sys
|
||||
|
||||
from mitmproxy.utils import data
|
||||
from mitmproxy.net import tcp
|
||||
from mitmproxy.net import http
|
||||
|
||||
|
||||
test_data = data.Data(__name__).push("../../test/")
|
||||
def treader(bytes):
|
||||
"""
|
||||
Construct a tcp.Read object from bytes.
|
||||
"""
|
||||
fp = BytesIO(bytes)
|
||||
return tcp.Reader(fp)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def chdir(dir):
|
||||
orig_dir = os.getcwd()
|
||||
os.chdir(dir)
|
||||
yield
|
||||
os.chdir(orig_dir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -25,12 +39,65 @@ def tmpdir(*args, **kwargs):
|
||||
shutil.rmtree(temp_workdir)
|
||||
|
||||
|
||||
def treader(bytes):
|
||||
def _check_exception(expected, actual, exc_tb):
|
||||
if isinstance(expected, str):
|
||||
if expected.lower() not in str(actual).lower():
|
||||
raise AssertionError(
|
||||
"Expected %s, but caught %s" % (
|
||||
repr(expected), repr(actual)
|
||||
)
|
||||
)
|
||||
else:
|
||||
if not isinstance(actual, expected):
|
||||
raise AssertionError(
|
||||
"Expected %s, but caught %s %s" % (
|
||||
expected.__name__, actual.__class__.__name__, repr(actual)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def raises(expected_exception, obj=None, *args, **kwargs):
|
||||
"""
|
||||
Construct a tcp.Read object from bytes.
|
||||
Assert that a callable raises a specified exception.
|
||||
|
||||
:exc An exception class or a string. If a class, assert that an
|
||||
exception of this type is raised. If a string, assert that the string
|
||||
occurs in the string representation of the exception, based on a
|
||||
case-insenstivie match.
|
||||
|
||||
:obj A callable object.
|
||||
|
||||
:args Arguments to be passsed to the callable.
|
||||
|
||||
:kwargs Arguments to be passed to the callable.
|
||||
"""
|
||||
fp = BytesIO(bytes)
|
||||
return tcp.Reader(fp)
|
||||
if obj is None:
|
||||
return RaisesContext(expected_exception)
|
||||
else:
|
||||
try:
|
||||
ret = obj(*args, **kwargs)
|
||||
except Exception as actual:
|
||||
_check_exception(expected_exception, actual, sys.exc_info()[2])
|
||||
else:
|
||||
raise AssertionError("No exception raised. Return value: {}".format(ret))
|
||||
|
||||
|
||||
class RaisesContext:
|
||||
def __init__(self, expected_exception):
|
||||
self.expected_exception = expected_exception
|
||||
|
||||
def __enter__(self):
|
||||
return
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if not exc_type:
|
||||
raise AssertionError("No exception raised.")
|
||||
else:
|
||||
_check_exception(self.expected_exception, exc_val, exc_tb)
|
||||
return True
|
||||
|
||||
|
||||
test_data = data.Data(__name__).push("../../test/")
|
||||
|
||||
|
||||
def treq(**kwargs):
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import argparse
|
||||
import re
|
||||
import os
|
||||
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import flowfilter
|
||||
from mitmproxy import options
|
||||
from mitmproxy import platform
|
||||
from mitmproxy.utils import human
|
||||
@@ -17,6 +19,91 @@ class ParseException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _parse_hook(s):
|
||||
sep, rem = s[0], s[1:]
|
||||
parts = rem.split(sep, 2)
|
||||
if len(parts) == 2:
|
||||
patt = ".*"
|
||||
a, b = parts
|
||||
elif len(parts) == 3:
|
||||
patt, a, b = parts
|
||||
else:
|
||||
raise ParseException(
|
||||
"Malformed hook specifier - too few clauses: %s" % s
|
||||
)
|
||||
|
||||
if not a:
|
||||
raise ParseException("Empty clause: %s" % str(patt))
|
||||
|
||||
if not flowfilter.parse(patt):
|
||||
raise ParseException("Malformed filter pattern: %s" % patt)
|
||||
|
||||
return patt, a, b
|
||||
|
||||
|
||||
def parse_replace_hook(s):
|
||||
"""
|
||||
Returns a (pattern, regex, replacement) tuple.
|
||||
|
||||
The general form for a replacement hook is as follows:
|
||||
|
||||
/patt/regex/replacement
|
||||
|
||||
The first character specifies the separator. Example:
|
||||
|
||||
:~q:foo:bar
|
||||
|
||||
If only two clauses are specified, the pattern is set to match
|
||||
universally (i.e. ".*"). Example:
|
||||
|
||||
/foo/bar/
|
||||
|
||||
Clauses are parsed from left to right. Extra separators are taken to be
|
||||
part of the final clause. For instance, the replacement clause below is
|
||||
"foo/bar/":
|
||||
|
||||
/one/two/foo/bar/
|
||||
|
||||
Checks that pattern and regex are both well-formed. Raises
|
||||
ParseException on error.
|
||||
"""
|
||||
patt, regex, replacement = _parse_hook(s)
|
||||
try:
|
||||
re.compile(regex)
|
||||
except re.error as e:
|
||||
raise ParseException("Malformed replacement regex: %s" % str(e))
|
||||
return patt, regex, replacement
|
||||
|
||||
|
||||
def parse_setheader(s):
|
||||
"""
|
||||
Returns a (pattern, header, value) tuple.
|
||||
|
||||
The general form for a replacement hook is as follows:
|
||||
|
||||
/patt/header/value
|
||||
|
||||
The first character specifies the separator. Example:
|
||||
|
||||
:~q:foo:bar
|
||||
|
||||
If only two clauses are specified, the pattern is set to match
|
||||
universally (i.e. ".*"). Example:
|
||||
|
||||
/foo/bar/
|
||||
|
||||
Clauses are parsed from left to right. Extra separators are taken to be
|
||||
part of the final clause. For instance, the value clause below is
|
||||
"foo/bar/":
|
||||
|
||||
/one/two/foo/bar/
|
||||
|
||||
Checks that pattern and regex are both well-formed. Raises
|
||||
ParseException on error.
|
||||
"""
|
||||
return _parse_hook(s)
|
||||
|
||||
|
||||
def get_common_options(args):
|
||||
stickycookie, stickyauth = None, None
|
||||
if args.stickycookie_filt:
|
||||
@@ -29,6 +116,34 @@ def get_common_options(args):
|
||||
if stream_large_bodies:
|
||||
stream_large_bodies = human.parse_size(stream_large_bodies)
|
||||
|
||||
reps = []
|
||||
for i in args.replace or []:
|
||||
try:
|
||||
p = parse_replace_hook(i)
|
||||
except ParseException as e:
|
||||
raise exceptions.OptionsError(e)
|
||||
reps.append(p)
|
||||
for i in args.replace_file or []:
|
||||
try:
|
||||
patt, rex, path = parse_replace_hook(i)
|
||||
except ParseException as e:
|
||||
raise exceptions.OptionsError(e)
|
||||
try:
|
||||
v = open(path, "rb").read()
|
||||
except IOError as e:
|
||||
raise exceptions.OptionsError(
|
||||
"Could not read replace file: %s" % path
|
||||
)
|
||||
reps.append((patt, rex, v))
|
||||
|
||||
setheaders = []
|
||||
for i in args.setheader or []:
|
||||
try:
|
||||
p = parse_setheader(i)
|
||||
except ParseException as e:
|
||||
raise exceptions.OptionsError(e)
|
||||
setheaders.append(p)
|
||||
|
||||
if args.streamfile and args.streamfile[0] == args.rfile:
|
||||
if args.streamfile[1] == "wb":
|
||||
raise exceptions.OptionsError(
|
||||
@@ -97,9 +212,9 @@ def get_common_options(args):
|
||||
args.verbose = 0
|
||||
|
||||
return dict(
|
||||
onboarding=args.onboarding,
|
||||
onboarding_host=args.onboarding_host,
|
||||
onboarding_port=args.onboarding_port,
|
||||
app=args.app,
|
||||
app_host=args.app_host,
|
||||
app_port=args.app_port,
|
||||
|
||||
anticache=args.anticache,
|
||||
anticomp=args.anticomp,
|
||||
@@ -109,9 +224,8 @@ def get_common_options(args):
|
||||
refresh_server_playback=not args.norefresh,
|
||||
server_replay_use_headers=args.server_replay_use_headers,
|
||||
rfile=args.rfile,
|
||||
replacements=args.replacements,
|
||||
replacement_files=args.replacement_files,
|
||||
setheaders=args.setheaders,
|
||||
replacements=reps,
|
||||
setheaders=setheaders,
|
||||
server_replay=args.server_replay,
|
||||
scripts=args.scripts,
|
||||
stickycookie=stickycookie,
|
||||
@@ -137,6 +251,7 @@ def get_common_options(args):
|
||||
ciphers_client = args.ciphers_client,
|
||||
ciphers_server = args.ciphers_server,
|
||||
clientcerts = args.clientcerts,
|
||||
http2 = args.http2,
|
||||
ignore_hosts = args.ignore_hosts,
|
||||
listen_host = args.addr,
|
||||
listen_port = args.port,
|
||||
@@ -144,12 +259,8 @@ def get_common_options(args):
|
||||
mode = mode,
|
||||
no_upstream_cert = args.no_upstream_cert,
|
||||
spoof_source_address = args.spoof_source_address,
|
||||
|
||||
http2 = args.http2,
|
||||
http2_priority = args.http2_priority,
|
||||
websocket = args.websocket,
|
||||
rawtcp = args.rawtcp,
|
||||
|
||||
websocket = args.websocket,
|
||||
upstream_server = upstream_server,
|
||||
upstream_auth = args.upstream_auth,
|
||||
ssl_version_client = args.ssl_version_client,
|
||||
@@ -164,8 +275,13 @@ def get_common_options(args):
|
||||
def basic_options(parser):
|
||||
parser.add_argument(
|
||||
'--version',
|
||||
action='version',
|
||||
version="%(prog)s" + " " + version.VERSION
|
||||
)
|
||||
parser.add_argument(
|
||||
'--sysinfo',
|
||||
action='store_true',
|
||||
dest='version',
|
||||
dest='sysinfo',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--shortversion',
|
||||
@@ -337,26 +453,19 @@ def proxy_options(parser):
|
||||
)
|
||||
|
||||
http2 = group.add_mutually_exclusive_group()
|
||||
http2.add_argument("--no-http2", action="store_false", dest="http2")
|
||||
http2.add_argument("--http2", action="store_true", dest="http2",
|
||||
http2.add_argument("--http2", action="store_true", dest="http2")
|
||||
http2.add_argument("--no-http2", action="store_false", dest="http2",
|
||||
help="Explicitly enable/disable HTTP/2 support. "
|
||||
"HTTP/2 support is enabled by default.",
|
||||
"Disabled by default until major websites implement the spec correctly. "
|
||||
"Default value will change in a future version."
|
||||
)
|
||||
|
||||
http2_priority = group.add_mutually_exclusive_group()
|
||||
http2_priority.add_argument("--http2-priority", action="store_true", dest="http2_priority")
|
||||
http2_priority.add_argument("--no-http2-priority", action="store_false", dest="http2_priority",
|
||||
help="Explicitly enable/disable PRIORITY forwarding for HTTP/2 connections. "
|
||||
"PRIORITY forwarding is disabled by default, "
|
||||
"because some webservers fail at implementing the RFC properly.",
|
||||
)
|
||||
|
||||
websocket = group.add_mutually_exclusive_group()
|
||||
websocket.add_argument("--no-websocket", action="store_false", dest="websocket")
|
||||
websocket.add_argument("--websocket", action="store_true", dest="websocket",
|
||||
websocket.add_argument("--no-websocket", action="store_false", dest="websocket",
|
||||
help="Explicitly enable/disable WebSocket support. "
|
||||
"WebSocket support is enabled by default.",
|
||||
"Enabled by default."
|
||||
)
|
||||
websocket.add_argument("--websocket", action="store_true", dest="websocket")
|
||||
|
||||
parser.add_argument(
|
||||
"--upstream-auth",
|
||||
@@ -465,13 +574,13 @@ def proxy_ssl_options(parser):
|
||||
def onboarding_app(parser):
|
||||
group = parser.add_argument_group("Onboarding App")
|
||||
group.add_argument(
|
||||
"--no-onboarding",
|
||||
action="store_false", dest="onboarding",
|
||||
"--noapp",
|
||||
action="store_false", dest="app",
|
||||
help="Disable the mitmproxy onboarding app."
|
||||
)
|
||||
group.add_argument(
|
||||
"--onboarding-host",
|
||||
action="store", dest="onboarding_host",
|
||||
"--app-host",
|
||||
action="store", dest="app_host",
|
||||
help="""
|
||||
Domain to serve the onboarding app from. For transparent mode, use
|
||||
an IP when a DNS entry for the app domain is not present. Default:
|
||||
@@ -479,9 +588,9 @@ def onboarding_app(parser):
|
||||
""" % options.APP_HOST
|
||||
)
|
||||
group.add_argument(
|
||||
"--onboarding-port",
|
||||
"--app-port",
|
||||
action="store",
|
||||
dest="onboarding_port",
|
||||
dest="app_port",
|
||||
type=int,
|
||||
metavar="80",
|
||||
help="Port to serve the onboarding app from."
|
||||
@@ -573,13 +682,13 @@ def replacements(parser):
|
||||
)
|
||||
group.add_argument(
|
||||
"--replace",
|
||||
action="append", type=str, dest="replacements",
|
||||
action="append", type=str, dest="replace",
|
||||
metavar="PATTERN",
|
||||
help="Replacement pattern."
|
||||
)
|
||||
group.add_argument(
|
||||
"--replace-from-file",
|
||||
action="append", type=str, dest="replacement_files",
|
||||
action="append", type=str, dest="replace_file",
|
||||
metavar="PATH",
|
||||
help="""
|
||||
Replacement pattern, where the replacement clause is a path to a
|
||||
@@ -599,7 +708,7 @@ def set_headers(parser):
|
||||
)
|
||||
group.add_argument(
|
||||
"--setheader",
|
||||
action="append", type=str, dest="setheaders",
|
||||
action="append", type=str, dest="setheader",
|
||||
metavar="PATTERN",
|
||||
help="Header set pattern."
|
||||
)
|
||||
@@ -659,42 +768,42 @@ def common_options(parser):
|
||||
|
||||
|
||||
def mitmproxy():
|
||||
# Don't import mitmproxy.tools.console for mitmdump, urwid is not available
|
||||
# on all platforms.
|
||||
# Don't import mitmproxy.tools.console for mitmdump, urwid is not available on all
|
||||
# platforms.
|
||||
from .console import palettes
|
||||
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options]")
|
||||
common_options(parser)
|
||||
parser.add_argument(
|
||||
"--palette", type=str,
|
||||
action="store", dest="console_palette",
|
||||
action="store", dest="palette",
|
||||
choices=sorted(palettes.palettes.keys()),
|
||||
help="Select color palette: " + ", ".join(palettes.palettes.keys())
|
||||
)
|
||||
parser.add_argument(
|
||||
"--palette-transparent",
|
||||
action="store_true", dest="console_palette_transparent",
|
||||
action="store_true", dest="palette_transparent",
|
||||
help="Set transparent background for palette."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e", "--eventlog",
|
||||
action="store_true", dest="console_eventlog",
|
||||
action="store_true", dest="eventlog",
|
||||
help="Show event log."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--follow",
|
||||
action="store_true", dest="console_focus_follow",
|
||||
action="store_true", dest="focus_follow",
|
||||
help="Focus follows new flows."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--order",
|
||||
type=str, dest="console_order",
|
||||
type=str, dest="order",
|
||||
choices=[o[1] for o in view.orders],
|
||||
help="Flow sort order."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-mouse",
|
||||
action="store_true", dest="console_no_mouse",
|
||||
action="store_true", dest="no_mouse",
|
||||
help="Disable mouse interaction."
|
||||
)
|
||||
group = parser.add_argument_group(
|
||||
@@ -748,24 +857,24 @@ def mitmweb():
|
||||
group = parser.add_argument_group("Mitmweb")
|
||||
group.add_argument(
|
||||
"--no-browser",
|
||||
action="store_false", dest="web_open_browser",
|
||||
action="store_false", dest="open_browser",
|
||||
help="Don't start a browser"
|
||||
)
|
||||
group.add_argument(
|
||||
"--web-port",
|
||||
action="store", type=int, dest="web_port",
|
||||
"--wport",
|
||||
action="store", type=int, dest="wport",
|
||||
metavar="PORT",
|
||||
help="Mitmweb port."
|
||||
)
|
||||
group.add_argument(
|
||||
"--web-iface",
|
||||
action="store", dest="web_iface",
|
||||
"--wiface",
|
||||
action="store", dest="wiface",
|
||||
metavar="IFACE",
|
||||
help="Mitmweb interface."
|
||||
)
|
||||
group.add_argument(
|
||||
"--web-debug",
|
||||
action="store_true", dest="web_debug",
|
||||
"--wdebug",
|
||||
action="store_true", dest="wdebug",
|
||||
help="Turn on mitmweb debugging"
|
||||
)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import urwid
|
||||
|
||||
from mitmproxy import http
|
||||
from mitmproxy.tools.console import common, searchable
|
||||
from mitmproxy.utils import human
|
||||
from mitmproxy.utils import strutils
|
||||
@@ -13,7 +12,7 @@ def maybe_timestamp(base, attr):
|
||||
return "active"
|
||||
|
||||
|
||||
def flowdetails(state, flow: http.HTTPFlow):
|
||||
def flowdetails(state, flow):
|
||||
text = []
|
||||
|
||||
sc = flow.server_conn
|
||||
@@ -22,7 +21,7 @@ def flowdetails(state, flow: http.HTTPFlow):
|
||||
resp = flow.response
|
||||
metadata = flow.metadata
|
||||
|
||||
if metadata is not None and len(metadata) > 0:
|
||||
if metadata is not None and len(metadata.items()) > 0:
|
||||
parts = [[str(k), repr(v)] for k, v in metadata.items()]
|
||||
text.append(urwid.Text([("head", "Metadata:")]))
|
||||
text.extend(common.format_keyvals(parts, key="key", val="text", indent=4))
|
||||
@@ -33,8 +32,6 @@ def flowdetails(state, flow: http.HTTPFlow):
|
||||
["Address", repr(sc.address)],
|
||||
["Resolved Address", repr(sc.ip_address)],
|
||||
]
|
||||
if resp:
|
||||
parts.append(["HTTP Version", resp.http_version])
|
||||
if sc.alpn_proto_negotiated:
|
||||
parts.append(["ALPN", sc.alpn_proto_negotiated])
|
||||
|
||||
@@ -94,8 +91,6 @@ def flowdetails(state, flow: http.HTTPFlow):
|
||||
parts = [
|
||||
["Address", repr(cc.address)],
|
||||
]
|
||||
if req:
|
||||
parts.append(["HTTP Version", req.http_version])
|
||||
if cc.tls_version:
|
||||
parts.append(["TLS Version", cc.tls_version])
|
||||
if cc.sni:
|
||||
|
||||
@@ -35,7 +35,6 @@ def _mkhelp():
|
||||
("W", "stream flows to file"),
|
||||
("X", "kill and delete flow, even if it's mid-intercept"),
|
||||
("z", "clear flow list or eventlog"),
|
||||
("Z", "clear unmarked flows"),
|
||||
("tab", "tab between eventlog and flow list"),
|
||||
("enter", "view flow"),
|
||||
("|", "run script on this flow"),
|
||||
@@ -338,10 +337,9 @@ class FlowListBox(urwid.ListBox):
|
||||
)
|
||||
|
||||
def new_request(self, url, method):
|
||||
try:
|
||||
parts = mitmproxy.net.http.url.parse(str(url))
|
||||
except ValueError as e:
|
||||
signals.status_message.send(message = "Invalid URL: " + str(e))
|
||||
parts = mitmproxy.net.http.url.parse(str(url))
|
||||
if not parts:
|
||||
signals.status_message.send(message="Invalid Url")
|
||||
return
|
||||
scheme, host, port, path = parts
|
||||
f = self.master.create_request(method, scheme, host, port, path)
|
||||
@@ -356,8 +354,6 @@ class FlowListBox(urwid.ListBox):
|
||||
self.master.view.update(f)
|
||||
elif key == "z":
|
||||
self.master.view.clear()
|
||||
elif key == "Z":
|
||||
self.master.view.clear_not_marked()
|
||||
elif key == "e":
|
||||
self.master.toggle_eventlog()
|
||||
elif key == "g":
|
||||
@@ -391,7 +387,7 @@ class FlowListBox(urwid.ListBox):
|
||||
lookup = dict([(i[0], i[1]) for i in view.orders])
|
||||
|
||||
def change_order(k):
|
||||
self.master.options.console_order = lookup[k]
|
||||
self.master.options.order = lookup[k]
|
||||
|
||||
signals.status_prompt_onekey.send(
|
||||
prompt = "Order",
|
||||
@@ -400,10 +396,10 @@ class FlowListBox(urwid.ListBox):
|
||||
)
|
||||
elif key == "F":
|
||||
o = self.master.options
|
||||
o.console_focus_follow = not o.console_focus_follow
|
||||
o.focus_follow = not o.focus_follow
|
||||
elif key == "v":
|
||||
val = not self.master.options.console_order_reversed
|
||||
self.master.options.console_order_reversed = val
|
||||
val = not self.master.options.order_reversed
|
||||
self.master.options.order_reversed = val
|
||||
elif key == "W":
|
||||
if self.master.options.streamfile:
|
||||
self.master.options.streamfile = None
|
||||
|
||||
@@ -65,8 +65,8 @@ class HeaderEditor(base.GridEditor):
|
||||
class URLEncodedFormEditor(base.GridEditor):
|
||||
title = "Editing URL-encoded form"
|
||||
columns = [
|
||||
col_text.Column("Key"),
|
||||
col_text.Column("Value")
|
||||
col_bytes.Column("Key"),
|
||||
col_bytes.Column("Value")
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ from mitmproxy import io
|
||||
from mitmproxy import log
|
||||
from mitmproxy.addons import view
|
||||
from mitmproxy.addons import intercept
|
||||
import mitmproxy.options
|
||||
from mitmproxy.tools.console import flowlist
|
||||
from mitmproxy.tools.console import flowview
|
||||
from mitmproxy.tools.console import grideditor
|
||||
@@ -32,6 +33,8 @@ from mitmproxy.tools.console import statusbar
|
||||
from mitmproxy.tools.console import window
|
||||
from mitmproxy.utils import strutils
|
||||
|
||||
from mitmproxy.net import tcp
|
||||
|
||||
EVENTLOG_SIZE = 10000
|
||||
|
||||
|
||||
@@ -105,7 +108,7 @@ class ConsoleMaster(master.Master):
|
||||
self.logbuffer.append(e)
|
||||
if len(self.logbuffer) > EVENTLOG_SIZE:
|
||||
self.logbuffer.pop(0)
|
||||
if self.options.console_focus_follow:
|
||||
if self.options.focus_follow:
|
||||
self.logbuffer.set_focus(len(self.logbuffer) - 1)
|
||||
|
||||
def sig_call_in(self, sender, seconds, callback, args=()):
|
||||
@@ -145,11 +148,11 @@ class ConsoleMaster(master.Master):
|
||||
try:
|
||||
with self.handlecontext():
|
||||
sc.run_once(command, [f])
|
||||
except ValueError as e:
|
||||
signals.add_log("Input error: %s" % e, "warn")
|
||||
except mitmproxy.exceptions.AddonError as e:
|
||||
signals.add_log("Script error: %s" % e, "warn")
|
||||
|
||||
def toggle_eventlog(self):
|
||||
self.options.console_eventlog = not self.options.console_eventlog
|
||||
self.options.eventlog = not self.options.eventlog
|
||||
self.view_flowlist()
|
||||
signals.replace_view_state.send(self)
|
||||
|
||||
@@ -229,8 +232,8 @@ class ConsoleMaster(master.Master):
|
||||
|
||||
def set_palette(self, options, updated):
|
||||
self.ui.register_palette(
|
||||
palettes.palettes[options.console_palette].palette(
|
||||
options.console_palette_transparent
|
||||
palettes.palettes[options.palette].palette(
|
||||
options.palette_transparent
|
||||
)
|
||||
)
|
||||
self.ui.clear()
|
||||
@@ -252,7 +255,7 @@ class ConsoleMaster(master.Master):
|
||||
self.loop = urwid.MainLoop(
|
||||
urwid.SolidFill("x"),
|
||||
screen = self.ui,
|
||||
handle_mouse = not self.options.console_no_mouse,
|
||||
handle_mouse = not self.options.no_mouse,
|
||||
)
|
||||
self.ab = statusbar.ActionBar()
|
||||
|
||||
@@ -270,6 +273,14 @@ class ConsoleMaster(master.Master):
|
||||
sys.exit(1)
|
||||
|
||||
self.loop.set_alarm_in(0.01, self.ticker)
|
||||
if self.options.http2 and not tcp.HAS_ALPN: # pragma: no cover
|
||||
def http2err(*args, **kwargs):
|
||||
signals.status_message.send(
|
||||
message = "HTTP/2 disabled - OpenSSL 1.0.2+ required."
|
||||
" Use --no-http2 to silence this warning.",
|
||||
expire=5
|
||||
)
|
||||
self.loop.set_alarm_in(0.01, http2err)
|
||||
|
||||
self.loop.set_alarm_in(
|
||||
0.0001,
|
||||
@@ -346,7 +357,7 @@ class ConsoleMaster(master.Master):
|
||||
if self.ui.started:
|
||||
self.ui.clear()
|
||||
|
||||
if self.options.console_eventlog:
|
||||
if self.options.eventlog:
|
||||
body = flowlist.BodyPile(self)
|
||||
else:
|
||||
body = flowlist.FlowListBox(self)
|
||||
@@ -412,7 +423,7 @@ class ConsoleMaster(master.Master):
|
||||
def websocket_message(self, f):
|
||||
super().websocket_message(f)
|
||||
message = f.messages[-1]
|
||||
signals.add_log(f.message_info(message), "info")
|
||||
signals.add_log(message.info, "info")
|
||||
signals.add_log(strutils.bytes_to_escaped_str(message.content), "debug")
|
||||
|
||||
@controller.handler
|
||||
|
||||
@@ -6,9 +6,6 @@ from mitmproxy.tools.console import grideditor
|
||||
from mitmproxy.tools.console import select
|
||||
from mitmproxy.tools.console import signals
|
||||
|
||||
from mitmproxy.addons import replace
|
||||
from mitmproxy.addons import setheaders
|
||||
|
||||
footer = [
|
||||
('heading_key', "enter/space"), ":toggle ",
|
||||
('heading_key', "C"), ":clear all ",
|
||||
@@ -78,7 +75,7 @@ class Options(urwid.WidgetWrap):
|
||||
select.Option(
|
||||
"Palette",
|
||||
"P",
|
||||
checker("console_palette", master.options),
|
||||
checker("palette", master.options),
|
||||
self.palette
|
||||
),
|
||||
select.Option(
|
||||
@@ -191,16 +188,10 @@ class Options(urwid.WidgetWrap):
|
||||
)
|
||||
|
||||
def setheaders(self):
|
||||
data = []
|
||||
for d in self.master.options.setheaders:
|
||||
if isinstance(d, str):
|
||||
data.append(setheaders.parse_setheader(d))
|
||||
else:
|
||||
data.append(d)
|
||||
self.master.view_grideditor(
|
||||
grideditor.SetHeadersEditor(
|
||||
self.master,
|
||||
data,
|
||||
self.master.options.setheaders,
|
||||
self.master.options.setter("setheaders")
|
||||
)
|
||||
)
|
||||
@@ -224,16 +215,10 @@ class Options(urwid.WidgetWrap):
|
||||
)
|
||||
|
||||
def replacepatterns(self):
|
||||
data = []
|
||||
for d in self.master.options.replacements:
|
||||
if isinstance(d, str):
|
||||
data.append(replace.parse_hook(d))
|
||||
else:
|
||||
data.append(d)
|
||||
self.master.view_grideditor(
|
||||
grideditor.ReplaceEditor(
|
||||
self.master,
|
||||
data,
|
||||
self.master.options.replacements,
|
||||
self.master.options.setter("replacements")
|
||||
)
|
||||
)
|
||||
|
||||
@@ -42,7 +42,7 @@ class PalettePicker(urwid.WidgetWrap):
|
||||
return select.Option(
|
||||
i,
|
||||
None,
|
||||
lambda: self.master.options.console_palette == name,
|
||||
lambda: self.master.options.palette == name,
|
||||
lambda: setattr(self.master.options, "palette", name)
|
||||
)
|
||||
|
||||
@@ -58,7 +58,7 @@ class PalettePicker(urwid.WidgetWrap):
|
||||
select.Option(
|
||||
"Transparent",
|
||||
"T",
|
||||
lambda: master.options.console_palette_transparent,
|
||||
lambda: master.options.palette_transparent,
|
||||
master.options.toggler("palette_transparent")
|
||||
)
|
||||
]
|
||||
|
||||
@@ -204,10 +204,10 @@ class StatusBar(urwid.WidgetWrap):
|
||||
r.append("[")
|
||||
r.append(("heading_key", "M"))
|
||||
r.append(":%s]" % self.master.options.default_contentview)
|
||||
if self.master.options.console_order:
|
||||
if self.master.options.order:
|
||||
r.append("[")
|
||||
r.append(("heading_key", "o"))
|
||||
r.append(":%s]" % self.master.options.console_order)
|
||||
r.append(":%s]" % self.master.options.order)
|
||||
|
||||
opts = []
|
||||
if self.master.options.anticache:
|
||||
@@ -222,7 +222,7 @@ class StatusBar(urwid.WidgetWrap):
|
||||
opts.append("killextra")
|
||||
if self.master.options.no_upstream_cert:
|
||||
opts.append("no-upstream-cert")
|
||||
if self.master.options.console_focus_follow:
|
||||
if self.master.options.focus_follow:
|
||||
opts.append("following")
|
||||
if self.master.options.stream_large_bodies:
|
||||
opts.append(
|
||||
@@ -258,7 +258,7 @@ class StatusBar(urwid.WidgetWrap):
|
||||
else:
|
||||
offset = self.master.view.focus.index + 1
|
||||
|
||||
if self.master.options.console_order_reversed:
|
||||
if self.master.options.order_reversed:
|
||||
arrow = common.SYMBOL_UP
|
||||
else:
|
||||
arrow = common.SYMBOL_DOWN
|
||||
|
||||
@@ -6,6 +6,7 @@ from mitmproxy import addons
|
||||
from mitmproxy import options
|
||||
from mitmproxy import master
|
||||
from mitmproxy.addons import dumper, termlog
|
||||
from mitmproxy.net import tcp
|
||||
|
||||
|
||||
class DumpError(Exception):
|
||||
@@ -29,13 +30,7 @@ class Options(options.Options):
|
||||
|
||||
class DumpMaster(master.Master):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
options: Options,
|
||||
server,
|
||||
with_termlog=True,
|
||||
with_dumper=True,
|
||||
) -> None:
|
||||
def __init__(self, options, server, with_termlog=True, with_dumper=True):
|
||||
master.Master.__init__(self, options, server)
|
||||
self.has_errored = False
|
||||
if with_termlog:
|
||||
@@ -43,6 +38,8 @@ class DumpMaster(master.Master):
|
||||
self.addons.add(*addons.default_addons())
|
||||
if with_dumper:
|
||||
self.addons.add(dumper.Dumper())
|
||||
# This line is just for type hinting
|
||||
self.options = self.options # type: Options
|
||||
|
||||
if not self.options.no_server:
|
||||
self.add_log(
|
||||
@@ -50,6 +47,13 @@ class DumpMaster(master.Master):
|
||||
"info"
|
||||
)
|
||||
|
||||
if self.server and self.options.http2 and not tcp.HAS_ALPN: # pragma: no cover
|
||||
self.add_log(
|
||||
"ALPN support missing (OpenSSL 1.0.2+ required)!\n"
|
||||
"HTTP/2 is disabled. Use --no-http2 to silence this warning.",
|
||||
"error"
|
||||
)
|
||||
|
||||
if options.rfile:
|
||||
try:
|
||||
self.load_flows_file(options.rfile)
|
||||
|
||||
@@ -35,10 +35,9 @@ def assert_utf8_env():
|
||||
|
||||
|
||||
def process_options(parser, options, args):
|
||||
if args.version:
|
||||
print(debug.dump_system_info())
|
||||
if args.sysinfo:
|
||||
print(debug.sysinfo())
|
||||
sys.exit(0)
|
||||
|
||||
debug.register_info_dumpers()
|
||||
pconf = config.ProxyConfig(options)
|
||||
if options.no_server:
|
||||
@@ -70,15 +69,14 @@ def mitmproxy(args=None): # pragma: no cover
|
||||
console_options.merge(cmdline.get_common_options(args))
|
||||
console_options.merge(
|
||||
dict(
|
||||
console_palette = args.console_palette,
|
||||
console_palette_transparent = args.console_palette_transparent,
|
||||
console_eventlog = args.console_eventlog,
|
||||
console_focus_follow = args.console_focus_follow,
|
||||
console_no_mouse = args.console_no_mouse,
|
||||
console_order = args.console_order,
|
||||
|
||||
filter = args.filter,
|
||||
palette = args.palette,
|
||||
palette_transparent = args.palette_transparent,
|
||||
eventlog = args.eventlog,
|
||||
focus_follow = args.focus_follow,
|
||||
intercept = args.intercept,
|
||||
filter = args.filter,
|
||||
no_mouse = args.no_mouse,
|
||||
order = args.order,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -150,10 +148,10 @@ def mitmweb(args=None): # pragma: no cover
|
||||
web_options.merge(
|
||||
dict(
|
||||
intercept = args.intercept,
|
||||
web_open_browser = args.web_open_browser,
|
||||
web_debug = args.web_debug,
|
||||
web_iface = args.web_iface,
|
||||
web_port = args.web_port,
|
||||
open_browser = args.open_browser,
|
||||
wdebug = args.wdebug,
|
||||
wiface = args.wiface,
|
||||
wport = args.wport,
|
||||
)
|
||||
)
|
||||
server = process_options(parser, web_options, args)
|
||||
|
||||
@@ -33,7 +33,6 @@ def flow_to_json(flow: mitmproxy.flow.Flow) -> dict:
|
||||
"server_conn": flow.server_conn.get_state(),
|
||||
"type": flow.type,
|
||||
"modified": flow.modified(),
|
||||
"marked": flow.marked,
|
||||
}
|
||||
# .alpn_proto_negotiated is bytes, we need to decode that.
|
||||
for conn in "client_conn", "server_conn":
|
||||
@@ -46,12 +45,6 @@ def flow_to_json(flow: mitmproxy.flow.Flow) -> dict:
|
||||
|
||||
if isinstance(flow, http.HTTPFlow):
|
||||
if flow.request:
|
||||
if flow.request.raw_content:
|
||||
content_length = len(flow.request.raw_content)
|
||||
content_hash = hashlib.sha256(flow.request.raw_content).hexdigest()
|
||||
else:
|
||||
content_length = None
|
||||
content_hash = None
|
||||
f["request"] = {
|
||||
"method": flow.request.method,
|
||||
"scheme": flow.request.scheme,
|
||||
@@ -60,26 +53,24 @@ def flow_to_json(flow: mitmproxy.flow.Flow) -> dict:
|
||||
"path": flow.request.path,
|
||||
"http_version": flow.request.http_version,
|
||||
"headers": tuple(flow.request.headers.items(True)),
|
||||
"contentLength": content_length,
|
||||
"contentHash": content_hash,
|
||||
"contentLength": len(
|
||||
flow.request.raw_content) if flow.request.raw_content is not None else None,
|
||||
"contentHash": hashlib.sha256(
|
||||
flow.request.raw_content).hexdigest() if flow.request.raw_content is not None else None,
|
||||
"timestamp_start": flow.request.timestamp_start,
|
||||
"timestamp_end": flow.request.timestamp_end,
|
||||
"is_replay": flow.request.is_replay,
|
||||
}
|
||||
if flow.response:
|
||||
if flow.response.raw_content:
|
||||
content_length = len(flow.response.raw_content)
|
||||
content_hash = hashlib.sha256(flow.response.raw_content).hexdigest()
|
||||
else:
|
||||
content_length = None
|
||||
content_hash = None
|
||||
f["response"] = {
|
||||
"http_version": flow.response.http_version,
|
||||
"status_code": flow.response.status_code,
|
||||
"reason": flow.response.reason,
|
||||
"headers": tuple(flow.response.headers.items(True)),
|
||||
"contentLength": content_length,
|
||||
"contentHash": content_hash,
|
||||
"contentLength": len(
|
||||
flow.response.raw_content) if flow.response.raw_content is not None else None,
|
||||
"contentHash": hashlib.sha256(
|
||||
flow.response.raw_content).hexdigest() if flow.response.raw_content is not None else None,
|
||||
"timestamp_start": flow.response.timestamp_start,
|
||||
"timestamp_end": flow.response.timestamp_end,
|
||||
"is_replay": flow.response.is_replay,
|
||||
@@ -119,7 +110,7 @@ class RequestHandler(tornado.web.RequestHandler):
|
||||
self.add_header(
|
||||
"Content-Security-Policy",
|
||||
"default-src 'self'; "
|
||||
"connect-src 'self' ws:; "
|
||||
"connect-src 'self' ws://* ; "
|
||||
"style-src 'self' 'unsafe-inline'"
|
||||
)
|
||||
|
||||
@@ -194,7 +185,7 @@ class WebSocketEventBroadcaster(tornado.websocket.WebSocketHandler):
|
||||
|
||||
@classmethod
|
||||
def broadcast(cls, **kwargs):
|
||||
message = json.dumps(kwargs, ensure_ascii=False).encode("utf8", "surrogateescape")
|
||||
message = json.dumps(kwargs, ensure_ascii=False)
|
||||
|
||||
for conn in cls.connections:
|
||||
try:
|
||||
|
||||
@@ -38,7 +38,7 @@ class WebMaster(master.Master):
|
||||
if with_termlog:
|
||||
self.addons.add(termlog.TermLog())
|
||||
self.app = app.Application(
|
||||
self, self.options.web_debug
|
||||
self, self.options.wdebug
|
||||
)
|
||||
# This line is just for type hinting
|
||||
self.options = self.options # type: Options
|
||||
@@ -103,7 +103,7 @@ class WebMaster(master.Master):
|
||||
iol = tornado.ioloop.IOLoop.instance()
|
||||
|
||||
http_server = tornado.httpserver.HTTPServer(self.app)
|
||||
http_server.listen(self.options.web_port, self.options.web_iface)
|
||||
http_server.listen(self.options.wport, self.options.wiface)
|
||||
|
||||
iol.add_callback(self.start)
|
||||
tornado.ioloop.PeriodicCallback(lambda: self.tick(timeout=0), 5).start()
|
||||
@@ -113,13 +113,13 @@ class WebMaster(master.Master):
|
||||
"info"
|
||||
)
|
||||
|
||||
web_url = "http://{}:{}/".format(self.options.web_iface, self.options.web_port)
|
||||
web_url = "http://{}:{}/".format(self.options.wiface, self.options.wport)
|
||||
self.add_log(
|
||||
"Web server listening at {}".format(web_url),
|
||||
"info"
|
||||
)
|
||||
|
||||
if self.options.web_open_browser:
|
||||
if self.options.open_browser:
|
||||
success = open_browser(web_url)
|
||||
if not success:
|
||||
self.add_log(
|
||||
|
||||
@@ -564,6 +564,7 @@ footer .label {
|
||||
.CodeMirror {
|
||||
border: 1px solid #ccc;
|
||||
height: auto !important;
|
||||
max-height: 2048px !important;
|
||||
}
|
||||
/* BASICS */
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user