mirror of
https://github.com/zhigang1992/mitmproxy.git
synced 2026-01-12 22:48:54 +08:00
Compare commits
669 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5654b6b82d | ||
|
|
6c90d1361b | ||
|
|
5962da4bbe | ||
|
|
f946ed109b | ||
|
|
d734f6bbd6 | ||
|
|
154e8ac0fc | ||
|
|
f67d9adc31 | ||
|
|
40f387eb48 | ||
|
|
315daa042a | ||
|
|
822797c7e0 | ||
|
|
2659b52209 | ||
|
|
e24ff261e7 | ||
|
|
0f4d94b31c | ||
|
|
63ffd9a3a9 | ||
|
|
a68808294d | ||
|
|
3f094a54ce | ||
|
|
9d2350b670 | ||
|
|
dcae79e017 | ||
|
|
4e39d387d3 | ||
|
|
9e58c36639 | ||
|
|
8d29492960 | ||
|
|
e24b4cc1b6 | ||
|
|
53ad658e9f | ||
|
|
2f52d8b9ad | ||
|
|
143872b574 | ||
|
|
7d56ec9d49 | ||
|
|
06c99bffc3 | ||
|
|
542a998174 | ||
|
|
1ea4a5a48e | ||
|
|
670d1e408b | ||
|
|
46373977e2 | ||
|
|
288448c575 | ||
|
|
365677006c | ||
|
|
fccc9bc45e | ||
|
|
29ce5a83d2 | ||
|
|
a7feced5de | ||
|
|
685487d33c | ||
|
|
b10a3992d8 | ||
|
|
a570caccbd | ||
|
|
3cd93567f5 | ||
|
|
82f87919e2 | ||
|
|
8a4a46e7d3 | ||
|
|
bcbe87bb09 | ||
|
|
50cfededce | ||
|
|
befbe88d96 | ||
|
|
51b50737e7 | ||
|
|
ed62bbad1d | ||
|
|
075d452a6d | ||
|
|
7ffb2c7981 | ||
|
|
97a00728a8 | ||
|
|
8f1b763082 | ||
|
|
aa0e134041 | ||
|
|
4b568f99d6 | ||
|
|
83f8760703 | ||
|
|
25fb0cc019 | ||
|
|
d439b34511 | ||
|
|
915bc7647d | ||
|
|
a32f87fef9 | ||
|
|
1d5eedcc9c | ||
|
|
f908ea220f | ||
|
|
c7247e026d | ||
|
|
a92017a6c1 | ||
|
|
789fbd00d2 | ||
|
|
6e03231d25 | ||
|
|
7317ea134e | ||
|
|
36118973d9 | ||
|
|
f21a970f29 | ||
|
|
139c4e6db3 | ||
|
|
217addbf31 | ||
|
|
0b090f7ae1 | ||
|
|
65202f5f1c | ||
|
|
6d3837fd54 | ||
|
|
f91ed91bf1 | ||
|
|
29c1f303d6 | ||
|
|
5fb18ad275 | ||
|
|
f8b76a62ff | ||
|
|
b537997f4f | ||
|
|
05e1f22143 | ||
|
|
bd88733a0a | ||
|
|
769123498c | ||
|
|
a781bab7db | ||
|
|
6af1a49464 | ||
|
|
7ff84673fd | ||
|
|
0ff0253b3e | ||
|
|
cfae95f5c3 | ||
|
|
cbb35cee51 | ||
|
|
ce01cb9c09 | ||
|
|
29b3e787ca | ||
|
|
8a07059cf4 | ||
|
|
be1b76b975 | ||
|
|
18edc11145 | ||
|
|
b73b06b364 | ||
|
|
aab6bf747c | ||
|
|
adce1a8235 | ||
|
|
3e82f4d6c4 | ||
|
|
5f91b7a45d | ||
|
|
75d952c46c | ||
|
|
115a31c9ba | ||
|
|
218e127e74 | ||
|
|
fde1159ae3 | ||
|
|
97000aa85c | ||
|
|
b7afcb5dc2 | ||
|
|
8c4810f606 | ||
|
|
ee3dd3f3c5 | ||
|
|
169baabcab | ||
|
|
2a46f3851a | ||
|
|
ab07b79138 | ||
|
|
90a5b90b0d | ||
|
|
0f9081b18d | ||
|
|
87610cc8b2 | ||
|
|
d5ea08db62 | ||
|
|
0a8e54edea | ||
|
|
cef01ac164 | ||
|
|
1aa6d9d324 | ||
|
|
ca2827886a | ||
|
|
7607240c30 | ||
|
|
df7701bb6d | ||
|
|
07cb83597b | ||
|
|
ec7d90f9be | ||
|
|
b3a1143338 | ||
|
|
2b500f234f | ||
|
|
5327756377 | ||
|
|
e32efcae49 | ||
|
|
1daf0b3f0a | ||
|
|
b72f139093 | ||
|
|
f90b4c2ff0 | ||
|
|
02c82b1b60 | ||
|
|
7aa2081894 | ||
|
|
a35a377cbb | ||
|
|
7ee0abbe0c | ||
|
|
6962a2c3f2 | ||
|
|
dcac976a47 | ||
|
|
e6eeab6094 | ||
|
|
161cdff25e | ||
|
|
ae71ec1d30 | ||
|
|
90c425bd14 | ||
|
|
73692b909b | ||
|
|
6af72160bf | ||
|
|
3d7cde058b | ||
|
|
9c18f0ba09 | ||
|
|
3aa7f24992 | ||
|
|
3afeb7c7e9 | ||
|
|
be5e276d45 | ||
|
|
83a428e0b7 | ||
|
|
dadb50946e | ||
|
|
ec6be96f2a | ||
|
|
742127ef7b | ||
|
|
c7b5012752 | ||
|
|
7365f18542 | ||
|
|
c76620c19f | ||
|
|
55ba6ed14d | ||
|
|
8567300dd6 | ||
|
|
677b9186ae | ||
|
|
9e3edd16c2 | ||
|
|
442999c75c | ||
|
|
98957673f0 | ||
|
|
dc37f7fd00 | ||
|
|
034ca20d3c | ||
|
|
70a0f87570 | ||
|
|
15c19d2bc6 | ||
|
|
9c686ca14c | ||
|
|
1909778e3d | ||
|
|
9a604b5cfe | ||
|
|
17ac1ff6cb | ||
|
|
482c1579f2 | ||
|
|
de9ecb7c30 | ||
|
|
31a45ddaaa | ||
|
|
6f00c1bc5d | ||
|
|
20776cf2fa | ||
|
|
c6a16e95e8 | ||
|
|
65f0885bd6 | ||
|
|
d69a411303 | ||
|
|
541c1e8b9f | ||
|
|
b531353ee0 | ||
|
|
ef9f0e22ea | ||
|
|
335861f490 | ||
|
|
abf291b0f9 | ||
|
|
439c113989 | ||
|
|
0d3e2c6adf | ||
|
|
3a35c69986 | ||
|
|
eb66456d16 | ||
|
|
44c3a24f8e | ||
|
|
1e81747a2a | ||
|
|
907825714b | ||
|
|
82ecc5448d | ||
|
|
20d3ae98e0 | ||
|
|
ab2fcbef8d | ||
|
|
00902e6feb | ||
|
|
95d9ec88ac | ||
|
|
26d0254203 | ||
|
|
cacad8373b | ||
|
|
4ca78604af | ||
|
|
0c0c0d38cc | ||
|
|
3a8da31835 | ||
|
|
b98ce71770 | ||
|
|
2153c337e0 | ||
|
|
92e3358915 | ||
|
|
b19fefd173 | ||
|
|
04f1a4443d | ||
|
|
8d68196fd0 | ||
|
|
1b6bc874b5 | ||
|
|
43e7b9d68c | ||
|
|
22d7c31ea8 | ||
|
|
475a4e3eb0 | ||
|
|
6f745b90a8 | ||
|
|
b1ef4bce30 | ||
|
|
1b330ba453 | ||
|
|
cb18c91f13 | ||
|
|
21794c7bbe | ||
|
|
49b0a67eb9 | ||
|
|
4e24c95a61 | ||
|
|
3f50d5fdbb | ||
|
|
fea08ef919 | ||
|
|
3a949f35f8 | ||
|
|
d759150734 | ||
|
|
8130b9880a | ||
|
|
bc67cee687 | ||
|
|
39659c7528 | ||
|
|
125ae2d8f9 | ||
|
|
c0882496e3 | ||
|
|
6c338c84a6 | ||
|
|
253fa95018 | ||
|
|
5192810ff6 | ||
|
|
eac210829e | ||
|
|
7a8b362df6 | ||
|
|
6d66184eba | ||
|
|
50eeac482f | ||
|
|
6c0bbcc9ee | ||
|
|
4a354ffb83 | ||
|
|
2a934ed323 | ||
|
|
b352557092 | ||
|
|
c5e0dc64b9 | ||
|
|
3de9829003 | ||
|
|
1410cbb4b6 | ||
|
|
85ddc5056b | ||
|
|
b6abe96202 | ||
|
|
b275257553 | ||
|
|
fefbff4164 | ||
|
|
1e6c51c698 | ||
|
|
112233891d | ||
|
|
228a22b3c0 | ||
|
|
bbf4629906 | ||
|
|
dadefaecdb | ||
|
|
169068c7ec | ||
|
|
e755399d1e | ||
|
|
cd93dabd75 | ||
|
|
65c4a3cf1d | ||
|
|
ec5d9cbd2b | ||
|
|
56b087278a | ||
|
|
aa4ab88459 | ||
|
|
0316a13f7e | ||
|
|
9f8004ed62 | ||
|
|
ef582333ff | ||
|
|
48fe32766c | ||
|
|
eba6d4359c | ||
|
|
6e7ba84017 | ||
|
|
30797755fb | ||
|
|
375680a3be | ||
|
|
e29cd7f5b7 | ||
|
|
b37d935d3c | ||
|
|
2d016775fe | ||
|
|
0de8f46c6e | ||
|
|
6bd2eb4ec6 | ||
|
|
f351d0a307 | ||
|
|
1f37743549 | ||
|
|
961747c4bc | ||
|
|
30fd285ec7 | ||
|
|
de1de3f626 | ||
|
|
124a6c9e5a | ||
|
|
875ce8c9c1 | ||
|
|
1b301ad5bb | ||
|
|
b745428b5c | ||
|
|
2832e790fd | ||
|
|
c24f7d8e12 | ||
|
|
75f83ba380 | ||
|
|
0c6663d0d5 | ||
|
|
2c6dcac97f | ||
|
|
ee65894d40 | ||
|
|
c853091a85 | ||
|
|
517aef1557 | ||
|
|
9ce92b1f43 | ||
|
|
647d7601b2 | ||
|
|
8a734a2613 | ||
|
|
6d37812928 | ||
|
|
05e11547f5 | ||
|
|
0f4b523868 | ||
|
|
7d5ab70ce3 | ||
|
|
1b045d24bc | ||
|
|
a8f7c30aa0 | ||
|
|
9a88594684 | ||
|
|
4684617d2c | ||
|
|
7f5fc0fdbd | ||
|
|
d069ba9da5 | ||
|
|
74f2a28e41 | ||
|
|
abcfac3c19 | ||
|
|
523cbb02aa | ||
|
|
c4e0ea27a1 | ||
|
|
e0382fde2f | ||
|
|
39154e628e | ||
|
|
e9746c5182 | ||
|
|
49e0f23848 | ||
|
|
c39b65c06b | ||
|
|
f3df4855af | ||
|
|
db40bdca15 | ||
|
|
45bf1ff64d | ||
|
|
da93525d2e | ||
|
|
b1dd86d7ae | ||
|
|
25703f8c53 | ||
|
|
07a8c4987a | ||
|
|
8e67c6f512 | ||
|
|
6d53d08b6c | ||
|
|
98b5893855 | ||
|
|
53178f35be | ||
|
|
7ea171de4e | ||
|
|
44c3c3ed86 | ||
|
|
7bec0c73cf | ||
|
|
69f7ad21fe | ||
|
|
ca819fb923 | ||
|
|
ebedd2875f | ||
|
|
927b5707fe | ||
|
|
b345f5d432 | ||
|
|
63179d9751 | ||
|
|
618e0c6265 | ||
|
|
716306538f | ||
|
|
dae7435135 | ||
|
|
cf83cbf2df | ||
|
|
7ff48b8bec | ||
|
|
f276c7a80d | ||
|
|
8707928b16 | ||
|
|
f0d6237a96 | ||
|
|
cb6240974d | ||
|
|
88d48e38af | ||
|
|
bda39335b0 | ||
|
|
96b798615c | ||
|
|
98ec3b77fe | ||
|
|
aed780bf48 | ||
|
|
b0ba765598 | ||
|
|
7ca2913c1a | ||
|
|
320d8848ab | ||
|
|
10db254791 | ||
|
|
ac3b0d69cc | ||
|
|
79f5883c2f | ||
|
|
99a6b0dbc1 | ||
|
|
6a4ca68a1c | ||
|
|
193c707061 | ||
|
|
8b79892246 | ||
|
|
5d82068218 | ||
|
|
d13df40753 | ||
|
|
3d9c2233be | ||
|
|
b51df9a0b1 | ||
|
|
2312cf6fb0 | ||
|
|
3b4d8d6eca | ||
|
|
fd5079b8a6 | ||
|
|
edfd62e42a | ||
|
|
82163a1e68 | ||
|
|
fa96a1d1ef | ||
|
|
f5fb6972aa | ||
|
|
9b1f40da37 | ||
|
|
71a830c836 | ||
|
|
e70b46672c | ||
|
|
18a6b66ba4 | ||
|
|
201c65960e | ||
|
|
22154dee5c | ||
|
|
45d18ac8cb | ||
|
|
f15a628561 | ||
|
|
67381ae550 | ||
|
|
e0644398b6 | ||
|
|
6a3497f09c | ||
|
|
2a1ed7f3eb | ||
|
|
9f348122ec | ||
|
|
1bafe73a94 | ||
|
|
f71c11559c | ||
|
|
78fd5a9dad | ||
|
|
50ebdf3081 | ||
|
|
40943f5618 | ||
|
|
0081d9b828 | ||
|
|
3da8532bed | ||
|
|
317d183ba4 | ||
|
|
2e90373e4b | ||
|
|
6479bd4e76 | ||
|
|
f1417dd859 | ||
|
|
bae4cdf8d5 | ||
|
|
b6eda9ed0a | ||
|
|
182c1adf7d | ||
|
|
fc9b152d06 | ||
|
|
c397a21980 | ||
|
|
fc3848345b | ||
|
|
a505221e1a | ||
|
|
99b584ad7d | ||
|
|
8f13f357ad | ||
|
|
7b4e60a2e7 | ||
|
|
2b3093fa1c | ||
|
|
6fd9146d0d | ||
|
|
0fdf2c0f4b | ||
|
|
18a8834209 | ||
|
|
19b2208c27 | ||
|
|
e0bc1109c0 | ||
|
|
6d228dfb9f | ||
|
|
9b6986ea87 | ||
|
|
b33d568e04 | ||
|
|
e723a58af5 | ||
|
|
2aecffd39a | ||
|
|
ccd8e1e617 | ||
|
|
3c66eb5b4b | ||
|
|
e89c785719 | ||
|
|
6d9560df1d | ||
|
|
d17309eda8 | ||
|
|
83f1e2eec0 | ||
|
|
fab3a8dcf4 | ||
|
|
3e9125a3c1 | ||
|
|
d0d11cec7b | ||
|
|
bbe08bb8d9 | ||
|
|
14d8c91250 | ||
|
|
ba76dbc672 | ||
|
|
391f28f78c | ||
|
|
786cd214d6 | ||
|
|
6b2383a9d8 | ||
|
|
48b3d1af2f | ||
|
|
cf0bce77a9 | ||
|
|
0b1cdc382c | ||
|
|
91c3a99d48 | ||
|
|
b87b69be7e | ||
|
|
dc7a7ad697 | ||
|
|
02a563dff1 | ||
|
|
2df2fc1f38 | ||
|
|
29c4a43e50 | ||
|
|
96256579e6 | ||
|
|
aa6b0f299e | ||
|
|
5fc4fc28b6 | ||
|
|
f9714fbf3e | ||
|
|
11134b669e | ||
|
|
18401dda8f | ||
|
|
0de277c18b | ||
|
|
050245e842 | ||
|
|
48cfaf8c39 | ||
|
|
ac6915c002 | ||
|
|
51e08a5b85 | ||
|
|
927a1ebab4 | ||
|
|
ed084f5413 | ||
|
|
36352c9539 | ||
|
|
3ee5227cca | ||
|
|
83c2de8849 | ||
|
|
4158a1ae55 | ||
|
|
6ef4f094b3 | ||
|
|
8cbd6dca9f | ||
|
|
47e6f977de | ||
|
|
2a461d6b39 | ||
|
|
b9e31f213f | ||
|
|
49c8e19f80 | ||
|
|
6a1e03ac6f | ||
|
|
33acb48c71 | ||
|
|
c04d14e53c | ||
|
|
4cec88fc7f | ||
|
|
ffdbccd571 | ||
|
|
6b22ca7a32 | ||
|
|
337b1c9399 | ||
|
|
d1186eea18 | ||
|
|
9cf00cbc70 | ||
|
|
2955e3d566 | ||
|
|
94a7e99fda | ||
|
|
4bac850bb1 | ||
|
|
80113a9d6e | ||
|
|
0d9c7ce50c | ||
|
|
4aa656f2a6 | ||
|
|
8a6f8bd461 | ||
|
|
24a51df9cb | ||
|
|
bb2fa6dc7d | ||
|
|
4d973e8295 | ||
|
|
a12c3d3f8e | ||
|
|
04748e6f3f | ||
|
|
cd9cd8a195 | ||
|
|
a3436897ad | ||
|
|
5acdd78b15 | ||
|
|
9b9d72594c | ||
|
|
d30ef7ee3e | ||
|
|
a52d8c1dab | ||
|
|
75a0a4c092 | ||
|
|
9c29f3b96d | ||
|
|
26a17a3d82 | ||
|
|
a912d67c06 | ||
|
|
577fb818b9 | ||
|
|
f5b30b8872 | ||
|
|
df8a5aa9be | ||
|
|
c622e4a649 | ||
|
|
55e471af40 | ||
|
|
4771c9599e | ||
|
|
63cfb4e480 | ||
|
|
f77cf03543 | ||
|
|
3067a971f9 | ||
|
|
245e24dcf3 | ||
|
|
51f6d279a7 | ||
|
|
e4cb96f84d | ||
|
|
ccca04b450 | ||
|
|
4adc575bad | ||
|
|
71742654e3 | ||
|
|
d4593bc333 | ||
|
|
85542bd12b | ||
|
|
705ffd6d06 | ||
|
|
c8c79cc291 | ||
|
|
8eb1d34644 | ||
|
|
809207195d | ||
|
|
d4264cb719 | ||
|
|
d70f7cd8cc | ||
|
|
43867dbd98 | ||
|
|
1c8836a8d6 | ||
|
|
c4e141a000 | ||
|
|
74c991d70b | ||
|
|
d6465b907f | ||
|
|
380ff50e57 | ||
|
|
da8444b11f | ||
|
|
1084588103 | ||
|
|
d10560d54c | ||
|
|
2ff5d72236 | ||
|
|
0299bb5b2e | ||
|
|
5a3976c43e | ||
|
|
f6cea09d5a | ||
|
|
1847cf175c | ||
|
|
2134b5b06a | ||
|
|
b5f0342664 | ||
|
|
ae94ca6fa9 | ||
|
|
d6d1ff0170 | ||
|
|
9b97b63891 | ||
|
|
7a205e80aa | ||
|
|
fa661217c1 | ||
|
|
4cf6047a4e | ||
|
|
fb06c66437 | ||
|
|
b4bed57d4c | ||
|
|
7a9d40817c | ||
|
|
dae0c23ec8 | ||
|
|
5dd54ef012 | ||
|
|
28c0596742 | ||
|
|
79354c0b43 | ||
|
|
66ad95c330 | ||
|
|
4ef5de2cce | ||
|
|
c622622c59 | ||
|
|
2316c0fb74 | ||
|
|
288f9a3857 | ||
|
|
d133b8baee | ||
|
|
966ffaa3d6 | ||
|
|
155670766e | ||
|
|
e6de57ccc6 | ||
|
|
53f298ac41 | ||
|
|
bbdb7300fd | ||
|
|
15548ff433 | ||
|
|
85e39b86bb | ||
|
|
bcaaa2f40b | ||
|
|
3f26a0b5a5 | ||
|
|
0674485e76 | ||
|
|
88dbfd5257 | ||
|
|
abef020e07 | ||
|
|
6202958048 | ||
|
|
e10c36fe11 | ||
|
|
11b4b3209d | ||
|
|
79aa994275 | ||
|
|
8b6e3d8bd3 | ||
|
|
3f4d472c80 | ||
|
|
4f0b2bc4de | ||
|
|
00a6551622 | ||
|
|
ae008ed80b | ||
|
|
ec92d7f67e | ||
|
|
c1bc1ea584 | ||
|
|
6e329595ca | ||
|
|
380d8ec370 | ||
|
|
3ae060f0d3 | ||
|
|
2606de25e4 | ||
|
|
7b2a986cea | ||
|
|
4b10212caf | ||
|
|
212d9f1b98 | ||
|
|
cf991ba4e2 | ||
|
|
f44dab5d26 | ||
|
|
914659e888 | ||
|
|
9e3f06b7f2 | ||
|
|
4cc75a9560 | ||
|
|
02f51d043d | ||
|
|
ff379b7665 | ||
|
|
1523068b03 | ||
|
|
e2e15df358 | ||
|
|
aaff9dfd32 | ||
|
|
980b8aedd3 | ||
|
|
45ace793d0 | ||
|
|
4746ce939f | ||
|
|
3fe2f9578a | ||
|
|
f570c57006 | ||
|
|
35aff3b783 | ||
|
|
a384dea62b | ||
|
|
edb96f69f5 | ||
|
|
e10fb22f9c | ||
|
|
0919e38514 | ||
|
|
c2c6050df3 | ||
|
|
695cc23696 | ||
|
|
356c4987a2 | ||
|
|
d24f76c98e | ||
|
|
5549757268 | ||
|
|
3028e06fd2 | ||
|
|
ab45e4d183 | ||
|
|
e076c23f8d | ||
|
|
15482e3242 | ||
|
|
acfd548fa2 | ||
|
|
2eaac31344 | ||
|
|
c512f095ae | ||
|
|
c46dd1e29d | ||
|
|
6a7eeef0ee | ||
|
|
ccb8889342 | ||
|
|
63beaa18ce | ||
|
|
5792e2c483 | ||
|
|
d674de298c | ||
|
|
006eb39cc5 | ||
|
|
9813294854 | ||
|
|
86174eb6ad | ||
|
|
0ca1916f1b | ||
|
|
e8fc4af4c6 | ||
|
|
127c69c3ac | ||
|
|
2b2292f432 | ||
|
|
93172460aa | ||
|
|
8aa250d679 | ||
|
|
a55eba3b37 | ||
|
|
0022c810e5 | ||
|
|
d1ccdf41a3 | ||
|
|
bbda53c8b6 | ||
|
|
2ca48e5d08 | ||
|
|
bc8f5a2d71 | ||
|
|
9a88a2fdea | ||
|
|
72b753c60f | ||
|
|
ea20bfb233 | ||
|
|
95cca4ce75 | ||
|
|
345a459720 | ||
|
|
eaaec4353d | ||
|
|
78fe04ca9d | ||
|
|
028a98380d | ||
|
|
9cedfa7ddd | ||
|
|
fe43e629fd | ||
|
|
6f9422dfb9 | ||
|
|
5d0f7e5c41 | ||
|
|
48e399a285 | ||
|
|
9c133abc79 | ||
|
|
d953d83773 | ||
|
|
d013f7ec8f | ||
|
|
88f3459c7d | ||
|
|
ede124a587 | ||
|
|
c41bd3fafd | ||
|
|
b1ec7e78cd | ||
|
|
c21ee90deb | ||
|
|
042261266f | ||
|
|
af194918cf | ||
|
|
1e89a93801 | ||
|
|
38fd88b3d1 | ||
|
|
3ea76a7f3e | ||
|
|
2335a70b79 | ||
|
|
af0539c526 | ||
|
|
e83083b64e | ||
|
|
a5f1215eb2 | ||
|
|
973406f327 | ||
|
|
eab360a02b | ||
|
|
0929e74b4e | ||
|
|
8185cf2724 | ||
|
|
9e06c69ea3 | ||
|
|
2a96d43602 | ||
|
|
e531a97a8b | ||
|
|
7221f49b25 | ||
|
|
843b1e17c9 | ||
|
|
6ae378aa20 | ||
|
|
141897c7fc | ||
|
|
c78ffbf16d | ||
|
|
51d57cfd4a | ||
|
|
0bde932b78 | ||
|
|
38198769eb | ||
|
|
cc9e70e3cc | ||
|
|
2735338815 | ||
|
|
4b1224e592 |
@@ -7,6 +7,9 @@ environment:
|
||||
matrix:
|
||||
- PYTHON: "C:\\Python35"
|
||||
TOXENV: "py35"
|
||||
# TODO: ENABLE WHEN AVAILABLE
|
||||
# - PYTHON: "C:\\Python36"
|
||||
# TOXENV: "py36"
|
||||
|
||||
SNAPSHOT_HOST:
|
||||
secure: NeTo57s2rJhCd/mjKHetXVxCFd3uhr8txnjnAXD1tUI=
|
||||
@@ -25,7 +28,7 @@ install:
|
||||
- "pip install -U tox"
|
||||
|
||||
test_script:
|
||||
- ps: "tox -- --cov mitmproxy --cov pathod -v"
|
||||
- ps: "tox -- --verbose --cov-report=term"
|
||||
- ps: |
|
||||
$Env:VERSION = $(python mitmproxy/version.py)
|
||||
$Env:SKIP_MITMPROXY = "python -c `"print('skip mitmproxy')`""
|
||||
@@ -41,12 +44,12 @@ test_script:
|
||||
if (!(Test-Path "C:\projects\mitmproxy\release\installbuilder-installer.exe")) {
|
||||
"Download InstallBuilder..."
|
||||
(New-Object System.Net.WebClient).DownloadFile(
|
||||
"https://installbuilder.bitrock.com/installbuilder-enterprise-16.11.1-windows-installer.exe",
|
||||
"https://installbuilder.bitrock.com/installbuilder-enterprise-17.1.0-windows-installer.exe",
|
||||
"C:\projects\mitmproxy\release\installbuilder-installer.exe"
|
||||
)
|
||||
}
|
||||
Start-Process "C:\projects\mitmproxy\release\installbuilder-installer.exe" "--mode unattended --unattendedmodeui none" -Wait
|
||||
& 'C:\Program Files (x86)\BitRock InstallBuilder Enterprise 16.11.1\bin\builder-cli.exe' `
|
||||
& 'C:\Program Files (x86)\BitRock InstallBuilder Enterprise 17.1.0\bin\builder-cli.exe' `
|
||||
build `
|
||||
.\release\installbuilder\mitmproxy.xml `
|
||||
windows `
|
||||
|
||||
6
.env
6
.env
@@ -1,6 +0,0 @@
|
||||
DIR="$( dirname "${BASH_SOURCE[0]}" )"
|
||||
ACTIVATE_DIR="$(if [ -f "$DIR/venv/bin/activate" ]; then echo 'bin'; else echo 'Scripts'; fi;)"
|
||||
if [ -z "$VIRTUAL_ENV" ] && [ -f "$DIR/venv/$ACTIVATE_DIR/activate" ]; then
|
||||
echo "Activating mitmproxy virtualenv..."
|
||||
source "$DIR/venv/$ACTIVATE_DIR/activate"
|
||||
fi
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,2 +1,2 @@
|
||||
mitmproxy/tools/web/static/**/* -diff
|
||||
mitmproxy/tools/web/static/**/* -diff linguist-vendored
|
||||
web/src/js/filt/filt.js -diff
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -11,6 +11,7 @@ MANIFEST
|
||||
.cache/
|
||||
.tox*/
|
||||
build/
|
||||
mitmproxy/contrib/kaitaistruct/*.ksy
|
||||
|
||||
# UI
|
||||
|
||||
@@ -19,3 +20,6 @@ bower_components
|
||||
*.map
|
||||
sslkeylogfile.log
|
||||
.tox/
|
||||
.python-version
|
||||
coverage.xml
|
||||
web/coverage/
|
||||
|
||||
33
.travis.yml
33
.travis.yml
@@ -5,6 +5,8 @@ env:
|
||||
global:
|
||||
- CI_DEPS=codecov>=2.0.5
|
||||
- CI_COMMANDS=codecov
|
||||
git:
|
||||
depth: 10000
|
||||
|
||||
matrix:
|
||||
fast_finish: true
|
||||
@@ -31,12 +33,33 @@ matrix:
|
||||
- debian-sid
|
||||
packages:
|
||||
- libssl-dev
|
||||
- python: 3.6
|
||||
env: TOXENV=py36 OPENSSL_ALPN
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
# Debian sid currently holds OpenSSL 1.1.0
|
||||
# change this with future releases!
|
||||
- debian-sid
|
||||
packages:
|
||||
- libssl-dev
|
||||
- python: 3.5
|
||||
env: TOXENV=individual_coverage
|
||||
- python: 3.5
|
||||
env: TOXENV=docs
|
||||
git:
|
||||
depth: 10000
|
||||
allow_failures:
|
||||
- python: pypy
|
||||
- language: node_js
|
||||
node_js: "node"
|
||||
before_install:
|
||||
- curl -o- -L https://yarnpkg.com/install.sh | bash
|
||||
- export PATH=$HOME/.yarn/bin:$PATH
|
||||
install:
|
||||
- cd web && yarn
|
||||
- yarn global add codecov
|
||||
script: npm test && codecov
|
||||
cache:
|
||||
yarn: true
|
||||
directories:
|
||||
- web/node_modules
|
||||
|
||||
install:
|
||||
- |
|
||||
@@ -53,7 +76,7 @@ install:
|
||||
- pip install tox
|
||||
|
||||
script:
|
||||
- tox -- --cov mitmproxy --cov pathod -v
|
||||
- tox -- --verbose --cov-report=term
|
||||
- |
|
||||
if [[ $BDIST == "1" ]]
|
||||
then
|
||||
|
||||
39
CHANGELOG
39
CHANGELOG
@@ -1,3 +1,42 @@
|
||||
28 April 2017: mitmproxy 2.0.2
|
||||
|
||||
* Fix mitmweb's Content-Security-Policy to work with Chrome 58+
|
||||
|
||||
* HTTP/2: actually use header normalization from hyper-h2
|
||||
|
||||
|
||||
15 March 2017: mitmproxy 2.0.1
|
||||
|
||||
* bump cryptography dependency
|
||||
|
||||
* bump pyparsing dependency
|
||||
|
||||
* HTTP/2: use header normalization from hyper-h2
|
||||
|
||||
|
||||
21 February 2017: mitmproxy 2.0
|
||||
|
||||
* HTTP/2 is now enabled by default.
|
||||
|
||||
* Image ContentView: Parse images with Kaitai Struct (kaitai.io) instead of Pillow.
|
||||
This simplifies installation, reduces binary size, and allows parsing in pure Python.
|
||||
|
||||
* Web: Add missing flow filters.
|
||||
|
||||
* Add transparent proxy support for OpenBSD.
|
||||
|
||||
* Check the mitmproxy CA for expiration and warn the user to regenerate it if necessary.
|
||||
|
||||
* Testing: Tremendous improvements, enforced 100% coverage for large parts of the
|
||||
codebase, increased overall coverage.
|
||||
|
||||
* Enforce individual coverage: one source file -> one test file with 100% coverage.
|
||||
|
||||
* A myriad of other small improvements throughout the project.
|
||||
|
||||
* Numerous bugfixes.
|
||||
|
||||
|
||||
26 December 2016: mitmproxy 1.0
|
||||
|
||||
* All mitmproxy tools are now Python 3 only! We plan to support Python 3.5 and higher.
|
||||
|
||||
104
README.rst
104
README.rst
@@ -10,6 +10,8 @@ interface.
|
||||
|
||||
``mitmdump`` is the command-line version of mitmproxy. Think tcpdump for HTTP.
|
||||
|
||||
``mitmweb`` is a web-based interface for mitmproxy.
|
||||
|
||||
``pathoc`` and ``pathod`` are perverse HTTP client and server applications
|
||||
designed to let you craft almost any conceivable HTTP request, including ones
|
||||
that creatively violate the standards.
|
||||
@@ -35,7 +37,7 @@ each other solve problems, and come up with new ideas for the project.
|
||||
|mitmproxy_discourse|
|
||||
|
||||
|
||||
Join our developer chat on Slack if you would like to hack on mitmproxy itself.
|
||||
Join our developer chat on Slack if you would like to contribute to mitmproxy itself.
|
||||
|
||||
|slack|
|
||||
|
||||
@@ -46,72 +48,64 @@ Installation
|
||||
The installation instructions are `here <http://docs.mitmproxy.org/en/stable/install.html>`__.
|
||||
If you want to contribute changes, keep on reading.
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
Hacking
|
||||
-------
|
||||
As an open source project, mitmproxy welcomes contributions of all forms. If you would like to bring the project forward,
|
||||
please consider contributing in the following areas:
|
||||
|
||||
To get started hacking on mitmproxy, make sure you have Python_ 3.5.x or above with
|
||||
virtualenv_ installed (you can find installation instructions for virtualenv
|
||||
`here <http://virtualenv.readthedocs.org/en/latest/>`__). Then do the following:
|
||||
- **Maintenance:** We are *incredibly* thankful for individuals who are stepping up and helping with maintenance. This includes (but is not limited to) triaging issues, reviewing pull requests and picking up stale ones, helping out other users in our forums_, creating minimal, complete and verifiable examples or test cases for existing bug reports, updating documentation, or fixing minor bugs that have recently been reported.
|
||||
- **Code Contributions:** We actively mark issues that we consider are `good first contributions`_. If you intend to work on a larger contribution to the project, please come talk to us first.
|
||||
|
||||
.. code-block:: text
|
||||
Development Setup
|
||||
-----------------
|
||||
|
||||
To get started hacking on mitmproxy, please follow the `advanced installation`_ steps to install mitmproxy from source, but stop right before running ``pip3 install mitmproxy``. Instead, do the following:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git clone https://github.com/mitmproxy/mitmproxy.git
|
||||
cd mitmproxy
|
||||
./dev.sh # powershell .\dev.ps1 on Windows
|
||||
./dev.sh # "powershell .\dev.ps1" on Windows
|
||||
|
||||
|
||||
The *dev* script will create a virtualenv environment in a directory called
|
||||
"venv", and install all mandatory and optional dependencies into it. The
|
||||
primary mitmproxy components - mitmproxy and pathod - are installed as
|
||||
The *dev* script will create a `virtualenv`_ environment in a directory called "venv"
|
||||
and install all mandatory and optional dependencies into it. The primary
|
||||
mitmproxy components - mitmproxy and pathod - are installed as
|
||||
"editable", so any changes to the source in the repository will be reflected
|
||||
live in the virtualenv.
|
||||
|
||||
To confirm that you're up and running, activate the virtualenv, and run the
|
||||
mitmproxy test suite:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
. venv/bin/activate # venv\Scripts\activate on Windows
|
||||
py.test
|
||||
|
||||
Note that the main executables for the project - ``mitmdump``, ``mitmproxy``,
|
||||
The main executables for the project - ``mitmdump``, ``mitmproxy``,
|
||||
``mitmweb``, ``pathod``, and ``pathoc`` - are all created within the
|
||||
virtualenv. After activating the virtualenv, they will be on your $PATH, and
|
||||
you can run them like any other command:
|
||||
|
||||
.. code-block:: text
|
||||
.. code-block:: bash
|
||||
|
||||
. venv/bin/activate # "venv\Scripts\activate" on Windows
|
||||
mitmdump --version
|
||||
|
||||
For convenience, the project includes an autoenv_ file (`.env`_) that
|
||||
auto-activates the virtualenv when you cd into the mitmproxy directory.
|
||||
|
||||
|
||||
Testing
|
||||
-------
|
||||
|
||||
If you've followed the procedure above, you already have all the development
|
||||
requirements installed, and you can simply run the test suite:
|
||||
requirements installed, and you can run the full test suite (including tests for code style and documentation) with tox_:
|
||||
|
||||
.. code-block:: text
|
||||
.. code-block:: bash
|
||||
|
||||
py.test
|
||||
tox
|
||||
|
||||
For speedier testing, we recommend you run `pytest`_ directly on individual test files or folders:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cd test/mitmproxy/addons
|
||||
pytest --cov mitmproxy.addons.anticache --looponfail test_anticache.py
|
||||
|
||||
As pytest does not check the code style, you probably want to run ``tox -e lint`` before committing your changes.
|
||||
|
||||
Please ensure that all patches are accompanied by matching changes in the test
|
||||
suite. The project tries to maintain 100% test coverage.
|
||||
|
||||
You can also use `tox` to run the full suite of tests, including a quick test
|
||||
to check documentation and code linting.
|
||||
|
||||
The following tox environments are relevant for local testing:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
tox -e py35 # runs all tests with Python 3.5
|
||||
tox -e docs # runs a does-it-compile check on the documentation
|
||||
tox -e lint # runs the linter for coding style checks
|
||||
|
||||
suite. The project tries to maintain 100% test coverage and enforces this strictly for some parts of the codebase.
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
@@ -120,7 +114,7 @@ The mitmproxy documentation is build using Sphinx_, which is installed
|
||||
automatically if you set up a development environment as described above. After
|
||||
installation, you can render the documentation like this:
|
||||
|
||||
.. code-block:: text
|
||||
.. code-block:: bash
|
||||
|
||||
cd docs
|
||||
make clean
|
||||
@@ -130,8 +124,8 @@ installation, you can render the documentation like this:
|
||||
The last command invokes `sphinx-autobuild`_, which watches the Sphinx directory and rebuilds
|
||||
the documentation when a change is detected.
|
||||
|
||||
Style
|
||||
-----
|
||||
Code Style
|
||||
----------
|
||||
|
||||
Keeping to a consistent code style throughout the project makes it easier to
|
||||
contribute and collaborate. Please stick to the guidelines in
|
||||
@@ -142,7 +136,7 @@ This is automatically enforced on every PR. If we detect a linting error, the
|
||||
PR checks will fail and block merging. You can run our lint checks yourself
|
||||
with the following command:
|
||||
|
||||
.. code-block:: text
|
||||
.. code-block:: bash
|
||||
|
||||
tox -e lint
|
||||
|
||||
@@ -151,7 +145,7 @@ with the following command:
|
||||
:target: https://mitmproxy.org/
|
||||
:alt: mitmproxy.org
|
||||
|
||||
.. |mitmproxy_docs| image:: https://readthedocs.org/projects/mitmproxy/badge/
|
||||
.. |mitmproxy_docs| image:: https://shields.mitmproxy.org/api/docs-latest-brightgreen.svg
|
||||
:target: http://docs.mitmproxy.org/en/latest/
|
||||
:alt: mitmproxy documentation
|
||||
|
||||
@@ -163,15 +157,15 @@ with the following command:
|
||||
:target: http://slack.mitmproxy.org/
|
||||
:alt: Slack Developer Chat
|
||||
|
||||
.. |travis| image:: https://shields.mitmproxy.org/travis/mitmproxy/mitmproxy/master.svg?label=Travis%20build
|
||||
.. |travis| image:: https://shields.mitmproxy.org/travis/mitmproxy/mitmproxy/master.svg?label=travis%20ci
|
||||
:target: https://travis-ci.org/mitmproxy/mitmproxy
|
||||
:alt: Travis Build Status
|
||||
|
||||
.. |appveyor| image:: https://shields.mitmproxy.org/appveyor/ci/mhils/mitmproxy/master.svg?label=Appveyor%20build
|
||||
.. |appveyor| image:: https://shields.mitmproxy.org/appveyor/ci/mhils/mitmproxy/master.svg?label=appveyor%20ci
|
||||
:target: https://ci.appveyor.com/project/mhils/mitmproxy
|
||||
:alt: Appveyor Build Status
|
||||
|
||||
.. |coverage| image:: https://codecov.io/gh/mitmproxy/mitmproxy/branch/master/graph/badge.svg
|
||||
.. |coverage| image:: https://shields.mitmproxy.org/codecov/c/github/mitmproxy/mitmproxy/master.svg?label=codecov
|
||||
:target: https://codecov.io/gh/mitmproxy/mitmproxy
|
||||
:alt: Coverage Status
|
||||
|
||||
@@ -183,11 +177,13 @@ with the following command:
|
||||
:target: https://pypi.python.org/pypi/mitmproxy
|
||||
:alt: Supported Python versions
|
||||
|
||||
.. _Python: https://www.python.org/
|
||||
.. _virtualenv: http://virtualenv.readthedocs.org/en/latest/
|
||||
.. _autoenv: https://github.com/kennethreitz/autoenv
|
||||
.. _.env: https://github.com/mitmproxy/mitmproxy/blob/master/.env
|
||||
.. _`advanced installation`: http://docs.mitmproxy.org/en/latest/install.html#advanced-installation
|
||||
.. _virtualenv: https://virtualenv.pypa.io/
|
||||
.. _`pytest`: http://pytest.org/
|
||||
.. _tox: https://tox.readthedocs.io/
|
||||
.. _Sphinx: http://sphinx-doc.org/
|
||||
.. _sphinx-autobuild: https://pypi.python.org/pypi/sphinx-autobuild
|
||||
.. _PEP8: https://www.python.org/dev/peps/pep-0008
|
||||
.. _Google Style Guide: https://google.github.io/styleguide/pyguide.html
|
||||
.. _`Google Style Guide`: https://google.github.io/styleguide/pyguide.html
|
||||
.. _forums: https://discourse.mitmproxy.org/
|
||||
.. _`good first contributions`: https://github.com/mitmproxy/mitmproxy/issues?q=is%3Aissue+is%3Aopen+label%3Agood-first-contribution
|
||||
|
||||
12
dev.ps1
12
dev.ps1
@@ -1,15 +1,19 @@
|
||||
$ErrorActionPreference = "Stop"
|
||||
$VENV = ".\venv"
|
||||
|
||||
python3 -m venv $VENV --copies
|
||||
& $VENV\Scripts\activate.ps1
|
||||
$pyver = python --version
|
||||
if($pyver -notmatch "3\.[5-9]") {
|
||||
Write-Warning "Unexpected Python version, expected Python 3.5 or above: $pyver"
|
||||
}
|
||||
|
||||
python -m venv .\venv --copies
|
||||
& .\venv\Scripts\activate.ps1
|
||||
|
||||
python -m pip install --disable-pip-version-check -U pip
|
||||
cmd /c "pip install -r requirements.txt 2>&1"
|
||||
|
||||
echo @"
|
||||
|
||||
* Created virtualenv environment in $VENV.
|
||||
* Created virtualenv environment in .\venv.
|
||||
* Installed all dependencies into the virtualenv.
|
||||
* Activated virtualenv environment.
|
||||
|
||||
|
||||
18
dev.sh
18
dev.sh
@@ -2,16 +2,14 @@
|
||||
set -e
|
||||
set -x
|
||||
|
||||
PYVERSION=3.5
|
||||
VENV="venv$PYVERSION"
|
||||
echo "Creating dev environment in ./venv..."
|
||||
|
||||
echo "Creating dev environment in $VENV using Python $PYVERSION"
|
||||
|
||||
python$PYVERSION -m venv "$VENV"
|
||||
. "$VENV/bin/activate"
|
||||
pip$PYVERSION install -U pip setuptools
|
||||
pip$PYVERSION install -r requirements.txt
|
||||
python3 -m venv venv
|
||||
. venv/bin/activate
|
||||
pip3 install -U pip setuptools
|
||||
pip3 install -r requirements.txt
|
||||
|
||||
echo ""
|
||||
echo "* Virtualenv created in $VENV and all dependencies installed."
|
||||
echo "* You can now activate the $(python --version) virtualenv with this command: \`. $VENV/bin/activate\`"
|
||||
echo " * Created virtualenv environment in ./venv."
|
||||
echo " * Installed all dependencies into the virtualenv."
|
||||
echo " * You can now activate the $(python3 --version) virtualenv with this command: \`. venv/bin/activate\`"
|
||||
@@ -24,6 +24,9 @@ something like this:
|
||||
Click on the relevant icon, follow the setup instructions for the platform
|
||||
you're on and you are good to go.
|
||||
|
||||
For iOS version 10.3 or up, you need to make sure ``mitmproxy`` is enabled in
|
||||
``Certificate Trust Settings``, you can check it by going to
|
||||
``Settings > General > About > Certificate Trust Settings``.
|
||||
|
||||
Installing the mitmproxy CA certificate manually
|
||||
------------------------------------------------
|
||||
@@ -40,7 +43,9 @@ start of mitmproxy.
|
||||
iOS
|
||||
^^^
|
||||
|
||||
http://kb.mit.edu/confluence/pages/viewpage.action?pageId=152600377
|
||||
See http://jasdev.me/intercepting-ios-traffic
|
||||
|
||||
and https://web.archive.org/web/20150920082614/http://kb.mit.edu/confluence/pages/viewpage.action?pageId=152600377
|
||||
|
||||
iOS Simulator
|
||||
^^^^^^^^^^^^^
|
||||
@@ -50,7 +55,7 @@ See https://github.com/ADVTOOLS/ADVTrustStore#how-to-use-advtruststore
|
||||
Java
|
||||
^^^^
|
||||
|
||||
See http://docs.oracle.com/cd/E19906-01/820-4916/geygn/index.html
|
||||
See https://docs.oracle.com/cd/E19906-01/820-4916/geygn/index.html
|
||||
|
||||
Android/Android Simulator
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -60,7 +65,7 @@ See http://wiki.cacert.org/FAQ/ImportRootCert#Android_Phones_.26_Tablets
|
||||
Windows
|
||||
^^^^^^^
|
||||
|
||||
See http://windows.microsoft.com/en-ca/windows/import-export-certificates-private-keys#1TC=windows-7
|
||||
See https://web.archive.org/web/20160612045445/http://windows.microsoft.com/en-ca/windows/import-export-certificates-private-keys#1TC=windows-7
|
||||
|
||||
Windows (automated)
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -77,7 +82,7 @@ See https://support.apple.com/kb/PH7297?locale=en_US
|
||||
Ubuntu/Debian
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
See http://askubuntu.com/questions/73287/how-do-i-install-a-root-certificate/94861#94861
|
||||
See https://askubuntu.com/questions/73287/how-do-i-install-a-root-certificate/94861#94861
|
||||
|
||||
Mozilla Firefox
|
||||
^^^^^^^^^^^^^^^
|
||||
@@ -87,7 +92,7 @@ See https://wiki.mozilla.org/MozillaRootCertificate#Mozilla_Firefox
|
||||
Chrome on Linux
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
See https://code.google.com/p/chromium/wiki/LinuxCertManagement
|
||||
See https://stackoverflow.com/a/15076602/198996
|
||||
|
||||
|
||||
The mitmproxy certificate authority
|
||||
@@ -130,7 +135,7 @@ mitmproxy-ca-cert.cer Same file as .pem, but with an extension expected by some
|
||||
Using a custom certificate
|
||||
--------------------------
|
||||
|
||||
You can use your own certificate by passing the ``--cert [domain=]path_to_certificate`` option to
|
||||
You can use your own (leaf) certificate by passing the ``--cert [domain=]path_to_certificate`` option to
|
||||
mitmproxy. Mitmproxy then uses the provided certificate for interception of the
|
||||
specified domain instead of generating a certificate signed by its own CA.
|
||||
|
||||
@@ -203,4 +208,4 @@ directory and uses this as the client cert.
|
||||
|
||||
|
||||
|
||||
.. _Certificate Pinning: http://security.stackexchange.com/questions/29988/what-is-certificate-pinning/
|
||||
.. _Certificate Pinning: https://security.stackexchange.com/questions/29988/what-is-certificate-pinning/
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
.. _architecture:
|
||||
|
||||
Architecture
|
||||
============
|
||||
|
||||
To give you a better understanding of how mitmproxy works, mitmproxy's
|
||||
high-level architecture is detailed in the following graphic:
|
||||
|
||||
.. image:: ../schematics/architecture.png
|
||||
|
||||
:download:`architecture.pdf <../schematics/architecture.pdf>`
|
||||
|
||||
Please don't refrain from asking any further
|
||||
questions on the mailing list, the Slack channel or the GitHub issue tracker.
|
||||
11
docs/dev/contributing.rst
Normal file
11
docs/dev/contributing.rst
Normal file
@@ -0,0 +1,11 @@
|
||||
.. _contributing:
|
||||
|
||||
Contributing
|
||||
============
|
||||
|
||||
As an open source project, **mitmproxy** welcomes contributions of all forms.
|
||||
|
||||
Please head over to the README_ to get started! 😃
|
||||
|
||||
|
||||
.. _README: https://github.com/mitmproxy/mitmproxy/blob/master/README.rst
|
||||
@@ -1,47 +0,0 @@
|
||||
.. _testing:
|
||||
|
||||
Testing
|
||||
=======
|
||||
|
||||
All the mitmproxy projects strive to maintain 100% code coverage. In general,
|
||||
patches and pull requests will be declined unless they're accompanied by a
|
||||
suitable extension to the test suite.
|
||||
|
||||
Our tests are written for the `py.test`_ or nose_ test frameworks.
|
||||
At the point where you send your pull request, a command like this:
|
||||
|
||||
>>> py.test --cov mitmproxy
|
||||
|
||||
Should give output something like this:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
> ---------- coverage: platform darwin, python 2.7.2-final-0 --
|
||||
> Name Stmts Miss Cover Missing
|
||||
> ----------------------------------------------------
|
||||
> mitmproxy/__init__ 0 0 100%
|
||||
> mitmproxy/app 4 0 100%
|
||||
> mitmproxy/cmdline 100 0 100%
|
||||
> mitmproxy/controller 69 0 100%
|
||||
> mitmproxy/dump 150 0 100%
|
||||
> mitmproxy/encoding 39 0 100%
|
||||
> mitmproxy/flowfilter 201 0 100%
|
||||
> mitmproxy/flow 891 0 100%
|
||||
> mitmproxy/proxy 427 0 100%
|
||||
> mitmproxy/script 27 0 100%
|
||||
> mitmproxy/utils 133 0 100%
|
||||
> mitmproxy/version 4 0 100%
|
||||
> ----------------------------------------------------
|
||||
> TOTAL 2045 0 100%
|
||||
> ----------------------------------------------------
|
||||
> Ran 251 tests in 11.864s
|
||||
|
||||
|
||||
There are exceptions to the coverage requirement - for instance, much of the
|
||||
console interface code can't sensibly be unit tested. These portions are
|
||||
excluded from coverage analysis either in the **.coveragerc** file, or using
|
||||
**#pragma no-cover** directives. To keep our coverage analysis relevant, we use
|
||||
these measures as sparingly as possible.
|
||||
|
||||
.. _nose: https://nose.readthedocs.org/en/latest/
|
||||
.. _py.test: https://pytest.org/
|
||||
@@ -11,5 +11,5 @@ sure you capture an HTTP exchange in its totality. It's also often used during
|
||||
|
||||
================== ======================
|
||||
command-line ``--anticache``
|
||||
mitmproxy shortcut :kbd:`o` then :kbd:`a`
|
||||
mitmproxy shortcut :kbd:`O` then :kbd:`a`
|
||||
================== ======================
|
||||
|
||||
@@ -23,7 +23,7 @@ How it works
|
||||
|
||||
================== ======================
|
||||
command-line ``--ignore regex``
|
||||
mitmproxy shortcut :kbd:`o` then :kbd:`I`
|
||||
mitmproxy shortcut :kbd:`O` then :kbd:`I`
|
||||
================== ======================
|
||||
|
||||
|
||||
|
||||
@@ -48,25 +48,24 @@ In practice, it's pretty common for the replacement literal to be long and
|
||||
complex. For instance, it might be an XSS exploit that weighs in at hundreds or
|
||||
thousands of characters. To cope with this, there's a variation of the
|
||||
replacement hook specifier that lets you load the replacement text from a file.
|
||||
So, you might start **mitmdump** as follows:
|
||||
To specify a file as replacement, prefix the file path with ``@``.
|
||||
You might start **mitmdump** as follows:
|
||||
|
||||
>>> mitmdump --replace-from-file :~q:foo:~/xss-exploit
|
||||
>>> mitmdump --replacements :~q:foo:@~/xss-exploit
|
||||
|
||||
This will load the replacement text from the file ``~/xss-exploit``.
|
||||
|
||||
Both the ``--replace`` and ``--replace-from-file`` flags can be passed multiple
|
||||
times.
|
||||
The ``--replacements`` flag can be passed multiple times.
|
||||
|
||||
|
||||
Interactively
|
||||
-------------
|
||||
|
||||
The :kbd:`R` shortcut key in the mitmproxy options menu (:kbd:`o`) lets you add and edit
|
||||
The :kbd:`R` shortcut key in the mitmproxy options menu (:kbd:`O`) lets you add and edit
|
||||
replacement hooks using a built-in editor. The context-sensitive help (:kbd:`?`) has
|
||||
complete usage information.
|
||||
|
||||
================== =======================
|
||||
command-line ``--replace``,
|
||||
``--replace-from-file``
|
||||
mitmproxy shortcut :kbd:`o` then :kbd:`R`
|
||||
command-line ``--replacements``
|
||||
mitmproxy shortcut :kbd:`O` then :kbd:`R`
|
||||
================== =======================
|
||||
|
||||
@@ -31,7 +31,8 @@ Host Header
|
||||
|
||||
In reverse proxy mode, mitmproxy automatically rewrites the Host header to match the
|
||||
upstream server. This allows mitmproxy to easily connect to existing endpoints on the
|
||||
open web (e.g. ``mitmproxy -R https://example.com``).
|
||||
open web (e.g. ``mitmproxy -R https://example.com``). You can disable this behaviour
|
||||
by passing ``--keep-host-header`` on the console.
|
||||
|
||||
However, keep in mind that absolute URLs within the returned document or HTTP redirects will
|
||||
NOT be rewritten by mitmproxy. This means that if you click on a link for "http://example.com"
|
||||
@@ -39,4 +40,4 @@ in the returned web page, you will be taken directly to that URL, bypassing mitm
|
||||
|
||||
One possible way to address this is to modify the hosts file of your OS so that "example.com"
|
||||
resolves to your proxy's IP, and then access the proxy by going directly to example.com.
|
||||
Make sure that your proxy can still resolve the original IP, or specify an IP in mitmproxy.
|
||||
Make sure that your proxy can still resolve the original IP, or specify an IP in mitmproxy.
|
||||
|
||||
@@ -31,7 +31,20 @@ in the past at the time of replay, and vice versa. Cookie expiry times are
|
||||
updated in a similar way.
|
||||
|
||||
You can turn off response refreshing using the ``--norefresh`` argument, or using
|
||||
the :kbd:`o` options shortcut within :program:`mitmproxy`.
|
||||
the :kbd:`O` options shortcut within :program:`mitmproxy`.
|
||||
|
||||
|
||||
Replaying a session recorded in Reverse-proxy Mode
|
||||
--------------------------------------------------
|
||||
|
||||
If you have captured the session in reverse proxy mode, in order to replay it you
|
||||
still have to specify the server URL, otherwise you may get the error:
|
||||
'HTTP protocol error in client request: Invalid HTTP request form (expected authority or absolute...)'.
|
||||
|
||||
During replay, when the client's requests match previously recorded requests, then the
|
||||
respective recorded responses are simply replayed by mitmproxy.
|
||||
Otherwise, the unmatched requests is forwarded to the upstream server.
|
||||
If forwarding is not desired, you can use the --kill (-k) switch to prevent that.
|
||||
|
||||
================== ===========
|
||||
command-line ``-S path``
|
||||
|
||||
@@ -15,5 +15,5 @@ Example: Set the **Host** header to "example.com" for all requests.
|
||||
|
||||
================== =======================
|
||||
command-line ``--setheader PATTERN``
|
||||
mitmproxy shortcut :kbd:`o` then :kbd:`H`
|
||||
mitmproxy shortcut :kbd:`O` then :kbd:`H`
|
||||
================== =======================
|
||||
|
||||
@@ -22,7 +22,7 @@ to interact with the secured resources.
|
||||
|
||||
================== ======================
|
||||
command-line ``-t FILTER``
|
||||
mitmproxy shortcut :kbd:`o` then :kbd:`t`
|
||||
mitmproxy shortcut :kbd:`O` then :kbd:`t`
|
||||
================== ======================
|
||||
|
||||
|
||||
@@ -37,5 +37,5 @@ replay of HTTP Digest authentication.
|
||||
|
||||
================== ======================
|
||||
command-line ``-u FILTER``
|
||||
mitmproxy shortcut :kbd:`o` then :kbd:`A`
|
||||
mitmproxy shortcut :kbd:`O` then :kbd:`A`
|
||||
================== ======================
|
||||
|
||||
@@ -19,7 +19,7 @@ How it works
|
||||
|
||||
================== ======================
|
||||
command-line ``--tcp HOST``
|
||||
mitmproxy shortcut :kbd:`o` then :kbd:`T`
|
||||
mitmproxy shortcut :kbd:`O` then :kbd:`T`
|
||||
================== ======================
|
||||
|
||||
For a detailed description how the hostname pattern works, please look at the :ref:`passthrough`
|
||||
|
||||
@@ -19,5 +19,5 @@ Upstream cert sniffing is on by default, and can optionally be turned off.
|
||||
|
||||
================== ======================
|
||||
command-line ``--no-upstream-cert``
|
||||
mitmproxy shortcut :kbd:`o` then :kbd:`U`
|
||||
mitmproxy shortcut :kbd:`O` then :kbd:`U`
|
||||
================== ======================
|
||||
|
||||
@@ -43,7 +43,7 @@ client connects to the proxy and makes a request that looks like this:
|
||||
|
||||
CONNECT example.com:443 HTTP/1.1
|
||||
|
||||
A conventional proxy can neither view nor manipulate an TLS-encrypted data
|
||||
A conventional proxy can neither view nor manipulate a TLS-encrypted data
|
||||
stream, so a CONNECT request simply asks the proxy to open a pipe between the
|
||||
client and server. The proxy here is just a facilitator - it blindly forwards
|
||||
data in both directions without knowing anything about the contents. The
|
||||
@@ -63,7 +63,7 @@ exactly this attack, by allowing a trusted third-party to cryptographically sign
|
||||
a server's certificates to verify that they are legit. If this signature doesn't
|
||||
match or is from a non-trusted party, a secure client will simply drop the
|
||||
connection and refuse to proceed. Despite the many shortcomings of the CA system
|
||||
as it exists today, this is usually fatal to attempts to MITM an TLS connection
|
||||
as it exists today, this is usually fatal to attempts to MITM a TLS connection
|
||||
for analysis. Our answer to this conundrum is to become a trusted Certificate
|
||||
Authority ourselves. Mitmproxy includes a full CA implementation that generates
|
||||
interception certificates on the fly. To get the client to trust these
|
||||
@@ -143,7 +143,7 @@ Lets put all of this together into the complete explicitly proxied HTTPS flow.
|
||||
2. Mitmproxy responds with a ``200 Connection Established``, as if it has set up the CONNECT pipe.
|
||||
3. The client believes it's talking to the remote server, and initiates the TLS connection.
|
||||
It uses SNI to indicate the hostname it is connecting to.
|
||||
4. Mitmproxy connects to the server, and establishes an TLS connection using the SNI hostname
|
||||
4. Mitmproxy connects to the server, and establishes a TLS connection using the SNI hostname
|
||||
indicated by the client.
|
||||
5. The server responds with the matching certificate, which contains the CN and SAN values
|
||||
needed to generate the interception certificate.
|
||||
@@ -217,7 +217,7 @@ explicit HTTPS connections to establish the CN and SANs, and cope with SNI.
|
||||
destination was.
|
||||
3. The client believes it's talking to the remote server, and initiates the TLS connection.
|
||||
It uses SNI to indicate the hostname it is connecting to.
|
||||
4. Mitmproxy connects to the server, and establishes an TLS connection using the SNI hostname
|
||||
4. Mitmproxy connects to the server, and establishes a TLS connection using the SNI hostname
|
||||
indicated by the client.
|
||||
5. The server responds with the matching certificate, which contains the CN and SAN values
|
||||
needed to generate the interception certificate.
|
||||
|
||||
@@ -47,6 +47,7 @@
|
||||
transparent
|
||||
transparent/linux
|
||||
transparent/osx
|
||||
transparent/openbsd
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
@@ -78,10 +79,9 @@
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
:caption: Hacking
|
||||
:caption: Development
|
||||
|
||||
dev/architecture
|
||||
dev/testing
|
||||
dev/contributing
|
||||
dev/sslkeylogfile
|
||||
|
||||
.. Indices and tables
|
||||
|
||||
@@ -20,7 +20,7 @@ You can use Homebrew to install everything:
|
||||
|
||||
brew install mitmproxy
|
||||
|
||||
Or you can download the pre-built binary packages from `mitmproxy.org`_.
|
||||
Or you can download the pre-built binary packages from our `releases`_.
|
||||
|
||||
|
||||
.. _install-windows:
|
||||
@@ -35,7 +35,7 @@ Both executables are added to your PATH and can be invoked from the command
|
||||
line.
|
||||
|
||||
.. note::
|
||||
mitmproxy's console interface is not supported on Windows, but you can use
|
||||
Mitmproxy's console interface is not supported on Windows, but you can use
|
||||
mitmweb (the web-based interface) and mitmdump.
|
||||
|
||||
.. _install-linux:
|
||||
@@ -44,7 +44,7 @@ Installation on Linux
|
||||
---------------------
|
||||
|
||||
The recommended way to run mitmproxy on Linux is to use the pre-built binaries
|
||||
provided at `mitmproxy.org`_.
|
||||
provided at `releases`_.
|
||||
|
||||
Our pre-built binaries provide you with the latest version of mitmproxy, a
|
||||
self-contained Python 3.5 environment and a recent version of OpenSSL that
|
||||
@@ -85,7 +85,7 @@ libraries. This was tested on a fully patched installation of Ubuntu 16.04.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
sudo apt-get install python3-pip python3-dev libffi-dev libssl-dev libtiff5-dev libjpeg8-dev zlib1g-dev libwebp-dev
|
||||
sudo apt-get install python3-dev python3-pip libffi-dev libssl-dev
|
||||
sudo pip3 install mitmproxy # or pip3 install --user mitmproxy
|
||||
|
||||
On older Ubuntu versions, e.g., **12.04** and **14.04**, you may need to install
|
||||
@@ -104,26 +104,40 @@ libraries. This was tested on a fully patched installation of Fedora 24.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
sudo dnf install make gcc redhat-rpm-config python3-pip python3-devel libffi-devel openssl-devel libtiff-devel libjpeg-devel zlib-devel libwebp-devel openjpeg2-devel
|
||||
sudo dnf install make gcc redhat-rpm-config python3-devel python3-pip libffi-devel openssl-devel
|
||||
sudo pip3 install mitmproxy # or pip3 install --user mitmproxy
|
||||
|
||||
Make sure to have an up-to-date version of pip by running ``pip3 install -U pip``.
|
||||
|
||||
|
||||
.. _install-source-opensuse:
|
||||
|
||||
Installation from Source on openSUSE
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This was tested on a fully patched installation of openSUSE Tumbleweed.
|
||||
Please note that openSUSE Leap 42.2 only comes with Python 3.4.x, whereas mitmproxy requires Python 3.5 or above.
|
||||
You can check you Python version by running ``python3 --version``.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
sudo zypper install python3-pip python3-devel libffi-devel openssl-devel gcc-c++
|
||||
sudo pip3 install mitmproxy
|
||||
|
||||
|
||||
.. _install-source-windows:
|
||||
|
||||
🐱💻 Installation from Source on Windows
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Installation from Source on Windows
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. note::
|
||||
mitmproxy's console interface is not supported on Windows, but you can use
|
||||
Mitmproxy's console interface is not supported on Windows, but you can use
|
||||
mitmweb (the web-based interface) and mitmdump.
|
||||
|
||||
First, install the latest version of Python 3.5 or later from the `Python
|
||||
website`_. During installation, make sure to select `Add Python to PATH`.
|
||||
|
||||
Now, you can install mitmproxy by running
|
||||
Mitmproxy has no other dependencies on Windows. You can now install mitmproxy by running
|
||||
|
||||
.. code:: powershell
|
||||
|
||||
@@ -139,11 +153,12 @@ Latest Development Version
|
||||
If you would like to install mitmproxy directly from the master branch on GitHub
|
||||
or would like to get set up to contribute to the project, install the
|
||||
dependencies as you would for a regular installation from source. Then see the
|
||||
Hacking_ section of the README on GitHub. You can check your system information
|
||||
by running: ``mitmproxy --sysinfo``
|
||||
project's README_ on GitHub. You can check your system information
|
||||
by running: ``mitmproxy --version``
|
||||
|
||||
|
||||
.. _Hacking: https://github.com/mitmproxy/mitmproxy/blob/master/README.rst#hacking
|
||||
.. _README: https://github.com/mitmproxy/mitmproxy/blob/master/README.rst
|
||||
.. _releases: https://github.com/mitmproxy/mitmproxy/releases
|
||||
.. _mitmproxy.org: https://mitmproxy.org/
|
||||
.. _`Python website`: https://www.python.org/downloads/windows/
|
||||
.. _pip: https://pip.pypa.io/en/latest/installing.html
|
||||
|
||||
@@ -6,6 +6,8 @@ with a console interface.
|
||||
|
||||
**mitmdump** is the command-line version of mitmproxy. Think tcpdump for HTTP.
|
||||
|
||||
**mitmweb** is a web-based interface for mitmproxy.
|
||||
|
||||
Documentation, tutorials and distribution packages can be found on the
|
||||
mitmproxy website: `mitmproxy.org <https://mitmproxy.org/>`_
|
||||
|
||||
|
||||
@@ -66,7 +66,7 @@ At the moment, the Grid Editor is used in four parts of mitmproxy:
|
||||
- Editing request or response headers (:kbd:`e` for edit, then :kbd:`h` for headers in flow view)
|
||||
- Editing a query string (:kbd:`e` for edit, then :kbd:`q` for query in flow view)
|
||||
- Editing a URL-encoded form (:kbd:`e` for edit, then :kbd:`f` for form in flow view)
|
||||
- Editing replacement patterns (:kbd:`o` for options, then :kbd:`R` for Replacement Patterns)
|
||||
- Editing replacement patterns (:kbd:`O` for options, then :kbd:`R` for Replacement Patterns)
|
||||
|
||||
If there is is no data, an empty editor will be started to let you add some.
|
||||
Here is the editor showing the headers from a request:
|
||||
|
||||
@@ -23,7 +23,7 @@ HTTP Request
|
||||
mode if the server responds correctly. Apart from that, websocket
|
||||
requests are just like any other, and all aspects of the request
|
||||
can be over-ridden.
|
||||
* - h\:\ :ref:`VALUE`\ =\ :ref:`VALUE`\
|
||||
* - h\ :ref:`VALUE`\ =\ :ref:`VALUE`\
|
||||
- Set a header.
|
||||
* - r
|
||||
- Set the **raw** flag on this response. Pathod will not calculate a
|
||||
@@ -73,7 +73,7 @@ HTTP Response
|
||||
* - m\ :ref:`VALUE`
|
||||
- HTTP Reason message. Automatically chosen according to the response
|
||||
code if not specified. (HTTP/1 only)
|
||||
* - h\:\ :ref:`VALUE`\ =\ :ref:`VALUE`\
|
||||
* - h\ :ref:`VALUE`\ =\ :ref:`VALUE`\
|
||||
- Set a header.
|
||||
* - r
|
||||
- Set the **raw** flag on this response. Pathod will not calculate a
|
||||
|
||||
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 85 KiB |
Binary file not shown.
@@ -29,6 +29,12 @@ will be added to all responses passing through the proxy:
|
||||
>>> mitmdump -s add_header.py
|
||||
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
A collection of addons that demonstrate popular features can be found at :src:`examples/simple`.
|
||||
|
||||
|
||||
Using classes
|
||||
-------------
|
||||
|
||||
@@ -54,24 +60,8 @@ and is replaced by the class instance.
|
||||
Handling arguments
|
||||
------------------
|
||||
|
||||
Scripts can handle their own command-line arguments, just like any other Python
|
||||
program. Let's build on the example above to do something slightly more
|
||||
sophisticated - replace one value with another in all responses. Mitmproxy's
|
||||
`HTTPRequest <api.html#mitmproxy.models.http.HTTPRequest>`_ and `HTTPResponse
|
||||
<api.html#mitmproxy.models.http.HTTPResponse>`_ objects have a handy `replace
|
||||
<api.html#mitmproxy.models.http.HTTPResponse.replace>`_ method that takes care
|
||||
of all the details for us.
|
||||
|
||||
.. literalinclude:: ../../examples/simple/script_arguments.py
|
||||
:caption: :src:`examples/simple/script_arguments.py`
|
||||
:language: python
|
||||
|
||||
We can now call this script on the command-line like this:
|
||||
|
||||
>>> mitmdump -dd -s "./script_arguments.py html faketml"
|
||||
|
||||
Whenever a handler is called, mitpmroxy rewrites the script environment so that
|
||||
it sees its own arguments as if it was invoked from the command-line.
|
||||
FIXME
|
||||
|
||||
|
||||
Logging and the context
|
||||
|
||||
@@ -27,7 +27,7 @@ Fully transparent mode
|
||||
By default mitmproxy will use its own local ip address for its server-side connections.
|
||||
In case this isn't desired, the --spoof-source-address argument can be used to
|
||||
use the client's ip address for server-side connections. The following config is
|
||||
required for this mode to work:
|
||||
required for this mode to work::
|
||||
|
||||
CLIENT_NET=192.168.1.0/24
|
||||
TABLE_ID=100
|
||||
@@ -42,9 +42,9 @@ required for this mode to work:
|
||||
|
||||
This mode does require root privileges though. There's a wrapper in the examples directory
|
||||
called 'mitmproxy_shim.c', which will enable you to use this mode with dropped priviliges.
|
||||
It can be used as follows:
|
||||
It can be used as follows::
|
||||
|
||||
gcc examples/mitmproxy_shim.c -o mitmproxy_shim -lcap
|
||||
gcc examples/complex/full_transparency_shim.c -o mitmproxy_shim -lcap
|
||||
sudo chown root:root mitmproxy_shim
|
||||
sudo chmod u+s mitmproxy_shim
|
||||
./mitmproxy_shim $(which mitmproxy) -T --spoof-source-address
|
||||
|
||||
53
docs/transparent/openbsd.rst
Normal file
53
docs/transparent/openbsd.rst
Normal file
@@ -0,0 +1,53 @@
|
||||
.. _openbsd:
|
||||
|
||||
OpenBSD
|
||||
=======
|
||||
|
||||
1. :ref:`Install the mitmproxy certificate on the test device <certinstall>`
|
||||
|
||||
2. Enable IP forwarding:
|
||||
|
||||
>>> sudo sysctl -w net.inet.ip.forwarding=1
|
||||
|
||||
3. Place the following two lines in **/etc/pf.conf**:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
mitm_if = "re2"
|
||||
pass in quick proto tcp from $mitm_if to port { 80, 443 } divert-to 127.0.0.1 port 8080
|
||||
|
||||
These rules tell pf to divert all traffic from ``$mitm_if`` destined for
|
||||
port 80 or 443 to the local mitmproxy instance running on port 8080. You
|
||||
should replace ``$mitm_if`` value with the interface on which your test
|
||||
device will appear.
|
||||
|
||||
4. Configure pf with the rules:
|
||||
|
||||
>>> doas pfctl -f /etc/pf.conf
|
||||
|
||||
5. And now enable it:
|
||||
|
||||
>>> doas pfctl -e
|
||||
|
||||
6. Fire up mitmproxy. You probably want a command like this:
|
||||
|
||||
>>> mitmproxy -T --host
|
||||
|
||||
The ``-T`` flag turns on transparent mode, and the ``--host``
|
||||
argument tells mitmproxy to use the value of the Host header for URL display.
|
||||
|
||||
7. Finally, configure your test device to use the host on which mitmproxy is
|
||||
running as the default gateway.
|
||||
|
||||
.. note::
|
||||
|
||||
Note that the **divert-to** rules in the pf.conf given above only apply to
|
||||
inbound traffic. **This means that they will NOT redirect traffic coming
|
||||
from the box running pf itself.** We can't distinguish between an outbound
|
||||
connection from a non-mitmproxy app, and an outbound connection from
|
||||
mitmproxy itself - if you want to intercept your traffic, you should use an
|
||||
external host to run mitmproxy. Nonetheless, pf is flexible to cater for a
|
||||
range of creative possibilities, like intercepting traffic emanating from
|
||||
VMs. See the **pf.conf** man page for more.
|
||||
|
||||
.. _pf: http://man.openbsd.org/OpenBSD-current/man5/pf.conf.5
|
||||
@@ -63,7 +63,7 @@ Note that this means we don't support transparent mode for earlier versions of O
|
||||
running pf itself.** We can't distinguish between an outbound connection from a
|
||||
non-mitmproxy app, and an outbound connection from mitmproxy itself - if you
|
||||
want to intercept your OSX traffic, you should use an external host to run
|
||||
mitmproxy. None the less, pf is flexible to cater for a range of creative
|
||||
mitmproxy. Nonetheless, pf is flexible to cater for a range of creative
|
||||
possibilities, like intercepting traffic emanating from VMs. See the
|
||||
**pf.conf** man page for more.
|
||||
|
||||
|
||||
@@ -5,14 +5,14 @@
|
||||
| change_upstream_proxy.py | Dynamically change the upstream proxy. |
|
||||
| dns_spoofing.py | Use mitmproxy in a DNS spoofing scenario. |
|
||||
| dup_and_replay.py | Duplicates each request, changes it, and then replays the modified request. |
|
||||
| flowbasic.py | Basic use of mitmproxy's FlowMaster directly. |
|
||||
| full_transparency_shim.c | Setuid wrapper that can be used to run mitmproxy in full transparency mode, as a normal user. |
|
||||
| har_dump.py | Dump flows as HAR files. |
|
||||
| mitmproxywrapper.py | Bracket mitmproxy run with proxy enable/disable on OS X |
|
||||
| nonblocking.py | Demonstrate parallel processing with a blocking script |
|
||||
| remote_debug.py | This script enables remote debugging of the mitmproxy _UI_ with PyCharm. |
|
||||
| sslstrip.py | sslstrip-like funtionality implemented with mitmproxy |
|
||||
| stream | Enable streaming for all responses. |
|
||||
| stream.py | Enable streaming for all responses. |
|
||||
| stream_modify.py | Modify a streamed response body. |
|
||||
| tcp_message.py | Modify a raw TCP connection |
|
||||
| tls_passthrough.py | Use conditional TLS interception based on a user-defined strategy. |
|
||||
| tls_passthrough.py | Use conditional TLS interception based on a user-defined strategy. |
|
||||
| xss_scanner.py | Scan all visited webpages. |
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
"""
|
||||
This inline scripts makes it possible to use mitmproxy in scenarios where IP spoofing has been used to redirect
|
||||
connections to mitmproxy. The way this works is that we rely on either the TLS Server Name Indication (SNI) or the
|
||||
Host header of the HTTP request.
|
||||
Of course, this is not foolproof - if an HTTPS connection comes without SNI, we don't
|
||||
know the actual target and cannot construct a certificate that looks valid.
|
||||
Similarly, if there's no Host header or a spoofed Host header, we're out of luck as well.
|
||||
Using transparent mode is the better option most of the time.
|
||||
This script makes it possible to use mitmproxy in scenarios where IP spoofing
|
||||
has been used to redirect connections to mitmproxy. The way this works is that
|
||||
we rely on either the TLS Server Name Indication (SNI) or the Host header of the
|
||||
HTTP request. Of course, this is not foolproof - if an HTTPS connection comes
|
||||
without SNI, we don't know the actual target and cannot construct a certificate
|
||||
that looks valid. Similarly, if there's no Host header or a spoofed Host header,
|
||||
we're out of luck as well. Using transparent mode is the better option most of
|
||||
the time.
|
||||
|
||||
Usage:
|
||||
mitmproxy
|
||||
@@ -13,6 +14,8 @@ Usage:
|
||||
-s dns_spoofing.py
|
||||
# Used as the target location if neither SNI nor host header are present.
|
||||
-R http://example.com/
|
||||
# To avoid auto rewriting of host header by the reverse proxy target.
|
||||
--keep-host-header
|
||||
mitmdump
|
||||
-p 80
|
||||
-R http://localhost:443/
|
||||
@@ -28,22 +31,27 @@ import re
|
||||
parse_host_header = re.compile(r"^(?P<host>[^:]+|\[.+\])(?::(?P<port>\d+))?$")
|
||||
|
||||
|
||||
def request(flow):
|
||||
if flow.client_conn.ssl_established:
|
||||
flow.request.scheme = "https"
|
||||
sni = flow.client_conn.connection.get_servername()
|
||||
port = 443
|
||||
else:
|
||||
flow.request.scheme = "http"
|
||||
sni = None
|
||||
port = 80
|
||||
class Rerouter:
|
||||
def request(self, flow):
|
||||
if flow.client_conn.ssl_established:
|
||||
flow.request.scheme = "https"
|
||||
sni = flow.client_conn.connection.get_servername()
|
||||
port = 443
|
||||
else:
|
||||
flow.request.scheme = "http"
|
||||
sni = None
|
||||
port = 80
|
||||
|
||||
host_header = flow.request.pretty_host
|
||||
m = parse_host_header.match(host_header)
|
||||
if m:
|
||||
host_header = m.group("host").strip("[]")
|
||||
if m.group("port"):
|
||||
port = int(m.group("port"))
|
||||
host_header = flow.request.host_header
|
||||
m = parse_host_header.match(host_header)
|
||||
if m:
|
||||
host_header = m.group("host").strip("[]")
|
||||
if m.group("port"):
|
||||
port = int(m.group("port"))
|
||||
|
||||
flow.request.host = sni or host_header
|
||||
flow.request.port = port
|
||||
flow.request.host_header = host_header
|
||||
flow.request.host = sni or host_header
|
||||
flow.request.port = port
|
||||
|
||||
|
||||
addons = [Rerouter()]
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
This example shows how to build a proxy based on mitmproxy's Flow
|
||||
primitives.
|
||||
|
||||
Heads Up: In the majority of cases, you want to use inline scripts.
|
||||
|
||||
Note that request and response messages are not automatically replied to,
|
||||
so we need to implement handlers to do this.
|
||||
"""
|
||||
from mitmproxy import controller, options, master
|
||||
from mitmproxy.proxy import ProxyServer, ProxyConfig
|
||||
|
||||
|
||||
class MyMaster(master.Master):
|
||||
def run(self):
|
||||
try:
|
||||
master.Master.run(self)
|
||||
except KeyboardInterrupt:
|
||||
self.shutdown()
|
||||
|
||||
@controller.handler
|
||||
def request(self, f):
|
||||
print("request", f)
|
||||
|
||||
@controller.handler
|
||||
def response(self, f):
|
||||
print("response", f)
|
||||
|
||||
@controller.handler
|
||||
def error(self, f):
|
||||
print("error", f)
|
||||
|
||||
@controller.handler
|
||||
def log(self, l):
|
||||
print("log", l.msg)
|
||||
|
||||
|
||||
opts = options.Options(cadir="~/.mitmproxy/")
|
||||
config = ProxyConfig(opts)
|
||||
server = ProxyServer(config)
|
||||
m = MyMaster(opts, server)
|
||||
m.run()
|
||||
@@ -4,16 +4,17 @@ This inline script can be used to dump flows as HAR files.
|
||||
|
||||
|
||||
import json
|
||||
import sys
|
||||
import base64
|
||||
import zlib
|
||||
import os
|
||||
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
from datetime import timezone
|
||||
|
||||
import mitmproxy
|
||||
|
||||
from mitmproxy import version
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy.utils import strutils
|
||||
from mitmproxy.net.http import cookies
|
||||
|
||||
@@ -24,17 +25,13 @@ HAR = {}
|
||||
SERVERS_SEEN = set()
|
||||
|
||||
|
||||
def start():
|
||||
"""
|
||||
Called once on script startup before any other events.
|
||||
"""
|
||||
if len(sys.argv) != 2:
|
||||
raise ValueError(
|
||||
'Usage: -s "har_dump.py filename" '
|
||||
'(- will output to stdout, filenames ending with .zhar '
|
||||
'will result in compressed har)'
|
||||
)
|
||||
def load(l):
|
||||
l.add_option(
|
||||
"hardump", str, "", "HAR dump path.",
|
||||
)
|
||||
|
||||
|
||||
def configure(updated):
|
||||
HAR.update({
|
||||
"log": {
|
||||
"version": "1.2",
|
||||
@@ -88,7 +85,7 @@ def response(flow):
|
||||
# Timings set to -1 will be ignored as per spec.
|
||||
full_time = sum(v for v in timings.values() if v > -1)
|
||||
|
||||
started_date_time = format_datetime(datetime.utcfromtimestamp(flow.request.timestamp_start))
|
||||
started_date_time = datetime.fromtimestamp(flow.request.timestamp_start, timezone.utc).isoformat()
|
||||
|
||||
# Response body size and encoding
|
||||
response_body_size = len(flow.response.raw_content)
|
||||
@@ -146,7 +143,7 @@ def response(flow):
|
||||
}
|
||||
|
||||
if flow.server_conn.connected():
|
||||
entry["serverIPAddress"] = str(flow.server_conn.ip_address.address[0])
|
||||
entry["serverIPAddress"] = str(flow.server_conn.ip_address[0])
|
||||
|
||||
HAR["log"]["entries"].append(entry)
|
||||
|
||||
@@ -155,25 +152,20 @@ def done():
|
||||
"""
|
||||
Called once on script shutdown, after any other events.
|
||||
"""
|
||||
dump_file = sys.argv[1]
|
||||
if ctx.options.hardump:
|
||||
json_dump = json.dumps(HAR, indent=2) # type: str
|
||||
|
||||
json_dump = json.dumps(HAR, indent=2) # type: str
|
||||
if ctx.options.hardump == '-':
|
||||
mitmproxy.ctx.log(json_dump)
|
||||
else:
|
||||
raw = json_dump.encode() # type: bytes
|
||||
if ctx.options.hardump.endswith('.zhar'):
|
||||
raw = zlib.compress(raw, 9)
|
||||
|
||||
if dump_file == '-':
|
||||
mitmproxy.ctx.log(json_dump)
|
||||
else:
|
||||
raw = json_dump.encode() # type: bytes
|
||||
if dump_file.endswith('.zhar'):
|
||||
raw = zlib.compress(raw, 9)
|
||||
with open(os.path.expanduser(ctx.options.hardump), "wb") as f:
|
||||
f.write(raw)
|
||||
|
||||
with open(dump_file, "wb") as f:
|
||||
f.write(raw)
|
||||
|
||||
mitmproxy.ctx.log("HAR dump finished (wrote %s bytes to file)" % len(json_dump))
|
||||
|
||||
|
||||
def format_datetime(dt):
|
||||
return dt.replace(tzinfo=pytz.timezone("UTC")).isoformat()
|
||||
mitmproxy.ctx.log("HAR dump finished (wrote %s bytes to file)" % len(json_dump))
|
||||
|
||||
|
||||
def format_cookies(cookie_list):
|
||||
@@ -197,7 +189,7 @@ def format_cookies(cookie_list):
|
||||
# Expiration time needs to be formatted
|
||||
expire_ts = cookies.get_expiration_ts(attrs)
|
||||
if expire_ts is not None:
|
||||
cookie_har["expires"] = format_datetime(datetime.fromtimestamp(expire_ts))
|
||||
cookie_har["expires"] = datetime.fromtimestamp(expire_ts, timezone.utc).isoformat()
|
||||
|
||||
rv.append(cookie_har)
|
||||
|
||||
@@ -209,7 +201,7 @@ def format_request_cookies(fields):
|
||||
|
||||
|
||||
def format_response_cookies(fields):
|
||||
return format_cookies((c[0], c[1].value, c[1].attrs) for c in fields)
|
||||
return format_cookies((c[0], c[1][0], c[1][1]) for c in fields)
|
||||
|
||||
|
||||
def name_value(obj):
|
||||
|
||||
@@ -14,6 +14,6 @@ Usage:
|
||||
"""
|
||||
|
||||
|
||||
def start():
|
||||
def load(l):
|
||||
import pydevd
|
||||
pydevd.settrace("localhost", port=5678, stdoutToServer=True, stderrToServer=True)
|
||||
|
||||
@@ -23,10 +23,10 @@ Authors: Maximilian Hils, Matthew Tuusberg
|
||||
import collections
|
||||
import random
|
||||
|
||||
import sys
|
||||
from enum import Enum
|
||||
|
||||
import mitmproxy
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy.exceptions import TlsProtocolException
|
||||
from mitmproxy.proxy.protocol import TlsLayer, RawTCPLayer
|
||||
|
||||
@@ -112,10 +112,16 @@ class TlsFeedback(TlsLayer):
|
||||
tls_strategy = None
|
||||
|
||||
|
||||
def start():
|
||||
def load(l):
|
||||
l.add_option(
|
||||
"tlsstrat", int, 0, "TLS passthrough strategy (0-100)",
|
||||
)
|
||||
|
||||
|
||||
def configure(updated):
|
||||
global tls_strategy
|
||||
if len(sys.argv) == 2:
|
||||
tls_strategy = ProbabilisticStrategy(float(sys.argv[1]))
|
||||
if ctx.options.tlsstrat > 0:
|
||||
tls_strategy = ProbabilisticStrategy(float(ctx.options.tlsstrat) / 100.0)
|
||||
else:
|
||||
tls_strategy = ConservativeStrategy()
|
||||
|
||||
|
||||
407
examples/complex/xss_scanner.py
Executable file
407
examples/complex/xss_scanner.py
Executable file
@@ -0,0 +1,407 @@
|
||||
"""
|
||||
|
||||
__ __ _____ _____ _____
|
||||
\ \ / // ____/ ____| / ____|
|
||||
\ V /| (___| (___ | (___ ___ __ _ _ __ _ __ ___ _ __
|
||||
> < \___ \\___ \ \___ \ / __/ _` | '_ \| '_ \ / _ \ '__|
|
||||
/ . \ ____) |___) | ____) | (_| (_| | | | | | | | __/ |
|
||||
/_/ \_\_____/_____/ |_____/ \___\__,_|_| |_|_| |_|\___|_|
|
||||
|
||||
|
||||
This script automatically scans all visited webpages for XSS and SQLi vulnerabilities.
|
||||
|
||||
Usage: mitmproxy -s xss_scanner.py
|
||||
|
||||
This script scans for vulnerabilities by injecting a fuzzing payload (see PAYLOAD below) into 4 different places
|
||||
and examining the HTML to look for XSS and SQLi injection vulnerabilities. The XSS scanning functionality works by
|
||||
looking to see whether it is possible to inject HTML based off of of where the payload appears in the page and what
|
||||
characters are escaped. In addition, it also looks for any script tags that load javascript from unclaimed domains.
|
||||
The SQLi scanning functionality works by using regular expressions to look for errors from a number of different
|
||||
common databases. Since it is only looking for errors, it will not find blind SQLi vulnerabilities.
|
||||
|
||||
The 4 places it injects the payload into are:
|
||||
1. URLs (e.g. https://example.com/ -> https://example.com/PAYLOAD/)
|
||||
2. Queries (e.g. https://example.com/index.html?a=b -> https://example.com/index.html?a=PAYLOAD)
|
||||
3. Referers (e.g. The referer changes from https://example.com to PAYLOAD)
|
||||
4. User Agents (e.g. The UA changes from Chrome to PAYLOAD)
|
||||
|
||||
Reports from this script show up in the event log (viewable by pressing e) and formatted like:
|
||||
|
||||
===== XSS Found ====
|
||||
XSS URL: http://daviddworken.com/vulnerableUA.php
|
||||
Injection Point: User Agent
|
||||
Suggested Exploit: <script>alert(0)</script>
|
||||
Line: 1029zxcs'd"ao<ac>so[sb]po(pc)se;sl/bsl\eq=3847asd
|
||||
|
||||
"""
|
||||
|
||||
from mitmproxy import ctx
|
||||
from socket import gaierror, gethostbyname
|
||||
from urllib.parse import urlparse
|
||||
import requests
|
||||
import re
|
||||
from html.parser import HTMLParser
|
||||
from mitmproxy import http
|
||||
from typing import Dict, Union, Tuple, Optional, List, NamedTuple
|
||||
|
||||
# The actual payload is put between a frontWall and a backWall to make it easy
|
||||
# to locate the payload with regular expressions
|
||||
FRONT_WALL = b"1029zxc"
|
||||
BACK_WALL = b"3847asd"
|
||||
PAYLOAD = b"""s'd"ao<ac>so[sb]po(pc)se;sl/bsl\\eq="""
|
||||
FULL_PAYLOAD = FRONT_WALL + PAYLOAD + BACK_WALL
|
||||
|
||||
# A XSSData is a named tuple with the following fields:
|
||||
# - url -> str
|
||||
# - injection_point -> str
|
||||
# - exploit -> str
|
||||
# - line -> str
|
||||
XSSData = NamedTuple('XSSData', [('url', str),
|
||||
('injection_point', str),
|
||||
('exploit', str),
|
||||
('line', str)])
|
||||
|
||||
# A SQLiData is named tuple with the following fields:
|
||||
# - url -> str
|
||||
# - injection_point -> str
|
||||
# - regex -> str
|
||||
# - dbms -> str
|
||||
SQLiData = NamedTuple('SQLiData', [('url', str),
|
||||
('injection_point', str),
|
||||
('regex', str),
|
||||
('dbms', str)])
|
||||
|
||||
|
||||
VulnData = Tuple[Optional[XSSData], Optional[SQLiData]]
|
||||
Cookies = Dict[str, str]
|
||||
|
||||
|
||||
def get_cookies(flow: http.HTTPFlow) -> Cookies:
|
||||
""" Return a dict going from cookie names to cookie values
|
||||
- Note that it includes both the cookies sent in the original request and
|
||||
the cookies sent by the server """
|
||||
return {name: value for name, value in flow.request.cookies.fields}
|
||||
|
||||
|
||||
def find_unclaimed_URLs(body: Union[str, bytes], requestUrl: bytes) -> None:
|
||||
""" Look for unclaimed URLs in script tags and log them if found"""
|
||||
class ScriptURLExtractor(HTMLParser):
|
||||
script_URLs = []
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
if tag == "script" and "src" in [name for name, value in attrs]:
|
||||
for name, value in attrs:
|
||||
if name == "src":
|
||||
self.script_URLs.append(value)
|
||||
|
||||
parser = ScriptURLExtractor()
|
||||
try:
|
||||
parser.feed(body)
|
||||
except TypeError:
|
||||
parser.feed(body.decode('utf-8'))
|
||||
for url in parser.script_URLs:
|
||||
parser = urlparse(url)
|
||||
domain = parser.netloc
|
||||
try:
|
||||
gethostbyname(domain)
|
||||
except gaierror:
|
||||
ctx.log.error("XSS found in %s due to unclaimed URL \"%s\" in script tag." % (requestUrl, url))
|
||||
|
||||
|
||||
def test_end_of_URL_injection(original_body: str, request_URL: str, cookies: Cookies) -> VulnData:
|
||||
""" Test the given URL for XSS via injection onto the end of the URL and
|
||||
log the XSS if found """
|
||||
parsed_URL = urlparse(request_URL)
|
||||
path = parsed_URL.path
|
||||
if path != "" and path[-1] != "/": # ensure the path ends in a /
|
||||
path += "/"
|
||||
path += FULL_PAYLOAD.decode('utf-8') # the path must be a string while the payload is bytes
|
||||
url = parsed_URL._replace(path=path).geturl()
|
||||
body = requests.get(url, cookies=cookies).text.lower()
|
||||
xss_info = get_XSS_data(body, url, "End of URL")
|
||||
sqli_info = get_SQLi_data(body, original_body, url, "End of URL")
|
||||
return xss_info, sqli_info
|
||||
|
||||
|
||||
def test_referer_injection(original_body: str, request_URL: str, cookies: Cookies) -> VulnData:
|
||||
""" Test the given URL for XSS via injection into the referer and
|
||||
log the XSS if found """
|
||||
body = requests.get(request_URL, headers={'referer': FULL_PAYLOAD}, cookies=cookies).text.lower()
|
||||
xss_info = get_XSS_data(body, request_URL, "Referer")
|
||||
sqli_info = get_SQLi_data(body, original_body, request_URL, "Referer")
|
||||
return xss_info, sqli_info
|
||||
|
||||
|
||||
def test_user_agent_injection(original_body: str, request_URL: str, cookies: Cookies) -> VulnData:
|
||||
""" Test the given URL for XSS via injection into the user agent and
|
||||
log the XSS if found """
|
||||
body = requests.get(request_URL, headers={'User-Agent': FULL_PAYLOAD}, cookies=cookies).text.lower()
|
||||
xss_info = get_XSS_data(body, request_URL, "User Agent")
|
||||
sqli_info = get_SQLi_data(body, original_body, request_URL, "User Agent")
|
||||
return xss_info, sqli_info
|
||||
|
||||
|
||||
def test_query_injection(original_body: str, request_URL: str, cookies: Cookies):
|
||||
""" Test the given URL for XSS via injection into URL queries and
|
||||
log the XSS if found """
|
||||
parsed_URL = urlparse(request_URL)
|
||||
query_string = parsed_URL.query
|
||||
# queries is a list of parameters where each parameter is set to the payload
|
||||
queries = [query.split("=")[0] + "=" + FULL_PAYLOAD.decode('utf-8') for query in query_string.split("&")]
|
||||
new_query_string = "&".join(queries)
|
||||
new_URL = parsed_URL._replace(query=new_query_string).geturl()
|
||||
body = requests.get(new_URL, cookies=cookies).text.lower()
|
||||
xss_info = get_XSS_data(body, new_URL, "Query")
|
||||
sqli_info = get_SQLi_data(body, original_body, new_URL, "Query")
|
||||
return xss_info, sqli_info
|
||||
|
||||
|
||||
def log_XSS_data(xss_info: Optional[XSSData]) -> None:
|
||||
""" Log information about the given XSS to mitmproxy """
|
||||
# If it is None, then there is no info to log
|
||||
if not xss_info:
|
||||
return
|
||||
ctx.log.error("===== XSS Found ====")
|
||||
ctx.log.error("XSS URL: %s" % xss_info.url)
|
||||
ctx.log.error("Injection Point: %s" % xss_info.injection_point)
|
||||
ctx.log.error("Suggested Exploit: %s" % xss_info.exploit)
|
||||
ctx.log.error("Line: %s" % xss_info.line)
|
||||
|
||||
|
||||
def log_SQLi_data(sqli_info: Optional[SQLiData]) -> None:
|
||||
""" Log information about the given SQLi to mitmproxy """
|
||||
if not sqli_info:
|
||||
return
|
||||
ctx.log.error("===== SQLi Found =====")
|
||||
ctx.log.error("SQLi URL: %s" % sqli_info.url.decode('utf-8'))
|
||||
ctx.log.error("Injection Point: %s" % sqli_info.injection_point.decode('utf-8'))
|
||||
ctx.log.error("Regex used: %s" % sqli_info.regex.decode('utf-8'))
|
||||
ctx.log.error("Suspected DBMS: %s" % sqli_info.dbms.decode('utf-8'))
|
||||
|
||||
|
||||
def get_SQLi_data(new_body: str, original_body: str, request_URL: str, injection_point: str) -> Optional[SQLiData]:
|
||||
""" Return a SQLiDict if there is a SQLi otherwise return None
|
||||
String String URL String -> (SQLiDict or None) """
|
||||
# Regexes taken from Damn Small SQLi Scanner: https://github.com/stamparm/DSSS/blob/master/dsss.py#L17
|
||||
DBMS_ERRORS = {
|
||||
"MySQL": (r"SQL syntax.*MySQL", r"Warning.*mysql_.*", r"valid MySQL result", r"MySqlClient\."),
|
||||
"PostgreSQL": (r"PostgreSQL.*ERROR", r"Warning.*\Wpg_.*", r"valid PostgreSQL result", r"Npgsql\."),
|
||||
"Microsoft SQL Server": (r"Driver.* SQL[\-\_\ ]*Server", r"OLE DB.* SQL Server", r"(\W|\A)SQL Server.*Driver",
|
||||
r"Warning.*mssql_.*", r"(\W|\A)SQL Server.*[0-9a-fA-F]{8}",
|
||||
r"(?s)Exception.*\WSystem\.Data\.SqlClient\.", r"(?s)Exception.*\WRoadhouse\.Cms\."),
|
||||
"Microsoft Access": (r"Microsoft Access Driver", r"JET Database Engine", r"Access Database Engine"),
|
||||
"Oracle": (r"\bORA-[0-9][0-9][0-9][0-9]", r"Oracle error", r"Oracle.*Driver", r"Warning.*\Woci_.*", r"Warning.*\Wora_.*"),
|
||||
"IBM DB2": (r"CLI Driver.*DB2", r"DB2 SQL error", r"\bdb2_\w+\("),
|
||||
"SQLite": (r"SQLite/JDBCDriver", r"SQLite.Exception", r"System.Data.SQLite.SQLiteException", r"Warning.*sqlite_.*",
|
||||
r"Warning.*SQLite3::", r"\[SQLITE_ERROR\]"),
|
||||
"Sybase": (r"(?i)Warning.*sybase.*", r"Sybase message", r"Sybase.*Server message.*"),
|
||||
}
|
||||
for dbms, regexes in DBMS_ERRORS.items():
|
||||
for regex in regexes:
|
||||
if re.search(regex, new_body) and not re.search(regex, original_body):
|
||||
return SQLiData(request_URL,
|
||||
injection_point,
|
||||
regex,
|
||||
dbms)
|
||||
|
||||
|
||||
# A qc is either ' or "
|
||||
def inside_quote(qc: str, substring: bytes, text_index: int, body: bytes) -> bool:
|
||||
""" Whether the Numberth occurence of the first string in the second
|
||||
string is inside quotes as defined by the supplied QuoteChar """
|
||||
substring = substring.decode('utf-8')
|
||||
body = body.decode('utf-8')
|
||||
num_substrings_found = 0
|
||||
in_quote = False
|
||||
for index, char in enumerate(body):
|
||||
# Whether the next chunk of len(substring) chars is the substring
|
||||
next_part_is_substring = (
|
||||
(not (index + len(substring) > len(body))) and
|
||||
(body[index:index + len(substring)] == substring)
|
||||
)
|
||||
# Whether this char is escaped with a \
|
||||
is_not_escaped = (
|
||||
(index - 1 < 0 or index - 1 > len(body)) or
|
||||
(body[index - 1] != "\\")
|
||||
)
|
||||
if char == qc and is_not_escaped:
|
||||
in_quote = not in_quote
|
||||
if next_part_is_substring:
|
||||
if num_substrings_found == text_index:
|
||||
return in_quote
|
||||
num_substrings_found += 1
|
||||
return False
|
||||
|
||||
|
||||
def paths_to_text(html: str, str: str) -> List[str]:
|
||||
""" Return list of Paths to a given str in the given HTML tree
|
||||
- Note that it does a BFS """
|
||||
|
||||
def remove_last_occurence_of_sub_string(str: str, substr: str):
|
||||
""" Delete the last occurence of substr from str
|
||||
String String -> String
|
||||
"""
|
||||
index = str.rfind(substr)
|
||||
return str[:index] + str[index + len(substr):]
|
||||
|
||||
class PathHTMLParser(HTMLParser):
|
||||
currentPath = ""
|
||||
paths = []
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
self.currentPath += ("/" + tag)
|
||||
|
||||
def handle_endtag(self, tag):
|
||||
self.currentPath = remove_last_occurence_of_sub_string(self.currentPath, "/" + tag)
|
||||
|
||||
def handle_data(self, data):
|
||||
if str in data:
|
||||
self.paths.append(self.currentPath)
|
||||
|
||||
parser = PathHTMLParser()
|
||||
parser.feed(html)
|
||||
return parser.paths
|
||||
|
||||
|
||||
def get_XSS_data(body: str, request_URL: str, injection_point: str) -> Optional[XSSData]:
|
||||
""" Return a XSSDict if there is a XSS otherwise return None """
|
||||
def in_script(text, index, body) -> bool:
|
||||
""" Whether the Numberth occurence of the first string in the second
|
||||
string is inside a script tag """
|
||||
paths = paths_to_text(body.decode('utf-8'), text.decode("utf-8"))
|
||||
try:
|
||||
path = paths[index]
|
||||
return "script" in path
|
||||
except IndexError:
|
||||
return False
|
||||
|
||||
def in_HTML(text: bytes, index: int, body: bytes) -> bool:
|
||||
""" Whether the Numberth occurence of the first string in the second
|
||||
string is inside the HTML but not inside a script tag or part of
|
||||
a HTML attribute"""
|
||||
# if there is a < then lxml will interpret that as a tag, so only search for the stuff before it
|
||||
text = text.split(b"<")[0]
|
||||
paths = paths_to_text(body.decode('utf-8'), text.decode("utf-8"))
|
||||
try:
|
||||
path = paths[index]
|
||||
return "script" not in path
|
||||
except IndexError:
|
||||
return False
|
||||
|
||||
def inject_javascript_handler(html: str) -> bool:
|
||||
""" Whether you can inject a Javascript:alert(0) as a link """
|
||||
class injectJSHandlerHTMLParser(HTMLParser):
|
||||
injectJSHandler = False
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
for name, value in attrs:
|
||||
if name == "href" and value.startswith(FRONT_WALL.decode('utf-8')):
|
||||
self.injectJSHandler = True
|
||||
|
||||
parser = injectJSHandlerHTMLParser()
|
||||
parser.feed(html)
|
||||
return parser.injectJSHandler
|
||||
# Only convert the body to bytes if needed
|
||||
if isinstance(body, str):
|
||||
body = bytes(body, 'utf-8')
|
||||
# Regex for between 24 and 72 (aka 24*3) characters encapsulated by the walls
|
||||
regex = re.compile(b"""%s.{24,72}?%s""" % (FRONT_WALL, BACK_WALL))
|
||||
matches = regex.findall(body)
|
||||
for index, match in enumerate(matches):
|
||||
# Where the string is injected into the HTML
|
||||
in_script = in_script(match, index, body)
|
||||
in_HTML = in_HTML(match, index, body)
|
||||
in_tag = not in_script and not in_HTML
|
||||
in_single_quotes = inside_quote("'", match, index, body)
|
||||
in_double_quotes = inside_quote('"', match, index, body)
|
||||
# Whether you can inject:
|
||||
inject_open_angle = b"ao<ac" in match # open angle brackets
|
||||
inject_close_angle = b"ac>so" in match # close angle brackets
|
||||
inject_single_quotes = b"s'd" in match # single quotes
|
||||
inject_double_quotes = b'd"ao' in match # double quotes
|
||||
inject_slash = b"sl/bsl" in match # forward slashes
|
||||
inject_semi = b"se;sl" in match # semicolons
|
||||
inject_equals = b"eq=" in match # equals sign
|
||||
if in_script and inject_slash and inject_open_angle and inject_close_angle: # e.g. <script>PAYLOAD</script>
|
||||
return XSSData(request_URL,
|
||||
injection_point,
|
||||
'</script><script>alert(0)</script><script>',
|
||||
match.decode('utf-8'))
|
||||
elif in_script and in_single_quotes and inject_single_quotes and inject_semi: # e.g. <script>t='PAYLOAD';</script>
|
||||
return XSSData(request_URL,
|
||||
injection_point,
|
||||
"';alert(0);g='",
|
||||
match.decode('utf-8'))
|
||||
elif in_script and in_double_quotes and inject_double_quotes and inject_semi: # e.g. <script>t="PAYLOAD";</script>
|
||||
return XSSData(request_URL,
|
||||
injection_point,
|
||||
'";alert(0);g="',
|
||||
match.decode('utf-8'))
|
||||
elif in_tag and in_single_quotes and inject_single_quotes and inject_open_angle and inject_close_angle and inject_slash:
|
||||
# e.g. <a href='PAYLOAD'>Test</a>
|
||||
return XSSData(request_URL,
|
||||
injection_point,
|
||||
"'><script>alert(0)</script>",
|
||||
match.decode('utf-8'))
|
||||
elif in_tag and in_double_quotes and inject_double_quotes and inject_open_angle and inject_close_angle and inject_slash:
|
||||
# e.g. <a href="PAYLOAD">Test</a>
|
||||
return XSSData(request_URL,
|
||||
injection_point,
|
||||
'"><script>alert(0)</script>',
|
||||
match.decode('utf-8'))
|
||||
elif in_tag and not in_double_quotes and not in_single_quotes and inject_open_angle and inject_close_angle and inject_slash:
|
||||
# e.g. <a href=PAYLOAD>Test</a>
|
||||
return XSSData(request_URL,
|
||||
injection_point,
|
||||
'><script>alert(0)</script>',
|
||||
match.decode('utf-8'))
|
||||
elif inject_javascript_handler(body.decode('utf-8')): # e.g. <html><a href=PAYLOAD>Test</a>
|
||||
return XSSData(request_URL,
|
||||
injection_point,
|
||||
'Javascript:alert(0)',
|
||||
match.decode('utf-8'))
|
||||
elif in_tag and in_double_quotes and inject_double_quotes and inject_equals: # e.g. <a href="PAYLOAD">Test</a>
|
||||
return XSSData(request_URL,
|
||||
injection_point,
|
||||
'" onmouseover="alert(0)" t="',
|
||||
match.decode('utf-8'))
|
||||
elif in_tag and in_single_quotes and inject_single_quotes and inject_equals: # e.g. <a href='PAYLOAD'>Test</a>
|
||||
return XSSData(request_URL,
|
||||
injection_point,
|
||||
"' onmouseover='alert(0)' t='",
|
||||
match.decode('utf-8'))
|
||||
elif in_tag and not in_single_quotes and not in_double_quotes and inject_equals: # e.g. <a href=PAYLOAD>Test</a>
|
||||
return XSSData(request_URL,
|
||||
injection_point,
|
||||
" onmouseover=alert(0) t=",
|
||||
match.decode('utf-8'))
|
||||
elif in_HTML and not in_script and inject_open_angle and inject_close_angle and inject_slash: # e.g. <html>PAYLOAD</html>
|
||||
return XSSData(request_URL,
|
||||
injection_point,
|
||||
'<script>alert(0)</script>',
|
||||
match.decode('utf-8'))
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
# response is mitmproxy's entry point
|
||||
def response(flow: http.HTTPFlow) -> None:
|
||||
cookiesDict = get_cookies(flow)
|
||||
# Example: http://xss.guru/unclaimedScriptTag.html
|
||||
find_unclaimed_URLs(flow.response.content, flow.request.url)
|
||||
results = test_end_of_URL_injection(flow.response.content.decode('utf-8'), flow.request.url, cookiesDict)
|
||||
log_XSS_data(results[0])
|
||||
log_SQLi_data(results[1])
|
||||
# Example: https://daviddworken.com/vulnerableReferer.php
|
||||
results = test_referer_injection(flow.response.content.decode('utf-8'), flow.request.url, cookiesDict)
|
||||
log_XSS_data(results[0])
|
||||
log_SQLi_data(results[1])
|
||||
# Example: https://daviddworken.com/vulnerableUA.php
|
||||
results = test_user_agent_injection(flow.response.content.decode('utf-8'), flow.request.url, cookiesDict)
|
||||
log_XSS_data(results[0])
|
||||
log_SQLi_data(results[1])
|
||||
if "?" in flow.request.url:
|
||||
# Example: https://daviddworken.com/vulnerable.php?name=
|
||||
results = test_query_injection(flow.response.content.decode('utf-8'), flow.request.url, cookiesDict)
|
||||
log_XSS_data(results[0])
|
||||
log_SQLi_data(results[1])
|
||||
@@ -7,7 +7,7 @@
|
||||
| filter_flows.py | This script demonstrates how to use mitmproxy's filter pattern in scripts. |
|
||||
| io_read_dumpfile.py | Read a dumpfile generated by mitmproxy. |
|
||||
| io_write_dumpfile.py | Only write selected flows into a mitmproxy dumpfile. |
|
||||
| logging.py | Use mitmproxy's logging API. |
|
||||
| log_events.py | Use mitmproxy's logging API. |
|
||||
| modify_body_inject_iframe.py | Inject configurable iframe into pages. |
|
||||
| modify_form.py | Modify HTTP form submissions. |
|
||||
| modify_querystring.py | Modify HTTP query strings. |
|
||||
@@ -15,4 +15,4 @@
|
||||
| script_arguments.py | Add arguments to a script. |
|
||||
| send_reply_from_proxy.py | Send a HTTP response directly from the proxy. |
|
||||
| upsidedownternet.py | Turn all images upside down. |
|
||||
| wsgi_flask_app.py | Embed a WSGI app into mitmproxy. |
|
||||
| wsgi_flask_app.py | Embed a WSGI app into mitmproxy. |
|
||||
|
||||
@@ -1,2 +1,5 @@
|
||||
def response(flow):
|
||||
from mitmproxy import http
|
||||
|
||||
|
||||
def response(flow: http.HTTPFlow) -> None:
|
||||
flow.response.headers["newheader"] = "foo"
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
from mitmproxy import http
|
||||
|
||||
|
||||
class AddHeader:
|
||||
def response(self, flow):
|
||||
def response(self, flow: http.HTTPFlow) -> None:
|
||||
flow.response.headers["newheader"] = "foo"
|
||||
|
||||
|
||||
def start():
|
||||
return AddHeader()
|
||||
addons = [AddHeader()]
|
||||
|
||||
@@ -3,6 +3,10 @@ This example shows how one can add a custom contentview to mitmproxy.
|
||||
The content view API is explained in the mitmproxy.contentviews module.
|
||||
"""
|
||||
from mitmproxy import contentviews
|
||||
import typing
|
||||
|
||||
|
||||
CVIEWSWAPCASE = typing.Tuple[str, typing.Iterable[typing.List[typing.Tuple[str, typing.AnyStr]]]]
|
||||
|
||||
|
||||
class ViewSwapCase(contentviews.View):
|
||||
@@ -10,17 +14,17 @@ class ViewSwapCase(contentviews.View):
|
||||
|
||||
# We don't have a good solution for the keyboard shortcut yet -
|
||||
# you manually need to find a free letter. Contributions welcome :)
|
||||
prompt = ("swap case text", "p")
|
||||
prompt = ("swap case text", "z")
|
||||
content_types = ["text/plain"]
|
||||
|
||||
def __call__(self, data: bytes, **metadata):
|
||||
def __call__(self, data: typing.AnyStr, **metadata) -> CVIEWSWAPCASE:
|
||||
return "case-swapped text", contentviews.format_text(data.swapcase())
|
||||
|
||||
|
||||
view = ViewSwapCase()
|
||||
|
||||
|
||||
def start():
|
||||
def load(l):
|
||||
contentviews.add(view)
|
||||
|
||||
|
||||
|
||||
11
examples/simple/custom_option.py
Normal file
11
examples/simple/custom_option.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
def load(l):
|
||||
ctx.log.info("Registering option 'custom'")
|
||||
l.add_option("custom", bool, False, "A custom option")
|
||||
|
||||
|
||||
def configure(updated):
|
||||
if "custom" in updated:
|
||||
ctx.log.info("custom option value: %s" % ctx.options.custom)
|
||||
@@ -1,23 +1,26 @@
|
||||
"""
|
||||
This scripts demonstrates how to use mitmproxy's filter pattern in scripts.
|
||||
Usage:
|
||||
mitmdump -s "flowfilter.py FILTER"
|
||||
"""
|
||||
import sys
|
||||
from mitmproxy import flowfilter
|
||||
from mitmproxy import ctx, http
|
||||
|
||||
|
||||
class Filter:
|
||||
def __init__(self, spec):
|
||||
self.filter = flowfilter.parse(spec)
|
||||
def __init__(self):
|
||||
self.filter = None # type: flowfilter.TFilter
|
||||
|
||||
def response(self, flow):
|
||||
def configure(self, updated):
|
||||
self.filter = flowfilter.parse(ctx.options.flowfilter)
|
||||
|
||||
def load(self, l):
|
||||
l.add_option(
|
||||
"flowfilter", str, "", "Check that flow matches filter."
|
||||
)
|
||||
|
||||
def response(self, flow: http.HTTPFlow) -> None:
|
||||
if flowfilter.match(self.filter, flow):
|
||||
print("Flow matches filter:")
|
||||
print(flow)
|
||||
|
||||
|
||||
def start():
|
||||
if len(sys.argv) != 2:
|
||||
raise ValueError("Usage: -s 'filt.py FILTER'")
|
||||
return Filter(sys.argv[1])
|
||||
addons = [Filter()]
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# type: ignore
|
||||
#
|
||||
# Simple script showing how to read a mitmproxy dump file
|
||||
#
|
||||
|
||||
from mitmproxy import io
|
||||
from mitmproxy.exceptions import FlowReadException
|
||||
import pprint
|
||||
import sys
|
||||
|
||||
|
||||
with open(sys.argv[1], "rb") as logfile:
|
||||
freader = io.FlowReader(logfile)
|
||||
pp = pprint.PrettyPrinter(indent=4)
|
||||
|
||||
@@ -7,23 +7,21 @@ to multiple files in parallel.
|
||||
"""
|
||||
import random
|
||||
import sys
|
||||
from mitmproxy import io
|
||||
from mitmproxy import io, http
|
||||
import typing # noqa
|
||||
|
||||
|
||||
class Writer:
|
||||
def __init__(self, path):
|
||||
def __init__(self, path: str) -> None:
|
||||
if path == "-":
|
||||
f = sys.stdout
|
||||
f = sys.stdout # type: typing.IO[typing.Any]
|
||||
else:
|
||||
f = open(path, "wb")
|
||||
self.w = io.FlowWriter(f)
|
||||
|
||||
def response(self, flow):
|
||||
def response(self, flow: http.HTTPFlow) -> None:
|
||||
if random.choice([True, False]):
|
||||
self.w.add(flow)
|
||||
|
||||
|
||||
def start():
|
||||
if len(sys.argv) != 2:
|
||||
raise ValueError('Usage: -s "flowriter.py filename"')
|
||||
return Writer(sys.argv[1])
|
||||
addons = [Writer(sys.argv[1])]
|
||||
|
||||
@@ -7,6 +7,6 @@ If you want to help us out: https://github.com/mitmproxy/mitmproxy/issues/1530 :
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
def start():
|
||||
def load(l):
|
||||
ctx.log.info("This is some informative text.")
|
||||
ctx.log.error("This is an error.")
|
||||
|
||||
@@ -1,29 +1,26 @@
|
||||
# Usage: mitmdump -s "iframe_injector.py url"
|
||||
# (this script works best with --anticache)
|
||||
import sys
|
||||
from bs4 import BeautifulSoup
|
||||
from mitmproxy import ctx, http
|
||||
|
||||
|
||||
class Injector:
|
||||
def __init__(self, iframe_url):
|
||||
self.iframe_url = iframe_url
|
||||
def load(self, loader):
|
||||
loader.add_option(
|
||||
"iframe", str, "", "IFrame to inject"
|
||||
)
|
||||
|
||||
def response(self, flow):
|
||||
if flow.request.host in self.iframe_url:
|
||||
return
|
||||
html = BeautifulSoup(flow.response.content, "html.parser")
|
||||
if html.body:
|
||||
iframe = html.new_tag(
|
||||
"iframe",
|
||||
src=self.iframe_url,
|
||||
frameborder=0,
|
||||
height=0,
|
||||
width=0)
|
||||
html.body.insert(0, iframe)
|
||||
flow.response.content = str(html).encode("utf8")
|
||||
def response(self, flow: http.HTTPFlow) -> None:
|
||||
if ctx.options.iframe:
|
||||
html = BeautifulSoup(flow.response.content, "html.parser")
|
||||
if html.body:
|
||||
iframe = html.new_tag(
|
||||
"iframe",
|
||||
src=ctx.options.iframe,
|
||||
frameborder=0,
|
||||
height=0,
|
||||
width=0)
|
||||
html.body.insert(0, iframe)
|
||||
flow.response.content = str(html).encode("utf8")
|
||||
|
||||
|
||||
def start():
|
||||
if len(sys.argv) != 2:
|
||||
raise ValueError('Usage: -s "iframe_injector.py url"')
|
||||
return Injector(sys.argv[1])
|
||||
addons = [Injector()]
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
def request(flow):
|
||||
from mitmproxy import http
|
||||
|
||||
|
||||
def request(flow: http.HTTPFlow) -> None:
|
||||
if flow.request.urlencoded_form:
|
||||
# If there's already a form, one can just add items to the dict:
|
||||
flow.request.urlencoded_form["mitmproxy"] = "rocks"
|
||||
|
||||
@@ -1,2 +1,5 @@
|
||||
def request(flow):
|
||||
from mitmproxy import http
|
||||
|
||||
|
||||
def request(flow: http.HTTPFlow) -> None:
|
||||
flow.request.query["mitmproxy"] = "rocks"
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
"""
|
||||
This example shows two ways to redirect flows to another server.
|
||||
"""
|
||||
from mitmproxy import http
|
||||
|
||||
|
||||
def request(flow):
|
||||
def request(flow: http.HTTPFlow) -> None:
|
||||
# pretty_host takes the "Host" header of the request into account,
|
||||
# which is useful in transparent mode where we usually only have the IP
|
||||
# otherwise.
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import argparse
|
||||
|
||||
|
||||
class Replacer:
|
||||
def __init__(self, src, dst):
|
||||
self.src, self.dst = src, dst
|
||||
|
||||
def response(self, flow):
|
||||
flow.response.replace(self.src, self.dst)
|
||||
|
||||
|
||||
def start():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("src", type=str)
|
||||
parser.add_argument("dst", type=str)
|
||||
args = parser.parse_args()
|
||||
return Replacer(args.src, args.dst)
|
||||
@@ -5,7 +5,7 @@ without sending any data to the remote server.
|
||||
from mitmproxy import http
|
||||
|
||||
|
||||
def request(flow):
|
||||
def request(flow: http.HTTPFlow) -> None:
|
||||
# pretty_url takes the "Host" header of the request into account, which
|
||||
# is useful in transparent mode where we usually only have the IP otherwise.
|
||||
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
This script rotates all images passing through the proxy by 180 degrees.
|
||||
"""
|
||||
import io
|
||||
|
||||
from PIL import Image
|
||||
from mitmproxy import http
|
||||
|
||||
|
||||
def response(flow):
|
||||
def response(flow: http.HTTPFlow) -> None:
|
||||
if flow.response.headers.get("content-type", "").startswith("image"):
|
||||
s = io.BytesIO(flow.response.content)
|
||||
img = Image.open(s).rotate(180)
|
||||
|
||||
@@ -10,14 +10,14 @@ app = Flask("proxapp")
|
||||
|
||||
|
||||
@app.route('/')
|
||||
def hello_world():
|
||||
def hello_world() -> str:
|
||||
return 'Hello World!'
|
||||
|
||||
|
||||
def start():
|
||||
# Host app at the magic domain "proxapp" on port 80. Requests to this
|
||||
def load(l):
|
||||
# Host app at the magic domain "proxapp.local" on port 80. Requests to this
|
||||
# domain and port combination will now be routed to the WSGI app instance.
|
||||
return wsgiapp.WSGIApp(app, "proxapp", 80)
|
||||
return wsgiapp.WSGIApp(app, "proxapp.local", 80)
|
||||
|
||||
# SSL works too, but the magic domain needs to be resolvable from the mitmproxy machine due to mitmproxy's design.
|
||||
# mitmproxy will connect to said domain and use serve its certificate (unless --no-upstream-cert is set)
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
|
||||
|
||||
<!--
|
||||
Cut and paste the output of "mitmdump --sysinfo".
|
||||
Cut and paste the output of "mitmproxy --version".
|
||||
|
||||
If you're using an older version if mitmproxy, please specify the version
|
||||
and OS.
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
import typing
|
||||
import traceback
|
||||
import contextlib
|
||||
import sys
|
||||
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import eventsequence
|
||||
from mitmproxy import controller
|
||||
from mitmproxy import flow
|
||||
from . import ctx
|
||||
import pprint
|
||||
|
||||
|
||||
@@ -6,18 +15,115 @@ def _get_name(itm):
|
||||
return getattr(itm, "name", itm.__class__.__name__.lower())
|
||||
|
||||
|
||||
def cut_traceback(tb, func_name):
|
||||
"""
|
||||
Cut off a traceback at the function with the given name.
|
||||
The func_name's frame is excluded.
|
||||
|
||||
Args:
|
||||
tb: traceback object, as returned by sys.exc_info()[2]
|
||||
func_name: function name
|
||||
|
||||
Returns:
|
||||
Reduced traceback.
|
||||
"""
|
||||
tb_orig = tb
|
||||
for _, _, fname, _ in traceback.extract_tb(tb):
|
||||
tb = tb.tb_next
|
||||
if fname == func_name:
|
||||
break
|
||||
return tb or tb_orig
|
||||
|
||||
|
||||
class StreamLog:
|
||||
"""
|
||||
A class for redirecting output using contextlib.
|
||||
"""
|
||||
def __init__(self, log):
|
||||
self.log = log
|
||||
|
||||
def write(self, buf):
|
||||
if buf.strip():
|
||||
self.log(buf)
|
||||
|
||||
def flush(self): # pragma: no cover
|
||||
# Click uses flush sometimes, so we dummy it up
|
||||
pass
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def safecall():
|
||||
stdout_replacement = StreamLog(ctx.log.warn)
|
||||
try:
|
||||
with contextlib.redirect_stdout(stdout_replacement):
|
||||
yield
|
||||
except (exceptions.AddonHalt, exceptions.OptionsError):
|
||||
raise
|
||||
except Exception as e:
|
||||
etype, value, tb = sys.exc_info()
|
||||
tb = cut_traceback(tb, "invoke_addon").tb_next
|
||||
ctx.log.error(
|
||||
"Addon error: %s" % "".join(
|
||||
traceback.format_exception(etype, value, tb)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class Loader:
|
||||
"""
|
||||
A loader object is passed to the load() event when addons start up.
|
||||
"""
|
||||
def __init__(self, master):
|
||||
self.master = master
|
||||
|
||||
def add_option(
|
||||
self,
|
||||
name: str,
|
||||
typespec: type,
|
||||
default: typing.Any,
|
||||
help: str,
|
||||
choices: typing.Optional[typing.Sequence[str]] = None
|
||||
) -> None:
|
||||
if name in self.master.options:
|
||||
ctx.log.warn("Over-riding existing option %s" % name)
|
||||
self.master.options.add_option(
|
||||
name,
|
||||
typespec,
|
||||
default,
|
||||
help,
|
||||
choices
|
||||
)
|
||||
|
||||
def add_command(self, path: str, func: typing.Callable) -> None:
|
||||
self.master.commands.add(path, func)
|
||||
|
||||
|
||||
def traverse(chain):
|
||||
"""
|
||||
Recursively traverse an addon chain.
|
||||
"""
|
||||
for a in chain:
|
||||
yield a
|
||||
if hasattr(a, "addons"):
|
||||
yield from traverse(a.addons)
|
||||
|
||||
|
||||
class AddonManager:
|
||||
def __init__(self, master):
|
||||
self.lookup = {}
|
||||
self.chain = []
|
||||
self.master = master
|
||||
master.options.changed.connect(self._options_update)
|
||||
master.options.changed.connect(self._configure_all)
|
||||
|
||||
def _configure_all(self, options, updated):
|
||||
self.trigger("configure", updated)
|
||||
|
||||
def clear(self):
|
||||
"""
|
||||
Remove all addons.
|
||||
"""
|
||||
self.done()
|
||||
self.chain = []
|
||||
for i in self.chain:
|
||||
self.remove(i)
|
||||
|
||||
def get(self, name):
|
||||
"""
|
||||
@@ -25,45 +131,58 @@ class AddonManager:
|
||||
attribute on the instance, or the lower case class name if that
|
||||
does not exist.
|
||||
"""
|
||||
for i in self.chain:
|
||||
if name == _get_name(i):
|
||||
return i
|
||||
return self.lookup.get(name, None)
|
||||
|
||||
def _options_update(self, options, updated):
|
||||
for i in self.chain:
|
||||
with self.master.handlecontext():
|
||||
self.invoke_with_context(i, "configure", options, updated)
|
||||
def register(self, addon):
|
||||
"""
|
||||
Register an addon, call its load event, and then register all its
|
||||
sub-addons. This should be used by addons that dynamically manage
|
||||
addons.
|
||||
|
||||
def startup(self, s):
|
||||
If the calling addon is already running, it should follow with
|
||||
running and configure events. Must be called within a current
|
||||
context.
|
||||
"""
|
||||
Run startup events on addon.
|
||||
"""
|
||||
self.invoke_with_context(s, "start")
|
||||
self.invoke_with_context(
|
||||
s,
|
||||
"configure",
|
||||
self.master.options,
|
||||
self.master.options.keys()
|
||||
)
|
||||
for a in traverse([addon]):
|
||||
name = _get_name(a)
|
||||
if name in self.lookup:
|
||||
raise exceptions.AddonManagerError(
|
||||
"An addon called '%s' already exists." % name
|
||||
)
|
||||
l = Loader(self.master)
|
||||
self.invoke_addon(addon, "load", l)
|
||||
for a in traverse([addon]):
|
||||
name = _get_name(a)
|
||||
self.lookup[name] = a
|
||||
for a in traverse([addon]):
|
||||
self.master.commands.collect_commands(a)
|
||||
return addon
|
||||
|
||||
def add(self, *addons):
|
||||
"""
|
||||
Add addons to the end of the chain, and run their startup events.
|
||||
Add addons to the end of the chain, and run their load event.
|
||||
If any addon has sub-addons, they are registered.
|
||||
"""
|
||||
self.chain.extend(addons)
|
||||
for i in addons:
|
||||
self.startup(i)
|
||||
with self.master.handlecontext():
|
||||
for i in addons:
|
||||
self.chain.append(self.register(i))
|
||||
|
||||
def remove(self, addon):
|
||||
"""
|
||||
Remove an addon from the chain, and run its done events.
|
||||
"""
|
||||
self.chain = [i for i in self.chain if i is not addon]
|
||||
self.invoke_with_context(addon, "done")
|
||||
Remove an addon and all its sub-addons.
|
||||
|
||||
def done(self):
|
||||
for i in self.chain:
|
||||
self.invoke_with_context(i, "done")
|
||||
If the addon is not in the chain - that is, if it's managed by a
|
||||
parent addon - it's the parent's responsibility to remove it from
|
||||
its own addons attribute.
|
||||
"""
|
||||
for a in traverse([addon]):
|
||||
n = _get_name(a)
|
||||
if n not in self.lookup:
|
||||
raise exceptions.AddonManagerError("No such addon: %s" % n)
|
||||
self.chain = [i for i in self.chain if i is not a]
|
||||
del self.lookup[_get_name(a)]
|
||||
with self.master.handlecontext():
|
||||
self.invoke_addon(a, "done")
|
||||
|
||||
def __len__(self):
|
||||
return len(self.chain)
|
||||
@@ -71,22 +190,59 @@ class AddonManager:
|
||||
def __str__(self):
|
||||
return pprint.pformat([str(i) for i in self.chain])
|
||||
|
||||
def invoke_with_context(self, addon, name, *args, **kwargs):
|
||||
def handle_lifecycle(self, name, message):
|
||||
"""
|
||||
Handle a lifecycle event.
|
||||
"""
|
||||
if not hasattr(message, "reply"): # pragma: no cover
|
||||
raise exceptions.ControlException(
|
||||
"Message %s has no reply attribute" % message
|
||||
)
|
||||
|
||||
# We can use DummyReply objects multiple times. We only clear them up on
|
||||
# the next handler so that we can access value and state in the
|
||||
# meantime.
|
||||
if isinstance(message.reply, controller.DummyReply):
|
||||
message.reply.reset()
|
||||
|
||||
self.trigger(name, message)
|
||||
|
||||
if message.reply.state != "taken":
|
||||
message.reply.take()
|
||||
if not message.reply.has_message:
|
||||
message.reply.ack()
|
||||
message.reply.commit()
|
||||
|
||||
if isinstance(message.reply, controller.DummyReply):
|
||||
message.reply.mark_reset()
|
||||
|
||||
if isinstance(message, flow.Flow):
|
||||
self.trigger("update", [message])
|
||||
|
||||
def invoke_addon(self, addon, name, *args, **kwargs):
|
||||
"""
|
||||
Invoke an event on an addon and all its children. This method must
|
||||
run within an established handler context.
|
||||
"""
|
||||
if name not in eventsequence.Events:
|
||||
name = "event_" + name
|
||||
for a in traverse([addon]):
|
||||
func = getattr(a, name, None)
|
||||
if func:
|
||||
if not callable(func):
|
||||
raise exceptions.AddonManagerError(
|
||||
"Addon handler %s not callable" % name
|
||||
)
|
||||
func(*args, **kwargs)
|
||||
|
||||
def trigger(self, name, *args, **kwargs):
|
||||
"""
|
||||
Establish a handler context and trigger an event across all addons
|
||||
"""
|
||||
with self.master.handlecontext():
|
||||
self.invoke(addon, name, *args, **kwargs)
|
||||
|
||||
def invoke(self, addon, name, *args, **kwargs):
|
||||
func = getattr(addon, name, None)
|
||||
if func:
|
||||
if not callable(func):
|
||||
raise exceptions.AddonError(
|
||||
"Addon handler %s not callable" % name
|
||||
)
|
||||
func(*args, **kwargs)
|
||||
|
||||
def __call__(self, name, *args, **kwargs):
|
||||
for i in self.chain:
|
||||
try:
|
||||
self.invoke(i, name, *args, **kwargs)
|
||||
except exceptions.AddonHalt:
|
||||
return
|
||||
for i in self.chain:
|
||||
try:
|
||||
with safecall():
|
||||
self.invoke_addon(i, name, *args, **kwargs)
|
||||
except exceptions.AddonHalt:
|
||||
return
|
||||
|
||||
@@ -1,35 +1,47 @@
|
||||
from mitmproxy.addons import anticache
|
||||
from mitmproxy.addons import anticomp
|
||||
from mitmproxy.addons import check_alpn
|
||||
from mitmproxy.addons import check_ca
|
||||
from mitmproxy.addons import clientplayback
|
||||
from mitmproxy.addons import streamfile
|
||||
from mitmproxy.addons import core_option_validation
|
||||
from mitmproxy.addons import core
|
||||
from mitmproxy.addons import cut
|
||||
from mitmproxy.addons import disable_h2c
|
||||
from mitmproxy.addons import export
|
||||
from mitmproxy.addons import onboarding
|
||||
from mitmproxy.addons import proxyauth
|
||||
from mitmproxy.addons import replace
|
||||
from mitmproxy.addons import script
|
||||
from mitmproxy.addons import setheaders
|
||||
from mitmproxy.addons import serverplayback
|
||||
from mitmproxy.addons import setheaders
|
||||
from mitmproxy.addons import stickyauth
|
||||
from mitmproxy.addons import stickycookie
|
||||
from mitmproxy.addons import streambodies
|
||||
from mitmproxy.addons import save
|
||||
from mitmproxy.addons import upstream_auth
|
||||
from mitmproxy.addons import disable_h2c_upgrade
|
||||
|
||||
|
||||
def default_addons():
|
||||
return [
|
||||
onboarding.Onboarding(),
|
||||
proxyauth.ProxyAuth(),
|
||||
core.Core(),
|
||||
core_option_validation.CoreOptionValidation(),
|
||||
anticache.AntiCache(),
|
||||
anticomp.AntiComp(),
|
||||
check_alpn.CheckALPN(),
|
||||
check_ca.CheckCA(),
|
||||
clientplayback.ClientPlayback(),
|
||||
cut.Cut(),
|
||||
disable_h2c.DisableH2C(),
|
||||
export.Export(),
|
||||
onboarding.Onboarding(),
|
||||
proxyauth.ProxyAuth(),
|
||||
replace.Replace(),
|
||||
script.ScriptLoader(),
|
||||
serverplayback.ServerPlayback(),
|
||||
setheaders.SetHeaders(),
|
||||
stickyauth.StickyAuth(),
|
||||
stickycookie.StickyCookie(),
|
||||
script.ScriptLoader(),
|
||||
streamfile.StreamFile(),
|
||||
streambodies.StreamBodies(),
|
||||
replace.Replace(),
|
||||
setheaders.SetHeaders(),
|
||||
serverplayback.ServerPlayback(),
|
||||
clientplayback.ClientPlayback(),
|
||||
save.Save(),
|
||||
upstream_auth.UpstreamAuth(),
|
||||
disable_h2c_upgrade.DisableH2CleartextUpgrade(),
|
||||
]
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
class AntiCache:
|
||||
def __init__(self):
|
||||
self.enabled = False
|
||||
|
||||
def configure(self, options, updated):
|
||||
self.enabled = options.anticache
|
||||
|
||||
def request(self, flow):
|
||||
if self.enabled:
|
||||
if ctx.options.anticache:
|
||||
flow.request.anticache()
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
class AntiComp:
|
||||
def __init__(self):
|
||||
self.enabled = False
|
||||
|
||||
def configure(self, options, updated):
|
||||
self.enabled = options.anticomp
|
||||
|
||||
def request(self, flow):
|
||||
if self.enabled:
|
||||
if ctx.options.anticomp:
|
||||
flow.request.anticomp()
|
||||
|
||||
17
mitmproxy/addons/check_alpn.py
Normal file
17
mitmproxy/addons/check_alpn.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import mitmproxy
|
||||
from mitmproxy.net import tcp
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
class CheckALPN:
|
||||
def __init__(self):
|
||||
self.failed = False
|
||||
|
||||
def configure(self, updated):
|
||||
self.failed = mitmproxy.ctx.master.options.http2 and not tcp.HAS_ALPN
|
||||
if self.failed:
|
||||
ctx.log.warn(
|
||||
"HTTP/2 is disabled because ALPN support missing!\n"
|
||||
"OpenSSL 1.0.2+ required to support HTTP/2 connections.\n"
|
||||
"Use --no-http2 to silence this warning."
|
||||
)
|
||||
24
mitmproxy/addons/check_ca.py
Normal file
24
mitmproxy/addons/check_ca.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import mitmproxy
|
||||
|
||||
|
||||
class CheckCA:
|
||||
def __init__(self):
|
||||
self.failed = False
|
||||
|
||||
def configure(self, updated):
|
||||
has_ca = (
|
||||
mitmproxy.ctx.master.server and
|
||||
mitmproxy.ctx.master.server.config and
|
||||
mitmproxy.ctx.master.server.config.certstore and
|
||||
mitmproxy.ctx.master.server.config.certstore.default_ca
|
||||
)
|
||||
if has_ca:
|
||||
self.failed = mitmproxy.ctx.master.server.config.certstore.default_ca.has_expired()
|
||||
if self.failed:
|
||||
mitmproxy.ctx.master.add_log(
|
||||
"The mitmproxy certificate authority has expired!\n"
|
||||
"Please delete all CA-related files in your ~/.mitmproxy folder.\n"
|
||||
"The CA will be regenerated automatically after restarting mitmproxy.\n"
|
||||
"Then make sure all your clients have the new CA installed.",
|
||||
"warn",
|
||||
)
|
||||
@@ -2,6 +2,7 @@ from mitmproxy import exceptions
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy import io
|
||||
from mitmproxy import flow
|
||||
from mitmproxy import command
|
||||
|
||||
import typing
|
||||
|
||||
@@ -10,36 +11,56 @@ class ClientPlayback:
|
||||
def __init__(self):
|
||||
self.flows = None
|
||||
self.current_thread = None
|
||||
self.keepserving = False
|
||||
self.has_replayed = False
|
||||
self.configured = False
|
||||
|
||||
def count(self) -> int:
|
||||
if self.flows:
|
||||
return len(self.flows)
|
||||
return 0
|
||||
|
||||
def load(self, flows: typing.Sequence[flow.Flow]):
|
||||
@command.command("replay.client.stop")
|
||||
def stop_replay(self) -> None:
|
||||
"""
|
||||
Stop client replay.
|
||||
"""
|
||||
self.flows = []
|
||||
ctx.master.addons.trigger("update", [])
|
||||
|
||||
@command.command("replay.client")
|
||||
def start_replay(self, flows: typing.Sequence[flow.Flow]) -> None:
|
||||
"""
|
||||
Replay requests from flows.
|
||||
"""
|
||||
self.flows = flows
|
||||
ctx.master.addons.trigger("update", [])
|
||||
|
||||
@command.command("replay.client.file")
|
||||
def load_file(self, path: str) -> None:
|
||||
try:
|
||||
flows = io.read_flows_from_paths([path])
|
||||
except exceptions.FlowReadException as e:
|
||||
raise exceptions.CommandError(str(e))
|
||||
self.flows = flows
|
||||
|
||||
def configure(self, options, updated):
|
||||
if "client_replay" in updated:
|
||||
if options.client_replay:
|
||||
ctx.log.info("Client Replay: {}".format(options.client_replay))
|
||||
try:
|
||||
flows = io.read_flows_from_paths(options.client_replay)
|
||||
except exceptions.FlowReadException as e:
|
||||
raise exceptions.OptionsError(str(e))
|
||||
self.load(flows)
|
||||
else:
|
||||
self.flows = None
|
||||
self.keepserving = options.keepserving
|
||||
def configure(self, updated):
|
||||
if not self.configured and ctx.options.client_replay:
|
||||
self.configured = True
|
||||
ctx.log.info("Client Replay: {}".format(ctx.options.client_replay))
|
||||
try:
|
||||
flows = io.read_flows_from_paths(ctx.options.client_replay)
|
||||
except exceptions.FlowReadException as e:
|
||||
raise exceptions.OptionsError(str(e))
|
||||
self.start_replay(flows)
|
||||
|
||||
def tick(self):
|
||||
if self.current_thread and not self.current_thread.is_alive():
|
||||
self.current_thread = None
|
||||
if self.flows and not self.current_thread:
|
||||
self.current_thread = ctx.master.replay_request(self.flows.pop(0))
|
||||
f = self.flows.pop(0)
|
||||
self.current_thread = ctx.master.replay_request(f)
|
||||
ctx.master.addons.trigger("update", [f])
|
||||
self.has_replayed = True
|
||||
if self.has_replayed:
|
||||
if not self.flows and not self.current_thread and not self.keepserving:
|
||||
ctx.master.shutdown()
|
||||
if not self.flows and not self.current_thread:
|
||||
ctx.master.addons.trigger("processing_complete")
|
||||
|
||||
259
mitmproxy/addons/core.py
Normal file
259
mitmproxy/addons/core.py
Normal file
@@ -0,0 +1,259 @@
|
||||
import typing
|
||||
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import command
|
||||
from mitmproxy import flow
|
||||
from mitmproxy import optmanager
|
||||
from mitmproxy.net.http import status_codes
|
||||
|
||||
|
||||
class Core:
|
||||
@command.command("set")
|
||||
def set(self, spec: str) -> None:
|
||||
"""
|
||||
Set an option of the form "key[=value]". When the value is omitted,
|
||||
booleans are set to true, strings and integers are set to None (if
|
||||
permitted), and sequences are emptied. Boolean values can be true,
|
||||
false or toggle.
|
||||
"""
|
||||
try:
|
||||
ctx.options.set(spec)
|
||||
except exceptions.OptionsError as e:
|
||||
raise exceptions.CommandError(e) from e
|
||||
|
||||
@command.command("flow.resume")
|
||||
def resume(self, flows: typing.Sequence[flow.Flow]) -> None:
|
||||
"""
|
||||
Resume flows if they are intercepted.
|
||||
"""
|
||||
intercepted = [i for i in flows if i.intercepted]
|
||||
for f in intercepted:
|
||||
f.resume()
|
||||
ctx.master.addons.trigger("update", intercepted)
|
||||
|
||||
# FIXME: this will become view.mark later
|
||||
@command.command("flow.mark")
|
||||
def mark(self, flows: typing.Sequence[flow.Flow], val: bool) -> None:
|
||||
"""
|
||||
Mark flows.
|
||||
"""
|
||||
updated = []
|
||||
for i in flows:
|
||||
if i.marked != val:
|
||||
i.marked = val
|
||||
updated.append(i)
|
||||
ctx.master.addons.trigger("update", updated)
|
||||
|
||||
# FIXME: this will become view.mark.toggle later
|
||||
@command.command("flow.mark.toggle")
|
||||
def mark_toggle(self, flows: typing.Sequence[flow.Flow]) -> None:
|
||||
"""
|
||||
Toggle mark for flows.
|
||||
"""
|
||||
for i in flows:
|
||||
i.marked = not i.marked
|
||||
ctx.master.addons.trigger("update", flows)
|
||||
|
||||
@command.command("flow.kill")
|
||||
def kill(self, flows: typing.Sequence[flow.Flow]) -> None:
|
||||
"""
|
||||
Kill running flows.
|
||||
"""
|
||||
updated = []
|
||||
for f in flows:
|
||||
if f.killable:
|
||||
f.kill()
|
||||
updated.append(f)
|
||||
ctx.log.alert("Killed %s flows." % len(updated))
|
||||
ctx.master.addons.trigger("update", updated)
|
||||
|
||||
# FIXME: this will become view.revert later
|
||||
@command.command("flow.revert")
|
||||
def revert(self, flows: typing.Sequence[flow.Flow]) -> None:
|
||||
"""
|
||||
Revert flow changes.
|
||||
"""
|
||||
updated = []
|
||||
for f in flows:
|
||||
if f.modified():
|
||||
f.revert()
|
||||
updated.append(f)
|
||||
ctx.log.alert("Reverted %s flows." % len(updated))
|
||||
ctx.master.addons.trigger("update", updated)
|
||||
|
||||
@command.command("flow.set.options")
|
||||
def flow_set_options(self) -> typing.Sequence[str]:
|
||||
return [
|
||||
"host",
|
||||
"status_code",
|
||||
"method",
|
||||
"path",
|
||||
"url",
|
||||
"reason",
|
||||
]
|
||||
|
||||
@command.command("flow.set")
|
||||
def flow_set(
|
||||
self,
|
||||
flows: typing.Sequence[flow.Flow], spec: str, sval: str
|
||||
) -> None:
|
||||
"""
|
||||
Quickly set a number of common values on flows.
|
||||
"""
|
||||
opts = self.flow_set_options()
|
||||
if spec not in opts:
|
||||
raise exceptions.CommandError(
|
||||
"Set spec must be one of: %s." % ", ".join(opts)
|
||||
)
|
||||
|
||||
val = sval # type: typing.Union[int, str]
|
||||
if spec == "status_code":
|
||||
try:
|
||||
val = int(val)
|
||||
except ValueError as v:
|
||||
raise exceptions.CommandError(
|
||||
"Status code is not an integer: %s" % val
|
||||
) from v
|
||||
|
||||
updated = []
|
||||
for f in flows:
|
||||
req = getattr(f, "request", None)
|
||||
rupdate = True
|
||||
if req:
|
||||
if spec == "method":
|
||||
req.method = val
|
||||
elif spec == "host":
|
||||
req.host = val
|
||||
elif spec == "path":
|
||||
req.path = val
|
||||
elif spec == "url":
|
||||
try:
|
||||
req.url = val
|
||||
except ValueError as e:
|
||||
raise exceptions.CommandError(
|
||||
"URL %s is invalid: %s" % (repr(val), e)
|
||||
) from e
|
||||
else:
|
||||
self.rupdate = False
|
||||
|
||||
resp = getattr(f, "response", None)
|
||||
supdate = True
|
||||
if resp:
|
||||
if spec == "status_code":
|
||||
resp.status_code = val
|
||||
if val in status_codes.RESPONSES:
|
||||
resp.reason = status_codes.RESPONSES[int(val)]
|
||||
elif spec == "reason":
|
||||
resp.reason = val
|
||||
else:
|
||||
supdate = False
|
||||
|
||||
if rupdate or supdate:
|
||||
updated.append(f)
|
||||
|
||||
ctx.master.addons.trigger("update", updated)
|
||||
ctx.log.alert("Set %s on %s flows." % (spec, len(updated)))
|
||||
|
||||
@command.command("flow.decode")
|
||||
def decode(self, flows: typing.Sequence[flow.Flow], part: str) -> None:
|
||||
"""
|
||||
Decode flows.
|
||||
"""
|
||||
updated = []
|
||||
for f in flows:
|
||||
p = getattr(f, part, None)
|
||||
if p:
|
||||
p.decode()
|
||||
updated.append(f)
|
||||
ctx.master.addons.trigger("update", updated)
|
||||
ctx.log.alert("Decoded %s flows." % len(updated))
|
||||
|
||||
@command.command("flow.encode.toggle")
|
||||
def encode_toggle(self, flows: typing.Sequence[flow.Flow], part: str) -> None:
|
||||
"""
|
||||
Toggle flow encoding on and off, using deflate for encoding.
|
||||
"""
|
||||
updated = []
|
||||
for f in flows:
|
||||
p = getattr(f, part, None)
|
||||
if p:
|
||||
current_enc = p.headers.get("content-encoding", "identity")
|
||||
if current_enc == "identity":
|
||||
p.encode("deflate")
|
||||
else:
|
||||
p.decode()
|
||||
updated.append(f)
|
||||
ctx.master.addons.trigger("update", updated)
|
||||
ctx.log.alert("Toggled encoding on %s flows." % len(updated))
|
||||
|
||||
@command.command("flow.encode")
|
||||
def encode(self, flows: typing.Sequence[flow.Flow], part: str, enc: str) -> None:
|
||||
"""
|
||||
Encode flows with a specified encoding.
|
||||
"""
|
||||
if enc not in self.encode_options():
|
||||
raise exceptions.CommandError("Invalid encoding format: %s" % enc)
|
||||
|
||||
updated = []
|
||||
for f in flows:
|
||||
p = getattr(f, part, None)
|
||||
if p:
|
||||
current_enc = p.headers.get("content-encoding", "identity")
|
||||
if current_enc == "identity":
|
||||
p.encode(enc)
|
||||
updated.append(f)
|
||||
ctx.master.addons.trigger("update", updated)
|
||||
ctx.log.alert("Encoded %s flows." % len(updated))
|
||||
|
||||
@command.command("flow.encode.options")
|
||||
def encode_options(self) -> typing.Sequence[str]:
|
||||
"""
|
||||
The possible values for an encoding specification.
|
||||
|
||||
"""
|
||||
return ["gzip", "deflate", "br"]
|
||||
|
||||
@command.command("options.load")
|
||||
def options_load(self, path: str) -> None:
|
||||
"""
|
||||
Load options from a file.
|
||||
"""
|
||||
try:
|
||||
optmanager.load_paths(ctx.options, path)
|
||||
except (OSError, exceptions.OptionsError) as e:
|
||||
raise exceptions.CommandError(
|
||||
"Could not load options - %s" % e
|
||||
) from e
|
||||
|
||||
@command.command("options.save")
|
||||
def options_save(self, path: str) -> None:
|
||||
"""
|
||||
Save options to a file.
|
||||
"""
|
||||
try:
|
||||
optmanager.save(ctx.options, path)
|
||||
except OSError as e:
|
||||
raise exceptions.CommandError(
|
||||
"Could not save options - %s" % e
|
||||
) from e
|
||||
|
||||
@command.command("options.reset")
|
||||
def options_reset(self) -> None:
|
||||
"""
|
||||
Reset all options to defaults.
|
||||
"""
|
||||
ctx.options.reset()
|
||||
|
||||
@command.command("options.reset.one")
|
||||
def options_reset_one(self, name: str) -> None:
|
||||
"""
|
||||
Reset one option to its default value.
|
||||
"""
|
||||
if name not in ctx.options:
|
||||
raise exceptions.CommandError("No such option: %s" % name)
|
||||
setattr(
|
||||
ctx.options,
|
||||
name,
|
||||
ctx.options.default(name),
|
||||
)
|
||||
47
mitmproxy/addons/core_option_validation.py
Normal file
47
mitmproxy/addons/core_option_validation.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
The core addon is responsible for verifying core settings that are not
|
||||
checked by other addons.
|
||||
"""
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import platform
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy.net import server_spec
|
||||
from mitmproxy.utils import human
|
||||
|
||||
|
||||
class CoreOptionValidation:
|
||||
def configure(self, updated):
|
||||
opts = ctx.options
|
||||
if opts.add_upstream_certs_to_client_chain and not opts.upstream_cert:
|
||||
raise exceptions.OptionsError(
|
||||
"The no-upstream-cert and add-upstream-certs-to-client-chain "
|
||||
"options are mutually exclusive. If no-upstream-cert is enabled "
|
||||
"then the upstream certificate is not retrieved before generating "
|
||||
"the client certificate chain."
|
||||
)
|
||||
if "body_size_limit" in updated and opts.body_size_limit:
|
||||
try:
|
||||
opts._processed["body_size_limit"] = human.parse_size(
|
||||
opts.body_size_limit
|
||||
)
|
||||
except ValueError as e:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid body size limit specification: %s" %
|
||||
opts.body_size_limit
|
||||
)
|
||||
if "mode" in updated:
|
||||
mode = opts.mode
|
||||
if mode.startswith("reverse:") or mode.startswith("upstream:"):
|
||||
try:
|
||||
server_spec.parse_with_mode(mode)
|
||||
except ValueError as e:
|
||||
raise exceptions.OptionsError(str(e)) from e
|
||||
elif mode == "transparent":
|
||||
if not platform.original_addr:
|
||||
raise exceptions.OptionsError(
|
||||
"Transparent mode not supported on this platform."
|
||||
)
|
||||
elif mode not in ["regular", "socks5"]:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid mode specification: %s" % mode
|
||||
)
|
||||
151
mitmproxy/addons/cut.py
Normal file
151
mitmproxy/addons/cut.py
Normal file
@@ -0,0 +1,151 @@
|
||||
import io
|
||||
import csv
|
||||
import typing
|
||||
from mitmproxy import command
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import flow
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy import certs
|
||||
from mitmproxy.utils import strutils
|
||||
|
||||
import pyperclip
|
||||
|
||||
|
||||
def headername(spec: str):
|
||||
if not (spec.startswith("header[") and spec.endswith("]")):
|
||||
raise exceptions.CommandError("Invalid header spec: %s" % spec)
|
||||
return spec[len("header["):-1].strip()
|
||||
|
||||
|
||||
flow_shortcuts = {
|
||||
"q": "request",
|
||||
"s": "response",
|
||||
"cc": "client_conn",
|
||||
"sc": "server_conn",
|
||||
}
|
||||
|
||||
|
||||
def is_addr(v):
|
||||
return isinstance(v, tuple) and len(v) > 1
|
||||
|
||||
|
||||
def extract(cut: str, f: flow.Flow) -> typing.Union[str, bytes]:
|
||||
path = cut.split(".")
|
||||
current = f # type: typing.Any
|
||||
for i, spec in enumerate(path):
|
||||
if spec.startswith("_"):
|
||||
raise exceptions.CommandError("Can't access internal attribute %s" % spec)
|
||||
if isinstance(current, flow.Flow):
|
||||
spec = flow_shortcuts.get(spec, spec)
|
||||
|
||||
part = getattr(current, spec, None)
|
||||
if i == len(path) - 1:
|
||||
if spec == "port" and is_addr(current):
|
||||
return str(current[1])
|
||||
if spec == "host" and is_addr(current):
|
||||
return str(current[0])
|
||||
elif spec.startswith("header["):
|
||||
return current.headers.get(headername(spec), "")
|
||||
elif isinstance(part, bytes):
|
||||
return part
|
||||
elif isinstance(part, bool):
|
||||
return "true" if part else "false"
|
||||
elif isinstance(part, certs.SSLCert):
|
||||
return part.to_pem().decode("ascii")
|
||||
current = part
|
||||
return str(current or "")
|
||||
|
||||
|
||||
def parse_cutspec(s: str) -> typing.Tuple[str, typing.Sequence[str]]:
|
||||
"""
|
||||
Returns (flowspec, [cuts]).
|
||||
|
||||
Raises exceptions.CommandError if input is invalid.
|
||||
"""
|
||||
parts = s.split("|", maxsplit=1)
|
||||
flowspec = "@all"
|
||||
if len(parts) == 2:
|
||||
flowspec = parts[1].strip()
|
||||
cuts = parts[0]
|
||||
cutparts = [i.strip() for i in cuts.split(",") if i.strip()]
|
||||
if len(cutparts) == 0:
|
||||
raise exceptions.CommandError("Invalid cut specification.")
|
||||
return flowspec, cutparts
|
||||
|
||||
|
||||
class Cut:
|
||||
@command.command("cut")
|
||||
def cut(self, cutspec: str) -> command.Cuts:
|
||||
"""
|
||||
Resolve a cut specification of the form "cuts|flowspec". The cuts
|
||||
are a comma-separated list of cut snippets. Cut snippets are
|
||||
attribute paths from the base of the flow object, with a few
|
||||
conveniences - "q", "s", "cc" and "sc" are shortcuts for request,
|
||||
response, client_conn and server_conn, "port" and "host" retrieve
|
||||
parts of an address tuple, ".header[key]" retrieves a header value.
|
||||
Return values converted sensibly: SSL certicates are converted to PEM
|
||||
format, bools are "true" or "false", "bytes" are preserved, and all
|
||||
other values are converted to strings. The flowspec is optional, and
|
||||
if it is not specified, it is assumed to be @all.
|
||||
"""
|
||||
flowspec, cuts = parse_cutspec(cutspec)
|
||||
flows = ctx.master.commands.call_args("view.resolve", [flowspec])
|
||||
ret = []
|
||||
for f in flows:
|
||||
ret.append([extract(c, f) for c in cuts])
|
||||
return ret
|
||||
|
||||
@command.command("cut.save")
|
||||
def save(self, cuts: command.Cuts, path: str) -> None:
|
||||
"""
|
||||
Save cuts to file. If there are multiple rows or columns, the format
|
||||
is UTF-8 encoded CSV. If there is exactly one row and one column,
|
||||
the data is written to file as-is, with raw bytes preserved. If the
|
||||
path is prefixed with a "+", values are appended if there is an
|
||||
existing file.
|
||||
"""
|
||||
append = False
|
||||
if path.startswith("+"):
|
||||
append = True
|
||||
path = path[1:]
|
||||
if len(cuts) == 1 and len(cuts[0]) == 1:
|
||||
with open(path, "ab" if append else "wb") as fp:
|
||||
if fp.tell() > 0:
|
||||
# We're appending to a file that already exists and has content
|
||||
fp.write(b"\n")
|
||||
v = cuts[0][0]
|
||||
if isinstance(v, bytes):
|
||||
fp.write(v)
|
||||
else:
|
||||
fp.write(v.encode("utf8"))
|
||||
ctx.log.alert("Saved single cut.")
|
||||
else:
|
||||
with open(path, "a" if append else "w", newline='', encoding="utf8") as fp:
|
||||
writer = csv.writer(fp)
|
||||
for r in cuts:
|
||||
writer.writerow(
|
||||
[strutils.always_str(c) or "" for c in r] # type: ignore
|
||||
)
|
||||
ctx.log.alert("Saved %s cuts as CSV." % len(cuts))
|
||||
|
||||
@command.command("cut.clip")
|
||||
def clip(self, cuts: command.Cuts) -> None:
|
||||
"""
|
||||
Send cuts to the system clipboard.
|
||||
"""
|
||||
fp = io.StringIO(newline="")
|
||||
if len(cuts) == 1 and len(cuts[0]) == 1:
|
||||
v = cuts[0][0]
|
||||
if isinstance(v, bytes):
|
||||
fp.write(strutils.always_str(v))
|
||||
else:
|
||||
fp.write("utf8")
|
||||
ctx.log.alert("Clipped single cut.")
|
||||
else:
|
||||
writer = csv.writer(fp)
|
||||
for r in cuts:
|
||||
writer.writerow(
|
||||
[strutils.always_str(c) or "" for c in r] # type: ignore
|
||||
)
|
||||
ctx.log.alert("Clipped %s cuts as CSV." % len(cuts))
|
||||
pyperclip.copy(fp.getvalue())
|
||||
38
mitmproxy/addons/disable_h2c.py
Normal file
38
mitmproxy/addons/disable_h2c.py
Normal file
@@ -0,0 +1,38 @@
|
||||
import mitmproxy
|
||||
|
||||
|
||||
class DisableH2C:
|
||||
|
||||
"""
|
||||
We currently only support HTTP/2 over a TLS connection.
|
||||
|
||||
Some clients try to upgrade a connection from HTTP/1.1 to h2c. We need to
|
||||
remove those headers to avoid protocol errors if one endpoints suddenly
|
||||
starts sending HTTP/2 frames.
|
||||
|
||||
Some clients might use HTTP/2 Prior Knowledge to directly initiate a session
|
||||
by sending the connection preface. We just kill those flows.
|
||||
"""
|
||||
|
||||
def process_flow(self, f):
|
||||
if f.request.headers.get('upgrade', '') == 'h2c':
|
||||
mitmproxy.ctx.log.warn("HTTP/2 cleartext connections (h2c upgrade requests) are currently not supported.")
|
||||
del f.request.headers['upgrade']
|
||||
if 'connection' in f.request.headers:
|
||||
del f.request.headers['connection']
|
||||
if 'http2-settings' in f.request.headers:
|
||||
del f.request.headers['http2-settings']
|
||||
|
||||
is_connection_preface = (
|
||||
f.request.method == 'PRI' and
|
||||
f.request.path == '*' and
|
||||
f.request.http_version == 'HTTP/2.0'
|
||||
)
|
||||
if is_connection_preface:
|
||||
f.kill()
|
||||
mitmproxy.ctx.log.warn("Initiating HTTP/2 connections with prior knowledge are currently not supported.")
|
||||
|
||||
# Handlers
|
||||
|
||||
def request(self, f):
|
||||
self.process_flow(f)
|
||||
@@ -1,21 +0,0 @@
|
||||
class DisableH2CleartextUpgrade:
|
||||
|
||||
"""
|
||||
We currently only support HTTP/2 over a TLS connection. Some clients try
|
||||
to upgrade a connection from HTTP/1.1 to h2c, so we need to remove those
|
||||
headers to avoid protocol errors if one endpoints suddenly starts sending
|
||||
HTTP/2 frames.
|
||||
"""
|
||||
|
||||
def process_flow(self, f):
|
||||
if f.request.headers.get('upgrade', '') == 'h2c':
|
||||
del f.request.headers['upgrade']
|
||||
if 'connection' in f.request.headers:
|
||||
del f.request.headers['connection']
|
||||
if 'http2-settings' in f.request.headers:
|
||||
del f.request.headers['http2-settings']
|
||||
|
||||
# Handlers
|
||||
|
||||
def request(self, f):
|
||||
self.process_flow(f)
|
||||
@@ -2,6 +2,7 @@ import itertools
|
||||
import sys
|
||||
|
||||
import click
|
||||
import shutil
|
||||
|
||||
import typing # noqa
|
||||
|
||||
@@ -28,24 +29,18 @@ def colorful(line, styles):
|
||||
class Dumper:
|
||||
def __init__(self, outfile=sys.stdout):
|
||||
self.filter = None # type: flowfilter.TFilter
|
||||
self.flow_detail = None # type: int
|
||||
self.outfp = outfile # type: typing.io.TextIO
|
||||
self.showhost = None # type: bool
|
||||
self.default_contentview = "auto" # type: str
|
||||
|
||||
def configure(self, options, updated):
|
||||
if "filtstr" in updated:
|
||||
if options.filtstr:
|
||||
self.filter = flowfilter.parse(options.filtstr)
|
||||
def configure(self, updated):
|
||||
if "view_filter" in updated:
|
||||
if ctx.options.view_filter:
|
||||
self.filter = flowfilter.parse(ctx.options.view_filter)
|
||||
if not self.filter:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid filter expression: %s" % options.filtstr
|
||||
"Invalid filter expression: %s" % ctx.options.view_filter
|
||||
)
|
||||
else:
|
||||
self.filter = None
|
||||
self.flow_detail = options.flow_detail
|
||||
self.showhost = options.showhost
|
||||
self.default_contentview = options.default_contentview
|
||||
|
||||
def echo(self, text, ident=None, **style):
|
||||
if ident:
|
||||
@@ -66,13 +61,13 @@ class Dumper:
|
||||
|
||||
def _echo_message(self, message):
|
||||
_, lines, error = contentviews.get_message_content_view(
|
||||
self.default_contentview,
|
||||
ctx.options.default_contentview,
|
||||
message
|
||||
)
|
||||
if error:
|
||||
ctx.log.debug(error)
|
||||
|
||||
if self.flow_detail == 3:
|
||||
if ctx.options.flow_detail == 3:
|
||||
lines_to_echo = itertools.islice(lines, 70)
|
||||
else:
|
||||
lines_to_echo = lines
|
||||
@@ -94,14 +89,14 @@ class Dumper:
|
||||
if next(lines, None):
|
||||
self.echo("(cut off)", ident=4, dim=True)
|
||||
|
||||
if self.flow_detail >= 2:
|
||||
if ctx.options.flow_detail >= 2:
|
||||
self.echo("")
|
||||
|
||||
def _echo_request_line(self, flow):
|
||||
if flow.client_conn:
|
||||
client = click.style(
|
||||
strutils.escape_control_characters(
|
||||
repr(flow.client_conn.address)
|
||||
human.format_address(flow.client_conn.address)
|
||||
)
|
||||
)
|
||||
elif flow.request.is_replay:
|
||||
@@ -120,10 +115,13 @@ class Dumper:
|
||||
fg=method_color,
|
||||
bold=True
|
||||
)
|
||||
if self.showhost:
|
||||
if ctx.options.showhost:
|
||||
url = flow.request.pretty_url
|
||||
else:
|
||||
url = flow.request.url
|
||||
terminalWidthLimit = max(shutil.get_terminal_size()[0] - 25, 50)
|
||||
if ctx.options.flow_detail < 1 and len(url) > terminalWidthLimit:
|
||||
url = url[:terminalWidthLimit] + "…"
|
||||
url = click.style(strutils.escape_control_characters(url), bold=True)
|
||||
|
||||
http_version = ""
|
||||
@@ -172,7 +170,7 @@ class Dumper:
|
||||
size = click.style(size, bold=True)
|
||||
|
||||
arrows = click.style(" <<", bold=True)
|
||||
if self.flow_detail == 1:
|
||||
if ctx.options.flow_detail == 1:
|
||||
# This aligns the HTTP response code with the HTTP request method:
|
||||
# 127.0.0.1:59519: GET http://example.com/
|
||||
# << 304 Not Modified 0b
|
||||
@@ -190,16 +188,16 @@ class Dumper:
|
||||
def echo_flow(self, f):
|
||||
if f.request:
|
||||
self._echo_request_line(f)
|
||||
if self.flow_detail >= 2:
|
||||
if ctx.options.flow_detail >= 2:
|
||||
self._echo_headers(f.request.headers)
|
||||
if self.flow_detail >= 3:
|
||||
if ctx.options.flow_detail >= 3:
|
||||
self._echo_message(f.request)
|
||||
|
||||
if f.response:
|
||||
self._echo_response_line(f)
|
||||
if self.flow_detail >= 2:
|
||||
if ctx.options.flow_detail >= 2:
|
||||
self._echo_headers(f.response.headers)
|
||||
if self.flow_detail >= 3:
|
||||
if ctx.options.flow_detail >= 3:
|
||||
self._echo_message(f.response)
|
||||
|
||||
if f.error:
|
||||
@@ -207,7 +205,7 @@ class Dumper:
|
||||
self.echo(" << {}".format(msg), bold=True, fg="red")
|
||||
|
||||
def match(self, f):
|
||||
if self.flow_detail == 0:
|
||||
if ctx.options.flow_detail == 0:
|
||||
return False
|
||||
if not self.filter:
|
||||
return True
|
||||
@@ -234,8 +232,8 @@ class Dumper:
|
||||
def websocket_message(self, f):
|
||||
if self.match(f):
|
||||
message = f.messages[-1]
|
||||
self.echo(message.info)
|
||||
if self.flow_detail >= 3:
|
||||
self.echo(f.message_info(message))
|
||||
if ctx.options.flow_detail >= 3:
|
||||
self._echo_message(message)
|
||||
|
||||
def websocket_end(self, f):
|
||||
@@ -263,5 +261,5 @@ class Dumper:
|
||||
server=repr(f.server_conn.address),
|
||||
direction=direction,
|
||||
))
|
||||
if self.flow_detail >= 3:
|
||||
if ctx.options.flow_detail >= 3:
|
||||
self._echo_message(message)
|
||||
|
||||
75
mitmproxy/addons/export.py
Normal file
75
mitmproxy/addons/export.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import typing
|
||||
|
||||
from mitmproxy import command
|
||||
from mitmproxy import flow
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy.utils import strutils
|
||||
from mitmproxy.net.http.http1 import assemble
|
||||
|
||||
import pyperclip
|
||||
|
||||
|
||||
def curl_command(f: flow.Flow) -> str:
|
||||
if not hasattr(f, "request"):
|
||||
raise exceptions.CommandError("Can't export flow with no request.")
|
||||
data = "curl "
|
||||
request = f.request.copy() # type: ignore
|
||||
request.decode(strict=False)
|
||||
for k, v in request.headers.items(multi=True):
|
||||
data += "-H '%s:%s' " % (k, v)
|
||||
if request.method != "GET":
|
||||
data += "-X %s " % request.method
|
||||
data += "'%s'" % request.url
|
||||
if request.content:
|
||||
data += " --data-binary '%s'" % strutils.bytes_to_escaped_str(
|
||||
request.content,
|
||||
escape_single_quotes=True
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
def raw(f: flow.Flow) -> bytes:
|
||||
if not hasattr(f, "request"):
|
||||
raise exceptions.CommandError("Can't export flow with no request.")
|
||||
return assemble.assemble_request(f.request) # type: ignore
|
||||
|
||||
|
||||
formats = dict(
|
||||
curl = curl_command,
|
||||
raw = raw,
|
||||
)
|
||||
|
||||
|
||||
class Export():
|
||||
@command.command("export.formats")
|
||||
def formats(self) -> typing.Sequence[str]:
|
||||
"""
|
||||
Return a list of the supported export formats.
|
||||
"""
|
||||
return list(sorted(formats.keys()))
|
||||
|
||||
@command.command("export.file")
|
||||
def file(self, fmt: str, f: flow.Flow, path: str) -> None:
|
||||
"""
|
||||
Export a flow to path.
|
||||
"""
|
||||
if fmt not in formats:
|
||||
raise exceptions.CommandError("No such export format: %s" % fmt)
|
||||
func = formats[fmt] # type: typing.Any
|
||||
v = func(f)
|
||||
with open(path, "wb") as fp:
|
||||
if isinstance(v, bytes):
|
||||
fp.write(v)
|
||||
else:
|
||||
fp.write(v.encode("utf-8"))
|
||||
|
||||
@command.command("export.clip")
|
||||
def clip(self, fmt: str, f: flow.Flow) -> None:
|
||||
"""
|
||||
Export a flow to the system clipboard.
|
||||
"""
|
||||
if fmt not in formats:
|
||||
raise exceptions.CommandError("No such export format: %s" % fmt)
|
||||
func = formats[fmt] # type: typing.Any
|
||||
v = strutils.always_str(func(f))
|
||||
pyperclip.copy(v)
|
||||
@@ -1,20 +1,21 @@
|
||||
from mitmproxy import flowfilter
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
class Intercept:
|
||||
def __init__(self):
|
||||
self.filt = None
|
||||
|
||||
def configure(self, opts, updated):
|
||||
def configure(self, updated):
|
||||
if "intercept" in updated:
|
||||
if not opts.intercept:
|
||||
if not ctx.options.intercept:
|
||||
self.filt = None
|
||||
return
|
||||
self.filt = flowfilter.parse(opts.intercept)
|
||||
self.filt = flowfilter.parse(ctx.options.intercept)
|
||||
if not self.filt:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid interception filter: %s" % opts.intercept
|
||||
"Invalid interception filter: %s" % ctx.options.intercept
|
||||
)
|
||||
|
||||
def process_flow(self, f):
|
||||
|
||||
7
mitmproxy/addons/keepserving.py
Normal file
7
mitmproxy/addons/keepserving.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
class KeepServing:
|
||||
def event_processing_complete(self):
|
||||
if not ctx.master.options.keepserving:
|
||||
ctx.master.shutdown()
|
||||
@@ -1,17 +1,18 @@
|
||||
from mitmproxy.addons import wsgiapp
|
||||
from mitmproxy.addons.onboardingapp import app
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
class Onboarding(wsgiapp.WSGIApp):
|
||||
name = "onboarding"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(app.Adapter(app.application), None, None)
|
||||
self.enabled = False
|
||||
|
||||
def configure(self, options, updated):
|
||||
self.host = options.app_host
|
||||
self.port = options.app_port
|
||||
self.enabled = options.app
|
||||
def configure(self, updated):
|
||||
self.host = ctx.options.onboarding_host
|
||||
self.port = ctx.options.onboarding_port
|
||||
|
||||
def request(self, f):
|
||||
if self.enabled:
|
||||
if ctx.options.onboarding:
|
||||
super().request(f)
|
||||
|
||||
@@ -1,35 +1,45 @@
|
||||
import binascii
|
||||
import weakref
|
||||
import ldap3
|
||||
from typing import Optional
|
||||
from typing import MutableMapping # noqa
|
||||
from typing import Tuple
|
||||
|
||||
import passlib.apache
|
||||
|
||||
import mitmproxy.net.http
|
||||
from mitmproxy import connections # noqa
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import http
|
||||
import mitmproxy.net.http
|
||||
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy.net.http import status_codes
|
||||
|
||||
REALM = "mitmproxy"
|
||||
|
||||
|
||||
def mkauth(username, password, scheme="basic"):
|
||||
def mkauth(username: str, password: str, scheme: str = "basic") -> str:
|
||||
"""
|
||||
Craft a basic auth string
|
||||
"""
|
||||
v = binascii.b2a_base64(
|
||||
(username + ":" + password).encode("utf8")
|
||||
).decode("ascii")
|
||||
return scheme + " " + v
|
||||
|
||||
|
||||
def parse_http_basic_auth(s):
|
||||
words = s.split()
|
||||
if len(words) != 2:
|
||||
return None
|
||||
scheme = words[0]
|
||||
def parse_http_basic_auth(s: str) -> Tuple[str, str, str]:
|
||||
"""
|
||||
Parse a basic auth header.
|
||||
Raises a ValueError if the input is invalid.
|
||||
"""
|
||||
scheme, authinfo = s.split()
|
||||
if scheme.lower() != "basic":
|
||||
raise ValueError("Unknown scheme")
|
||||
try:
|
||||
user = binascii.a2b_base64(words[1]).decode("utf8", "replace")
|
||||
except binascii.Error:
|
||||
return None
|
||||
parts = user.split(':')
|
||||
if len(parts) != 2:
|
||||
return None
|
||||
return scheme, parts[0], parts[1]
|
||||
user, password = binascii.a2b_base64(authinfo.encode()).decode("utf8", "replace").split(":")
|
||||
except binascii.Error as e:
|
||||
raise ValueError(str(e))
|
||||
return scheme, user, password
|
||||
|
||||
|
||||
class ProxyAuth:
|
||||
@@ -37,112 +47,147 @@ class ProxyAuth:
|
||||
self.nonanonymous = False
|
||||
self.htpasswd = None
|
||||
self.singleuser = None
|
||||
self.ldapserver = None
|
||||
self.authenticated = weakref.WeakKeyDictionary() # type: MutableMapping[connections.ClientConnection, Tuple[str, str]]
|
||||
"""Contains all connections that are permanently authenticated after an HTTP CONNECT"""
|
||||
|
||||
def enabled(self):
|
||||
return any([self.nonanonymous, self.htpasswd, self.singleuser])
|
||||
def enabled(self) -> bool:
|
||||
return any([self.nonanonymous, self.htpasswd, self.singleuser, self.ldapserver])
|
||||
|
||||
def which_auth_header(self, f):
|
||||
if f.mode == "regular":
|
||||
def is_proxy_auth(self) -> bool:
|
||||
"""
|
||||
Returns:
|
||||
- True, if authentication is done as if mitmproxy is a proxy
|
||||
- False, if authentication is done as if mitmproxy is a HTTP server
|
||||
"""
|
||||
return ctx.options.mode in ("regular", "upstream")
|
||||
|
||||
def which_auth_header(self) -> str:
|
||||
if self.is_proxy_auth():
|
||||
return 'Proxy-Authorization'
|
||||
else:
|
||||
return 'Authorization'
|
||||
|
||||
def auth_required_response(self, f):
|
||||
if f.mode == "regular":
|
||||
hdrname = 'Proxy-Authenticate'
|
||||
else:
|
||||
hdrname = 'WWW-Authenticate'
|
||||
|
||||
headers = mitmproxy.net.http.Headers()
|
||||
headers[hdrname] = 'Basic realm="%s"' % REALM
|
||||
|
||||
if f.mode == "transparent":
|
||||
def auth_required_response(self) -> http.HTTPResponse:
|
||||
if self.is_proxy_auth():
|
||||
return http.make_error_response(
|
||||
401,
|
||||
"Authentication Required",
|
||||
headers
|
||||
status_codes.PROXY_AUTH_REQUIRED,
|
||||
headers=mitmproxy.net.http.Headers(Proxy_Authenticate='Basic realm="{}"'.format(REALM)),
|
||||
)
|
||||
else:
|
||||
return http.make_error_response(
|
||||
407,
|
||||
"Proxy Authentication Required",
|
||||
headers,
|
||||
status_codes.UNAUTHORIZED,
|
||||
headers=mitmproxy.net.http.Headers(WWW_Authenticate='Basic realm="{}"'.format(REALM)),
|
||||
)
|
||||
|
||||
def check(self, f):
|
||||
auth_value = f.request.headers.get(self.which_auth_header(f), None)
|
||||
if not auth_value:
|
||||
return False
|
||||
parts = parse_http_basic_auth(auth_value)
|
||||
if not parts:
|
||||
return False
|
||||
scheme, username, password = parts
|
||||
if scheme.lower() != 'basic':
|
||||
return False
|
||||
def check(self, f: http.HTTPFlow) -> Optional[Tuple[str, str]]:
|
||||
"""
|
||||
Check if a request is correctly authenticated.
|
||||
Returns:
|
||||
- a (username, password) tuple if successful,
|
||||
- None, otherwise.
|
||||
"""
|
||||
auth_value = f.request.headers.get(self.which_auth_header(), "")
|
||||
try:
|
||||
scheme, username, password = parse_http_basic_auth(auth_value)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
if self.nonanonymous:
|
||||
pass
|
||||
return username, password
|
||||
elif self.singleuser:
|
||||
if [username, password] != self.singleuser:
|
||||
return False
|
||||
if self.singleuser == [username, password]:
|
||||
return username, password
|
||||
elif self.htpasswd:
|
||||
if not self.htpasswd.check_password(username, password):
|
||||
return False
|
||||
else:
|
||||
raise NotImplementedError("Should never happen.")
|
||||
if self.htpasswd.check_password(username, password):
|
||||
return username, password
|
||||
elif self.ldapserver:
|
||||
if not username or not password:
|
||||
return None
|
||||
dn = ctx.options.proxyauth.split(":")[2]
|
||||
parts = dn.split("?")
|
||||
conn = ldap3.Connection(
|
||||
self.ldapserver,
|
||||
parts[0] + username + parts[1],
|
||||
password,
|
||||
auto_bind=True)
|
||||
if conn:
|
||||
conn.search(parts[1][1:], '(' + parts[0] + username + ')', attributes=['objectclass'])
|
||||
if ctx.options.proxyauth.split(":")[3] in conn.entries[0]['objectclass']:
|
||||
return username, password
|
||||
return None
|
||||
|
||||
return True
|
||||
|
||||
def authenticate(self, f):
|
||||
if self.check(f):
|
||||
del f.request.headers[self.which_auth_header(f)]
|
||||
def authenticate(self, f: http.HTTPFlow) -> bool:
|
||||
valid_credentials = self.check(f)
|
||||
if valid_credentials:
|
||||
f.metadata["proxyauth"] = valid_credentials
|
||||
del f.request.headers[self.which_auth_header()]
|
||||
return True
|
||||
else:
|
||||
f.response = self.auth_required_response(f)
|
||||
f.response = self.auth_required_response()
|
||||
return False
|
||||
|
||||
# Handlers
|
||||
def configure(self, options, updated):
|
||||
if "auth_nonanonymous" in updated:
|
||||
self.nonanonymous = options.auth_nonanonymous
|
||||
if "auth_singleuser" in updated:
|
||||
if options.auth_singleuser:
|
||||
parts = options.auth_singleuser.split(':')
|
||||
if len(parts) != 2:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid single-user auth specification."
|
||||
)
|
||||
self.singleuser = parts
|
||||
else:
|
||||
self.singleuser = None
|
||||
if "auth_htpasswd" in updated:
|
||||
if options.auth_htpasswd:
|
||||
try:
|
||||
self.htpasswd = passlib.apache.HtpasswdFile(
|
||||
options.auth_htpasswd
|
||||
)
|
||||
except (ValueError, OSError) as v:
|
||||
raise exceptions.OptionsError(
|
||||
"Could not open htpasswd file: %s" % v
|
||||
)
|
||||
else:
|
||||
self.htpasswd = None
|
||||
def configure(self, updated):
|
||||
if "proxyauth" in updated:
|
||||
self.nonanonymous = False
|
||||
self.singleuser = None
|
||||
self.htpasswd = None
|
||||
self.ldapserver = None
|
||||
if ctx.options.proxyauth:
|
||||
if ctx.options.proxyauth == "any":
|
||||
self.nonanonymous = True
|
||||
elif ctx.options.proxyauth.startswith("@"):
|
||||
p = ctx.options.proxyauth[1:]
|
||||
try:
|
||||
self.htpasswd = passlib.apache.HtpasswdFile(p)
|
||||
except (ValueError, OSError) as v:
|
||||
raise exceptions.OptionsError(
|
||||
"Could not open htpasswd file: %s" % p
|
||||
)
|
||||
elif ctx.options.proxyauth.startswith("ldap"):
|
||||
parts = ctx.options.proxyauth.split(":")
|
||||
if len(parts) != 4:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid ldap specification"
|
||||
)
|
||||
if parts[0] == "ldaps":
|
||||
server = ldap3.Server(parts[1], use_ssl=True)
|
||||
elif parts[0] == "ldap":
|
||||
server = ldap3.Server(parts[1])
|
||||
else:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid ldap specfication on the first part"
|
||||
)
|
||||
self.ldapserver = server
|
||||
else:
|
||||
parts = ctx.options.proxyauth.split(':')
|
||||
if len(parts) != 2:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid single-user auth specification."
|
||||
)
|
||||
self.singleuser = parts
|
||||
if self.enabled():
|
||||
if options.mode == "transparent":
|
||||
if ctx.options.mode == "transparent":
|
||||
raise exceptions.OptionsError(
|
||||
"Proxy Authentication not supported in transparent mode."
|
||||
)
|
||||
elif options.mode == "socks5":
|
||||
if ctx.options.mode == "socks5":
|
||||
raise exceptions.OptionsError(
|
||||
"Proxy Authentication not supported in SOCKS mode. "
|
||||
"https://github.com/mitmproxy/mitmproxy/issues/738"
|
||||
)
|
||||
# TODO: check for multiple auth options
|
||||
# TODO: check for multiple auth options
|
||||
|
||||
def http_connect(self, f):
|
||||
if self.enabled() and f.mode == "regular":
|
||||
self.authenticate(f)
|
||||
|
||||
def requestheaders(self, f):
|
||||
def http_connect(self, f: http.HTTPFlow) -> None:
|
||||
if self.enabled():
|
||||
# Are we already authenticated in CONNECT?
|
||||
if not (f.mode == "regular" and f.server_conn.via):
|
||||
self.authenticate(f)
|
||||
if self.authenticate(f):
|
||||
self.authenticated[f.client_conn] = f.metadata["proxyauth"]
|
||||
|
||||
def requestheaders(self, f: http.HTTPFlow) -> None:
|
||||
if self.enabled():
|
||||
# Is this connection authenticated by a previous HTTP CONNECT?
|
||||
if f.client_conn in self.authenticated:
|
||||
f.metadata["proxyauth"] = self.authenticated[f.client_conn]
|
||||
return
|
||||
self.authenticate(f)
|
||||
|
||||
56
mitmproxy/addons/readfile.py
Normal file
56
mitmproxy/addons/readfile.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import os.path
|
||||
import sys
|
||||
import typing
|
||||
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import io
|
||||
|
||||
|
||||
class ReadFile:
|
||||
"""
|
||||
An addon that handles reading from file on startup.
|
||||
"""
|
||||
|
||||
def load_flows(self, fo: typing.IO[bytes]) -> int:
|
||||
cnt = 0
|
||||
freader = io.FlowReader(fo)
|
||||
try:
|
||||
for flow in freader.stream():
|
||||
ctx.master.load_flow(flow)
|
||||
cnt += 1
|
||||
except (IOError, exceptions.FlowReadException) as e:
|
||||
if cnt:
|
||||
ctx.log.warn("Flow file corrupted - loaded %i flows." % cnt)
|
||||
else:
|
||||
ctx.log.error("Flow file corrupted.")
|
||||
raise exceptions.FlowReadException(str(e)) from e
|
||||
else:
|
||||
return cnt
|
||||
|
||||
def load_flows_from_path(self, path: str) -> int:
|
||||
path = os.path.expanduser(path)
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
return self.load_flows(f)
|
||||
except IOError as e:
|
||||
ctx.log.error("Cannot load flows: {}".format(e))
|
||||
raise exceptions.FlowReadException(str(e)) from e
|
||||
|
||||
def running(self):
|
||||
if ctx.options.rfile:
|
||||
try:
|
||||
self.load_flows_from_path(ctx.options.rfile)
|
||||
except exceptions.FlowReadException as e:
|
||||
raise exceptions.OptionsError(e) from e
|
||||
finally:
|
||||
ctx.master.addons.trigger("processing_complete")
|
||||
|
||||
|
||||
class ReadFileStdin(ReadFile):
|
||||
"""Support the special case of "-" for reading from stdin"""
|
||||
def load_flows_from_path(self, path: str) -> int:
|
||||
if path == "-":
|
||||
return self.load_flows(sys.stdin.buffer)
|
||||
else:
|
||||
return super().load_flows_from_path(path)
|
||||
@@ -1,35 +1,81 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import flowfilter
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
def parse_hook(s):
|
||||
"""
|
||||
Returns a (pattern, regex, replacement) tuple.
|
||||
|
||||
The general form for a replacement hook is as follows:
|
||||
|
||||
/patt/regex/replacement
|
||||
|
||||
The first character specifies the separator. Example:
|
||||
|
||||
:~q:foo:bar
|
||||
|
||||
If only two clauses are specified, the pattern is set to match
|
||||
universally (i.e. ".*"). Example:
|
||||
|
||||
/foo/bar/
|
||||
|
||||
Clauses are parsed from left to right. Extra separators are taken to be
|
||||
part of the final clause. For instance, the replacement clause below is
|
||||
"foo/bar/":
|
||||
|
||||
/one/two/foo/bar/
|
||||
"""
|
||||
sep, rem = s[0], s[1:]
|
||||
parts = rem.split(sep, 2)
|
||||
if len(parts) == 2:
|
||||
patt = ".*"
|
||||
a, b = parts
|
||||
elif len(parts) == 3:
|
||||
patt, a, b = parts
|
||||
else:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid replacement specifier: %s" % s
|
||||
)
|
||||
return patt, a, b
|
||||
|
||||
|
||||
class Replace:
|
||||
def __init__(self):
|
||||
self.lst = []
|
||||
|
||||
def configure(self, options, updated):
|
||||
def configure(self, updated):
|
||||
"""
|
||||
.replacements is a list of tuples (fpat, rex, s):
|
||||
|
||||
fpatt: a string specifying a filter pattern.
|
||||
rex: a regular expression, as bytes.
|
||||
s: the replacement string, as bytes
|
||||
rex: a regular expression, as string.
|
||||
s: the replacement string
|
||||
"""
|
||||
if "replacements" in updated:
|
||||
lst = []
|
||||
for fpatt, rex, s in options.replacements:
|
||||
for rep in ctx.options.replacements:
|
||||
fpatt, rex, s = parse_hook(rep)
|
||||
|
||||
flt = flowfilter.parse(fpatt)
|
||||
if not flt:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid filter pattern: %s" % fpatt
|
||||
)
|
||||
try:
|
||||
# We should ideally escape here before trying to compile
|
||||
re.compile(rex)
|
||||
except re.error as e:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid regular expression: %s - %s" % (rex, str(e))
|
||||
)
|
||||
if s.startswith("@") and not os.path.isfile(s[1:]):
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid file path: {}".format(s[1:])
|
||||
)
|
||||
lst.append((rex, s, flt))
|
||||
self.lst = lst
|
||||
|
||||
@@ -37,9 +83,9 @@ class Replace:
|
||||
for rex, s, flt in self.lst:
|
||||
if flt(f):
|
||||
if f.response:
|
||||
f.response.replace(rex, s, flags=re.DOTALL)
|
||||
self.replace(f.response, rex, s)
|
||||
else:
|
||||
f.request.replace(rex, s, flags=re.DOTALL)
|
||||
self.replace(f.request, rex, s)
|
||||
|
||||
def request(self, flow):
|
||||
if not flow.reply.has_message:
|
||||
@@ -48,3 +94,14 @@ class Replace:
|
||||
def response(self, flow):
|
||||
if not flow.reply.has_message:
|
||||
self.execute(flow)
|
||||
|
||||
def replace(self, obj, rex, s):
|
||||
if s.startswith("@"):
|
||||
s = os.path.expanduser(s[1:])
|
||||
try:
|
||||
with open(s, "rb") as f:
|
||||
s = f.read()
|
||||
except IOError:
|
||||
ctx.log.warn("Could not read replacement file: %s" % s)
|
||||
return
|
||||
obj.replace(rex, s, flags=re.DOTALL)
|
||||
|
||||
93
mitmproxy/addons/save.py
Normal file
93
mitmproxy/addons/save.py
Normal file
@@ -0,0 +1,93 @@
|
||||
import os.path
|
||||
import typing
|
||||
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import flowfilter
|
||||
from mitmproxy import io
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy import flow
|
||||
|
||||
|
||||
class Save:
|
||||
def __init__(self):
|
||||
self.stream = None
|
||||
self.filt = None
|
||||
self.active_flows = set() # type: Set[flow.Flow]
|
||||
|
||||
def open_file(self, path):
|
||||
if path.startswith("+"):
|
||||
path = path[1:]
|
||||
mode = "ab"
|
||||
else:
|
||||
mode = "wb"
|
||||
path = os.path.expanduser(path)
|
||||
return open(path, mode)
|
||||
|
||||
def start_stream_to_path(self, path, flt):
|
||||
try:
|
||||
f = self.open_file(path)
|
||||
except IOError as v:
|
||||
raise exceptions.OptionsError(str(v))
|
||||
self.stream = io.FilteredFlowWriter(f, flt)
|
||||
self.active_flows = set()
|
||||
|
||||
def configure(self, updated):
|
||||
# We're already streaming - stop the previous stream and restart
|
||||
if "save_stream_filter" in updated:
|
||||
if ctx.options.save_stream_filter:
|
||||
self.filt = flowfilter.parse(ctx.options.save_stream_filter)
|
||||
if not self.filt:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid filter specification: %s" % ctx.options.save_stream_filter
|
||||
)
|
||||
else:
|
||||
self.filt = None
|
||||
if "save_stream_file" in updated:
|
||||
if self.stream:
|
||||
self.done()
|
||||
if ctx.options.save_stream_file:
|
||||
self.start_stream_to_path(ctx.options.save_stream_file, self.filt)
|
||||
|
||||
def save(self, flows: typing.Sequence[flow.Flow], path: str) -> None:
|
||||
"""
|
||||
Save flows to a file. If the path starts with a +, flows are
|
||||
appended to the file, otherwise it is over-written.
|
||||
"""
|
||||
try:
|
||||
f = self.open_file(path)
|
||||
except IOError as v:
|
||||
raise exceptions.CommandError(v) from v
|
||||
stream = io.FlowWriter(f)
|
||||
for i in flows:
|
||||
stream.add(i)
|
||||
f.close()
|
||||
ctx.log.alert("Saved %s flows." % len(flows))
|
||||
|
||||
def load(self, l):
|
||||
l.add_command("save.file", self.save)
|
||||
|
||||
def tcp_start(self, flow):
|
||||
if self.stream:
|
||||
self.active_flows.add(flow)
|
||||
|
||||
def tcp_end(self, flow):
|
||||
if self.stream:
|
||||
self.stream.add(flow)
|
||||
self.active_flows.discard(flow)
|
||||
|
||||
def response(self, flow):
|
||||
if self.stream:
|
||||
self.stream.add(flow)
|
||||
self.active_flows.discard(flow)
|
||||
|
||||
def request(self, flow):
|
||||
if self.stream:
|
||||
self.active_flows.add(flow)
|
||||
|
||||
def done(self):
|
||||
if self.stream:
|
||||
for f in self.active_flows:
|
||||
self.stream.add(f)
|
||||
self.active_flows = set([])
|
||||
self.stream.fo.close()
|
||||
self.stream = None
|
||||
@@ -1,258 +1,145 @@
|
||||
import contextlib
|
||||
import os
|
||||
import shlex
|
||||
import importlib
|
||||
import time
|
||||
import sys
|
||||
import threading
|
||||
import traceback
|
||||
import types
|
||||
import typing
|
||||
|
||||
from mitmproxy import addonmanager
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import flow
|
||||
from mitmproxy import command
|
||||
from mitmproxy import eventsequence
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy import events
|
||||
|
||||
|
||||
import watchdog.events
|
||||
from watchdog.observers import polling
|
||||
|
||||
|
||||
def parse_command(command):
|
||||
"""
|
||||
Returns a (path, args) tuple.
|
||||
"""
|
||||
if not command or not command.strip():
|
||||
raise exceptions.OptionsError("Empty script command.")
|
||||
# Windows: escape all backslashes in the path.
|
||||
if os.name == "nt": # pragma: no cover
|
||||
backslashes = shlex.split(command, posix=False)[0].count("\\")
|
||||
command = command.replace("\\", "\\\\", backslashes)
|
||||
args = shlex.split(command) # pragma: no cover
|
||||
args[0] = os.path.expanduser(args[0])
|
||||
if not os.path.exists(args[0]):
|
||||
raise exceptions.OptionsError(
|
||||
("Script file not found: %s.\r\n"
|
||||
"If your script path contains spaces, "
|
||||
"make sure to wrap it in additional quotes, e.g. -s \"'./foo bar/baz.py' --args\".") %
|
||||
args[0])
|
||||
elif os.path.isdir(args[0]):
|
||||
raise exceptions.OptionsError("Not a file: %s" % args[0])
|
||||
return args[0], args[1:]
|
||||
|
||||
|
||||
def cut_traceback(tb, func_name):
|
||||
"""
|
||||
Cut off a traceback at the function with the given name.
|
||||
The func_name's frame is excluded.
|
||||
|
||||
Args:
|
||||
tb: traceback object, as returned by sys.exc_info()[2]
|
||||
func_name: function name
|
||||
|
||||
Returns:
|
||||
Reduced traceback.
|
||||
"""
|
||||
tb_orig = tb
|
||||
|
||||
for _, _, fname, _ in traceback.extract_tb(tb):
|
||||
tb = tb.tb_next
|
||||
if fname == func_name:
|
||||
break
|
||||
|
||||
if tb is None:
|
||||
# We could not find the method, take the full stack trace.
|
||||
# This may happen on some Python interpreters/flavors (e.g. PyInstaller).
|
||||
return tb_orig
|
||||
else:
|
||||
return tb
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def scriptenv(path, args):
|
||||
oldargs = sys.argv
|
||||
sys.argv = [path] + args
|
||||
script_dir = os.path.dirname(os.path.abspath(path))
|
||||
sys.path.append(script_dir)
|
||||
def load_script(actx, path):
|
||||
if not os.path.exists(path):
|
||||
ctx.log.info("No such file: %s" % path)
|
||||
return
|
||||
loader = importlib.machinery.SourceFileLoader(os.path.basename(path), path)
|
||||
try:
|
||||
yield
|
||||
except SystemExit as v:
|
||||
ctx.log.error("Script exited with code %s" % v.code)
|
||||
except Exception:
|
||||
etype, value, tb = sys.exc_info()
|
||||
tb = cut_traceback(tb, "scriptenv").tb_next
|
||||
ctx.log.error(
|
||||
"Script error: %s" % "".join(
|
||||
traceback.format_exception(etype, value, tb)
|
||||
)
|
||||
)
|
||||
oldpath = sys.path
|
||||
sys.path.insert(0, os.path.dirname(path))
|
||||
with addonmanager.safecall():
|
||||
m = loader.load_module()
|
||||
if not getattr(m, "name", None):
|
||||
m.name = path
|
||||
return m
|
||||
finally:
|
||||
sys.argv = oldargs
|
||||
sys.path.pop()
|
||||
|
||||
|
||||
def load_script(path, args):
|
||||
with open(path, "rb") as f:
|
||||
try:
|
||||
code = compile(f.read(), path, 'exec')
|
||||
except SyntaxError as e:
|
||||
ctx.log.error(
|
||||
"Script error: %s line %s: %s" % (
|
||||
e.filename, e.lineno, e.msg
|
||||
)
|
||||
)
|
||||
return
|
||||
ns = {'__file__': os.path.abspath(path)}
|
||||
with scriptenv(path, args):
|
||||
exec(code, ns)
|
||||
return types.SimpleNamespace(**ns)
|
||||
|
||||
|
||||
class ReloadHandler(watchdog.events.FileSystemEventHandler):
|
||||
def __init__(self, callback):
|
||||
self.callback = callback
|
||||
|
||||
def filter(self, event):
|
||||
if event.is_directory:
|
||||
return False
|
||||
if os.path.basename(event.src_path).startswith("."):
|
||||
return False
|
||||
return True
|
||||
|
||||
def on_modified(self, event):
|
||||
if self.filter(event):
|
||||
self.callback()
|
||||
|
||||
def on_created(self, event):
|
||||
if self.filter(event):
|
||||
self.callback()
|
||||
sys.path[:] = oldpath
|
||||
|
||||
|
||||
class Script:
|
||||
"""
|
||||
An addon that manages a single script.
|
||||
"""
|
||||
def __init__(self, command):
|
||||
self.name = command
|
||||
ReloadInterval = 2
|
||||
|
||||
self.command = command
|
||||
self.path, self.args = parse_command(command)
|
||||
def __init__(self, path):
|
||||
self.name = "scriptmanager:" + path
|
||||
self.path = path
|
||||
self.fullpath = os.path.expanduser(path)
|
||||
self.ns = None
|
||||
self.observer = None
|
||||
self.dead = False
|
||||
|
||||
self.last_options = None
|
||||
self.should_reload = threading.Event()
|
||||
self.last_load = 0
|
||||
self.last_mtime = 0
|
||||
if not os.path.isfile(self.fullpath):
|
||||
raise exceptions.OptionsError("No such script: %s" % path)
|
||||
|
||||
for i in events.Events:
|
||||
if not hasattr(self, i):
|
||||
def mkprox():
|
||||
evt = i
|
||||
|
||||
def prox(*args, **kwargs):
|
||||
self.run(evt, *args, **kwargs)
|
||||
return prox
|
||||
setattr(self, i, mkprox())
|
||||
|
||||
def run(self, name, *args, **kwargs):
|
||||
# It's possible for ns to be un-initialised if we failed during
|
||||
# configure
|
||||
if self.ns is not None and not self.dead:
|
||||
func = getattr(self.ns, name, None)
|
||||
if func:
|
||||
with scriptenv(self.path, self.args):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
def reload(self):
|
||||
self.should_reload.set()
|
||||
|
||||
def load_script(self):
|
||||
self.ns = load_script(self.path, self.args)
|
||||
ret = self.run("start")
|
||||
if ret:
|
||||
self.ns = ret
|
||||
self.run("start")
|
||||
@property
|
||||
def addons(self):
|
||||
return [self.ns] if self.ns else []
|
||||
|
||||
def tick(self):
|
||||
if self.should_reload.is_set():
|
||||
self.should_reload.clear()
|
||||
ctx.log.info("Reloading script: %s" % self.name)
|
||||
self.ns = load_script(self.path, self.args)
|
||||
self.start()
|
||||
self.configure(self.last_options, self.last_options.keys())
|
||||
else:
|
||||
self.run("tick")
|
||||
|
||||
def start(self):
|
||||
self.load_script()
|
||||
|
||||
def configure(self, options, updated):
|
||||
self.last_options = options
|
||||
if not self.observer:
|
||||
self.observer = polling.PollingObserver()
|
||||
# Bind the handler to the real underlying master object
|
||||
self.observer.schedule(
|
||||
ReloadHandler(self.reload),
|
||||
os.path.dirname(self.path) or "."
|
||||
)
|
||||
self.observer.start()
|
||||
self.run("configure", options, updated)
|
||||
|
||||
def done(self):
|
||||
self.run("done")
|
||||
self.dead = True
|
||||
if time.time() - self.last_load > self.ReloadInterval:
|
||||
mtime = os.stat(self.fullpath).st_mtime
|
||||
if mtime > self.last_mtime:
|
||||
ctx.log.info("Loading script: %s" % self.path)
|
||||
if self.ns:
|
||||
ctx.master.addons.remove(self.ns)
|
||||
self.ns = load_script(ctx, self.fullpath)
|
||||
if self.ns:
|
||||
# We're already running, so we have to explicitly register and
|
||||
# configure the addon
|
||||
ctx.master.addons.register(self.ns)
|
||||
ctx.master.addons.invoke_addon(self.ns, "running")
|
||||
ctx.master.addons.invoke_addon(
|
||||
self.ns,
|
||||
"configure",
|
||||
ctx.options.keys()
|
||||
)
|
||||
self.last_load = time.time()
|
||||
self.last_mtime = mtime
|
||||
|
||||
|
||||
class ScriptLoader:
|
||||
"""
|
||||
An addon that manages loading scripts from options.
|
||||
"""
|
||||
def run_once(self, command, flows):
|
||||
sc = Script(command)
|
||||
sc.load_script()
|
||||
for f in flows:
|
||||
for evt, o in events.event_sequence(f):
|
||||
sc.run(evt, o)
|
||||
sc.done()
|
||||
return sc
|
||||
def __init__(self):
|
||||
self.is_running = False
|
||||
self.addons = []
|
||||
|
||||
def configure(self, options, updated):
|
||||
def running(self):
|
||||
self.is_running = True
|
||||
|
||||
@command.command("script.run")
|
||||
def script_run(self, flows: typing.Sequence[flow.Flow], path: str) -> None:
|
||||
"""
|
||||
Run a script on the specified flows. The script is loaded with
|
||||
default options, and all lifecycle events for each flow are
|
||||
simulated.
|
||||
"""
|
||||
try:
|
||||
s = Script(path)
|
||||
l = addonmanager.Loader(ctx.master)
|
||||
ctx.master.addons.invoke_addon(s, "load", l)
|
||||
ctx.master.addons.invoke_addon(s, "configure", ctx.options.keys())
|
||||
# Script is loaded on the first tick
|
||||
ctx.master.addons.invoke_addon(s, "tick")
|
||||
for f in flows:
|
||||
for evt, arg in eventsequence.iterate(f):
|
||||
ctx.master.addons.invoke_addon(s, evt, arg)
|
||||
except exceptions.OptionsError as e:
|
||||
raise exceptions.CommandError("Error running script: %s" % e) from e
|
||||
|
||||
def configure(self, updated):
|
||||
if "scripts" in updated:
|
||||
for s in options.scripts:
|
||||
if options.scripts.count(s) > 1:
|
||||
for s in ctx.options.scripts:
|
||||
if ctx.options.scripts.count(s) > 1:
|
||||
raise exceptions.OptionsError("Duplicate script: %s" % s)
|
||||
|
||||
for a in ctx.master.addons.chain[:]:
|
||||
if isinstance(a, Script) and a.name not in options.scripts:
|
||||
for a in self.addons[:]:
|
||||
if a.path not in ctx.options.scripts:
|
||||
ctx.log.info("Un-loading script: %s" % a.name)
|
||||
ctx.master.addons.remove(a)
|
||||
self.addons.remove(a)
|
||||
|
||||
# The machinations below are to ensure that:
|
||||
# - Scripts remain in the same order
|
||||
# - Scripts are listed directly after the script addon. This is
|
||||
# needed to ensure that interactions with, for instance, flow
|
||||
# serialization remains correct.
|
||||
# - Scripts are not initialized un-necessarily. If only a
|
||||
# script's order in the script list has changed, it should simply
|
||||
# be moved.
|
||||
# script's order in the script list has changed, it is just
|
||||
# moved.
|
||||
|
||||
current = {}
|
||||
for a in ctx.master.addons.chain[:]:
|
||||
if isinstance(a, Script):
|
||||
current[a.name] = a
|
||||
ctx.master.addons.chain.remove(a)
|
||||
for a in self.addons:
|
||||
current[a.path] = a
|
||||
|
||||
ordered = []
|
||||
newscripts = []
|
||||
for s in options.scripts:
|
||||
for s in ctx.options.scripts:
|
||||
if s in current:
|
||||
ordered.append(current[s])
|
||||
else:
|
||||
ctx.log.info("Loading script: %s" % s)
|
||||
sc = Script(s)
|
||||
ordered.append(sc)
|
||||
newscripts.append(sc)
|
||||
|
||||
ochain = ctx.master.addons.chain
|
||||
pos = ochain.index(self)
|
||||
ctx.master.addons.chain = ochain[:pos + 1] + ordered + ochain[pos + 1:]
|
||||
self.addons = ordered
|
||||
|
||||
for s in newscripts:
|
||||
ctx.master.addons.startup(s)
|
||||
ctx.master.addons.register(s)
|
||||
if self.is_running:
|
||||
# If we're already running, we configure and tell the addon
|
||||
# we're up and running.
|
||||
ctx.master.addons.invoke_addon(s, "running")
|
||||
|
||||
@@ -1,29 +1,50 @@
|
||||
import hashlib
|
||||
import urllib
|
||||
import typing
|
||||
from typing import Any # noqa
|
||||
from typing import List # noqa
|
||||
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy import flow
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import io
|
||||
from mitmproxy import command
|
||||
|
||||
|
||||
class ServerPlayback:
|
||||
def __init__(self):
|
||||
self.options = None
|
||||
|
||||
self.flowmap = {}
|
||||
self.stop = False
|
||||
self.final_flow = None
|
||||
self.configured = False
|
||||
|
||||
def load(self, flows):
|
||||
@command.command("replay.server")
|
||||
def load_flows(self, flows: typing.Sequence[flow.Flow]) -> None:
|
||||
"""
|
||||
Replay server responses from flows.
|
||||
"""
|
||||
self.flowmap = {}
|
||||
for i in flows:
|
||||
if i.response:
|
||||
if i.response: # type: ignore
|
||||
l = self.flowmap.setdefault(self._hash(i), [])
|
||||
l.append(i)
|
||||
ctx.master.addons.trigger("update", [])
|
||||
|
||||
def clear(self):
|
||||
@command.command("replay.server.file")
|
||||
def load_file(self, path: str) -> None:
|
||||
try:
|
||||
flows = io.read_flows_from_paths([path])
|
||||
except exceptions.FlowReadException as e:
|
||||
raise exceptions.CommandError(str(e))
|
||||
self.load_flows(flows)
|
||||
|
||||
@command.command("replay.server.stop")
|
||||
def clear(self) -> None:
|
||||
"""
|
||||
Stop server replay.
|
||||
"""
|
||||
self.flowmap = {}
|
||||
ctx.master.addons.trigger("update", [])
|
||||
|
||||
def count(self):
|
||||
return sum([len(i) for i in self.flowmap.values()])
|
||||
@@ -38,27 +59,27 @@ class ServerPlayback:
|
||||
queriesArray = urllib.parse.parse_qsl(query, keep_blank_values=True)
|
||||
|
||||
key = [str(r.port), str(r.scheme), str(r.method), str(path)] # type: List[Any]
|
||||
if not self.options.server_replay_ignore_content:
|
||||
if self.options.server_replay_ignore_payload_params and r.multipart_form:
|
||||
if not ctx.options.server_replay_ignore_content:
|
||||
if ctx.options.server_replay_ignore_payload_params and r.multipart_form:
|
||||
key.extend(
|
||||
(k, v)
|
||||
for k, v in r.multipart_form.items(multi=True)
|
||||
if k.decode(errors="replace") not in self.options.server_replay_ignore_payload_params
|
||||
if k.decode(errors="replace") not in ctx.options.server_replay_ignore_payload_params
|
||||
)
|
||||
elif self.options.server_replay_ignore_payload_params and r.urlencoded_form:
|
||||
elif ctx.options.server_replay_ignore_payload_params and r.urlencoded_form:
|
||||
key.extend(
|
||||
(k, v)
|
||||
for k, v in r.urlencoded_form.items(multi=True)
|
||||
if k not in self.options.server_replay_ignore_payload_params
|
||||
if k not in ctx.options.server_replay_ignore_payload_params
|
||||
)
|
||||
else:
|
||||
key.append(str(r.raw_content))
|
||||
|
||||
if not self.options.server_replay_ignore_host:
|
||||
if not ctx.options.server_replay_ignore_host:
|
||||
key.append(r.host)
|
||||
|
||||
filtered = []
|
||||
ignore_params = self.options.server_replay_ignore_params or []
|
||||
ignore_params = ctx.options.server_replay_ignore_params or []
|
||||
for p in queriesArray:
|
||||
if p[0] not in ignore_params:
|
||||
filtered.append(p)
|
||||
@@ -66,9 +87,9 @@ class ServerPlayback:
|
||||
key.append(p[0])
|
||||
key.append(p[1])
|
||||
|
||||
if self.options.server_replay_use_headers:
|
||||
if ctx.options.server_replay_use_headers:
|
||||
headers = []
|
||||
for i in self.options.server_replay_use_headers:
|
||||
for i in ctx.options.server_replay_use_headers:
|
||||
v = r.headers.get(i)
|
||||
headers.append((i, v))
|
||||
key.append(headers)
|
||||
@@ -83,7 +104,7 @@ class ServerPlayback:
|
||||
"""
|
||||
hsh = self._hash(request)
|
||||
if hsh in self.flowmap:
|
||||
if self.options.server_replay_nopop:
|
||||
if ctx.options.server_replay_nopop:
|
||||
return self.flowmap[hsh][0]
|
||||
else:
|
||||
ret = self.flowmap[hsh].pop(0)
|
||||
@@ -91,20 +112,18 @@ class ServerPlayback:
|
||||
del self.flowmap[hsh]
|
||||
return ret
|
||||
|
||||
def configure(self, options, updated):
|
||||
self.options = options
|
||||
if "server_replay" in updated:
|
||||
self.clear()
|
||||
if options.server_replay:
|
||||
try:
|
||||
flows = io.read_flows_from_paths(options.server_replay)
|
||||
except exceptions.FlowReadException as e:
|
||||
raise exceptions.OptionsError(str(e))
|
||||
self.load(flows)
|
||||
def configure(self, updated):
|
||||
if not self.configured and ctx.options.server_replay:
|
||||
self.configured = True
|
||||
try:
|
||||
flows = io.read_flows_from_paths(ctx.options.server_replay)
|
||||
except exceptions.FlowReadException as e:
|
||||
raise exceptions.OptionsError(str(e))
|
||||
self.load_flows(flows)
|
||||
|
||||
def tick(self):
|
||||
if self.stop and not self.final_flow.live:
|
||||
ctx.master.shutdown()
|
||||
ctx.master.addons.trigger("processing_complete")
|
||||
|
||||
def request(self, f):
|
||||
if self.flowmap:
|
||||
@@ -112,13 +131,13 @@ class ServerPlayback:
|
||||
if rflow:
|
||||
response = rflow.response.copy()
|
||||
response.is_replay = True
|
||||
if self.options.refresh_server_playback:
|
||||
if ctx.options.refresh_server_playback:
|
||||
response.refresh()
|
||||
f.response = response
|
||||
if not self.flowmap and not self.options.keepserving:
|
||||
if not self.flowmap:
|
||||
self.final_flow = f
|
||||
self.stop = True
|
||||
elif self.options.replay_kill_extra:
|
||||
elif ctx.options.replay_kill_extra:
|
||||
ctx.log.warn(
|
||||
"server_playback: killed non-replay request {}".format(
|
||||
f.request.url
|
||||
|
||||
@@ -1,22 +1,55 @@
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import flowfilter
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
def parse_setheader(s):
|
||||
"""
|
||||
Returns a (pattern, regex, replacement) tuple.
|
||||
|
||||
The general form for a replacement hook is as follows:
|
||||
|
||||
/patt/regex/replacement
|
||||
|
||||
The first character specifies the separator. Example:
|
||||
|
||||
:~q:foo:bar
|
||||
|
||||
If only two clauses are specified, the pattern is set to match
|
||||
universally (i.e. ".*"). Example:
|
||||
|
||||
/foo/bar/
|
||||
|
||||
Clauses are parsed from left to right. Extra separators are taken to be
|
||||
part of the final clause. For instance, the replacement clause below is
|
||||
"foo/bar/":
|
||||
|
||||
/one/two/foo/bar/
|
||||
"""
|
||||
sep, rem = s[0], s[1:]
|
||||
parts = rem.split(sep, 2)
|
||||
if len(parts) == 2:
|
||||
patt = ".*"
|
||||
a, b = parts
|
||||
elif len(parts) == 3:
|
||||
patt, a, b = parts
|
||||
else:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid replacement specifier: %s" % s
|
||||
)
|
||||
return patt, a, b
|
||||
|
||||
|
||||
class SetHeaders:
|
||||
def __init__(self):
|
||||
self.lst = []
|
||||
|
||||
def configure(self, options, updated):
|
||||
"""
|
||||
options.setheaders is a tuple of (fpatt, header, value)
|
||||
|
||||
fpatt: String specifying a filter pattern.
|
||||
header: Header name.
|
||||
value: Header value string
|
||||
"""
|
||||
def configure(self, updated):
|
||||
if "setheaders" in updated:
|
||||
self.lst = []
|
||||
for fpatt, header, value in options.setheaders:
|
||||
for shead in ctx.options.setheaders:
|
||||
fpatt, header, value = parse_setheader(shead)
|
||||
|
||||
flt = flowfilter.parse(fpatt)
|
||||
if not flt:
|
||||
raise exceptions.OptionsError(
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import flowfilter
|
||||
from mitmproxy import ctx
|
||||
|
||||
|
||||
class StickyAuth:
|
||||
@@ -7,13 +8,13 @@ class StickyAuth:
|
||||
self.flt = None
|
||||
self.hosts = {}
|
||||
|
||||
def configure(self, options, updated):
|
||||
def configure(self, updated):
|
||||
if "stickyauth" in updated:
|
||||
if options.stickyauth:
|
||||
flt = flowfilter.parse(options.stickyauth)
|
||||
if ctx.options.stickyauth:
|
||||
flt = flowfilter.parse(ctx.options.stickyauth)
|
||||
if not flt:
|
||||
raise exceptions.OptionsError(
|
||||
"stickyauth: invalid filter expression: %s" % options.stickyauth
|
||||
"stickyauth: invalid filter expression: %s" % ctx.options.stickyauth
|
||||
)
|
||||
self.flt = flt
|
||||
else:
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import collections
|
||||
from http import cookiejar
|
||||
from typing import List, Tuple, Dict, Optional # noqa
|
||||
|
||||
from mitmproxy import http, flowfilter, ctx, exceptions
|
||||
from mitmproxy.net.http import cookies
|
||||
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import flowfilter
|
||||
TOrigin = Tuple[str, int, str]
|
||||
|
||||
|
||||
def ckey(attrs, f):
|
||||
def ckey(attrs: Dict[str, str], f: http.HTTPFlow) -> TOrigin:
|
||||
"""
|
||||
Returns a (domain, port, path) tuple.
|
||||
"""
|
||||
@@ -20,32 +21,32 @@ def ckey(attrs, f):
|
||||
return (domain, f.request.port, path)
|
||||
|
||||
|
||||
def domain_match(a, b):
|
||||
if cookiejar.domain_match(a, b):
|
||||
def domain_match(a: str, b: str) -> bool:
|
||||
if cookiejar.domain_match(a, b): # type: ignore
|
||||
return True
|
||||
elif cookiejar.domain_match(a, b.strip(".")):
|
||||
elif cookiejar.domain_match(a, b.strip(".")): # type: ignore
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class StickyCookie:
|
||||
def __init__(self):
|
||||
self.jar = collections.defaultdict(dict)
|
||||
self.flt = None
|
||||
self.jar = collections.defaultdict(dict) # type: Dict[TOrigin, Dict[str, str]]
|
||||
self.flt = None # type: Optional[flowfilter.TFilter]
|
||||
|
||||
def configure(self, options, updated):
|
||||
def configure(self, updated):
|
||||
if "stickycookie" in updated:
|
||||
if options.stickycookie:
|
||||
flt = flowfilter.parse(options.stickycookie)
|
||||
if ctx.options.stickycookie:
|
||||
flt = flowfilter.parse(ctx.options.stickycookie)
|
||||
if not flt:
|
||||
raise exceptions.OptionsError(
|
||||
"stickycookie: invalid filter expression: %s" % options.stickycookie
|
||||
"stickycookie: invalid filter expression: %s" % ctx.options.stickycookie
|
||||
)
|
||||
self.flt = flt
|
||||
else:
|
||||
self.flt = None
|
||||
|
||||
def response(self, flow):
|
||||
def response(self, flow: http.HTTPFlow):
|
||||
if self.flt:
|
||||
for name, (value, attrs) in flow.response.cookies.items(multi=True):
|
||||
# FIXME: We now know that Cookie.py screws up some cookies with
|
||||
@@ -62,24 +63,21 @@ class StickyCookie:
|
||||
if not self.jar[dom_port_path]:
|
||||
self.jar.pop(dom_port_path, None)
|
||||
else:
|
||||
b = attrs.copy()
|
||||
b.insert(0, name, value)
|
||||
self.jar[dom_port_path][name] = b
|
||||
self.jar[dom_port_path][name] = value
|
||||
|
||||
def request(self, flow):
|
||||
def request(self, flow: http.HTTPFlow):
|
||||
if self.flt:
|
||||
l = []
|
||||
cookie_list = [] # type: List[Tuple[str,str]]
|
||||
if flowfilter.match(self.flt, flow):
|
||||
for domain, port, path in self.jar.keys():
|
||||
for (domain, port, path), c in self.jar.items():
|
||||
match = [
|
||||
domain_match(flow.request.host, domain),
|
||||
flow.request.port == port,
|
||||
flow.request.path.startswith(path)
|
||||
]
|
||||
if all(match):
|
||||
c = self.jar[(domain, port, path)]
|
||||
l.extend([cookies.format_cookie_header(c[name].items(multi=True)) for name in c.keys()])
|
||||
if l:
|
||||
cookie_list.extend(c.items())
|
||||
if cookie_list:
|
||||
# FIXME: we need to formalise this...
|
||||
flow.request.stickycookie = True
|
||||
flow.request.headers["cookie"] = "; ".join(l)
|
||||
flow.metadata["stickycookie"] = True
|
||||
flow.request.headers["cookie"] = cookies.format_cookie_header(cookie_list)
|
||||
|
||||
@@ -1,14 +1,19 @@
|
||||
from mitmproxy.net.http import http1
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy.utils import human
|
||||
|
||||
|
||||
class StreamBodies:
|
||||
def __init__(self):
|
||||
self.max_size = None
|
||||
|
||||
def configure(self, options, updated):
|
||||
self.max_size = options.stream_large_bodies
|
||||
def configure(self, updated):
|
||||
if "stream_large_bodies" in updated and ctx.options.stream_large_bodies:
|
||||
try:
|
||||
self.max_size = human.parse_size(ctx.options.stream_large_bodies)
|
||||
except ValueError as e:
|
||||
raise exceptions.OptionsError(e)
|
||||
|
||||
def run(self, f, is_request):
|
||||
if self.max_size:
|
||||
|
||||
@@ -1,68 +0,0 @@
|
||||
import os.path
|
||||
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import flowfilter
|
||||
from mitmproxy import io
|
||||
|
||||
|
||||
class StreamFile:
|
||||
def __init__(self):
|
||||
self.stream = None
|
||||
self.filt = None
|
||||
self.active_flows = set() # type: Set[flow.Flow]
|
||||
|
||||
def start_stream_to_path(self, path, mode, flt):
|
||||
path = os.path.expanduser(path)
|
||||
try:
|
||||
f = open(path, mode)
|
||||
except IOError as v:
|
||||
raise exceptions.OptionsError(str(v))
|
||||
self.stream = io.FilteredFlowWriter(f, flt)
|
||||
self.active_flows = set()
|
||||
|
||||
def configure(self, options, updated):
|
||||
# We're already streaming - stop the previous stream and restart
|
||||
if "filtstr" in updated:
|
||||
if options.filtstr:
|
||||
self.filt = flowfilter.parse(options.filtstr)
|
||||
if not self.filt:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid filter specification: %s" % options.filtstr
|
||||
)
|
||||
else:
|
||||
self.filt = None
|
||||
if "streamfile" in updated:
|
||||
if self.stream:
|
||||
self.done()
|
||||
if options.streamfile:
|
||||
if options.streamfile_append:
|
||||
mode = "ab"
|
||||
else:
|
||||
mode = "wb"
|
||||
self.start_stream_to_path(options.streamfile, mode, self.filt)
|
||||
|
||||
def tcp_start(self, flow):
|
||||
if self.stream:
|
||||
self.active_flows.add(flow)
|
||||
|
||||
def tcp_end(self, flow):
|
||||
if self.stream:
|
||||
self.stream.add(flow)
|
||||
self.active_flows.discard(flow)
|
||||
|
||||
def response(self, flow):
|
||||
if self.stream:
|
||||
self.stream.add(flow)
|
||||
self.active_flows.discard(flow)
|
||||
|
||||
def request(self, flow):
|
||||
if self.stream:
|
||||
self.active_flows.add(flow)
|
||||
|
||||
def done(self):
|
||||
if self.stream:
|
||||
for flow in self.active_flows:
|
||||
self.stream.add(flow)
|
||||
self.active_flows = set([])
|
||||
self.stream.fo.close()
|
||||
self.stream = None
|
||||
@@ -2,21 +2,28 @@ import sys
|
||||
import click
|
||||
|
||||
from mitmproxy import log
|
||||
from mitmproxy import ctx
|
||||
|
||||
# These get over-ridden by the save execution context. Keep them around so we
|
||||
# can log directly.
|
||||
realstdout = sys.stdout
|
||||
realstderr = sys.stderr
|
||||
|
||||
|
||||
class TermLog:
|
||||
def __init__(self, outfile=sys.stdout):
|
||||
self.options = None
|
||||
def __init__(self, outfile=None):
|
||||
self.outfile = outfile
|
||||
|
||||
def configure(self, options, updated):
|
||||
self.options = options
|
||||
|
||||
def log(self, e):
|
||||
if self.options.verbosity >= log.log_tier(e.level):
|
||||
if log.log_tier(e.level) == log.log_tier("error"):
|
||||
outfile = self.outfile or realstderr
|
||||
else:
|
||||
outfile = self.outfile or realstdout
|
||||
|
||||
if ctx.options.verbosity >= log.log_tier(e.level):
|
||||
click.secho(
|
||||
e.msg,
|
||||
file=self.outfile,
|
||||
file=outfile,
|
||||
fg=dict(error="red", warn="yellow").get(e.level),
|
||||
dim=(e.level == "debug"),
|
||||
err=(e.level == "error")
|
||||
|
||||
17
mitmproxy/addons/termstatus.py
Normal file
17
mitmproxy/addons/termstatus.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy.utils import human
|
||||
|
||||
"""
|
||||
A tiny addon to print the proxy status to terminal. Eventually this could
|
||||
also print some stats on exit.
|
||||
"""
|
||||
|
||||
|
||||
class TermStatus:
|
||||
def running(self):
|
||||
if ctx.options.server:
|
||||
ctx.log.info(
|
||||
"Proxy server listening at http://{}".format(
|
||||
human.format_address(ctx.master.server.address)
|
||||
)
|
||||
)
|
||||
@@ -2,6 +2,7 @@ import re
|
||||
import base64
|
||||
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy.utils import strutils
|
||||
|
||||
|
||||
@@ -26,20 +27,17 @@ class UpstreamAuth():
|
||||
"""
|
||||
def __init__(self):
|
||||
self.auth = None
|
||||
self.root_mode = None
|
||||
|
||||
def configure(self, options, updated):
|
||||
def configure(self, updated):
|
||||
# FIXME: We're doing this because our proxy core is terminally confused
|
||||
# at the moment. Ideally, we should be able to check if we're in
|
||||
# reverse proxy mode at the HTTP layer, so that scripts can put the
|
||||
# proxy in reverse proxy mode for specific reuests.
|
||||
if "mode" in updated:
|
||||
self.root_mode = options.mode
|
||||
if "upstream_auth" in updated:
|
||||
if options.upstream_auth is None:
|
||||
if ctx.options.upstream_auth is None:
|
||||
self.auth = None
|
||||
else:
|
||||
self.auth = parse_upstream_auth(options.upstream_auth)
|
||||
self.auth = parse_upstream_auth(ctx.options.upstream_auth)
|
||||
|
||||
def http_connect(self, f):
|
||||
if self.auth and f.mode == "upstream":
|
||||
@@ -49,5 +47,5 @@ class UpstreamAuth():
|
||||
if self.auth:
|
||||
if f.mode == "upstream" and not f.server_conn.via:
|
||||
f.request.headers["Proxy-Authorization"] = self.auth
|
||||
elif self.root_mode == "reverse":
|
||||
elif ctx.options.mode == "reverse":
|
||||
f.request.headers["Proxy-Authorization"] = self.auth
|
||||
|
||||
@@ -18,6 +18,11 @@ import sortedcontainers
|
||||
import mitmproxy.flow
|
||||
from mitmproxy import flowfilter
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import command
|
||||
from mitmproxy import connections
|
||||
from mitmproxy import ctx
|
||||
from mitmproxy import io
|
||||
from mitmproxy import http # noqa
|
||||
|
||||
# The underlying sorted list implementation expects the sort key to be stable
|
||||
# for the lifetime of the object. However, if we sort by size, for instance,
|
||||
@@ -34,7 +39,7 @@ class _OrderKey:
|
||||
def __init__(self, view):
|
||||
self.view = view
|
||||
|
||||
def generate(self, f: mitmproxy.flow.Flow) -> typing.Any: # pragma: no cover
|
||||
def generate(self, f: http.HTTPFlow) -> typing.Any: # pragma: no cover
|
||||
pass
|
||||
|
||||
def refresh(self, f):
|
||||
@@ -64,22 +69,22 @@ class _OrderKey:
|
||||
|
||||
|
||||
class OrderRequestStart(_OrderKey):
|
||||
def generate(self, f: mitmproxy.flow.Flow) -> datetime.datetime:
|
||||
def generate(self, f: http.HTTPFlow) -> datetime.datetime:
|
||||
return f.request.timestamp_start or 0
|
||||
|
||||
|
||||
class OrderRequestMethod(_OrderKey):
|
||||
def generate(self, f: mitmproxy.flow.Flow) -> str:
|
||||
def generate(self, f: http.HTTPFlow) -> str:
|
||||
return f.request.method
|
||||
|
||||
|
||||
class OrderRequestURL(_OrderKey):
|
||||
def generate(self, f: mitmproxy.flow.Flow) -> str:
|
||||
def generate(self, f: http.HTTPFlow) -> str:
|
||||
return f.request.url
|
||||
|
||||
|
||||
class OrderKeySize(_OrderKey):
|
||||
def generate(self, f: mitmproxy.flow.Flow) -> int:
|
||||
def generate(self, f: http.HTTPFlow) -> int:
|
||||
s = 0
|
||||
if f.request.raw_content:
|
||||
s += len(f.request.raw_content)
|
||||
@@ -102,23 +107,23 @@ orders = [
|
||||
class View(collections.Sequence):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._store = {}
|
||||
self._store = collections.OrderedDict()
|
||||
self.filter = matchall
|
||||
# Should we show only marked flows?
|
||||
self.show_marked = False
|
||||
|
||||
self.default_order = OrderRequestStart(self)
|
||||
self.orders = dict(
|
||||
time = self.default_order,
|
||||
method = OrderRequestMethod(self),
|
||||
url = OrderRequestURL(self),
|
||||
size = OrderKeySize(self),
|
||||
time = OrderRequestStart(self), method = OrderRequestMethod(self),
|
||||
url = OrderRequestURL(self), size = OrderKeySize(self),
|
||||
)
|
||||
self.order_key = self.default_order
|
||||
self.order_reversed = False
|
||||
self.focus_follow = False
|
||||
|
||||
self._view = sortedcontainers.SortedListWithKey(key = self.order_key)
|
||||
self._view = sortedcontainers.SortedListWithKey(
|
||||
key = self.order_key
|
||||
)
|
||||
|
||||
# The sig_view* signals broadcast events that affect the view. That is,
|
||||
# an update to a flow in the store but not in the view does not trigger
|
||||
@@ -165,7 +170,7 @@ class View(collections.Sequence):
|
||||
def __len__(self):
|
||||
return len(self._view)
|
||||
|
||||
def __getitem__(self, offset) -> mitmproxy.flow.Flow:
|
||||
def __getitem__(self, offset) -> typing.Any:
|
||||
return self._view[self._rev(offset)]
|
||||
|
||||
# Reflect some methods to the efficient underlying implementation
|
||||
@@ -177,7 +182,7 @@ class View(collections.Sequence):
|
||||
def index(self, f: mitmproxy.flow.Flow, start: int = 0, stop: typing.Optional[int] = None) -> int:
|
||||
return self._rev(self._view.index(f, start, stop))
|
||||
|
||||
def __contains__(self, f: mitmproxy.flow.Flow) -> bool:
|
||||
def __contains__(self, f: typing.Any) -> bool:
|
||||
return self._view.__contains__(f)
|
||||
|
||||
def _order_key_name(self):
|
||||
@@ -197,7 +202,36 @@ class View(collections.Sequence):
|
||||
self.sig_view_refresh.send(self)
|
||||
|
||||
# API
|
||||
def toggle_marked(self):
|
||||
@command.command("view.focus.next")
|
||||
def focus_next(self) -> None:
|
||||
"""
|
||||
Set focus to the next flow.
|
||||
"""
|
||||
idx = self.focus.index + 1
|
||||
if self.inbounds(idx):
|
||||
self.focus.flow = self[idx]
|
||||
|
||||
@command.command("view.focus.prev")
|
||||
def focus_prev(self) -> None:
|
||||
"""
|
||||
Set focus to the previous flow.
|
||||
"""
|
||||
idx = self.focus.index - 1
|
||||
if self.inbounds(idx):
|
||||
self.focus.flow = self[idx]
|
||||
|
||||
@command.command("view.order.options")
|
||||
def order_options(self) -> typing.Sequence[str]:
|
||||
"""
|
||||
A list of all the orders we support.
|
||||
"""
|
||||
return list(sorted(self.orders.keys()))
|
||||
|
||||
@command.command("view.marked.toggle")
|
||||
def toggle_marked(self) -> None:
|
||||
"""
|
||||
Toggle whether to show marked views only.
|
||||
"""
|
||||
self.show_marked = not self.show_marked
|
||||
self._refilter()
|
||||
|
||||
@@ -221,7 +255,7 @@ class View(collections.Sequence):
|
||||
self.filter = flt or matchall
|
||||
self._refilter()
|
||||
|
||||
def clear(self):
|
||||
def clear(self) -> None:
|
||||
"""
|
||||
Clears both the store and view.
|
||||
"""
|
||||
@@ -230,55 +264,30 @@ class View(collections.Sequence):
|
||||
self.sig_view_refresh.send(self)
|
||||
self.sig_store_refresh.send(self)
|
||||
|
||||
def add(self, f: mitmproxy.flow.Flow) -> bool:
|
||||
def clear_not_marked(self):
|
||||
"""
|
||||
Clears only the unmarked flows.
|
||||
"""
|
||||
for flow in self._store.copy().values():
|
||||
if not flow.marked:
|
||||
self._store.pop(flow.id)
|
||||
|
||||
self._refilter()
|
||||
self.sig_store_refresh.send(self)
|
||||
|
||||
def add(self, flows: typing.Sequence[mitmproxy.flow.Flow]) -> None:
|
||||
"""
|
||||
Adds a flow to the state. If the flow already exists, it is
|
||||
ignored.
|
||||
"""
|
||||
if f.id not in self._store:
|
||||
self._store[f.id] = f
|
||||
if self.filter(f):
|
||||
self._base_add(f)
|
||||
if self.focus_follow:
|
||||
self.focus.flow = f
|
||||
self.sig_view_add.send(self, flow=f)
|
||||
|
||||
def remove(self, f: mitmproxy.flow.Flow):
|
||||
"""
|
||||
Removes the flow from the underlying store and the view.
|
||||
"""
|
||||
if f.id in self._store:
|
||||
if f in self._view:
|
||||
self._view.remove(f)
|
||||
self.sig_view_remove.send(self, flow=f)
|
||||
del self._store[f.id]
|
||||
self.sig_store_remove.send(self, flow=f)
|
||||
|
||||
def update(self, f: mitmproxy.flow.Flow):
|
||||
"""
|
||||
Updates a flow. If the flow is not in the state, it's ignored.
|
||||
"""
|
||||
if f.id in self._store:
|
||||
if self.filter(f):
|
||||
if f not in self._view:
|
||||
for f in flows:
|
||||
if f.id not in self._store:
|
||||
self._store[f.id] = f
|
||||
if self.filter(f):
|
||||
self._base_add(f)
|
||||
if self.focus_follow:
|
||||
self.focus.flow = f
|
||||
self.sig_view_add.send(self, flow=f)
|
||||
else:
|
||||
# This is a tad complicated. The sortedcontainers
|
||||
# implementation assumes that the order key is stable. If
|
||||
# it changes mid-way Very Bad Things happen. We detect when
|
||||
# this happens, and re-fresh the item.
|
||||
self.order_key.refresh(f)
|
||||
self.sig_view_update.send(self, flow=f)
|
||||
else:
|
||||
try:
|
||||
self._view.remove(f)
|
||||
self.sig_view_remove.send(self, flow=f)
|
||||
except ValueError:
|
||||
# The value was not in the view
|
||||
pass
|
||||
|
||||
def get_by_id(self, flow_id: str) -> typing.Optional[mitmproxy.flow.Flow]:
|
||||
"""
|
||||
@@ -287,48 +296,199 @@ class View(collections.Sequence):
|
||||
"""
|
||||
return self._store.get(flow_id)
|
||||
|
||||
@command.command("view.getval")
|
||||
def getvalue(self, f: mitmproxy.flow.Flow, key: str, default: str) -> str:
|
||||
"""
|
||||
Get a value from the settings store for the specified flow.
|
||||
"""
|
||||
return self.settings[f].get(key, default)
|
||||
|
||||
@command.command("view.setval.toggle")
|
||||
def setvalue_toggle(
|
||||
self,
|
||||
flows: typing.Sequence[mitmproxy.flow.Flow],
|
||||
key: str
|
||||
) -> None:
|
||||
"""
|
||||
Toggle a boolean value in the settings store, seting the value to
|
||||
the string "true" or "false".
|
||||
"""
|
||||
updated = []
|
||||
for f in flows:
|
||||
current = self.settings[f].get("key", "false")
|
||||
self.settings[f][key] = "false" if current == "true" else "true"
|
||||
updated.append(f)
|
||||
ctx.master.addons.trigger("update", updated)
|
||||
|
||||
@command.command("view.setval")
|
||||
def setvalue(
|
||||
self,
|
||||
flows: typing.Sequence[mitmproxy.flow.Flow],
|
||||
key: str, value: str
|
||||
) -> None:
|
||||
"""
|
||||
Set a value in the settings store for the specified flows.
|
||||
"""
|
||||
updated = []
|
||||
for f in flows:
|
||||
self.settings[f][key] = value
|
||||
updated.append(f)
|
||||
ctx.master.addons.trigger("update", updated)
|
||||
|
||||
@command.command("view.load")
|
||||
def load_file(self, path: str) -> None:
|
||||
"""
|
||||
Load flows into the view, without processing them with addons.
|
||||
"""
|
||||
for i in io.FlowReader(open(path, "rb")).stream():
|
||||
# Do this to get a new ID, so we can load the same file N times and
|
||||
# get new flows each time. It would be more efficient to just have a
|
||||
# .newid() method or something.
|
||||
self.add([i.copy()])
|
||||
|
||||
@command.command("view.go")
|
||||
def go(self, dst: int) -> None:
|
||||
"""
|
||||
Go to a specified offset. Positive offests are from the beginning of
|
||||
the view, negative from the end of the view, so that 0 is the first
|
||||
flow, -1 is the last flow.
|
||||
"""
|
||||
if len(self) == 0:
|
||||
return
|
||||
if dst < 0:
|
||||
dst = len(self) + dst
|
||||
if dst < 0:
|
||||
dst = 0
|
||||
if dst > len(self) - 1:
|
||||
dst = len(self) - 1
|
||||
self.focus.flow = self[dst]
|
||||
|
||||
@command.command("view.duplicate")
|
||||
def duplicate(self, flows: typing.Sequence[mitmproxy.flow.Flow]) -> None:
|
||||
"""
|
||||
Duplicates the specified flows, and sets the focus to the first
|
||||
duplicate.
|
||||
"""
|
||||
dups = [f.copy() for f in flows]
|
||||
if dups:
|
||||
self.add(dups)
|
||||
self.focus.flow = dups[0]
|
||||
ctx.log.alert("Duplicated %s flows" % len(dups))
|
||||
|
||||
@command.command("view.remove")
|
||||
def remove(self, flows: typing.Sequence[mitmproxy.flow.Flow]) -> None:
|
||||
"""
|
||||
Removes the flow from the underlying store and the view.
|
||||
"""
|
||||
for f in flows:
|
||||
if f.id in self._store:
|
||||
if f.killable:
|
||||
f.kill()
|
||||
if f in self._view:
|
||||
self._view.remove(f)
|
||||
self.sig_view_remove.send(self, flow=f)
|
||||
del self._store[f.id]
|
||||
self.sig_store_remove.send(self, flow=f)
|
||||
|
||||
@command.command("view.resolve")
|
||||
def resolve(self, spec: str) -> typing.Sequence[mitmproxy.flow.Flow]:
|
||||
"""
|
||||
Resolve a flow list specification to an actual list of flows.
|
||||
"""
|
||||
if spec == "@all":
|
||||
return [i for i in self._store.values()]
|
||||
if spec == "@focus":
|
||||
return [self.focus.flow] if self.focus.flow else []
|
||||
elif spec == "@shown":
|
||||
return [i for i in self]
|
||||
elif spec == "@hidden":
|
||||
return [i for i in self._store.values() if i not in self._view]
|
||||
elif spec == "@marked":
|
||||
return [i for i in self._store.values() if i.marked]
|
||||
elif spec == "@unmarked":
|
||||
return [i for i in self._store.values() if not i.marked]
|
||||
else:
|
||||
filt = flowfilter.parse(spec)
|
||||
if not filt:
|
||||
raise exceptions.CommandError("Invalid flow filter: %s" % spec)
|
||||
return [i for i in self._store.values() if filt(i)]
|
||||
|
||||
@command.command("view.create")
|
||||
def create(self, method: str, url: str) -> None:
|
||||
req = http.HTTPRequest.make(method.upper(), url)
|
||||
c = connections.ClientConnection.make_dummy(("", 0))
|
||||
s = connections.ServerConnection.make_dummy((req.host, req.port))
|
||||
f = http.HTTPFlow(c, s)
|
||||
f.request = req
|
||||
f.request.headers["Host"] = req.host
|
||||
self.add([f])
|
||||
|
||||
# Event handlers
|
||||
def configure(self, opts, updated):
|
||||
if "filter" in updated:
|
||||
def configure(self, updated):
|
||||
if "view_filter" in updated:
|
||||
filt = None
|
||||
if opts.filter:
|
||||
filt = flowfilter.parse(opts.filter)
|
||||
if ctx.options.view_filter:
|
||||
filt = flowfilter.parse(ctx.options.view_filter)
|
||||
if not filt:
|
||||
raise exceptions.OptionsError(
|
||||
"Invalid interception filter: %s" % opts.filter
|
||||
"Invalid interception filter: %s" % ctx.options.view_filter
|
||||
)
|
||||
self.set_filter(filt)
|
||||
if "order" in updated:
|
||||
if opts.order is None:
|
||||
self.set_order(self.default_order)
|
||||
else:
|
||||
if opts.order not in self.orders:
|
||||
raise exceptions.OptionsError(
|
||||
"Unknown flow order: %s" % opts.order
|
||||
)
|
||||
self.set_order(self.orders[opts.order])
|
||||
if "order_reversed" in updated:
|
||||
self.set_reversed(opts.order_reversed)
|
||||
if "focus_follow" in updated:
|
||||
self.focus_follow = opts.focus_follow
|
||||
if "console_order" in updated:
|
||||
if ctx.options.console_order not in self.orders:
|
||||
raise exceptions.OptionsError(
|
||||
"Unknown flow order: %s" % ctx.options.console_order
|
||||
)
|
||||
self.set_order(self.orders[ctx.options.console_order])
|
||||
if "console_order_reversed" in updated:
|
||||
self.set_reversed(ctx.options.console_order_reversed)
|
||||
if "console_focus_follow" in updated:
|
||||
self.focus_follow = ctx.options.console_focus_follow
|
||||
|
||||
def request(self, f):
|
||||
self.add(f)
|
||||
self.add([f])
|
||||
|
||||
def error(self, f):
|
||||
self.update(f)
|
||||
self.update([f])
|
||||
|
||||
def response(self, f):
|
||||
self.update(f)
|
||||
self.update([f])
|
||||
|
||||
def intercept(self, f):
|
||||
self.update(f)
|
||||
self.update([f])
|
||||
|
||||
def resume(self, f):
|
||||
self.update(f)
|
||||
self.update([f])
|
||||
|
||||
def kill(self, f):
|
||||
self.update(f)
|
||||
self.update([f])
|
||||
|
||||
def update(self, flows: typing.Sequence[mitmproxy.flow.Flow]) -> None:
|
||||
"""
|
||||
Updates a list of flows. If flow is not in the state, it's ignored.
|
||||
"""
|
||||
for f in flows:
|
||||
if f.id in self._store:
|
||||
if self.filter(f):
|
||||
if f not in self._view:
|
||||
self._base_add(f)
|
||||
if self.focus_follow:
|
||||
self.focus.flow = f
|
||||
self.sig_view_add.send(self, flow=f)
|
||||
else:
|
||||
# This is a tad complicated. The sortedcontainers
|
||||
# implementation assumes that the order key is stable. If
|
||||
# it changes mid-way Very Bad Things happen. We detect when
|
||||
# this happens, and re-fresh the item.
|
||||
self.order_key.refresh(f)
|
||||
self.sig_view_update.send(self, flow=f)
|
||||
else:
|
||||
try:
|
||||
self._view.remove(f)
|
||||
self.sig_view_remove.send(self, flow=f)
|
||||
except ValueError:
|
||||
# The value was not in the view
|
||||
pass
|
||||
|
||||
|
||||
class Focus:
|
||||
@@ -360,6 +520,7 @@ class Focus:
|
||||
def index(self) -> typing.Optional[int]:
|
||||
if self.flow:
|
||||
return self.view.index(self.flow)
|
||||
return None
|
||||
|
||||
@index.setter
|
||||
def index(self, idx):
|
||||
@@ -393,7 +554,7 @@ class Focus:
|
||||
class Settings(collections.Mapping):
|
||||
def __init__(self, view: View) -> None:
|
||||
self.view = view
|
||||
self._values = {} # type: typing.MutableMapping[str, mitmproxy.flow.Flow]
|
||||
self._values = {} # type: typing.MutableMapping[str, typing.Dict]
|
||||
view.sig_store_remove.connect(self._sig_store_remove)
|
||||
view.sig_store_refresh.connect(self._sig_store_refresh)
|
||||
|
||||
|
||||
@@ -13,6 +13,10 @@ class WSGIApp:
|
||||
def __init__(self, app, host, port):
|
||||
self.app, self.host, self.port = app, host, port
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return "wsgiapp:%s:%s" % (self.host, self.port)
|
||||
|
||||
def serve(self, app, flow):
|
||||
"""
|
||||
Serves app on flow, and prevents further handling of the flow.
|
||||
|
||||
@@ -3,8 +3,8 @@ import ssl
|
||||
import time
|
||||
import datetime
|
||||
import ipaddress
|
||||
|
||||
import sys
|
||||
|
||||
from pyasn1.type import univ, constraint, char, namedtype, tag
|
||||
from pyasn1.codec.der.decoder import decode
|
||||
from pyasn1.error import PyAsn1Error
|
||||
@@ -13,8 +13,8 @@ import OpenSSL
|
||||
from mitmproxy.types import serializable
|
||||
|
||||
# Default expiry must not be too long: https://github.com/mitmproxy/mitmproxy/issues/815
|
||||
|
||||
DEFAULT_EXP = 94608000 # = 24 * 60 * 60 * 365 * 3
|
||||
|
||||
# Generated with "openssl dhparam". It's too slow to generate this on startup.
|
||||
DEFAULT_DHPARAM = b"""
|
||||
-----BEGIN DH PARAMETERS-----
|
||||
@@ -93,9 +93,9 @@ def dummy_cert(privkey, cacert, commonname, sans):
|
||||
try:
|
||||
ipaddress.ip_address(i.decode("ascii"))
|
||||
except ValueError:
|
||||
ss.append(b"DNS: %s" % i)
|
||||
ss.append(b"DNS:%s" % i)
|
||||
else:
|
||||
ss.append(b"IP: %s" % i)
|
||||
ss.append(b"IP:%s" % i)
|
||||
ss = b", ".join(ss)
|
||||
|
||||
cert = OpenSSL.crypto.X509()
|
||||
@@ -356,14 +356,14 @@ class CertStore:
|
||||
|
||||
|
||||
class _GeneralName(univ.Choice):
|
||||
# We are only interested in dNSNames. We use a default handler to ignore
|
||||
# other types.
|
||||
# TODO: We should also handle iPAddresses.
|
||||
# We only care about dNSName and iPAddress
|
||||
componentType = namedtype.NamedTypes(
|
||||
namedtype.NamedType('dNSName', char.IA5String().subtype(
|
||||
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)
|
||||
)
|
||||
),
|
||||
)),
|
||||
namedtype.NamedType('iPAddress', univ.OctetString().subtype(
|
||||
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7)
|
||||
)),
|
||||
)
|
||||
|
||||
|
||||
@@ -384,9 +384,6 @@ class SSLCert(serializable.Serializable):
|
||||
def __eq__(self, other):
|
||||
return self.digest("sha256") == other.digest("sha256")
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def get_state(self):
|
||||
return self.to_pem()
|
||||
|
||||
@@ -477,5 +474,10 @@ class SSLCert(serializable.Serializable):
|
||||
except PyAsn1Error:
|
||||
continue
|
||||
for i in dec[0]:
|
||||
altnames.append(i[0].asOctets())
|
||||
if i[0] is None and isinstance(i[1], univ.OctetString) and not isinstance(i[1], char.IA5String):
|
||||
# This would give back the IP address: b'.'.join([str(e).encode() for e in i[1].asNumbers()])
|
||||
continue
|
||||
else:
|
||||
e = i[0].asOctets()
|
||||
altnames.append(e)
|
||||
return altnames
|
||||
|
||||
195
mitmproxy/command.py
Normal file
195
mitmproxy/command.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""
|
||||
This module manges and invokes typed commands.
|
||||
"""
|
||||
import inspect
|
||||
import typing
|
||||
import shlex
|
||||
import textwrap
|
||||
import functools
|
||||
import sys
|
||||
|
||||
from mitmproxy.utils import typecheck
|
||||
from mitmproxy import exceptions
|
||||
from mitmproxy import flow
|
||||
|
||||
|
||||
Cuts = typing.Sequence[
|
||||
typing.Sequence[typing.Union[str, bytes]]
|
||||
]
|
||||
|
||||
|
||||
def typename(t: type, ret: bool) -> str:
|
||||
"""
|
||||
Translates a type to an explanatory string. If ret is True, we're
|
||||
looking at a return type, else we're looking at a parameter type.
|
||||
"""
|
||||
if issubclass(t, (str, int, bool)):
|
||||
return t.__name__
|
||||
elif t == typing.Sequence[flow.Flow]:
|
||||
return "[flow]" if ret else "flowspec"
|
||||
elif t == typing.Sequence[str]:
|
||||
return "[str]"
|
||||
elif t == Cuts:
|
||||
return "[cuts]" if ret else "cutspec"
|
||||
elif t == flow.Flow:
|
||||
return "flow"
|
||||
else: # pragma: no cover
|
||||
raise NotImplementedError(t)
|
||||
|
||||
|
||||
class Command:
|
||||
def __init__(self, manager, path, func) -> None:
|
||||
self.path = path
|
||||
self.manager = manager
|
||||
self.func = func
|
||||
sig = inspect.signature(self.func)
|
||||
self.help = None
|
||||
if func.__doc__:
|
||||
txt = func.__doc__.strip()
|
||||
self.help = "\n".join(textwrap.wrap(txt))
|
||||
|
||||
self.has_positional = False
|
||||
for i in sig.parameters.values():
|
||||
# This is the kind for *args paramters
|
||||
if i.kind == i.VAR_POSITIONAL:
|
||||
self.has_positional = True
|
||||
self.paramtypes = [v.annotation for v in sig.parameters.values()]
|
||||
self.returntype = sig.return_annotation
|
||||
|
||||
def paramnames(self) -> typing.Sequence[str]:
|
||||
v = [typename(i, False) for i in self.paramtypes]
|
||||
if self.has_positional:
|
||||
v[-1] = "*" + v[-1][1:-1]
|
||||
return v
|
||||
|
||||
def retname(self) -> str:
|
||||
return typename(self.returntype, True) if self.returntype else ""
|
||||
|
||||
def signature_help(self) -> str:
|
||||
params = " ".join(self.paramnames())
|
||||
ret = self.retname()
|
||||
if ret:
|
||||
ret = " -> " + ret
|
||||
return "%s %s%s" % (self.path, params, ret)
|
||||
|
||||
def call(self, args: typing.Sequence[str]):
|
||||
"""
|
||||
Call the command with a set of arguments. At this point, all argumets are strings.
|
||||
"""
|
||||
if not self.has_positional and (len(self.paramtypes) != len(args)):
|
||||
raise exceptions.CommandError("Usage: %s" % self.signature_help())
|
||||
|
||||
remainder = [] # type: typing.Sequence[str]
|
||||
if self.has_positional:
|
||||
remainder = args[len(self.paramtypes) - 1:]
|
||||
args = args[:len(self.paramtypes) - 1]
|
||||
|
||||
pargs = []
|
||||
for i in range(len(args)):
|
||||
if typecheck.check_command_type(args[i], self.paramtypes[i]):
|
||||
pargs.append(args[i])
|
||||
else:
|
||||
pargs.append(parsearg(self.manager, args[i], self.paramtypes[i]))
|
||||
|
||||
if remainder:
|
||||
if typecheck.check_command_type(remainder, self.paramtypes[-1]):
|
||||
pargs.extend(remainder)
|
||||
else:
|
||||
raise exceptions.CommandError("Invalid value type.")
|
||||
|
||||
with self.manager.master.handlecontext():
|
||||
ret = self.func(*pargs)
|
||||
|
||||
if not typecheck.check_command_type(ret, self.returntype):
|
||||
raise exceptions.CommandError("Command returned unexpected data")
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class CommandManager:
|
||||
def __init__(self, master):
|
||||
self.master = master
|
||||
self.commands = {}
|
||||
|
||||
def collect_commands(self, addon):
|
||||
for i in dir(addon):
|
||||
if not i.startswith("__"):
|
||||
o = getattr(addon, i)
|
||||
if hasattr(o, "command_path"):
|
||||
self.add(o.command_path, o)
|
||||
|
||||
def add(self, path: str, func: typing.Callable):
|
||||
self.commands[path] = Command(self, path, func)
|
||||
|
||||
def call_args(self, path, args):
|
||||
"""
|
||||
Call a command using a list of string arguments. May raise CommandError.
|
||||
"""
|
||||
if path not in self.commands:
|
||||
raise exceptions.CommandError("Unknown command: %s" % path)
|
||||
return self.commands[path].call(args)
|
||||
|
||||
def call(self, cmdstr: str):
|
||||
"""
|
||||
Call a command using a string. May raise CommandError.
|
||||
"""
|
||||
parts = shlex.split(cmdstr)
|
||||
if not len(parts) >= 1:
|
||||
raise exceptions.CommandError("Invalid command: %s" % cmdstr)
|
||||
return self.call_args(parts[0], parts[1:])
|
||||
|
||||
def dump(self, out=sys.stdout) -> None:
|
||||
cmds = list(self.commands.values())
|
||||
cmds.sort(key=lambda x: x.signature_help())
|
||||
for c in cmds:
|
||||
for hl in (c.help or "").splitlines():
|
||||
print("# " + hl, file=out)
|
||||
print(c.signature_help(), file=out)
|
||||
print(file=out)
|
||||
|
||||
|
||||
def parsearg(manager: CommandManager, spec: str, argtype: type) -> typing.Any:
|
||||
"""
|
||||
Convert a string to a argument to the appropriate type.
|
||||
"""
|
||||
if issubclass(argtype, str):
|
||||
return spec
|
||||
elif argtype == bool:
|
||||
if spec == "true":
|
||||
return True
|
||||
elif spec == "false":
|
||||
return False
|
||||
else:
|
||||
raise exceptions.CommandError(
|
||||
"Booleans are 'true' or 'false', got %s" % spec
|
||||
)
|
||||
elif issubclass(argtype, int):
|
||||
try:
|
||||
return int(spec)
|
||||
except ValueError as e:
|
||||
raise exceptions.CommandError("Expected an integer, got %s." % spec)
|
||||
elif argtype == typing.Sequence[flow.Flow]:
|
||||
return manager.call_args("view.resolve", [spec])
|
||||
elif argtype == Cuts:
|
||||
return manager.call_args("cut", [spec])
|
||||
elif argtype == flow.Flow:
|
||||
flows = manager.call_args("view.resolve", [spec])
|
||||
if len(flows) != 1:
|
||||
raise exceptions.CommandError(
|
||||
"Command requires one flow, specification matched %s." % len(flows)
|
||||
)
|
||||
return flows[0]
|
||||
elif argtype == typing.Sequence[str]:
|
||||
return [i.strip() for i in spec.split(",")]
|
||||
else:
|
||||
raise exceptions.CommandError("Unsupported argument type: %s" % argtype)
|
||||
|
||||
|
||||
def command(path):
|
||||
def decorator(function):
|
||||
@functools.wraps(function)
|
||||
def wrapper(*args, **kwargs):
|
||||
return function(*args, **kwargs)
|
||||
wrapper.__dict__["command_path"] = path
|
||||
return wrapper
|
||||
return decorator
|
||||
@@ -1,7 +1,7 @@
|
||||
import time
|
||||
|
||||
import copy
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from mitmproxy import stateobject
|
||||
from mitmproxy import certs
|
||||
@@ -18,6 +18,7 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
|
||||
address: Remote address
|
||||
ssl_established: True if TLS is established, False otherwise
|
||||
clientcert: The TLS client certificate
|
||||
mitmcert: The MITM'ed TLS server certificate presented to the client
|
||||
timestamp_start: Connection start timestamp
|
||||
timestamp_ssl_setup: TLS established timestamp
|
||||
timestamp_end: Connection end timestamp
|
||||
@@ -41,6 +42,8 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
|
||||
self.clientcert = None
|
||||
self.ssl_established = None
|
||||
|
||||
self.id = str(uuid.uuid4())
|
||||
self.mitmcert = None
|
||||
self.timestamp_start = time.time()
|
||||
self.timestamp_end = None
|
||||
self.timestamp_ssl_setup = None
|
||||
@@ -53,26 +56,47 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
|
||||
return bool(self.connection) and not self.finished
|
||||
|
||||
def __repr__(self):
|
||||
if self.ssl_established:
|
||||
tls = "[{}] ".format(self.tls_version)
|
||||
else:
|
||||
tls = ""
|
||||
|
||||
if self.alpn_proto_negotiated:
|
||||
alpn = "[ALPN: {}] ".format(
|
||||
strutils.bytes_to_escaped_str(self.alpn_proto_negotiated)
|
||||
)
|
||||
else:
|
||||
alpn = ""
|
||||
return "<ClientConnection: {ssl}{alpn}{address}>".format(
|
||||
ssl="[ssl] " if self.ssl_established else "",
|
||||
|
||||
return "<ClientConnection: {tls}{alpn}{host}:{port}>".format(
|
||||
tls=tls,
|
||||
alpn=alpn,
|
||||
address=repr(self.address)
|
||||
host=self.address[0],
|
||||
port=self.address[1],
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, ClientConnection):
|
||||
return self.id == other.id
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
@property
|
||||
def tls_established(self):
|
||||
return self.ssl_established
|
||||
|
||||
@tls_established.setter
|
||||
def tls_established(self, value):
|
||||
self.ssl_established = value
|
||||
|
||||
_stateobject_attributes = dict(
|
||||
address=tcp.Address,
|
||||
id=str,
|
||||
address=tuple,
|
||||
ssl_established=bool,
|
||||
clientcert=certs.SSLCert,
|
||||
mitmcert=certs.SSLCert,
|
||||
timestamp_start=float,
|
||||
timestamp_ssl_setup=float,
|
||||
timestamp_end=float,
|
||||
@@ -82,9 +106,6 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
|
||||
tls_version=str,
|
||||
)
|
||||
|
||||
def copy(self):
|
||||
return copy.copy(self)
|
||||
|
||||
def send(self, message):
|
||||
if isinstance(message, list):
|
||||
message = b''.join(message)
|
||||
@@ -100,8 +121,10 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
|
||||
@classmethod
|
||||
def make_dummy(cls, address):
|
||||
return cls.from_state(dict(
|
||||
address=dict(address=address, use_ipv6=False),
|
||||
id=str(uuid.uuid4()),
|
||||
address=address,
|
||||
clientcert=None,
|
||||
mitmcert=None,
|
||||
ssl_established=False,
|
||||
timestamp_start=None,
|
||||
timestamp_end=None,
|
||||
@@ -112,9 +135,10 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
|
||||
tls_version=None,
|
||||
))
|
||||
|
||||
def convert_to_ssl(self, *args, **kwargs):
|
||||
super().convert_to_ssl(*args, **kwargs)
|
||||
def convert_to_ssl(self, cert, *args, **kwargs):
|
||||
super().convert_to_ssl(cert, *args, **kwargs)
|
||||
self.timestamp_ssl_setup = time.time()
|
||||
self.mitmcert = cert
|
||||
sni = self.connection.get_servername()
|
||||
if sni:
|
||||
self.sni = sni.decode("idna")
|
||||
@@ -142,6 +166,7 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject):
|
||||
cert: The certificate presented by the remote during the TLS handshake
|
||||
sni: Server Name Indication sent by the proxy during the TLS handshake
|
||||
alpn_proto_negotiated: The negotiated application protocol
|
||||
tls_version: TLS version
|
||||
via: The underlying server connection (e.g. the connection to the upstream proxy in upstream proxy mode)
|
||||
timestamp_start: Connection start timestamp
|
||||
timestamp_tcp_setup: TCP ACK received timestamp
|
||||
@@ -152,7 +177,9 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject):
|
||||
def __init__(self, address, source_address=None, spoof_source_address=None):
|
||||
tcp.TCPClient.__init__(self, address, source_address, spoof_source_address)
|
||||
|
||||
self.id = str(uuid.uuid4())
|
||||
self.alpn_proto_negotiated = None
|
||||
self.tls_version = None
|
||||
self.via = None
|
||||
self.timestamp_start = None
|
||||
self.timestamp_end = None
|
||||
@@ -164,35 +191,50 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject):
|
||||
|
||||
def __repr__(self):
|
||||
if self.ssl_established and self.sni:
|
||||
ssl = "[ssl: {0}] ".format(self.sni)
|
||||
tls = "[{}: {}] ".format(self.tls_version or "TLS", self.sni)
|
||||
elif self.ssl_established:
|
||||
ssl = "[ssl] "
|
||||
tls = "[{}] ".format(self.tls_version or "TLS")
|
||||
else:
|
||||
ssl = ""
|
||||
tls = ""
|
||||
if self.alpn_proto_negotiated:
|
||||
alpn = "[ALPN: {}] ".format(
|
||||
strutils.bytes_to_escaped_str(self.alpn_proto_negotiated)
|
||||
)
|
||||
else:
|
||||
alpn = ""
|
||||
return "<ServerConnection: {ssl}{alpn}{address}>".format(
|
||||
ssl=ssl,
|
||||
return "<ServerConnection: {tls}{alpn}{host}:{port}>".format(
|
||||
tls=tls,
|
||||
alpn=alpn,
|
||||
address=repr(self.address)
|
||||
host=self.address[0],
|
||||
port=self.address[1],
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, ServerConnection):
|
||||
return self.id == other.id
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
@property
|
||||
def tls_established(self):
|
||||
return self.ssl_established
|
||||
|
||||
@tls_established.setter
|
||||
def tls_established(self, value):
|
||||
self.ssl_established = value
|
||||
|
||||
_stateobject_attributes = dict(
|
||||
address=tcp.Address,
|
||||
ip_address=tcp.Address,
|
||||
source_address=tcp.Address,
|
||||
id=str,
|
||||
address=tuple,
|
||||
ip_address=tuple,
|
||||
source_address=tuple,
|
||||
ssl_established=bool,
|
||||
cert=certs.SSLCert,
|
||||
sni=str,
|
||||
alpn_proto_negotiated=bytes,
|
||||
tls_version=str,
|
||||
timestamp_start=float,
|
||||
timestamp_tcp_setup=float,
|
||||
timestamp_ssl_setup=float,
|
||||
@@ -208,12 +250,14 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject):
|
||||
@classmethod
|
||||
def make_dummy(cls, address):
|
||||
return cls.from_state(dict(
|
||||
address=dict(address=address, use_ipv6=False),
|
||||
ip_address=dict(address=address, use_ipv6=False),
|
||||
id=str(uuid.uuid4()),
|
||||
address=address,
|
||||
ip_address=address,
|
||||
cert=None,
|
||||
sni=None,
|
||||
alpn_proto_negotiated=None,
|
||||
source_address=dict(address=('', 0), use_ipv6=False),
|
||||
tls_version=None,
|
||||
source_address=('', 0),
|
||||
ssl_established=False,
|
||||
timestamp_start=None,
|
||||
timestamp_tcp_setup=None,
|
||||
@@ -222,9 +266,6 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject):
|
||||
via=None
|
||||
))
|
||||
|
||||
def copy(self):
|
||||
return copy.copy(self)
|
||||
|
||||
def connect(self):
|
||||
self.timestamp_start = time.time()
|
||||
tcp.TCPClient.connect(self)
|
||||
@@ -246,13 +287,14 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject):
|
||||
else:
|
||||
path = os.path.join(
|
||||
clientcerts,
|
||||
self.address.host.encode("idna").decode()) + ".pem"
|
||||
self.address[0].encode("idna").decode()) + ".pem"
|
||||
if os.path.exists(path):
|
||||
clientcert = path
|
||||
|
||||
self.convert_to_ssl(cert=clientcert, sni=sni, **kwargs)
|
||||
self.sni = sni
|
||||
self.alpn_proto_negotiated = self.get_alpn_proto_negotiated()
|
||||
self.tls_version = self.connection.get_protocol_version_name()
|
||||
self.timestamp_ssl_setup = time.time()
|
||||
|
||||
def finish(self):
|
||||
|
||||
@@ -36,12 +36,14 @@ def get(name: str) -> Optional[View]:
|
||||
for i in views:
|
||||
if i.name.lower() == name.lower():
|
||||
return i
|
||||
return None
|
||||
|
||||
|
||||
def get_by_shortcut(c: str) -> Optional[View]:
|
||||
for i in views:
|
||||
if i.prompt[1] == c:
|
||||
return i
|
||||
return None
|
||||
|
||||
|
||||
def add(view: View) -> None:
|
||||
@@ -159,6 +161,7 @@ def get_content_view(viewmode: View, data: bytes, **metadata):
|
||||
return desc, safe_to_print(content), error
|
||||
|
||||
|
||||
# The order in which ContentViews are added is important!
|
||||
add(auto.ViewAuto())
|
||||
add(raw.ViewRaw())
|
||||
add(hex.ViewHex())
|
||||
@@ -172,9 +175,7 @@ add(urlencoded.ViewURLEncoded())
|
||||
add(multipart.ViewMultipart())
|
||||
add(image.ViewImage())
|
||||
add(query.ViewQuery())
|
||||
|
||||
if protobuf.ViewProtobuf.is_available():
|
||||
add(protobuf.ViewProtobuf())
|
||||
add(protobuf.ViewProtobuf())
|
||||
|
||||
__all__ = [
|
||||
"View", "VIEW_CUTOFF", "KEY_MAX", "format_text", "format_dict",
|
||||
|
||||
@@ -18,6 +18,8 @@ class ViewAuto(base.View):
|
||||
return contentviews.content_types_map[ct][0](data, **metadata)
|
||||
elif strutils.is_xml(data):
|
||||
return contentviews.get("XML/HTML")(data, **metadata)
|
||||
elif ct.startswith("image/"):
|
||||
return contentviews.get("Image")(data, **metadata)
|
||||
if metadata.get("query"):
|
||||
return contentviews.get("Query")(data, **metadata)
|
||||
if data and strutils.is_mostly_bin(data):
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
import io
|
||||
|
||||
from PIL import ExifTags
|
||||
from PIL import Image
|
||||
|
||||
from mitmproxy.types import multidict
|
||||
from . import base
|
||||
|
||||
|
||||
class ViewImage(base.View):
|
||||
name = "Image"
|
||||
prompt = ("image", "i")
|
||||
content_types = [
|
||||
"image/png",
|
||||
"image/jpeg",
|
||||
"image/gif",
|
||||
"image/vnd.microsoft.icon",
|
||||
"image/x-icon",
|
||||
]
|
||||
|
||||
def __call__(self, data, **metadata):
|
||||
try:
|
||||
img = Image.open(io.BytesIO(data))
|
||||
except IOError:
|
||||
return None
|
||||
parts = [
|
||||
("Format", str(img.format_description)),
|
||||
("Size", "%s x %s px" % img.size),
|
||||
("Mode", str(img.mode)),
|
||||
]
|
||||
for i in sorted(img.info.keys()):
|
||||
if i != "exif":
|
||||
parts.append(
|
||||
(str(i), str(img.info[i]))
|
||||
)
|
||||
if hasattr(img, "_getexif"):
|
||||
ex = img._getexif()
|
||||
if ex:
|
||||
for i in sorted(ex.keys()):
|
||||
tag = ExifTags.TAGS.get(i, i)
|
||||
parts.append(
|
||||
(str(tag), str(ex[i]))
|
||||
)
|
||||
fmt = base.format_dict(multidict.MultiDict(parts))
|
||||
return "%s image" % img.format, fmt
|
||||
3
mitmproxy/contentviews/image/__init__.py
Normal file
3
mitmproxy/contentviews/image/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .view import ViewImage
|
||||
|
||||
__all__ = ["ViewImage"]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user