Compare commits
469 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4accef00fb | ||
|
|
d779525500 | ||
|
|
65a7706f77 | ||
|
|
5e12abbb9b | ||
|
|
e0fe2b97be | ||
|
|
bd33863f9f | ||
|
|
a011139894 | ||
|
|
36866f1d36 | ||
|
|
407531bcb1 | ||
|
|
3adbb2ff41 | ||
|
|
499ae1c7a1 | ||
|
|
438ea6ccb0 | ||
|
|
598a29a733 | ||
|
|
6d102fc826 | ||
|
|
fca07fbb62 | ||
|
|
cdedcc24b8 | ||
|
|
60d5f27140 | ||
|
|
cb413bae49 | ||
|
|
e9f78ea70c | ||
|
|
6858cb066f | ||
|
|
4be0d426f4 | ||
|
|
7d7d5d6c3c | ||
|
|
0422387e90 | ||
|
|
2ed5fd9ac4 | ||
|
|
2beb2acc24 | ||
|
|
56ce591908 | ||
|
|
b190e676b4 | ||
|
|
19520b2ec9 | ||
|
|
eeb96ae8b5 | ||
|
|
cddedd37d5 | ||
|
|
4d6626b099 | ||
|
|
7a55833bb2 | ||
|
|
7e4702cf09 | ||
|
|
685f08697a | ||
|
|
a255db706d | ||
|
|
9d76902710 | ||
|
|
62ee7f6980 | ||
|
|
2f6707825a | ||
|
|
7dda77dcb4 | ||
|
|
ddec22d04c | ||
|
|
32e90859f4 | ||
|
|
8b8970c787 | ||
|
|
03d35ba799 | ||
|
|
c035d7d88a | ||
|
|
46f9e9efff | ||
|
|
4fa8d7ed79 | ||
|
|
cd71b505a9 | ||
|
|
c7db08ed3e | ||
|
|
3582a1004c | ||
|
|
22cbd2dbb5 | ||
|
|
c87af9e85c | ||
|
|
6c202effa4 | ||
|
|
632f52af22 | ||
|
|
46e59529a4 | ||
|
|
bdf060236a | ||
|
|
d9d2a09282 | ||
|
|
b020fd4ad2 | ||
|
|
4ef3526354 | ||
|
|
20ddeb6e1b | ||
|
|
d27f110498 | ||
|
|
910797ccb6 | ||
|
|
7de9d15aef | ||
|
|
6a9ffe7e06 | ||
|
|
12dcea4f70 | ||
|
|
b3b39bd8f1 | ||
|
|
c7caecf77c | ||
|
|
1fe30363c7 | ||
|
|
54a7256c8d | ||
|
|
8e8e4ff132 | ||
|
|
1dace72092 | ||
|
|
3a5c1d9faf | ||
|
|
f38c754301 | ||
|
|
fff38f484d | ||
|
|
95390b655f | ||
|
|
5967c421ca | ||
|
|
b8b5214f44 | ||
|
|
cdd3b67a5c | ||
|
|
28c9de3f6a | ||
|
|
f3b9bfc114 | ||
|
|
c9eba39edd | ||
|
|
40a1c7116e | ||
|
|
c03af9cfcc | ||
|
|
c4cbc32cc5 | ||
|
|
1231ce199e | ||
|
|
e0cac6fd99 | ||
|
|
d9db1534b1 | ||
|
|
6a0aaaf069 | ||
|
|
4c04798aa5 | ||
|
|
3f84b0a015 | ||
|
|
917380ddbb | ||
|
|
d9ae067e52 | ||
|
|
b2e8bf6e89 | ||
|
|
170cbe98c5 | ||
|
|
c94f662095 | ||
|
|
0987dcfb1c | ||
|
|
6920c01d4a | ||
|
|
cc0cc8cdf0 | ||
|
|
fb13969798 | ||
|
|
278258ee9f | ||
|
|
9e542cf86b | ||
|
|
244e952f79 | ||
|
|
aa2a8fa223 | ||
|
|
467acb47bf | ||
|
|
0c0d6b2bfc | ||
|
|
ce0e5be406 | ||
|
|
65ce4c90fa | ||
|
|
9897a08d09 | ||
|
|
f5753ba720 | ||
|
|
fcf32a935b | ||
|
|
ec50788987 | ||
|
|
ac0a2da3b5 | ||
|
|
9f84dc42fe | ||
|
|
21f9304235 | ||
|
|
5cedd22bbd | ||
|
|
c0dacbc4dd | ||
|
|
dd6e9ea70c | ||
|
|
87598dcd7f | ||
|
|
3bb7b677f8 | ||
|
|
988a7223f4 | ||
|
|
7f044372fa | ||
|
|
552897abbc | ||
|
|
946a8c5baa | ||
|
|
888b31aa92 | ||
|
|
e2dec2510f | ||
|
|
da5ad2ab9f | ||
|
|
eaa4b04a22 | ||
|
|
3051b13108 | ||
|
|
4c4e48bab7 | ||
|
|
01a3eb29cb | ||
|
|
73f7249c5f | ||
|
|
18c6559199 | ||
|
|
e66ece993f | ||
|
|
0686860624 | ||
|
|
24ce46b380 | ||
|
|
a49bf81ff2 | ||
|
|
64501fd7f1 | ||
|
|
db3c0b0907 | ||
|
|
edda117a7a | ||
|
|
cdface0dd5 | ||
|
|
be6afe2d3a | ||
|
|
9163780000 | ||
|
|
d7aa7dfe64 | ||
|
|
f1decb531d | ||
|
|
99399c698b | ||
|
|
1f5f42f216 | ||
|
|
9082c4702f | ||
|
|
6cedcfbf77 | ||
|
|
8a631f045e | ||
|
|
a6a2ee5b6b | ||
|
|
016708276c | ||
|
|
4cfdc4c513 | ||
|
|
0f257c9308 | ||
|
|
c8104b6e78 | ||
|
|
1a1d731043 | ||
|
|
c5a000d2ae | ||
|
|
94d1924fa9 | ||
|
|
6c1cf68bca | ||
|
|
395af051bd | ||
|
|
42fd66675e | ||
|
|
21a3f3699b | ||
|
|
d168b2acac | ||
|
|
2ce8233921 | ||
|
|
697a4fa8a4 | ||
|
|
2f83c6c7d1 | ||
|
|
127f414e9c | ||
|
|
33c4ccffab | ||
|
|
bafe7f5a09 | ||
|
|
baf41112d1 | ||
|
|
a90dde94e1 | ||
|
|
7dfbfc7227 | ||
|
|
b10843d051 | ||
|
|
520ac8f4dc | ||
|
|
537a6e50e9 | ||
|
|
2d0cbdf1a8 | ||
|
|
5afb562aa3 | ||
|
|
db069c3d4a | ||
|
|
fae40c7e2f | ||
|
|
0c43b592dc | ||
|
|
2ab8924e2d | ||
|
|
0e31cfa784 | ||
|
|
8f7ffcf350 | ||
|
|
9c8507a0fd | ||
|
|
e9b2cab088 | ||
|
|
d3ccacccb1 | ||
|
|
df386c8fbc | ||
|
|
4d15dd6e17 | ||
|
|
56a0499636 | ||
|
|
10fc4768e8 | ||
|
|
2b63d7d10d | ||
|
|
1f177528c1 | ||
|
|
fc3bbb70a3 | ||
|
|
ce3cab0295 | ||
|
|
c784e5285e | ||
|
|
2bf9055cae | ||
|
|
8aba5aed4f | ||
|
|
0ce7cf5e10 | ||
|
|
96edcbccd7 | ||
|
|
4603afb6de | ||
|
|
56317b00af | ||
|
|
cacec9c1f3 | ||
|
|
44ee07f0b2 | ||
|
|
6a8d5e1731 | ||
|
|
d9962f65b3 | ||
|
|
119e88d87b | ||
|
|
71d9e010d9 | ||
|
|
5718caa957 | ||
|
|
efd8a32ed6 | ||
|
|
b22d700e16 | ||
|
|
ccdacea0c4 | ||
|
|
4bdcbc1cb5 | ||
|
|
833c6cf2ec | ||
|
|
dd6dbdd90a | ||
|
|
63013cc565 | ||
|
|
912402364a | ||
|
|
159f51b12b | ||
|
|
7678a91b0e | ||
|
|
b13899c63d | ||
|
|
3a0d882c5e | ||
|
|
cb81f0ad6d | ||
|
|
518bacf628 | ||
|
|
ca63b03e55 | ||
|
|
cecef88d6b | ||
|
|
7ffd805a03 | ||
|
|
a7e2a0c981 | ||
|
|
2a570bb4ca | ||
|
|
5ca8f0706d | ||
|
|
a9b4436cdc | ||
|
|
5f91999512 | ||
|
|
9f000beeaf | ||
|
|
ff0a71f212 | ||
|
|
22dfc6ec24 | ||
|
|
48147c079e | ||
|
|
d715479ef6 | ||
|
|
fc8298c468 | ||
|
|
e94ca5dc91 | ||
|
|
114b71b751 | ||
|
|
b2770a2087 | ||
|
|
cba1878bb2 | ||
|
|
a2e037d6af | ||
|
|
65a2b6a223 | ||
|
|
9ed799e803 | ||
|
|
c1c0ecca13 | ||
|
|
ee62836383 | ||
|
|
705f598b1a | ||
|
|
414de88925 | ||
|
|
53ffd245dd | ||
|
|
cf1b756206 | ||
|
|
22b58e31ef | ||
|
|
b7f9bf5a28 | ||
|
|
aba680b6c2 | ||
|
|
fabada95f6 | ||
|
|
9ccd8bb3ea | ||
|
|
1d68acf8f0 | ||
|
|
1e7697b551 | ||
|
|
4a4ec88d00 | ||
|
|
6adc778d62 | ||
|
|
6b7ebdb7e9 | ||
|
|
3d7facd774 | ||
|
|
eaee1f2cab | ||
|
|
ff012221ae | ||
|
|
c398553748 | ||
|
|
3ccbcf6185 | ||
|
|
f0abc0ef59 | ||
|
|
a99fa3375d | ||
|
|
22c7e09b3f | ||
|
|
0dfe1d5b35 | ||
|
|
a99a3bc6d7 | ||
|
|
9804f25de3 | ||
|
|
ae98200660 | ||
|
|
e45420646f | ||
|
|
21be82ef8b | ||
|
|
001afe00cb | ||
|
|
19a5985f29 | ||
|
|
2715ee6c61 | ||
|
|
dc157fa28f | ||
|
|
1ff14b4e05 | ||
|
|
480ac254ab | ||
|
|
4b95db81aa | ||
|
|
c81e898435 | ||
|
|
f1646b96ca | ||
|
|
44f2b63e43 | ||
|
|
847a2bdc85 | ||
|
|
03f0f99469 | ||
|
|
3900e66158 | ||
|
|
3dff6cda40 | ||
|
|
73d05095b5 | ||
|
|
fcdc1728eb | ||
|
|
8b942ea237 | ||
|
|
88a1c5ca5d | ||
|
|
047176b297 | ||
|
|
dc4d0d8e71 | ||
|
|
b9c5c7bbde | ||
|
|
9daeed923f | ||
|
|
66b260cea9 | ||
|
|
58cf01c2ad | ||
|
|
d866841c19 | ||
|
|
a462a644fb | ||
|
|
678675a9a6 | ||
|
|
de9069ef1d | ||
|
|
c0c0a1a83a | ||
|
|
1d004b6dbd | ||
|
|
b90e1200d7 | ||
|
|
4493a0a804 | ||
|
|
58835b2b42 | ||
|
|
427597b603 | ||
|
|
7d64879ba8 | ||
|
|
bb715704b7 | ||
|
|
d67e9cc507 | ||
|
|
2927bbb2d6 | ||
|
|
0527b59180 | ||
|
|
a5ce1032d3 | ||
|
|
1c2acdc985 | ||
|
|
4e75534ef8 | ||
|
|
7a573cafd1 | ||
|
|
844194ee29 | ||
|
|
609c5921d4 | ||
|
|
c79eaa089a | ||
|
|
e9d962f273 | ||
|
|
b5405174ec | ||
|
|
6eee601521 | ||
|
|
2fac2bee7c | ||
|
|
c140eeee6b | ||
|
|
c5988a04f9 | ||
|
|
a2e0f98693 | ||
|
|
1111153f06 | ||
|
|
e5a836cb7d | ||
|
|
b0de84cbc5 | ||
|
|
cbb718e10d | ||
|
|
b5ad9369fe | ||
|
|
4401de0413 | ||
|
|
6e671c5245 | ||
|
|
08848be784 | ||
|
|
b599fbae97 | ||
|
|
a8dabc99f6 | ||
|
|
f1130db131 | ||
|
|
735ec35546 | ||
|
|
5a009a2a64 | ||
|
|
d9e9526247 | ||
|
|
5a8c3b8be0 | ||
|
|
1c9c17fb9b | ||
|
|
7f82449179 | ||
|
|
e455ec994e | ||
|
|
c111027420 | ||
|
|
abcdf479e6 | ||
|
|
ad2371f810 | ||
|
|
c4e2b0f95f | ||
|
|
3da62ec234 | ||
|
|
01233991f3 | ||
|
|
ee35974273 | ||
|
|
7037e7365e | ||
|
|
03b13e8a1c | ||
|
|
cdd2da0208 | ||
|
|
cec0e0cf02 | ||
|
|
8122ddedfe | ||
|
|
55a77c5e89 | ||
|
|
461f31582d | ||
|
|
f356faa278 | ||
|
|
9f034d9c4c | ||
|
|
ba52590ae4 | ||
|
|
92edea1de5 | ||
|
|
7ff46966da | ||
|
|
fca70b3508 | ||
|
|
70009cd984 | ||
|
|
8d8b88c4fd | ||
|
|
c4b0cccefd | ||
|
|
7c2beba555 | ||
|
|
7d8d94388b | ||
|
|
0b46b1a614 | ||
|
|
5153db6bff | ||
|
|
b0af4b3712 | ||
|
|
c8f4aeaefa | ||
|
|
00da74400c | ||
|
|
83fb569d61 | ||
|
|
5a62cb4869 | ||
|
|
687df2fabd | ||
|
|
cdd0794d6e | ||
|
|
dcc988135e | ||
|
|
3db117d85f | ||
|
|
ee9aad82dd | ||
|
|
2d6eb63fce | ||
|
|
ca001c8504 | ||
|
|
4e581c59da | ||
|
|
dbd42bc6bf | ||
|
|
c862ec1b64 | ||
|
|
f709140571 | ||
|
|
ef1c4b7a20 | ||
|
|
6c94a63f1c | ||
|
|
20669c73d3 | ||
|
|
0da719f4c2 | ||
|
|
373194c38a | ||
|
|
3d245431fc | ||
|
|
250c8c56f0 | ||
|
|
e136231c8e | ||
|
|
98ffaadf52 | ||
|
|
ebb1981803 | ||
|
|
72361c99e1 | ||
|
|
d5c9c8ebbd | ||
|
|
746229846d | ||
|
|
ffd7cd3ca8 | ||
|
|
b3cecabca3 | ||
|
|
662541c64c | ||
|
|
225bd80ea8 | ||
|
|
85e54980cc | ||
|
|
a19a0fa9f3 | ||
|
|
9bb6e0dc62 | ||
|
|
15ddcf53e7 | ||
|
|
6b54972ec0 | ||
|
|
0219eada23 | ||
|
|
8916bce306 | ||
|
|
99edba4fd9 | ||
|
|
64de3e01e8 | ||
|
|
8222ccc40b | ||
|
|
dc449bf8b0 | ||
|
|
ef0ecf878b | ||
|
|
53f1e3c91d | ||
|
|
eeef80919f | ||
|
|
987bce2182 | ||
|
|
b511d686f0 | ||
|
|
132a83501e | ||
|
|
e565ad5f55 | ||
|
|
f955d2bd58 | ||
|
|
5953399090 | ||
|
|
d26a944d95 | ||
|
|
50dac15568 | ||
|
|
ac1e11e4ce | ||
|
|
d749683d48 | ||
|
|
84e8e1ddfb | ||
|
|
6e58514b84 | ||
|
|
803e156509 | ||
|
|
c06aa683eb | ||
|
|
6644ceef49 | ||
|
|
bd3b3863ae | ||
|
|
ffd4f9c8b9 | ||
|
|
760ff2db72 | ||
|
|
f37187a041 | ||
|
|
1cdb170290 | ||
|
|
d5de3f2fe0 | ||
|
|
d76673e62d | ||
|
|
c549f367c1 | ||
|
|
927c3bce96 | ||
|
|
d75a2c77da | ||
|
|
e6c55d7ff9 | ||
|
|
4c2cb26991 | ||
|
|
dfe7f1d9af | ||
|
|
666297f6fb | ||
|
|
55a011b9c1 | ||
|
|
27aff12a1e | ||
|
|
9a87ee2fe4 | ||
|
|
0a9f4c6074 | ||
|
|
7219331057 | ||
|
|
2fd12a839c | ||
|
|
8c73e0cbc2 | ||
|
|
52e06226a2 | ||
|
|
452592519d | ||
|
|
c9281f8912 | ||
|
|
36d6d29a0c | ||
|
|
db6059e100 | ||
|
|
aab57cb24b | ||
|
|
f00b939402 | ||
|
|
bef9617638 | ||
|
|
692175f5b0 | ||
|
|
5ad65450c4 | ||
|
|
60c96f990a | ||
|
|
07b2bf1104 | ||
|
|
ac1bc232a9 | ||
|
|
5919607ad0 | ||
|
|
07ea629ca5 | ||
|
|
b629d18df6 | ||
|
|
566cbb6507 |
32
.github/ISSUE_TEMPLATE/bug_report.md
vendored
32
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -11,30 +11,38 @@ NOTE:
|
|||||||
all of the below are optional, consider them as inspiration, delete and rewrite at will, thx md
|
all of the below are optional, consider them as inspiration, delete and rewrite at will, thx md
|
||||||
|
|
||||||
|
|
||||||
**Describe the bug**
|
### Describe the bug
|
||||||
a description of what the bug is
|
a description of what the bug is
|
||||||
|
|
||||||
**To Reproduce**
|
### To Reproduce
|
||||||
List of steps to reproduce the issue, or, if it's hard to reproduce, then at least a detailed explanation of what you did to run into it
|
List of steps to reproduce the issue, or, if it's hard to reproduce, then at least a detailed explanation of what you did to run into it
|
||||||
|
|
||||||
**Expected behavior**
|
### Expected behavior
|
||||||
a description of what you expected to happen
|
a description of what you expected to happen
|
||||||
|
|
||||||
**Screenshots**
|
### Screenshots
|
||||||
if applicable, add screenshots to help explain your problem, such as the kickass crashpage :^)
|
if applicable, add screenshots to help explain your problem, such as the kickass crashpage :^)
|
||||||
|
|
||||||
**Server details**
|
### Server details (if you are using docker/podman)
|
||||||
if the issue is possibly on the server-side, then mention some of the following:
|
remove the ones that are not relevant:
|
||||||
* server OS / version:
|
* **server OS / version:**
|
||||||
* python version:
|
* **how you're running copyparty:** (docker/podman/something-else)
|
||||||
* copyparty arguments:
|
* **docker image:** (variant, version, and arch if you know)
|
||||||
* filesystem (`lsblk -f` on linux):
|
* **copyparty arguments and/or config-file:**
|
||||||
|
|
||||||
**Client details**
|
### Server details (if you're NOT using docker/podman)
|
||||||
|
remove the ones that are not relevant:
|
||||||
|
* **server OS / version:**
|
||||||
|
* **what copyparty did you grab:** (sfx/exe/pip/aur/...)
|
||||||
|
* **how you're running it:** (in a terminal, as a systemd-service, ...)
|
||||||
|
* run copyparty with `--version` and grab the last 3 lines (they start with `copyparty`, `CPython`, `sqlite`) and paste them below this line:
|
||||||
|
* **copyparty arguments and/or config-file:**
|
||||||
|
|
||||||
|
### Client details
|
||||||
if the issue is possibly on the client-side, then mention some of the following:
|
if the issue is possibly on the client-side, then mention some of the following:
|
||||||
* the device type and model:
|
* the device type and model:
|
||||||
* OS version:
|
* OS version:
|
||||||
* browser version:
|
* browser version:
|
||||||
|
|
||||||
**Additional context**
|
### Additional context
|
||||||
any other context about the problem here
|
any other context about the problem here
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -30,6 +30,7 @@ copyparty/res/COPYING.txt
|
|||||||
copyparty/web/deps/
|
copyparty/web/deps/
|
||||||
srv/
|
srv/
|
||||||
scripts/docker/i/
|
scripts/docker/i/
|
||||||
|
scripts/deps-docker/uncomment.py
|
||||||
contrib/package/arch/pkg/
|
contrib/package/arch/pkg/
|
||||||
contrib/package/arch/src/
|
contrib/package/arch/src/
|
||||||
|
|
||||||
|
|||||||
24
.vscode/settings.json
vendored
24
.vscode/settings.json
vendored
@@ -22,6 +22,9 @@
|
|||||||
"terminal.ansiBrightCyan": "#9cf0ed",
|
"terminal.ansiBrightCyan": "#9cf0ed",
|
||||||
"terminal.ansiBrightWhite": "#ffffff",
|
"terminal.ansiBrightWhite": "#ffffff",
|
||||||
},
|
},
|
||||||
|
"python.terminal.activateEnvironment": false,
|
||||||
|
"python.analysis.enablePytestSupport": false,
|
||||||
|
"python.analysis.typeCheckingMode": "standard",
|
||||||
"python.testing.pytestEnabled": false,
|
"python.testing.pytestEnabled": false,
|
||||||
"python.testing.unittestEnabled": true,
|
"python.testing.unittestEnabled": true,
|
||||||
"python.testing.unittestArgs": [
|
"python.testing.unittestArgs": [
|
||||||
@@ -31,23 +34,8 @@
|
|||||||
"-p",
|
"-p",
|
||||||
"test_*.py"
|
"test_*.py"
|
||||||
],
|
],
|
||||||
"python.linting.pylintEnabled": true,
|
// python3 -m isort --py=27 --profile=black ~/dev/copyparty/{copyparty,tests}/*.py && python3 -m black -t py27 ~/dev/copyparty/{copyparty,tests,bin}/*.py $(find ~/dev/copyparty/copyparty/stolen -iname '*.py')
|
||||||
"python.linting.flake8Enabled": true,
|
"editor.formatOnSave": false,
|
||||||
"python.linting.banditEnabled": true,
|
|
||||||
"python.linting.mypyEnabled": true,
|
|
||||||
"python.linting.flake8Args": [
|
|
||||||
"--max-line-length=120",
|
|
||||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226",
|
|
||||||
],
|
|
||||||
"python.linting.banditArgs": [
|
|
||||||
"--ignore=B104,B110,B112"
|
|
||||||
],
|
|
||||||
// python3 -m isort --py=27 --profile=black copyparty/
|
|
||||||
"python.formatting.provider": "none",
|
|
||||||
"[python]": {
|
|
||||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
|
||||||
},
|
|
||||||
"editor.formatOnSave": true,
|
|
||||||
"[html]": {
|
"[html]": {
|
||||||
"editor.formatOnSave": false,
|
"editor.formatOnSave": false,
|
||||||
"editor.autoIndent": "keep",
|
"editor.autoIndent": "keep",
|
||||||
@@ -58,6 +46,4 @@
|
|||||||
"files.associations": {
|
"files.associations": {
|
||||||
"*.makefile": "makefile"
|
"*.makefile": "makefile"
|
||||||
},
|
},
|
||||||
"python.linting.enabled": true,
|
|
||||||
"python.pythonPath": "/usr/bin/python3"
|
|
||||||
}
|
}
|
||||||
@@ -28,6 +28,8 @@ aside from documentation and ideas, some other things that would be cool to have
|
|||||||
|
|
||||||
* **translations** -- the copyparty web-UI has translations for english and norwegian at the top of [browser.js](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/web/browser.js); if you'd like to add a translation for another language then that'd be welcome! and if that language has a grammar that doesn't fit into the way the strings are assembled, then we'll fix that as we go :>
|
* **translations** -- the copyparty web-UI has translations for english and norwegian at the top of [browser.js](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/web/browser.js); if you'd like to add a translation for another language then that'd be welcome! and if that language has a grammar that doesn't fit into the way the strings are assembled, then we'll fix that as we go :>
|
||||||
|
|
||||||
|
* but please note that support for [RTL (Right-to-Left) languages](https://en.wikipedia.org/wiki/Right-to-left_script) is currently not planned, since the javascript is a bit too jank for that
|
||||||
|
|
||||||
* **UI ideas** -- at some point I was thinking of rewriting the UI in react/preact/something-not-vanilla-javascript, but I'll admit the comfiness of not having any build stage combined with raw performance has kinda convinced me otherwise :p but I'd be very open to ideas on how the UI could be improved, or be more intuitive.
|
* **UI ideas** -- at some point I was thinking of rewriting the UI in react/preact/something-not-vanilla-javascript, but I'll admit the comfiness of not having any build stage combined with raw performance has kinda convinced me otherwise :p but I'd be very open to ideas on how the UI could be improved, or be more intuitive.
|
||||||
|
|
||||||
* **docker improvements** -- I don't really know what I'm doing when it comes to containers, so I'm sure there's a *huge* room for improvement here, mainly regarding how you're supposed to use the container with kubernetes / docker-compose / any of the other popular ways to do things. At some point I swear I'll start learning about docker so I can pick up clach04's [docker-compose draft](https://github.com/9001/copyparty/issues/38) and learn how that stuff ticks, unless someone beats me to it!
|
* **docker improvements** -- I don't really know what I'm doing when it comes to containers, so I'm sure there's a *huge* room for improvement here, mainly regarding how you're supposed to use the container with kubernetes / docker-compose / any of the other popular ways to do things. At some point I swear I'll start learning about docker so I can pick up clach04's [docker-compose draft](https://github.com/9001/copyparty/issues/38) and learn how that stuff ticks, unless someone beats me to it!
|
||||||
|
|||||||
@@ -15,22 +15,18 @@ produces a chronological list of all uploads by collecting info from up2k databa
|
|||||||
# [`partyfuse.py`](partyfuse.py)
|
# [`partyfuse.py`](partyfuse.py)
|
||||||
* mount a copyparty server as a local filesystem (read-only)
|
* mount a copyparty server as a local filesystem (read-only)
|
||||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||||
* **supports Linux** -- expect `117 MiB/s` sequential read
|
* **supports Linux** -- expect `600 MiB/s` sequential read
|
||||||
* **supports macos** -- expect `85 MiB/s` sequential read
|
* **supports macos** -- expect `85 MiB/s` sequential read
|
||||||
|
|
||||||
filecache is default-on for windows and macos;
|
|
||||||
* macos readsize is 64kB, so speed ~32 MiB/s without the cache
|
|
||||||
* windows readsize varies by software; explorer=1M, pv=32k
|
|
||||||
|
|
||||||
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
|
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
|
||||||
|
|
||||||
also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x performance
|
and consider using [../docs/rclone.md](../docs/rclone.md) instead; usually a bit faster, especially on windows
|
||||||
|
|
||||||
|
|
||||||
## to run this on windows:
|
## to run this on windows:
|
||||||
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
|
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
|
||||||
* [x] add python 3.x to PATH (it asks during install)
|
* [x] add python 3.x to PATH (it asks during install)
|
||||||
* `python -m pip install --user fusepy`
|
* `python -m pip install --user fusepy` (or grab a copy of `fuse.py` from the `connect` page on your copyparty, and keep it in the same folder)
|
||||||
* `python ./partyfuse.py n: http://192.168.1.69:3923/`
|
* `python ./partyfuse.py n: http://192.168.1.69:3923/`
|
||||||
|
|
||||||
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
|
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
|
||||||
@@ -82,3 +78,6 @@ cd /mnt/nas/music/.hist
|
|||||||
# [`prisonparty.sh`](prisonparty.sh)
|
# [`prisonparty.sh`](prisonparty.sh)
|
||||||
* run copyparty in a chroot, preventing any accidental file access
|
* run copyparty in a chroot, preventing any accidental file access
|
||||||
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
||||||
|
|
||||||
|
# [`bubbleparty.sh`](bubbleparty.sh)
|
||||||
|
* run copyparty in an isolated process, preventing any accidental file access and more
|
||||||
|
|||||||
19
bin/bubbleparty.sh
Executable file
19
bin/bubbleparty.sh
Executable file
@@ -0,0 +1,19 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
# usage: ./bubbleparty.sh ./copyparty-sfx.py ....
|
||||||
|
bwrap \
|
||||||
|
--unshare-all \
|
||||||
|
--ro-bind /usr /usr \
|
||||||
|
--ro-bind /bin /bin \
|
||||||
|
--ro-bind /lib /lib \
|
||||||
|
--ro-bind /etc/resolv.conf /etc/resolv.conf \
|
||||||
|
--dev-bind /dev /dev \
|
||||||
|
--dir /tmp \
|
||||||
|
--dir /var \
|
||||||
|
--bind $(pwd) $(pwd) \
|
||||||
|
--share-net \
|
||||||
|
--die-with-parent \
|
||||||
|
--file 11 /etc/passwd \
|
||||||
|
--file 12 /etc/group \
|
||||||
|
"$@" \
|
||||||
|
11< <(getent passwd $(id -u) 65534) \
|
||||||
|
12< <(getent group $(id -g) 65534)
|
||||||
@@ -20,6 +20,8 @@ each plugin must define a `main()` which takes 3 arguments;
|
|||||||
|
|
||||||
## on404
|
## on404
|
||||||
|
|
||||||
|
* [redirect.py](redirect.py) sends an HTTP 301 or 302, redirecting the client to another page/file
|
||||||
|
* [randpic.py](randpic.py) redirects `/foo/bar/randpic.jpg` to a random pic in `/foo/bar/`
|
||||||
* [sorry.py](answer.py) replies with a custom message instead of the usual 404
|
* [sorry.py](answer.py) replies with a custom message instead of the usual 404
|
||||||
* [nooo.py](nooo.py) replies with an endless noooooooooooooo
|
* [nooo.py](nooo.py) replies with an endless noooooooooooooo
|
||||||
* [never404.py](never404.py) 100% guarantee that 404 will never be a thing again as it automatically creates dummy files whenever necessary
|
* [never404.py](never404.py) 100% guarantee that 404 will never be a thing again as it automatically creates dummy files whenever necessary
|
||||||
|
|||||||
35
bin/handlers/randpic.py
Normal file
35
bin/handlers/randpic.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import os
|
||||||
|
import random
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
|
||||||
|
# assuming /foo/bar/ is a valid URL but /foo/bar/randpic.png does not exist,
|
||||||
|
# hijack the 404 with a redirect to a random pic in that folder
|
||||||
|
#
|
||||||
|
# thx to lia & kipu for the idea
|
||||||
|
|
||||||
|
|
||||||
|
def main(cli, vn, rem):
|
||||||
|
req_fn = rem.split("/")[-1]
|
||||||
|
if not cli.can_read or not req_fn.startswith("randpic"):
|
||||||
|
return
|
||||||
|
|
||||||
|
req_abspath = vn.canonical(rem)
|
||||||
|
req_ap_dir = os.path.dirname(req_abspath)
|
||||||
|
files_in_dir = os.listdir(req_ap_dir)
|
||||||
|
|
||||||
|
if "." in req_fn:
|
||||||
|
file_ext = "." + req_fn.split(".")[-1]
|
||||||
|
files_in_dir = [x for x in files_in_dir if x.lower().endswith(file_ext)]
|
||||||
|
|
||||||
|
if not files_in_dir:
|
||||||
|
return
|
||||||
|
|
||||||
|
selected_file = random.choice(files_in_dir)
|
||||||
|
|
||||||
|
req_url = "/".join([vn.vpath, rem]).strip("/")
|
||||||
|
req_dir = req_url.rsplit("/", 1)[0]
|
||||||
|
new_url = "/".join([req_dir, quote(selected_file)]).strip("/")
|
||||||
|
|
||||||
|
cli.reply(b"redirecting...", 302, headers={"Location": "/" + new_url})
|
||||||
|
return "true"
|
||||||
52
bin/handlers/redirect.py
Normal file
52
bin/handlers/redirect.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# if someone hits a 404, redirect them to another location
|
||||||
|
|
||||||
|
|
||||||
|
def send_http_302_temporary_redirect(cli, new_path):
|
||||||
|
"""
|
||||||
|
replies with an HTTP 302, which is a temporary redirect;
|
||||||
|
"new_path" can be any of the following:
|
||||||
|
- "http://a.com/" would redirect to another website,
|
||||||
|
- "/foo/bar" would redirect to /foo/bar on the same server;
|
||||||
|
note the leading '/' in the location which is important
|
||||||
|
"""
|
||||||
|
cli.reply(b"redirecting...", 302, headers={"Location": new_path})
|
||||||
|
|
||||||
|
|
||||||
|
def send_http_301_permanent_redirect(cli, new_path):
|
||||||
|
"""
|
||||||
|
replies with an HTTP 301, which is a permanent redirect;
|
||||||
|
otherwise identical to send_http_302_temporary_redirect
|
||||||
|
"""
|
||||||
|
cli.reply(b"redirecting...", 301, headers={"Location": new_path})
|
||||||
|
|
||||||
|
|
||||||
|
def send_errorpage_with_redirect_link(cli, new_path):
|
||||||
|
"""
|
||||||
|
replies with a website explaining that the page has moved;
|
||||||
|
"new_path" must be an absolute location on the same server
|
||||||
|
but without a leading '/', so for example "foo/bar"
|
||||||
|
would redirect to "/foo/bar"
|
||||||
|
"""
|
||||||
|
cli.redirect(new_path, click=False, msg="this page has moved")
|
||||||
|
|
||||||
|
|
||||||
|
def main(cli, vn, rem):
|
||||||
|
"""
|
||||||
|
this is the function that gets called by copyparty;
|
||||||
|
note that vn.vpath and cli.vpath does not have a leading '/'
|
||||||
|
so we're adding the slash in the debug messages below
|
||||||
|
"""
|
||||||
|
print(f"this client just hit a 404: {cli.ip}")
|
||||||
|
print(f"they were accessing this volume: /{vn.vpath}")
|
||||||
|
print(f"and the original request-path (straight from the URL) was /{cli.vpath}")
|
||||||
|
print(f"...which resolves to the following filesystem path: {vn.canonical(rem)}")
|
||||||
|
|
||||||
|
new_path = "/foo/bar/"
|
||||||
|
print(f"will now redirect the client to {new_path}")
|
||||||
|
|
||||||
|
# uncomment one of these:
|
||||||
|
send_http_302_temporary_redirect(cli, new_path)
|
||||||
|
#send_http_301_permanent_redirect(cli, new_path)
|
||||||
|
#send_errorpage_with_redirect_link(cli, new_path)
|
||||||
|
|
||||||
|
return "true"
|
||||||
@@ -2,7 +2,7 @@ standalone programs which are executed by copyparty when an event happens (uploa
|
|||||||
|
|
||||||
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
||||||
|
|
||||||
run copyparty with `--help-hooks` for usage details / hook type explanations (xbu/xau/xiu/xbr/xar/xbd/xad)
|
run copyparty with `--help-hooks` for usage details / hook type explanations (xm/xbu/xau/xiu/xbc/xac/xbr/xar/xbd/xad/xban)
|
||||||
|
|
||||||
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
||||||
|
|
||||||
@@ -13,6 +13,7 @@ run copyparty with `--help-hooks` for usage details / hook type explanations (xb
|
|||||||
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
||||||
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||||
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||||
|
* [into-the-cache-it-goes.py](into-the-cache-it-goes.py) avoids bugs in caching proxies by immediately downloading each file that is uploaded
|
||||||
|
|
||||||
|
|
||||||
# upload batches
|
# upload batches
|
||||||
@@ -23,7 +24,11 @@ these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every sin
|
|||||||
|
|
||||||
# before upload
|
# before upload
|
||||||
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
||||||
|
* [reloc-by-ext.py](reloc-by-ext.py) redirects an upload to another destination based on the file extension
|
||||||
|
|
||||||
|
|
||||||
# on message
|
# on message
|
||||||
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
||||||
|
* [qbittorrent-magnet.py](qbittorrent-magnet.py) starts downloading a torrent if you post a magnet url
|
||||||
|
* [usb-eject.py](usb-eject.py) adds web-UI buttons to safe-remove usb flashdrives shared through copyparty
|
||||||
|
* [msg-log.py](msg-log.py) is a guestbook; logs messages to a doc in the same folder
|
||||||
|
|||||||
@@ -12,19 +12,28 @@ announces a new upload on discord
|
|||||||
example usage as global config:
|
example usage as global config:
|
||||||
--xau f,t5,j,bin/hooks/discord-announce.py
|
--xau f,t5,j,bin/hooks/discord-announce.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
f = fork; don't delay other hooks while this is running
|
||||||
|
t5 = timeout if it's still running after 5 sec
|
||||||
|
j = this hook needs upload information as json (not just the filename)
|
||||||
|
|
||||||
example usage as a volflag (per-volume config):
|
example usage as a volflag (per-volume config):
|
||||||
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
(share filesystem-path srv/inc as volume /inc,
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
readable by everyone, read-write for user 'ed',
|
readable by everyone, read-write for user 'ed',
|
||||||
running this plugin on all uploads with the params listed below)
|
running this plugin on all uploads with the params explained above)
|
||||||
|
|
||||||
parameters explained,
|
example usage as a volflag in a copyparty config file:
|
||||||
xbu = execute after upload
|
[/inc]
|
||||||
f = fork; don't wait for it to finish
|
srv/inc
|
||||||
t5 = timeout if it's still running after 5 sec
|
accs:
|
||||||
j = provide upload information as json; not just the filename
|
r: *
|
||||||
|
rw: ed
|
||||||
|
flags:
|
||||||
|
xau: f,t5,j,bin/hooks/discord-announce.py
|
||||||
|
|
||||||
replace "xau" with "xbu" to announce Before upload starts instead of After completion
|
replace "xau" with "xbu" to announce Before upload starts instead of After completion
|
||||||
|
|
||||||
|
|||||||
140
bin/hooks/into-the-cache-it-goes.py
Normal file
140
bin/hooks/into-the-cache-it-goes.py
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
import platform
|
||||||
|
import subprocess as sp
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
try to avoid race conditions in caching proxies
|
||||||
|
(primarily cloudflare, but probably others too)
|
||||||
|
by means of the most obvious solution possible:
|
||||||
|
|
||||||
|
just as each file has finished uploading, use
|
||||||
|
the server's external URL to download the file
|
||||||
|
so that it ends up in the cache, warm and snug
|
||||||
|
|
||||||
|
this intentionally delays the upload response
|
||||||
|
as it waits for the file to finish downloading
|
||||||
|
before copyparty is allowed to return the URL
|
||||||
|
|
||||||
|
NOTE: you must edit this script before use,
|
||||||
|
replacing https://example.com with your URL
|
||||||
|
|
||||||
|
NOTE: if the files are only accessible with a
|
||||||
|
password and/or filekey, you must also add
|
||||||
|
a cromulent password in the PASSWORD field
|
||||||
|
|
||||||
|
NOTE: needs either wget, curl, or "requests":
|
||||||
|
python3 -m pip install --user -U requests
|
||||||
|
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xau j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
j = this hook needs upload information as json (not just the filename)
|
||||||
|
t10 = abort download and continue if it takes longer than 10sec
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xau=j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all uploads with params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/inc]
|
||||||
|
srv/inc
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
rw: ed
|
||||||
|
flags:
|
||||||
|
xau: j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# replace this with your site's external URL
|
||||||
|
# (including the :portnumber if necessary)
|
||||||
|
SITE_URL = "https://example.com"
|
||||||
|
|
||||||
|
# if downloading is protected by passwords or filekeys,
|
||||||
|
# specify a valid password between the quotes below:
|
||||||
|
PASSWORD = ""
|
||||||
|
|
||||||
|
# if file is larger than this, skip download
|
||||||
|
MAX_MEGABYTES = 8
|
||||||
|
|
||||||
|
# =============== END OF CONFIG ===============
|
||||||
|
|
||||||
|
|
||||||
|
WINDOWS = platform.system() == "Windows"
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
fun = download_with_python
|
||||||
|
if shutil.which("curl"):
|
||||||
|
fun = download_with_curl
|
||||||
|
elif shutil.which("wget"):
|
||||||
|
fun = download_with_wget
|
||||||
|
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
|
||||||
|
if inf["sz"] > 1024 * 1024 * MAX_MEGABYTES:
|
||||||
|
print("[into-the-cache] file is too large; will not download")
|
||||||
|
return
|
||||||
|
|
||||||
|
file_url = "/"
|
||||||
|
if inf["vp"]:
|
||||||
|
file_url += inf["vp"] + "/"
|
||||||
|
file_url += inf["ap"].replace("\\", "/").split("/")[-1]
|
||||||
|
file_url = SITE_URL.rstrip("/") + quote(file_url, safe=b"/")
|
||||||
|
|
||||||
|
print("[into-the-cache] %s(%s)" % (fun.__name__, file_url))
|
||||||
|
fun(file_url, PASSWORD.strip())
|
||||||
|
|
||||||
|
print("[into-the-cache] Download OK")
|
||||||
|
|
||||||
|
|
||||||
|
def download_with_curl(url, pw):
|
||||||
|
cmd = ["curl"]
|
||||||
|
|
||||||
|
if pw:
|
||||||
|
cmd += ["-HPW:%s" % (pw,)]
|
||||||
|
|
||||||
|
nah = sp.DEVNULL
|
||||||
|
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||||
|
|
||||||
|
|
||||||
|
def download_with_wget(url, pw):
|
||||||
|
cmd = ["wget", "-O"]
|
||||||
|
|
||||||
|
cmd += ["nul" if WINDOWS else "/dev/null"]
|
||||||
|
|
||||||
|
if pw:
|
||||||
|
cmd += ["--header=PW:%s" % (pw,)]
|
||||||
|
|
||||||
|
nah = sp.DEVNULL
|
||||||
|
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||||
|
|
||||||
|
|
||||||
|
def download_with_python(url, pw):
|
||||||
|
import requests
|
||||||
|
|
||||||
|
headers = {}
|
||||||
|
if pw:
|
||||||
|
headers["PW"] = pw
|
||||||
|
|
||||||
|
with requests.get(url, headers=headers, stream=True) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
for _ in r.iter_content(chunk_size=1024 * 256):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -14,19 +14,32 @@ except:
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
"""
|
_ = r"""
|
||||||
use copyparty as a dumb messaging server / guestbook thing;
|
use copyparty as a dumb messaging server / guestbook thing;
|
||||||
|
accepts guestbook entries from 📟 (message-to-server-log) in the web-ui
|
||||||
initially contributed by @clach04 in https://github.com/9001/copyparty/issues/35 (thanks!)
|
initially contributed by @clach04 in https://github.com/9001/copyparty/issues/35 (thanks!)
|
||||||
|
|
||||||
Sample usage:
|
example usage as global config:
|
||||||
|
|
||||||
python copyparty-sfx.py --xm j,bin/hooks/msg-log.py
|
python copyparty-sfx.py --xm j,bin/hooks/msg-log.py
|
||||||
|
|
||||||
Where:
|
parameters explained,
|
||||||
|
xm = execute on message (📟)
|
||||||
|
j = this hook needs message information as json (not just the message-text)
|
||||||
|
|
||||||
xm = execute on message-to-server-log
|
example usage as a volflag (per-volume config):
|
||||||
j = provide message information as json; not just the text - this script REQUIRES json
|
python copyparty-sfx.py -v srv/log:log:r:c,xm=j,bin/hooks/msg-log.py
|
||||||
t10 = timeout and kill download after 10 secs
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/log as volume /log, readable by everyone,
|
||||||
|
running this plugin on all messages with the params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/log]
|
||||||
|
srv/log
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
flags:
|
||||||
|
xm: j,bin/hooks/msg-log.py
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
128
bin/hooks/qbittorrent-magnet.py
Executable file
128
bin/hooks/qbittorrent-magnet.py
Executable file
@@ -0,0 +1,128 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# coding: utf-8
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import shutil
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
start downloading a torrent by POSTing a magnet URL to copyparty,
|
||||||
|
for example using 📟 (message-to-server-log) in the web-ui
|
||||||
|
|
||||||
|
by default it will download the torrent to the folder you were in
|
||||||
|
when you pasted the magnet into the message-to-server-log field
|
||||||
|
|
||||||
|
you can optionally specify another location by adding a whitespace
|
||||||
|
after the magnet URL followed by the name of the subfolder to DL into,
|
||||||
|
or for example "anime/airing" would download to /srv/media/anime/airing
|
||||||
|
because the keyword "anime" is in the DESTS config below
|
||||||
|
|
||||||
|
needs python3
|
||||||
|
|
||||||
|
example usage as global config (not a good idea):
|
||||||
|
python copyparty-sfx.py --xm aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xm = execute on message (📟)
|
||||||
|
aw = only users with write-access can use this
|
||||||
|
f = fork; don't delay other hooks while this is running
|
||||||
|
j = provide message information as json (not just the text)
|
||||||
|
t60 = abort if qbittorrent has to think about it for more than 1 min
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config, much better):
|
||||||
|
-v srv/qb:qb:A,ed:c,xm=aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/qb as volume /qb with Admin for user 'ed',
|
||||||
|
running this plugin on all messages with the params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/qb]
|
||||||
|
srv/qb
|
||||||
|
accs:
|
||||||
|
A: ed
|
||||||
|
flags:
|
||||||
|
xm: aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||||
|
|
||||||
|
the volflag examples only kicks in if you send the torrent magnet
|
||||||
|
while you're in the /qb folder (or any folder below there)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# list of usernames to allow
|
||||||
|
ALLOWLIST = [ "ed", "morpheus" ]
|
||||||
|
|
||||||
|
|
||||||
|
# list of destination aliases to translate into full filesystem
|
||||||
|
# paths; takes effect if the first folder component in the
|
||||||
|
# custom download location matches anything in this dict
|
||||||
|
DESTS = {
|
||||||
|
"iso": "/srv/pub/linux-isos",
|
||||||
|
"anime": "/srv/media/anime",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
url = inf["txt"]
|
||||||
|
if not url.lower().startswith("magnet:?"):
|
||||||
|
# not a magnet, abort
|
||||||
|
return
|
||||||
|
|
||||||
|
if inf["user"] not in ALLOWLIST:
|
||||||
|
print("🧲 denied for user", inf["user"])
|
||||||
|
return
|
||||||
|
|
||||||
|
# might as well run the command inside the filesystem folder
|
||||||
|
# which matches the URL that the magnet message was sent to
|
||||||
|
os.chdir(inf["ap"])
|
||||||
|
|
||||||
|
# is there is a custom download location in the url?
|
||||||
|
dst = ""
|
||||||
|
if " " in url:
|
||||||
|
url, dst = url.split(" ", 1)
|
||||||
|
|
||||||
|
# is the location in the predefined list of locations?
|
||||||
|
parts = dst.replace("\\", "/").split("/")
|
||||||
|
if parts[0] in DESTS:
|
||||||
|
dst = os.path.join(DESTS[parts[0]], *(parts[1:]))
|
||||||
|
|
||||||
|
else:
|
||||||
|
# nope, so download to the current folder instead;
|
||||||
|
# comment the dst line below to instead use the default
|
||||||
|
# download location from your qbittorrent settings
|
||||||
|
dst = inf["ap"]
|
||||||
|
pass
|
||||||
|
|
||||||
|
# archlinux has a -nox suffix for qbittorrent if headless
|
||||||
|
# so check if we should be using that
|
||||||
|
if shutil.which("qbittorrent-nox"):
|
||||||
|
torrent_bin = "qbittorrent-nox"
|
||||||
|
else:
|
||||||
|
torrent_bin = "qbittorrent"
|
||||||
|
|
||||||
|
# the command to add a new torrent, adjust if necessary
|
||||||
|
cmd = [torrent_bin, url]
|
||||||
|
if dst:
|
||||||
|
cmd += ["--save-path=%s" % (dst,)]
|
||||||
|
|
||||||
|
# if copyparty and qbittorrent are running as different users
|
||||||
|
# you may have to do something like the following
|
||||||
|
# (assuming qbittorrent* is nopasswd-allowed in sudoers):
|
||||||
|
#
|
||||||
|
# cmd = ["sudo", "-u", "qbitter"] + cmd
|
||||||
|
|
||||||
|
print("🧲", cmd)
|
||||||
|
|
||||||
|
try:
|
||||||
|
sp.check_call(cmd)
|
||||||
|
except:
|
||||||
|
print("🧲 FAILED TO ADD", url)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
127
bin/hooks/reloc-by-ext.py
Normal file
127
bin/hooks/reloc-by-ext.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
relocate/redirect incoming uploads according to file extension or name
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xbu j,c1,bin/hooks/reloc-by-ext.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xbu = execute before upload
|
||||||
|
j = this hook needs upload information as json (not just the filename)
|
||||||
|
c1 = this hook returns json on stdout, so tell copyparty to read that
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xbu=j,c1,bin/hooks/reloc-by-ext.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all uploads with the params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/inc]
|
||||||
|
srv/inc
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
rw: ed
|
||||||
|
flags:
|
||||||
|
xbu: j,c1,bin/hooks/reloc-by-ext.py
|
||||||
|
|
||||||
|
note: this could also work as an xau hook (after-upload), but
|
||||||
|
because it doesn't need to read the file contents its better
|
||||||
|
as xbu (before-upload) since that's safer / less buggy,
|
||||||
|
and only xbu works with up2k (dragdrop into browser)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
PICS = "avif bmp gif heic heif jpeg jpg jxl png psd qoi tga tif tiff webp"
|
||||||
|
VIDS = "3gp asf avi flv mkv mov mp4 mpeg mpeg2 mpegts mpg mpg2 nut ogm ogv rm ts vob webm wmv"
|
||||||
|
MUSIC = "aac aif aiff alac amr ape dfpwm flac m4a mp3 ogg opus ra tak tta wav wma wv"
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
vdir, fn = os.path.split(inf["vp"])
|
||||||
|
|
||||||
|
try:
|
||||||
|
fn, ext = fn.rsplit(".", 1)
|
||||||
|
except:
|
||||||
|
# no file extension; pretend it's "bin"
|
||||||
|
ext = "bin"
|
||||||
|
|
||||||
|
ext = ext.lower()
|
||||||
|
|
||||||
|
# this function must end by printing the action to perform;
|
||||||
|
# that's handled by the print(json.dumps(... at the bottom
|
||||||
|
#
|
||||||
|
# the action can contain the following keys:
|
||||||
|
# "vp" is the folder URL to move the upload to,
|
||||||
|
# "ap" is the filesystem-path to move it to (but "vp" is safer),
|
||||||
|
# "fn" overrides the final filename to use
|
||||||
|
|
||||||
|
##
|
||||||
|
## some example actions to take; pick one by
|
||||||
|
## selecting it inside the print at the end:
|
||||||
|
##
|
||||||
|
|
||||||
|
# create a subfolder named after the filetype and move it into there
|
||||||
|
into_subfolder = {"vp": ext}
|
||||||
|
|
||||||
|
# move it into a toplevel folder named after the filetype
|
||||||
|
into_toplevel = {"vp": "/" + ext}
|
||||||
|
|
||||||
|
# move it into a filetype-named folder next to the target folder
|
||||||
|
into_sibling = {"vp": "../" + ext}
|
||||||
|
|
||||||
|
# move images into "/just/pics", vids into "/just/vids",
|
||||||
|
# music into "/just/tunes", and anything else as-is
|
||||||
|
if ext in PICS.split():
|
||||||
|
by_category = {"vp": "/just/pics"}
|
||||||
|
elif ext in VIDS.split():
|
||||||
|
by_category = {"vp": "/just/vids"}
|
||||||
|
elif ext in MUSIC.split():
|
||||||
|
by_category = {"vp": "/just/tunes"}
|
||||||
|
else:
|
||||||
|
by_category = {} # no action
|
||||||
|
|
||||||
|
# now choose the default effect to apply; can be any of these:
|
||||||
|
# into_subfolder into_toplevel into_sibling by_category
|
||||||
|
effect = {"vp": "/junk"}
|
||||||
|
|
||||||
|
##
|
||||||
|
## but we can keep going, adding more speicifc rules
|
||||||
|
## which can take precedence, replacing the fallback
|
||||||
|
## effect we just specified:
|
||||||
|
##
|
||||||
|
|
||||||
|
fn = fn.lower() # lowercase filename to make this easier
|
||||||
|
|
||||||
|
if "screenshot" in fn:
|
||||||
|
effect = {"vp": "/ss"}
|
||||||
|
if "mpv_" in fn:
|
||||||
|
effect = {"vp": "/anishots"}
|
||||||
|
elif "debian" in fn or "biebian" in fn:
|
||||||
|
effect = {"vp": "/linux-ISOs"}
|
||||||
|
elif re.search(r"ep(isode |\.)?[0-9]", fn):
|
||||||
|
effect = {"vp": "/podcasts"}
|
||||||
|
|
||||||
|
# regex lets you grab a part of the matching
|
||||||
|
# text and use that in the upload path:
|
||||||
|
m = re.search(r"\b(op|ed)([^a-z]|$)", fn)
|
||||||
|
if m:
|
||||||
|
# the regex matched; use "anime-op" or "anime-ed"
|
||||||
|
effect = {"vp": "/anime-" + m[1]}
|
||||||
|
|
||||||
|
# aaand DO IT
|
||||||
|
print(json.dumps({"reloc": effect}))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
57
bin/hooks/usb-eject.js
Normal file
57
bin/hooks/usb-eject.js
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
// see usb-eject.py for usage
|
||||||
|
|
||||||
|
function usbclick() {
|
||||||
|
QS('#treeul a[href="/usb/"]').click();
|
||||||
|
}
|
||||||
|
|
||||||
|
function eject_cb() {
|
||||||
|
var t = this.responseText;
|
||||||
|
if (t.indexOf('can be safely unplugged') < 0 && t.indexOf('Device can be removed') < 0)
|
||||||
|
return toast.err(30, 'usb eject failed:\n\n' + t);
|
||||||
|
|
||||||
|
toast.ok(5, esc(t.replace(/ - /g, '\n\n')).trim());
|
||||||
|
usbclick(); setTimeout(usbclick, 10);
|
||||||
|
};
|
||||||
|
|
||||||
|
function add_eject_2(a) {
|
||||||
|
var aw = a.getAttribute('href').split(/\//g);
|
||||||
|
if (aw.length != 4 || aw[3])
|
||||||
|
return;
|
||||||
|
|
||||||
|
var v = aw[2],
|
||||||
|
k = 'umount_' + v,
|
||||||
|
o = ebi(k);
|
||||||
|
|
||||||
|
if (o)
|
||||||
|
o.parentNode.removeChild(o);
|
||||||
|
|
||||||
|
a.appendChild(mknod('span', k, '⏏'), a);
|
||||||
|
o = ebi(k);
|
||||||
|
o.style.cssText = 'position:absolute; right:1em; margin-top:-.2em; font-size:1.3em';
|
||||||
|
o.onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
var xhr = new XHR();
|
||||||
|
xhr.open('POST', get_evpath(), true);
|
||||||
|
xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded;charset=UTF-8');
|
||||||
|
xhr.send('msg=' + uricom_enc(':usb-eject:' + v + ':'));
|
||||||
|
xhr.onload = xhr.onerror = eject_cb;
|
||||||
|
toast.inf(10, "ejecting " + v + "...");
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
function add_eject() {
|
||||||
|
var o = QSA('#treeul a[href^="/usb/"]');
|
||||||
|
for (var a = o.length - 1; a > 0; a--)
|
||||||
|
add_eject_2(o[a]);
|
||||||
|
};
|
||||||
|
|
||||||
|
(function() {
|
||||||
|
var f0 = treectl.rendertree;
|
||||||
|
treectl.rendertree = function (res, ts, top0, dst, rst) {
|
||||||
|
var ret = f0(res, ts, top0, dst, rst);
|
||||||
|
add_eject();
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
|
setTimeout(add_eject, 50);
|
||||||
58
bin/hooks/usb-eject.py
Normal file
58
bin/hooks/usb-eject.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import stat
|
||||||
|
import subprocess as sp
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
if you've found yourself using copyparty to serve flashdrives on a LAN
|
||||||
|
and your only wish is that the web-UI had a button to unmount / safely
|
||||||
|
remove those flashdrives, then boy howdy are you in the right place :D
|
||||||
|
|
||||||
|
put usb-eject.js in the webroot (or somewhere else http-accessible)
|
||||||
|
then run copyparty with these args:
|
||||||
|
|
||||||
|
-v /run/media/egon:/usb:A:c,hist=/tmp/junk
|
||||||
|
--xm=c1,bin/hooks/usb-eject.py
|
||||||
|
--js-browser=/usb-eject.js
|
||||||
|
|
||||||
|
which does the following respectively,
|
||||||
|
|
||||||
|
* share all of /run/media/egon as /usb with admin for everyone
|
||||||
|
and put the histpath somewhere it won't cause trouble
|
||||||
|
* run the usb-eject hook with stdout redirect to the web-ui
|
||||||
|
* add the complementary usb-eject.js to the browser
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
try:
|
||||||
|
label = sys.argv[1].split(":usb-eject:")[1].split(":")[0]
|
||||||
|
mp = "/run/media/egon/" + label
|
||||||
|
# print("ejecting [%s]... " % (mp,), end="")
|
||||||
|
mp = os.path.abspath(os.path.realpath(mp.encode("utf-8")))
|
||||||
|
st = os.lstat(mp)
|
||||||
|
if not stat.S_ISDIR(st.st_mode):
|
||||||
|
raise Exception("not a regular directory")
|
||||||
|
|
||||||
|
# if you're running copyparty as root (thx for the faith)
|
||||||
|
# you'll need something like this to make dbus talkative
|
||||||
|
cmd = b"sudo -u egon DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/1000/bus gio mount -e"
|
||||||
|
|
||||||
|
# but if copyparty and the ui-session is running
|
||||||
|
# as the same user (good) then this is plenty
|
||||||
|
cmd = b"gio mount -e"
|
||||||
|
|
||||||
|
cmd = cmd.split(b" ") + [mp]
|
||||||
|
ret = sp.check_output(cmd).decode("utf-8", "replace")
|
||||||
|
print(ret.strip() or (label + " can be safely unplugged"))
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
print("unmount failed: %r" % (ex,))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -9,25 +9,38 @@ import subprocess as sp
|
|||||||
_ = r"""
|
_ = r"""
|
||||||
use copyparty as a file downloader by POSTing URLs as
|
use copyparty as a file downloader by POSTing URLs as
|
||||||
application/x-www-form-urlencoded (for example using the
|
application/x-www-form-urlencoded (for example using the
|
||||||
message/pager function on the website)
|
📟 message-to-server-log in the web-ui)
|
||||||
|
|
||||||
example usage as global config:
|
example usage as global config:
|
||||||
--xm f,j,t3600,bin/hooks/wget.py
|
--xm aw,f,j,t3600,bin/hooks/wget.py
|
||||||
|
|
||||||
example usage as a volflag (per-volume config):
|
|
||||||
-v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
(share filesystem-path srv/inc as volume /inc,
|
|
||||||
readable by everyone, read-write for user 'ed',
|
|
||||||
running this plugin on all messages with the params listed below)
|
|
||||||
|
|
||||||
parameters explained,
|
parameters explained,
|
||||||
xm = execute on message-to-server-log
|
xm = execute on message-to-server-log
|
||||||
f = fork so it doesn't block uploads
|
aw = only users with write-access can use this
|
||||||
j = provide message information as json; not just the text
|
f = fork; don't delay other hooks while this is running
|
||||||
|
j = provide message information as json (not just the text)
|
||||||
c3 = mute all output
|
c3 = mute all output
|
||||||
t3600 = timeout and kill download after 1 hour
|
t3600 = timeout and abort download after 1 hour
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xm=aw,f,j,t3600,bin/hooks/wget.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all messages with the params explained above)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/inc]
|
||||||
|
srv/inc
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
rw: ed
|
||||||
|
flags:
|
||||||
|
xm: aw,f,j,t3600,bin/hooks/wget.py
|
||||||
|
|
||||||
|
the volflag examples only kicks in if you send the message
|
||||||
|
while you're in the /inc folder (or any folder below there)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -31,6 +31,9 @@ plugins in this section should only be used with appropriate precautions:
|
|||||||
* [very-bad-idea.py](./very-bad-idea.py) combined with [meadup.js](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js) converts copyparty into a janky yet extremely flexible chromecast clone
|
* [very-bad-idea.py](./very-bad-idea.py) combined with [meadup.js](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js) converts copyparty into a janky yet extremely flexible chromecast clone
|
||||||
* also adds a virtual keyboard by @steinuil to the basic-upload tab for comfy couch crowd control
|
* also adds a virtual keyboard by @steinuil to the basic-upload tab for comfy couch crowd control
|
||||||
* anything uploaded through the [android app](https://github.com/9001/party-up) (files or links) are executed on the server, meaning anyone can infect your PC with malware... so protect this with a password and keep it on a LAN!
|
* anything uploaded through the [android app](https://github.com/9001/party-up) (files or links) are executed on the server, meaning anyone can infect your PC with malware... so protect this with a password and keep it on a LAN!
|
||||||
|
* [kamelåså](https://github.com/steinuil/kameloso) is a much better (and MUCH safer) alternative to this plugin
|
||||||
|
* powered by [chicken-curry-banana-pineapple-peanut pizza](https://a.ocv.me/pub/g/i/2025/01/298437ce-8351-4c8c-861c-fa131d217999.jpg?cache) so you know it's good
|
||||||
|
* and, unlike this plugin, kamelåså even has windows support (nice)
|
||||||
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
|
|||||||
@@ -6,6 +6,11 @@ WARNING -- DANGEROUS PLUGIN --
|
|||||||
running this plugin, they can execute malware on your machine
|
running this plugin, they can execute malware on your machine
|
||||||
so please keep this on a LAN and protect it with a password
|
so please keep this on a LAN and protect it with a password
|
||||||
|
|
||||||
|
here is a MUCH BETTER ALTERNATIVE (which also works on Windows):
|
||||||
|
https://github.com/steinuil/kameloso
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
use copyparty as a chromecast replacement:
|
use copyparty as a chromecast replacement:
|
||||||
* post a URL and it will open in the default browser
|
* post a URL and it will open in the default browser
|
||||||
* upload a file and it will open in the default application
|
* upload a file and it will open in the default application
|
||||||
|
|||||||
676
bin/partyfuse.py
676
bin/partyfuse.py
File diff suppressed because it is too large
Load Diff
922
bin/u2c.py
922
bin/u2c.py
File diff suppressed because it is too large
Load Diff
76
bin/zmq-recv.py
Executable file
76
bin/zmq-recv.py
Executable file
@@ -0,0 +1,76 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import zmq
|
||||||
|
|
||||||
|
"""
|
||||||
|
zmq-recv.py: demo zmq receiver
|
||||||
|
2025-01-22, v1.0, ed <irc.rizon.net>, MIT-Licensed
|
||||||
|
https://github.com/9001/copyparty/blob/hovudstraum/bin/zmq-recv.py
|
||||||
|
|
||||||
|
basic zmq-server to receive events from copyparty; try one of
|
||||||
|
the below and then "send a message to serverlog" in the web-ui:
|
||||||
|
|
||||||
|
1) dumb fire-and-forget to any and all listeners;
|
||||||
|
run this script with "sub" and run copyparty with this:
|
||||||
|
--xm zmq:pub:tcp://*:5556
|
||||||
|
|
||||||
|
2) one lucky listener gets the message, blocks if no listeners:
|
||||||
|
run this script with "pull" and run copyparty with this:
|
||||||
|
--xm t3,zmq:push:tcp://*:5557
|
||||||
|
|
||||||
|
3) blocking syn/ack mode, client must ack each message;
|
||||||
|
run this script with "rep" and run copyparty with this:
|
||||||
|
--xm t3,zmq:req:tcp://localhost:5555
|
||||||
|
|
||||||
|
note: to conditionally block uploads based on message contents,
|
||||||
|
use rep_server to answer with "return 1" and run copyparty with
|
||||||
|
--xau t3,c,zmq:req:tcp://localhost:5555
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
ctx = zmq.Context()
|
||||||
|
|
||||||
|
|
||||||
|
def sub_server():
|
||||||
|
# PUB/SUB allows any number of servers/clients, and
|
||||||
|
# messages are fire-and-forget
|
||||||
|
sck = ctx.socket(zmq.SUB)
|
||||||
|
sck.connect("tcp://localhost:5556")
|
||||||
|
sck.setsockopt_string(zmq.SUBSCRIBE, "")
|
||||||
|
while True:
|
||||||
|
print("copyparty says %r" % (sck.recv_string(),))
|
||||||
|
|
||||||
|
|
||||||
|
def pull_server():
|
||||||
|
# PUSH/PULL allows any number of servers/clients, and
|
||||||
|
# each message is sent to a exactly one PULL client
|
||||||
|
sck = ctx.socket(zmq.PULL)
|
||||||
|
sck.connect("tcp://localhost:5557")
|
||||||
|
while True:
|
||||||
|
print("copyparty says %r" % (sck.recv_string(),))
|
||||||
|
|
||||||
|
|
||||||
|
def rep_server():
|
||||||
|
# REP/REQ is a server/client pair where each message must be
|
||||||
|
# acked by the other before another message can be sent, so
|
||||||
|
# copyparty will do a blocking-wait for the ack
|
||||||
|
sck = ctx.socket(zmq.REP)
|
||||||
|
sck.bind("tcp://*:5555")
|
||||||
|
while True:
|
||||||
|
print("copyparty says %r" % (sck.recv_string(),))
|
||||||
|
reply = b"thx"
|
||||||
|
# reply = b"return 1" # non-zero to block an upload
|
||||||
|
sck.send(reply)
|
||||||
|
|
||||||
|
|
||||||
|
mode = sys.argv[1].lower() if len(sys.argv) > 1 else ""
|
||||||
|
|
||||||
|
if mode == "sub":
|
||||||
|
sub_server()
|
||||||
|
elif mode == "pull":
|
||||||
|
pull_server()
|
||||||
|
elif mode == "rep":
|
||||||
|
rep_server()
|
||||||
|
else:
|
||||||
|
print("specify mode as first argument: SUB | PULL | REP")
|
||||||
@@ -12,13 +12,21 @@
|
|||||||
* assumes the webserver and copyparty is running on the same server/IP
|
* assumes the webserver and copyparty is running on the same server/IP
|
||||||
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
||||||
|
|
||||||
### [`sharex.sxcu`](sharex.sxcu)
|
### [`sharex.sxcu`](sharex.sxcu) - Windows screenshot uploader
|
||||||
* sharex config file to upload screenshots and grab the URL
|
* [sharex](https://getsharex.com/) config file to upload screenshots and grab the URL
|
||||||
* `RequestURL`: full URL to the target folder
|
* `RequestURL`: full URL to the target folder
|
||||||
* `pw`: password (remove the `pw` line if anon-write)
|
* `pw`: password (remove the `pw` line if anon-write)
|
||||||
* the `act:bput` thing is optional since copyparty v1.9.29
|
* the `act:bput` thing is optional since copyparty v1.9.29
|
||||||
* using an older sharex version, maybe sharex v12.1.1 for example? dw fam i got your back 👉😎👉 [`sharex12.sxcu`](sharex12.sxcu)
|
* using an older sharex version, maybe sharex v12.1.1 for example? dw fam i got your back 👉😎👉 [`sharex12.sxcu`](sharex12.sxcu)
|
||||||
|
|
||||||
|
### [`ishare.iscu`](ishare.iscu) - MacOS screenshot uploader
|
||||||
|
* [ishare](https://isharemac.app/) config file to upload screenshots and grab the URL
|
||||||
|
* `RequestURL`: full URL to the target folder
|
||||||
|
* `pw`: password (remove the `pw` line if anon-write)
|
||||||
|
|
||||||
|
### [`flameshot.sh`](flameshot.sh) - Linux screenshot uploader
|
||||||
|
* takes a screenshot with [flameshot](https://flameshot.org/) on Linux, uploads it, and writes the URL to clipboard
|
||||||
|
|
||||||
### [`send-to-cpp.contextlet.json`](send-to-cpp.contextlet.json)
|
### [`send-to-cpp.contextlet.json`](send-to-cpp.contextlet.json)
|
||||||
* browser integration, kind of? custom rightclick actions and stuff
|
* browser integration, kind of? custom rightclick actions and stuff
|
||||||
* rightclick a pic and send it to copyparty straight from your browser
|
* rightclick a pic and send it to copyparty straight from your browser
|
||||||
@@ -50,5 +58,10 @@ init-scripts to start copyparty as a service
|
|||||||
* [`openrc/copyparty`](openrc/copyparty)
|
* [`openrc/copyparty`](openrc/copyparty)
|
||||||
|
|
||||||
# Reverse-proxy
|
# Reverse-proxy
|
||||||
copyparty has basic support for running behind another webserver
|
copyparty supports running behind another webserver
|
||||||
* [`nginx/copyparty.conf`](nginx/copyparty.conf)
|
* [`apache/copyparty.conf`](apache/copyparty.conf)
|
||||||
|
* [`haproxy/copyparty.conf`](haproxy/copyparty.conf)
|
||||||
|
* [`lighttpd/subdomain.conf`](lighttpd/subdomain.conf)
|
||||||
|
* [`lighttpd/subpath.conf`](lighttpd/subpath.conf)
|
||||||
|
* [`nginx/copyparty.conf`](nginx/copyparty.conf) -- recommended
|
||||||
|
* [`traefik/copyparty.yaml`](traefik/copyparty.yaml)
|
||||||
|
|||||||
@@ -1,14 +1,29 @@
|
|||||||
# when running copyparty behind a reverse proxy,
|
# if you would like to use unix-sockets (recommended),
|
||||||
# the following arguments are recommended:
|
# you must run copyparty with one of the following:
|
||||||
#
|
#
|
||||||
# -i 127.0.0.1 only accept connections from nginx
|
# -i unix:777:/dev/shm/party.sock
|
||||||
|
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||||
#
|
#
|
||||||
# if you are doing location-based proxying (such as `/stuff` below)
|
# if you are doing location-based proxying (such as `/stuff` below)
|
||||||
# you must run copyparty with --rp-loc=stuff
|
# you must run copyparty with --rp-loc=stuff
|
||||||
#
|
#
|
||||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
|
|
||||||
|
|
||||||
LoadModule proxy_module modules/mod_proxy.so
|
LoadModule proxy_module modules/mod_proxy.so
|
||||||
ProxyPass "/stuff" "http://127.0.0.1:3923/stuff"
|
|
||||||
# do not specify ProxyPassReverse
|
|
||||||
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
|
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
|
||||||
|
# NOTE: do not specify ProxyPassReverse
|
||||||
|
|
||||||
|
|
||||||
|
##
|
||||||
|
## then, enable one of the below:
|
||||||
|
|
||||||
|
# use subdomain proxying to unix-socket (best)
|
||||||
|
ProxyPass "/" "unix:///dev/shm/party.sock|http://whatever/"
|
||||||
|
|
||||||
|
# use subdomain proxying to 127.0.0.1 (slower)
|
||||||
|
#ProxyPass "/" "http://127.0.0.1:3923/"
|
||||||
|
|
||||||
|
# use subpath proxying to 127.0.0.1 (slow and maybe buggy)
|
||||||
|
#ProxyPass "/stuff" "http://127.0.0.1:3923/stuff"
|
||||||
|
|||||||
14
contrib/flameshot.sh
Executable file
14
contrib/flameshot.sh
Executable file
@@ -0,0 +1,14 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# take a screenshot with flameshot and send it to copyparty;
|
||||||
|
# the image url will be placed on your clipboard
|
||||||
|
|
||||||
|
password=wark
|
||||||
|
url=https://a.ocv.me/up/
|
||||||
|
filename=$(date +%Y-%m%d-%H%M%S).png
|
||||||
|
|
||||||
|
flameshot gui -s -r |
|
||||||
|
curl -T- $url$filename?pw=$password |
|
||||||
|
tail -n 1 |
|
||||||
|
xsel -ib
|
||||||
24
contrib/haproxy/copyparty.conf
Normal file
24
contrib/haproxy/copyparty.conf
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# this config is essentially two separate examples;
|
||||||
|
#
|
||||||
|
# foo1 connects to copyparty using tcp, and
|
||||||
|
# foo2 uses unix-sockets for 27% higher performance
|
||||||
|
#
|
||||||
|
# to use foo2 you must run copyparty with one of the following:
|
||||||
|
#
|
||||||
|
# -i unix:777:/dev/shm/party.sock
|
||||||
|
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||||
|
|
||||||
|
defaults
|
||||||
|
mode http
|
||||||
|
option forwardfor
|
||||||
|
timeout connect 1s
|
||||||
|
timeout client 610s
|
||||||
|
timeout server 610s
|
||||||
|
|
||||||
|
listen foo1
|
||||||
|
bind *:8081
|
||||||
|
server srv1 127.0.0.1:3923 maxconn 512
|
||||||
|
|
||||||
|
listen foo2
|
||||||
|
bind *:8082
|
||||||
|
server srv1 /dev/shm/party.sock maxconn 512
|
||||||
10
contrib/ishare.iscu
Normal file
10
contrib/ishare.iscu
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"Name": "copyparty",
|
||||||
|
"RequestURL": "http://127.0.0.1:3923/screenshots/",
|
||||||
|
"Headers": {
|
||||||
|
"pw": "PUT_YOUR_PASSWORD_HERE_MY_DUDE",
|
||||||
|
"accept": "json"
|
||||||
|
},
|
||||||
|
"FileFormName": "f",
|
||||||
|
"ResponseURL": "{{fileurl}}"
|
||||||
|
}
|
||||||
24
contrib/lighttpd/subdomain.conf
Normal file
24
contrib/lighttpd/subdomain.conf
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# example usage for benchmarking:
|
||||||
|
#
|
||||||
|
# taskset -c 1 lighttpd -Df ~/dev/copyparty/contrib/lighttpd/subdomain.conf
|
||||||
|
#
|
||||||
|
# lighttpd can connect to copyparty using either tcp (127.0.0.1)
|
||||||
|
# or a unix-socket, but unix-sockets are 37% faster because
|
||||||
|
# lighttpd doesn't reuse tcp connections, so we're doing unix-sockets
|
||||||
|
#
|
||||||
|
# this means we must run copyparty with one of the following:
|
||||||
|
#
|
||||||
|
# -i unix:777:/dev/shm/party.sock
|
||||||
|
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||||
|
#
|
||||||
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
|
|
||||||
|
server.port = 80
|
||||||
|
server.document-root = "/var/empty"
|
||||||
|
server.upload-dirs = ( "/dev/shm", "/tmp" )
|
||||||
|
server.modules = ( "mod_proxy" )
|
||||||
|
proxy.forwarded = ( "for" => 1, "proto" => 1 )
|
||||||
|
proxy.server = ( "" => ( ( "host" => "/dev/shm/party.sock" ) ) )
|
||||||
|
|
||||||
|
# if you really need to use tcp instead of unix-sockets, do this instead:
|
||||||
|
#proxy.server = ( "" => ( ( "host" => "127.0.0.1", "port" => "3923" ) ) )
|
||||||
31
contrib/lighttpd/subpath.conf
Normal file
31
contrib/lighttpd/subpath.conf
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# example usage for benchmarking:
|
||||||
|
#
|
||||||
|
# taskset -c 1 lighttpd -Df ~/dev/copyparty/contrib/lighttpd/subpath.conf
|
||||||
|
#
|
||||||
|
# lighttpd can connect to copyparty using either tcp (127.0.0.1)
|
||||||
|
# or a unix-socket, but unix-sockets are 37% faster because
|
||||||
|
# lighttpd doesn't reuse tcp connections, so we're doing unix-sockets
|
||||||
|
#
|
||||||
|
# this means we must run copyparty with one of the following:
|
||||||
|
#
|
||||||
|
# -i unix:777:/dev/shm/party.sock
|
||||||
|
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||||
|
#
|
||||||
|
# also since this example proxies a subpath instead of the
|
||||||
|
# recommended subdomain-proxying, we must also specify this:
|
||||||
|
#
|
||||||
|
# --rp-loc files
|
||||||
|
#
|
||||||
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
|
|
||||||
|
server.port = 80
|
||||||
|
server.document-root = "/var/empty"
|
||||||
|
server.upload-dirs = ( "/dev/shm", "/tmp" )
|
||||||
|
server.modules = ( "mod_proxy" )
|
||||||
|
$HTTP["url"] =~ "^/files" {
|
||||||
|
proxy.forwarded = ( "for" => 1, "proto" => 1 )
|
||||||
|
proxy.server = ( "" => ( ( "host" => "/dev/shm/party.sock" ) ) )
|
||||||
|
|
||||||
|
# if you really need to use tcp instead of unix-sockets, do this instead:
|
||||||
|
#proxy.server = ( "" => ( ( "host" => "127.0.0.1", "port" => "3923" ) ) )
|
||||||
|
}
|
||||||
@@ -1,14 +1,10 @@
|
|||||||
# when running copyparty behind a reverse proxy,
|
# look for "max clients:" when starting copyparty, as nginx should
|
||||||
# the following arguments are recommended:
|
# not accept more consecutive clients than what copyparty is able to;
|
||||||
#
|
|
||||||
# -i 127.0.0.1 only accept connections from nginx
|
|
||||||
#
|
|
||||||
# -nc must match or exceed the webserver's max number of concurrent clients;
|
|
||||||
# copyparty default is 1024 if OS permits it (see "max clients:" on startup),
|
|
||||||
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
||||||
#
|
#
|
||||||
# you may also consider adding -j0 for CPU-intensive configurations
|
# rarely, in some extreme usecases, it can be good to add -j0
|
||||||
# (5'000 requests per second, or 20gbps upload/download in parallel)
|
# (40'000 requests per second, or 20gbps upload/download in parallel)
|
||||||
|
# but this is usually counterproductive and slightly buggy
|
||||||
#
|
#
|
||||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
#
|
#
|
||||||
@@ -20,10 +16,33 @@
|
|||||||
#
|
#
|
||||||
# and then enable it below by uncomenting the cloudflare-only.conf line
|
# and then enable it below by uncomenting the cloudflare-only.conf line
|
||||||
|
|
||||||
upstream cpp {
|
|
||||||
|
upstream cpp_tcp {
|
||||||
|
# alternative 1: connect to copyparty using tcp;
|
||||||
|
# cpp_uds is slightly faster and more secure, but
|
||||||
|
# cpp_tcp is easier to setup and "just works"
|
||||||
|
# ...you should however restrict copyparty to only
|
||||||
|
# accept connections from nginx by adding these args:
|
||||||
|
# -i 127.0.0.1
|
||||||
|
|
||||||
server 127.0.0.1:3923 fail_timeout=1s;
|
server 127.0.0.1:3923 fail_timeout=1s;
|
||||||
keepalive 1;
|
keepalive 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
upstream cpp_uds {
|
||||||
|
# alternative 2: unix-socket, aka. "unix domain socket";
|
||||||
|
# 5-10% faster, and better isolation from other software,
|
||||||
|
# but there must be at least one unix-group which both
|
||||||
|
# nginx and copyparty is a member of; if that group is
|
||||||
|
# "www" then run copyparty with the following args:
|
||||||
|
# -i unix:770:www:/dev/shm/party.sock
|
||||||
|
|
||||||
|
server unix:/dev/shm/party.sock fail_timeout=1s;
|
||||||
|
keepalive 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
server {
|
server {
|
||||||
listen 443 ssl;
|
listen 443 ssl;
|
||||||
listen [::]:443 ssl;
|
listen [::]:443 ssl;
|
||||||
@@ -34,13 +53,18 @@ server {
|
|||||||
#include /etc/nginx/cloudflare-only.conf;
|
#include /etc/nginx/cloudflare-only.conf;
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
proxy_pass http://cpp;
|
# recommendation: replace cpp_tcp with cpp_uds below
|
||||||
|
proxy_pass http://cpp_tcp;
|
||||||
proxy_redirect off;
|
proxy_redirect off;
|
||||||
# disable buffering (next 4 lines)
|
# disable buffering (next 4 lines)
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
client_max_body_size 0;
|
client_max_body_size 0;
|
||||||
proxy_buffering off;
|
proxy_buffering off;
|
||||||
proxy_request_buffering off;
|
proxy_request_buffering off;
|
||||||
|
# improve download speed from 600 to 1500 MiB/s
|
||||||
|
proxy_buffers 32 8k;
|
||||||
|
proxy_buffer_size 16k;
|
||||||
|
proxy_busy_buffers_size 24k;
|
||||||
|
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
@@ -52,6 +76,7 @@ server {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# default client_max_body_size (1M) blocks uploads larger than 256 MiB
|
# default client_max_body_size (1M) blocks uploads larger than 256 MiB
|
||||||
client_max_body_size 1024M;
|
client_max_body_size 1024M;
|
||||||
client_header_timeout 610m;
|
client_header_timeout 610m;
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Maintainer: icxes <dev.null@need.moe>
|
# Maintainer: icxes <dev.null@need.moe>
|
||||||
pkgname=copyparty
|
pkgname=copyparty
|
||||||
pkgver="1.13.1"
|
pkgver="1.16.15"
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||||
arch=("any")
|
arch=("any")
|
||||||
@@ -16,12 +16,13 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
|||||||
"libkeyfinder-git: detection of musical keys"
|
"libkeyfinder-git: detection of musical keys"
|
||||||
"qm-vamp-plugins: BPM detection"
|
"qm-vamp-plugins: BPM detection"
|
||||||
"python-pyopenssl: ftps functionality"
|
"python-pyopenssl: ftps functionality"
|
||||||
"python-argon2_cffi: hashed passwords in config"
|
"python-pyzmq: send zeromq messages from event-hooks"
|
||||||
|
"python-argon2-cffi: hashed passwords in config"
|
||||||
"python-impacket-git: smb support (bad idea)"
|
"python-impacket-git: smb support (bad idea)"
|
||||||
)
|
)
|
||||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||||
backup=("etc/${pkgname}.d/init" )
|
backup=("etc/${pkgname}.d/init" )
|
||||||
sha256sums=("f103b784c423a45fbab47c584e4cc53d887fe0616f803bffe009fbfdab3963d7")
|
sha256sums=("080844a7458073c86714dde5fa659a64120909568005420ff81a27e353e91a72")
|
||||||
|
|
||||||
build() {
|
build() {
|
||||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, ffmpeg, mutagen,
|
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, pyzmq, ffmpeg, mutagen,
|
||||||
|
|
||||||
# use argon2id-hashed passwords in config files (sha2 is always available)
|
# use argon2id-hashed passwords in config files (sha2 is always available)
|
||||||
withHashedPasswords ? true,
|
withHashedPasswords ? true,
|
||||||
@@ -21,6 +21,9 @@ withMediaProcessing ? true,
|
|||||||
# if MediaProcessing is not enabled, you probably want this instead (less accurate, but much safer and faster)
|
# if MediaProcessing is not enabled, you probably want this instead (less accurate, but much safer and faster)
|
||||||
withBasicAudioMetadata ? false,
|
withBasicAudioMetadata ? false,
|
||||||
|
|
||||||
|
# send ZeroMQ messages from event-hooks
|
||||||
|
withZeroMQ ? true,
|
||||||
|
|
||||||
# enable FTPS support in the FTP server
|
# enable FTPS support in the FTP server
|
||||||
withFTPS ? false,
|
withFTPS ? false,
|
||||||
|
|
||||||
@@ -43,6 +46,7 @@ let
|
|||||||
++ lib.optional withMediaProcessing ffmpeg
|
++ lib.optional withMediaProcessing ffmpeg
|
||||||
++ lib.optional withBasicAudioMetadata mutagen
|
++ lib.optional withBasicAudioMetadata mutagen
|
||||||
++ lib.optional withHashedPasswords argon2-cffi
|
++ lib.optional withHashedPasswords argon2-cffi
|
||||||
|
++ lib.optional withZeroMQ pyzmq
|
||||||
);
|
);
|
||||||
in stdenv.mkDerivation {
|
in stdenv.mkDerivation {
|
||||||
pname = "copyparty";
|
pname = "copyparty";
|
||||||
@@ -60,4 +64,5 @@ in stdenv.mkDerivation {
|
|||||||
--set PATH '${lib.makeBinPath ([ utillinux ] ++ lib.optional withMediaProcessing ffmpeg)}:$PATH' \
|
--set PATH '${lib.makeBinPath ([ utillinux ] ++ lib.optional withMediaProcessing ffmpeg)}:$PATH' \
|
||||||
--add-flags "$out/share/copyparty-sfx.py"
|
--add-flags "$out/share/copyparty-sfx.py"
|
||||||
'';
|
'';
|
||||||
|
meta.mainProgram = "copyparty";
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"url": "https://github.com/9001/copyparty/releases/download/v1.13.1/copyparty-sfx.py",
|
"url": "https://github.com/9001/copyparty/releases/download/v1.16.15/copyparty-sfx.py",
|
||||||
"version": "1.13.1",
|
"version": "1.16.15",
|
||||||
"hash": "sha256-NFfnveCrR1SbiNlibVyU3UPePLUGJMc4XZvWdksXNd8="
|
"hash": "sha256-vpI/N01pCeCiAv3haE1Zac1X9Oyp2vqPAC1WEcl5phA="
|
||||||
}
|
}
|
||||||
@@ -15,11 +15,19 @@ save one of these as `.epilogue.html` inside a folder to customize it:
|
|||||||
point `--js-browser` to one of these by URL:
|
point `--js-browser` to one of these by URL:
|
||||||
|
|
||||||
* [`minimal-up2k.js`](minimal-up2k.js) is similar to the above `minimal-up2k.html` except it applies globally to all write-only folders
|
* [`minimal-up2k.js`](minimal-up2k.js) is similar to the above `minimal-up2k.html` except it applies globally to all write-only folders
|
||||||
|
* [`quickmove.js`](quickmove.js) adds a hotkey to move selected files into a subfolder
|
||||||
* [`up2k-hooks.js`](up2k-hooks.js) lets you specify a ruleset for files to skip uploading
|
* [`up2k-hooks.js`](up2k-hooks.js) lets you specify a ruleset for files to skip uploading
|
||||||
* [`up2k-hook-ytid.js`](up2k-hook-ytid.js) is a more specific example checking youtube-IDs against some API
|
* [`up2k-hook-ytid.js`](up2k-hook-ytid.js) is a more specific example checking youtube-IDs against some API
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## example any-js
|
||||||
|
point `--js-browser` and/or `--js-other` to one of these by URL:
|
||||||
|
|
||||||
|
* [`banner.js`](banner.js) shows a very enterprise [legal-banner](https://github.com/user-attachments/assets/8ae8e087-b209-449c-b08d-74e040f0284b)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## example browser-css
|
## example browser-css
|
||||||
point `--css-browser` to one of these by URL:
|
point `--css-browser` to one of these by URL:
|
||||||
|
|
||||||
|
|||||||
93
contrib/plugins/banner.js
Normal file
93
contrib/plugins/banner.js
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
(function() {
|
||||||
|
|
||||||
|
// usage: copy this to '.banner.js' in your webroot,
|
||||||
|
// and run copyparty with the following arguments:
|
||||||
|
// --js-browser /.banner.js --js-other /.banner.js
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// had to pick the most chuuni one as the default
|
||||||
|
var bannertext = '' +
|
||||||
|
'<h3>You are accessing a U.S. Government (USG) Information System (IS) that is provided for USG-authorized use only.</h3>' +
|
||||||
|
'<p>By using this IS (which includes any device attached to this IS), you consent to the following conditions:</p>' +
|
||||||
|
'<ul>' +
|
||||||
|
'<li>The USG routinely intercepts and monitors communications on this IS for purposes including, but not limited to, penetration testing, COMSEC monitoring, network operations and defense, personnel misconduct (PM), law enforcement (LE), and counterintelligence (CI) investigations.</li>' +
|
||||||
|
'<li>At any time, the USG may inspect and seize data stored on this IS.</li>' +
|
||||||
|
'<li>Communications using, or data stored on, this IS are not private, are subject to routine monitoring, interception, and search, and may be disclosed or used for any USG-authorized purpose.</li>' +
|
||||||
|
'<li>This IS includes security measures (e.g., authentication and access controls) to protect USG interests -- not for your personal benefit or privacy.</li>' +
|
||||||
|
'<li>Notwithstanding the above, using this IS does not constitute consent to PM, LE or CI investigative searching or monitoring of the content of privileged communications, or work product, related to personal representation or services by attorneys, psychotherapists, or clergy, and their assistants. Such communications and work product are private and confidential. See User Agreement for details.</li>' +
|
||||||
|
'</ul>';
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// fancy div to insert into pages
|
||||||
|
function bannerdiv(border) {
|
||||||
|
var ret = mknod('div', null, bannertext);
|
||||||
|
if (border)
|
||||||
|
ret.setAttribute("style", "border:1em solid var(--fg); border-width:.3em 0; margin:3em 0");
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// keep all of these false and then selectively enable them in the if-blocks below
|
||||||
|
var show_msgbox = false,
|
||||||
|
login_top = false,
|
||||||
|
top = false,
|
||||||
|
bottom = false,
|
||||||
|
top_bordered = false,
|
||||||
|
bottom_bordered = false;
|
||||||
|
|
||||||
|
if (QS("h1#cc") && QS("a#k")) {
|
||||||
|
// this is the controlpanel
|
||||||
|
// (you probably want to keep just one of these enabled)
|
||||||
|
show_msgbox = true;
|
||||||
|
login_top = true;
|
||||||
|
bottom = true;
|
||||||
|
}
|
||||||
|
else if (ebi("swin") && ebi("smac")) {
|
||||||
|
// this is the connect-page, same deal here
|
||||||
|
show_msgbox = true;
|
||||||
|
top_bordered = true;
|
||||||
|
bottom_bordered = true;
|
||||||
|
}
|
||||||
|
else if (ebi("op_cfg") || ebi("div#mw") ) {
|
||||||
|
// we're running in the main filebrowser (op_cfg) or markdown-viewer/editor (div#mw),
|
||||||
|
// fragile pages which break if you do something too fancy
|
||||||
|
show_msgbox = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// shows a fullscreen messagebox; works on all pages
|
||||||
|
if (show_msgbox) {
|
||||||
|
var now = Math.floor(Date.now() / 1000),
|
||||||
|
last_shown = sread("bannerts") || 0;
|
||||||
|
|
||||||
|
// 60 * 60 * 17 = 17 hour cooldown
|
||||||
|
if (now - last_shown > 60 * 60 * 17) {
|
||||||
|
swrite("bannerts", now);
|
||||||
|
modal.confirm(bannertext, null, function () {
|
||||||
|
location = 'https://this-page-intentionally-left-blank.org/';
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// show a message on the page footer; only works on the connect-page
|
||||||
|
if (top || top_bordered) {
|
||||||
|
var dst = ebi('wrap');
|
||||||
|
dst.insertBefore(bannerdiv(top_bordered), dst.firstChild);
|
||||||
|
}
|
||||||
|
|
||||||
|
// show a message on the page footer; only works on the controlpanel and connect-page
|
||||||
|
if (bottom || bottom_bordered) {
|
||||||
|
ebi('wrap').appendChild(bannerdiv(bottom_bordered));
|
||||||
|
}
|
||||||
|
|
||||||
|
// show a message on the top of the page; only works on the controlpanel
|
||||||
|
if (login_top) {
|
||||||
|
var dst = QS('h1');
|
||||||
|
dst.parentNode.insertBefore(bannerdiv(false), dst);
|
||||||
|
}
|
||||||
|
|
||||||
|
})();
|
||||||
117
contrib/plugins/graft-thumbs.js
Normal file
117
contrib/plugins/graft-thumbs.js
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
// USAGE:
|
||||||
|
// place this file somewhere in the webroot and then
|
||||||
|
// python3 -m copyparty --js-browser /.res/graft-thumbs.js
|
||||||
|
//
|
||||||
|
// DESCRIPTION:
|
||||||
|
// this is a gridview plugin which, for each file in a folder,
|
||||||
|
// looks for another file with the same filename (but with a
|
||||||
|
// different file extension)
|
||||||
|
//
|
||||||
|
// if one of those files is an image and the other is not,
|
||||||
|
// then this plugin assumes the image is a "sidecar thumbnail"
|
||||||
|
// for the other file, and it will graft the image thumbnail
|
||||||
|
// onto the non-image file (for example an mp3)
|
||||||
|
//
|
||||||
|
// optional feature 1, default-enabled:
|
||||||
|
// the image-file is then hidden from the directory listing
|
||||||
|
//
|
||||||
|
// optional feature 2, default-enabled:
|
||||||
|
// when clicking the audio file, the image will also open
|
||||||
|
|
||||||
|
|
||||||
|
(function() {
|
||||||
|
|
||||||
|
// `graft_thumbs` assumes the gridview has just been rendered;
|
||||||
|
// it looks for sidecars, and transplants those thumbnails onto
|
||||||
|
// the other file with the same basename (filename sans extension)
|
||||||
|
|
||||||
|
var graft_thumbs = function () {
|
||||||
|
if (!thegrid.en)
|
||||||
|
return; // not in grid mode
|
||||||
|
|
||||||
|
var files = msel.getall(),
|
||||||
|
pairs = {};
|
||||||
|
|
||||||
|
console.log(files);
|
||||||
|
|
||||||
|
for (var a = 0; a < files.length; a++) {
|
||||||
|
var file = files[a],
|
||||||
|
is_pic = /\.(jpe?g|png|gif|webp)$/i.exec(file.vp),
|
||||||
|
is_audio = re_au_all.exec(file.vp),
|
||||||
|
basename = file.vp.replace(/\.[^\.]+$/, ""),
|
||||||
|
entry = pairs[basename];
|
||||||
|
|
||||||
|
if (!entry)
|
||||||
|
// first time seeing this basename; create a new entry in pairs
|
||||||
|
entry = pairs[basename] = {};
|
||||||
|
|
||||||
|
if (is_pic)
|
||||||
|
entry.thumb = file;
|
||||||
|
else if (is_audio)
|
||||||
|
entry.audio = file;
|
||||||
|
}
|
||||||
|
|
||||||
|
var basenames = Object.keys(pairs);
|
||||||
|
for (var a = 0; a < basenames.length; a++)
|
||||||
|
(function(a) {
|
||||||
|
var pair = pairs[basenames[a]];
|
||||||
|
|
||||||
|
if (!pair.thumb || !pair.audio)
|
||||||
|
return; // not a matching pair of files
|
||||||
|
|
||||||
|
var img_thumb = QS('#ggrid a[ref="' + pair.thumb.id + '"] img[onload]'),
|
||||||
|
img_audio = QS('#ggrid a[ref="' + pair.audio.id + '"] img[onload]');
|
||||||
|
|
||||||
|
if (!img_thumb || !img_audio)
|
||||||
|
return; // something's wrong... let's bail
|
||||||
|
|
||||||
|
// alright, graft the thumb...
|
||||||
|
img_audio.src = img_thumb.src;
|
||||||
|
|
||||||
|
// ...and hide the sidecar
|
||||||
|
img_thumb.closest('a').style.display = 'none';
|
||||||
|
|
||||||
|
// ...and add another onclick-handler to the audio,
|
||||||
|
// so it also opens the pic while playing the song
|
||||||
|
img_audio.addEventListener('click', function() {
|
||||||
|
img_thumb.click();
|
||||||
|
return false; // let it bubble to the next listener
|
||||||
|
});
|
||||||
|
|
||||||
|
})(a);
|
||||||
|
};
|
||||||
|
|
||||||
|
// ...and then the trick! near the end of loadgrid,
|
||||||
|
// thegrid.bagit is called to initialize the baguettebox
|
||||||
|
// (image/video gallery); this is the perfect function to
|
||||||
|
// "hook" (hijack) so we can run our code :^)
|
||||||
|
|
||||||
|
// need to grab a backup of the original function first,
|
||||||
|
var orig_func = thegrid.bagit;
|
||||||
|
|
||||||
|
// and then replace it with our own:
|
||||||
|
thegrid.bagit = function (isrc) {
|
||||||
|
|
||||||
|
if (isrc !== '#ggrid')
|
||||||
|
// we only want to modify the grid, so
|
||||||
|
// let the original function handle this one
|
||||||
|
return orig_func(isrc);
|
||||||
|
|
||||||
|
graft_thumbs();
|
||||||
|
|
||||||
|
// when changing directories, the grid is
|
||||||
|
// rendered before msel returns the correct
|
||||||
|
// filenames, so schedule another run:
|
||||||
|
setTimeout(graft_thumbs, 1);
|
||||||
|
|
||||||
|
// and finally, call the original thegrid.bagit function
|
||||||
|
return orig_func(isrc);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (ls0) {
|
||||||
|
// the server included an initial listing json (ls0),
|
||||||
|
// so the grid has already been rendered without our hook
|
||||||
|
graft_thumbs();
|
||||||
|
}
|
||||||
|
|
||||||
|
})();
|
||||||
140
contrib/plugins/quickmove.js
Normal file
140
contrib/plugins/quickmove.js
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
|
||||||
|
// USAGE:
|
||||||
|
// place this file somewhere in the webroot,
|
||||||
|
// for example in a folder named ".res" to hide it, and then
|
||||||
|
// python3 copyparty-sfx.py -v .::A --js-browser /.res/quickmove.js
|
||||||
|
//
|
||||||
|
// DESCRIPTION:
|
||||||
|
// the command above launches copyparty with one single volume;
|
||||||
|
// ".::A" = current folder as webroot, and everyone has Admin
|
||||||
|
//
|
||||||
|
// the plugin adds hotkey "W" which moves all selected files
|
||||||
|
// into a subfolder named "foobar" inside the current folder
|
||||||
|
|
||||||
|
|
||||||
|
(function() {
|
||||||
|
|
||||||
|
var action_to_perform = ask_for_confirmation_and_then_move;
|
||||||
|
// this decides what the new hotkey should do;
|
||||||
|
// ask_for_confirmation_and_then_move = show a yes/no box,
|
||||||
|
// move_selected_files = just move the files immediately
|
||||||
|
|
||||||
|
var move_destination = "foobar";
|
||||||
|
// this is the target folder to move files to;
|
||||||
|
// by default it is a subfolder of the current folder,
|
||||||
|
// but it can also be an absolute path like "/foo/bar"
|
||||||
|
|
||||||
|
// ===
|
||||||
|
// === END OF CONFIG
|
||||||
|
// ===
|
||||||
|
|
||||||
|
var main_hotkey_handler, // copyparty's original hotkey handler
|
||||||
|
plugin_enabler, // timer to engage this plugin when safe
|
||||||
|
files_to_move; // list of files to move
|
||||||
|
|
||||||
|
function ask_for_confirmation_and_then_move() {
|
||||||
|
var num_files = msel.getsel().length,
|
||||||
|
msg = "move the selected " + num_files + " files?";
|
||||||
|
|
||||||
|
if (!num_files)
|
||||||
|
return toast.warn(2, 'no files were selected to be moved');
|
||||||
|
|
||||||
|
modal.confirm(msg, move_selected_files, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
function move_selected_files() {
|
||||||
|
var selection = msel.getsel();
|
||||||
|
|
||||||
|
if (!selection.length)
|
||||||
|
return toast.warn(2, 'no files were selected to be moved');
|
||||||
|
|
||||||
|
if (thegrid.bbox) {
|
||||||
|
// close image/video viewer
|
||||||
|
thegrid.bbox = null;
|
||||||
|
baguetteBox.destroy();
|
||||||
|
}
|
||||||
|
|
||||||
|
files_to_move = [];
|
||||||
|
for (var a = 0; a < selection.length; a++)
|
||||||
|
files_to_move.push(selection[a].vp);
|
||||||
|
|
||||||
|
move_next_file();
|
||||||
|
}
|
||||||
|
|
||||||
|
function move_next_file() {
|
||||||
|
var num_files = files_to_move.length,
|
||||||
|
filepath = files_to_move.pop(),
|
||||||
|
filename = vsplit(filepath)[1];
|
||||||
|
|
||||||
|
toast.inf(10, "moving " + num_files + " files...\n\n" + filename);
|
||||||
|
|
||||||
|
var dst = move_destination;
|
||||||
|
|
||||||
|
if (!dst.endsWith('/'))
|
||||||
|
// must have a trailing slash, so add it
|
||||||
|
dst += '/';
|
||||||
|
|
||||||
|
if (!dst.startsWith('/'))
|
||||||
|
// destination is a relative path, so prefix current folder path
|
||||||
|
dst = get_evpath() + dst;
|
||||||
|
|
||||||
|
// and finally append the filename
|
||||||
|
dst += '/' + filename;
|
||||||
|
|
||||||
|
// prepare the move-request to be sent
|
||||||
|
var xhr = new XHR();
|
||||||
|
xhr.onload = xhr.onerror = function() {
|
||||||
|
if (this.status !== 201)
|
||||||
|
return toast.err(30, 'move failed: ' + esc(this.responseText));
|
||||||
|
|
||||||
|
if (files_to_move.length)
|
||||||
|
return move_next_file(); // still more files to go
|
||||||
|
|
||||||
|
toast.ok(1, 'move OK');
|
||||||
|
treectl.goto(); // reload the folder contents
|
||||||
|
};
|
||||||
|
xhr.open('POST', filepath + '?move=' + dst);
|
||||||
|
xhr.send();
|
||||||
|
}
|
||||||
|
|
||||||
|
function our_hotkey_handler(e) {
|
||||||
|
// bail if either ALT, CTRL, or SHIFT is pressed
|
||||||
|
if (e.altKey || e.shiftKey || e.isComposing || ctrl(e))
|
||||||
|
return main_hotkey_handler(e); // let copyparty handle this keystroke
|
||||||
|
|
||||||
|
var key_name = (e.code || e.key) + '',
|
||||||
|
ae = document.activeElement,
|
||||||
|
aet = ae && ae != document.body ? ae.nodeName.toLowerCase() : '';
|
||||||
|
|
||||||
|
// check the current aet (active element type),
|
||||||
|
// only continue if one of the following currently has input focus:
|
||||||
|
// nothing | link | button | table-row | table-cell | div | text
|
||||||
|
if (aet && !/^(a|button|tr|td|div|pre)$/.test(aet))
|
||||||
|
return main_hotkey_handler(e); // let copyparty handle this keystroke
|
||||||
|
|
||||||
|
if (key_name == 'KeyW') {
|
||||||
|
// okay, this one's for us... do the thing
|
||||||
|
action_to_perform();
|
||||||
|
return ev(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return main_hotkey_handler(e); // let copyparty handle this keystroke
|
||||||
|
}
|
||||||
|
|
||||||
|
function enable_plugin() {
|
||||||
|
if (!window.hotkeys_attached)
|
||||||
|
return console.log('quickmove is waiting for the page to finish loading');
|
||||||
|
|
||||||
|
clearInterval(plugin_enabler);
|
||||||
|
main_hotkey_handler = document.onkeydown;
|
||||||
|
document.onkeydown = our_hotkey_handler;
|
||||||
|
console.log('quickmove is now enabled');
|
||||||
|
}
|
||||||
|
|
||||||
|
// copyparty doesn't enable its hotkeys until the page
|
||||||
|
// has finished loading, so we'll wait for that too
|
||||||
|
plugin_enabler = setInterval(enable_plugin, 100);
|
||||||
|
|
||||||
|
})();
|
||||||
@@ -4,7 +4,7 @@
|
|||||||
#
|
#
|
||||||
# installation:
|
# installation:
|
||||||
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
||||||
# useradd -r -s /sbin/nologin -d /var/lib/copyparty copyparty
|
# useradd -r -s /sbin/nologin -m -d /var/lib/copyparty copyparty
|
||||||
# firewall-cmd --permanent --add-port=3923/tcp # --zone=libvirt
|
# firewall-cmd --permanent --add-port=3923/tcp # --zone=libvirt
|
||||||
# firewall-cmd --reload
|
# firewall-cmd --reload
|
||||||
# cp -pv copyparty.service /etc/systemd/system/
|
# cp -pv copyparty.service /etc/systemd/system/
|
||||||
@@ -12,11 +12,18 @@
|
|||||||
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||||
#
|
#
|
||||||
|
# every time you edit this file, you must "systemctl daemon-reload"
|
||||||
|
# for the changes to take effect and then "systemctl restart copyparty"
|
||||||
|
#
|
||||||
# if it fails to start, first check this: systemctl status copyparty
|
# if it fails to start, first check this: systemctl status copyparty
|
||||||
# then try starting it while viewing logs:
|
# then try starting it while viewing logs:
|
||||||
# journalctl -fan 100
|
# journalctl -fan 100
|
||||||
# tail -Fn 100 /var/log/copyparty/$(date +%Y-%m%d.log)
|
# tail -Fn 100 /var/log/copyparty/$(date +%Y-%m%d.log)
|
||||||
#
|
#
|
||||||
|
# if you run into any issues, for example thumbnails not working,
|
||||||
|
# try removing the "some quick hardening" section and then please
|
||||||
|
# let me know if that actually helped so we can look into it
|
||||||
|
#
|
||||||
# you may want to:
|
# you may want to:
|
||||||
# - change "User=copyparty" and "/var/lib/copyparty/" to another user
|
# - change "User=copyparty" and "/var/lib/copyparty/" to another user
|
||||||
# - edit /etc/copyparty.conf to configure copyparty
|
# - edit /etc/copyparty.conf to configure copyparty
|
||||||
|
|||||||
116
contrib/themes/bsod.css
Normal file
116
contrib/themes/bsod.css
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
/* copy bsod.* into a folder named ".themes" in your webroot and then
|
||||||
|
--themes=10 --theme=9 --css-browser=/.themes/bsod.css
|
||||||
|
*/
|
||||||
|
|
||||||
|
html.ey {
|
||||||
|
--w2: #3d7bbc;
|
||||||
|
--w3: #5fcbec;
|
||||||
|
|
||||||
|
--fg: #fff;
|
||||||
|
--fg-max: #fff;
|
||||||
|
--fg-weak: var(--w3);
|
||||||
|
|
||||||
|
--bg: #2067b2;
|
||||||
|
--bg-d3: var(--bg);
|
||||||
|
--bg-d2: var(--w2);
|
||||||
|
--bg-d1: var(--fg-weak);
|
||||||
|
--bg-u2: var(--bg);
|
||||||
|
--bg-u3: var(--bg);
|
||||||
|
--bg-u5: var(--w2);
|
||||||
|
|
||||||
|
--tab-alt: var(--fg-weak);
|
||||||
|
--row-alt: var(--w2);
|
||||||
|
|
||||||
|
--scroll: var(--w3);
|
||||||
|
|
||||||
|
--a: #fff;
|
||||||
|
--a-b: #fff;
|
||||||
|
--a-hil: #fff;
|
||||||
|
--a-h-bg: var(--fg-weak);
|
||||||
|
--a-dark: var(--a);
|
||||||
|
--a-gray: var(--fg-weak);
|
||||||
|
|
||||||
|
--btn-fg: var(--a);
|
||||||
|
--btn-bg: var(--w2);
|
||||||
|
--btn-h-fg: var(--w2);
|
||||||
|
--btn-1-fg: var(--bg);
|
||||||
|
--btn-1-bg: var(--a);
|
||||||
|
--txt-sh: a;
|
||||||
|
--txt-bg: var(--w2);
|
||||||
|
|
||||||
|
--u2-b1-bg: var(--w2);
|
||||||
|
--u2-b2-bg: var(--w2);
|
||||||
|
--u2-txt-bg: var(--w2);
|
||||||
|
--u2-tab-bg: a;
|
||||||
|
--u2-tab-1-bg: var(--w2);
|
||||||
|
|
||||||
|
--sort-1: var(--a);
|
||||||
|
--sort-1: var(--fg-weak);
|
||||||
|
|
||||||
|
--tree-bg: var(--bg);
|
||||||
|
|
||||||
|
--g-b1: a;
|
||||||
|
--g-b2: a;
|
||||||
|
--g-f-bg: var(--w2);
|
||||||
|
|
||||||
|
--f-sh1: 0.1;
|
||||||
|
--f-sh2: 0.02;
|
||||||
|
--f-sh3: 0.1;
|
||||||
|
--f-h-b1: a;
|
||||||
|
|
||||||
|
--srv-1: var(--a);
|
||||||
|
--srv-3: var(--a);
|
||||||
|
|
||||||
|
--mp-sh: a;
|
||||||
|
}
|
||||||
|
|
||||||
|
html.ey {
|
||||||
|
background: url('bsod.png') top 5em right 4.5em no-repeat fixed var(--bg);
|
||||||
|
}
|
||||||
|
html.ey body#b {
|
||||||
|
background: var(--bg); /*sandbox*/
|
||||||
|
}
|
||||||
|
html.ey #ops {
|
||||||
|
margin: 1.7em 1.5em 0 1.5em;
|
||||||
|
border-radius: .3em;
|
||||||
|
border-width: 1px 0;
|
||||||
|
}
|
||||||
|
html.ey #ops a {
|
||||||
|
text-shadow: 1px 1px 0 rgba(0,0,0,0.5);
|
||||||
|
}
|
||||||
|
html.ey .opbox {
|
||||||
|
margin: 1.5em 0 0 0;
|
||||||
|
}
|
||||||
|
html.ey #tree {
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
html.ey #tt {
|
||||||
|
border-color: var(--w2);
|
||||||
|
background: var(--w2);
|
||||||
|
}
|
||||||
|
html.ey .mdo a {
|
||||||
|
background: none;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
html.ey .mdo pre,
|
||||||
|
html.ey .mdo code {
|
||||||
|
color: #fff;
|
||||||
|
background: var(--w2);
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
html.ey .mdo h1,
|
||||||
|
html.ey .mdo h2 {
|
||||||
|
background: none;
|
||||||
|
border-color: var(--w2);
|
||||||
|
}
|
||||||
|
html.ey .mdo ul ul,
|
||||||
|
html.ey .mdo ul ol,
|
||||||
|
html.ey .mdo ol ul,
|
||||||
|
html.ey .mdo ol ol {
|
||||||
|
border-color: var(--w2);
|
||||||
|
}
|
||||||
|
html.ey .mdo p>em,
|
||||||
|
html.ey .mdo li>em,
|
||||||
|
html.ey .mdo td>em {
|
||||||
|
color: #fd0;
|
||||||
|
}
|
||||||
BIN
contrib/themes/bsod.png
Normal file
BIN
contrib/themes/bsod.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.2 KiB |
25
contrib/traefik/copyparty.yaml
Normal file
25
contrib/traefik/copyparty.yaml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# ./traefik --configFile=copyparty.yaml
|
||||||
|
|
||||||
|
entryPoints:
|
||||||
|
web:
|
||||||
|
address: :8080
|
||||||
|
transport:
|
||||||
|
# don't disconnect during big uploads
|
||||||
|
respondingTimeouts:
|
||||||
|
readTimeout: "0s"
|
||||||
|
log:
|
||||||
|
level: DEBUG
|
||||||
|
providers:
|
||||||
|
file:
|
||||||
|
# WARNING: must be same filename as current file
|
||||||
|
filename: "copyparty.yaml"
|
||||||
|
http:
|
||||||
|
services:
|
||||||
|
service-cpp:
|
||||||
|
loadBalancer:
|
||||||
|
servers:
|
||||||
|
- url: "http://127.0.0.1:3923/"
|
||||||
|
routers:
|
||||||
|
my-router:
|
||||||
|
rule: "PathPrefix(`/`)"
|
||||||
|
service: service-cpp
|
||||||
@@ -16,9 +16,10 @@ except:
|
|||||||
TYPE_CHECKING = False
|
TYPE_CHECKING = False
|
||||||
|
|
||||||
if True:
|
if True:
|
||||||
from typing import Any, Callable
|
from typing import Any, Callable, Optional
|
||||||
|
|
||||||
PY2 = sys.version_info < (3,)
|
PY2 = sys.version_info < (3,)
|
||||||
|
PY36 = sys.version_info > (3, 6)
|
||||||
if not PY2:
|
if not PY2:
|
||||||
unicode: Callable[[Any], str] = str
|
unicode: Callable[[Any], str] = str
|
||||||
else:
|
else:
|
||||||
@@ -50,6 +51,64 @@ try:
|
|||||||
except:
|
except:
|
||||||
CORES = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
CORES = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
||||||
|
|
||||||
|
# all embedded resources to be retrievable over http
|
||||||
|
zs = """
|
||||||
|
web/a/partyfuse.py
|
||||||
|
web/a/u2c.py
|
||||||
|
web/a/webdav-cfg.bat
|
||||||
|
web/baguettebox.js
|
||||||
|
web/browser.css
|
||||||
|
web/browser.html
|
||||||
|
web/browser.js
|
||||||
|
web/browser2.html
|
||||||
|
web/cf.html
|
||||||
|
web/copyparty.gif
|
||||||
|
web/dd/2.png
|
||||||
|
web/dd/3.png
|
||||||
|
web/dd/4.png
|
||||||
|
web/dd/5.png
|
||||||
|
web/deps/busy.mp3
|
||||||
|
web/deps/easymde.css
|
||||||
|
web/deps/easymde.js
|
||||||
|
web/deps/marked.js
|
||||||
|
web/deps/fuse.py
|
||||||
|
web/deps/mini-fa.css
|
||||||
|
web/deps/mini-fa.woff
|
||||||
|
web/deps/prism.css
|
||||||
|
web/deps/prism.js
|
||||||
|
web/deps/prismd.css
|
||||||
|
web/deps/scp.woff2
|
||||||
|
web/deps/sha512.ac.js
|
||||||
|
web/deps/sha512.hw.js
|
||||||
|
web/iiam.gif
|
||||||
|
web/md.css
|
||||||
|
web/md.html
|
||||||
|
web/md.js
|
||||||
|
web/md2.css
|
||||||
|
web/md2.js
|
||||||
|
web/mde.css
|
||||||
|
web/mde.html
|
||||||
|
web/mde.js
|
||||||
|
web/msg.css
|
||||||
|
web/msg.html
|
||||||
|
web/rups.css
|
||||||
|
web/rups.html
|
||||||
|
web/rups.js
|
||||||
|
web/shares.css
|
||||||
|
web/shares.html
|
||||||
|
web/shares.js
|
||||||
|
web/splash.css
|
||||||
|
web/splash.html
|
||||||
|
web/splash.js
|
||||||
|
web/svcs.html
|
||||||
|
web/svcs.js
|
||||||
|
web/ui.css
|
||||||
|
web/up2k.js
|
||||||
|
web/util.js
|
||||||
|
web/w.hash.js
|
||||||
|
"""
|
||||||
|
RES = set(zs.strip().split("\n"))
|
||||||
|
|
||||||
|
|
||||||
class EnvParams(object):
|
class EnvParams(object):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import base64
|
|||||||
import locale
|
import locale
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import select
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
@@ -26,6 +27,7 @@ from .__init__ import (
|
|||||||
EXE,
|
EXE,
|
||||||
MACOS,
|
MACOS,
|
||||||
PY2,
|
PY2,
|
||||||
|
PY36,
|
||||||
VT100,
|
VT100,
|
||||||
WINDOWS,
|
WINDOWS,
|
||||||
E,
|
E,
|
||||||
@@ -41,18 +43,26 @@ from .util import (
|
|||||||
DEF_EXP,
|
DEF_EXP,
|
||||||
DEF_MTE,
|
DEF_MTE,
|
||||||
DEF_MTH,
|
DEF_MTH,
|
||||||
|
HAVE_IPV6,
|
||||||
IMPLICATIONS,
|
IMPLICATIONS,
|
||||||
JINJA_VER,
|
JINJA_VER,
|
||||||
MIMES,
|
MIMES,
|
||||||
PARTFTPY_VER,
|
PARTFTPY_VER,
|
||||||
PY_DESC,
|
PY_DESC,
|
||||||
PYFTPD_VER,
|
PYFTPD_VER,
|
||||||
|
RAM_AVAIL,
|
||||||
|
RAM_TOTAL,
|
||||||
SQLITE_VER,
|
SQLITE_VER,
|
||||||
UNPLICATIONS,
|
UNPLICATIONS,
|
||||||
|
URL_BUG,
|
||||||
|
URL_PRJ,
|
||||||
Daemon,
|
Daemon,
|
||||||
align_tab,
|
align_tab,
|
||||||
ansi_re,
|
ansi_re,
|
||||||
|
b64enc,
|
||||||
dedent,
|
dedent,
|
||||||
|
has_resource,
|
||||||
|
load_resource,
|
||||||
min_ex,
|
min_ex,
|
||||||
pybin,
|
pybin,
|
||||||
termsize,
|
termsize,
|
||||||
@@ -65,7 +75,13 @@ if True: # pylint: disable=using-constant-test
|
|||||||
|
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_TLS"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
HAVE_SSL = True
|
HAVE_SSL = True
|
||||||
import ssl
|
import ssl
|
||||||
except:
|
except:
|
||||||
@@ -173,8 +189,10 @@ def init_E(EE: EnvParams) -> None:
|
|||||||
(os.environ.get, "TMP"),
|
(os.environ.get, "TMP"),
|
||||||
(unicode, "/tmp"),
|
(unicode, "/tmp"),
|
||||||
]
|
]
|
||||||
|
errs = []
|
||||||
for chk in [os.listdir, os.mkdir]:
|
for chk in [os.listdir, os.mkdir]:
|
||||||
for pf, pa in paths:
|
for npath, (pf, pa) in enumerate(paths):
|
||||||
|
p = ""
|
||||||
try:
|
try:
|
||||||
p = pf(pa)
|
p = pf(pa)
|
||||||
# print(chk.__name__, p, pa)
|
# print(chk.__name__, p, pa)
|
||||||
@@ -187,9 +205,20 @@ def init_E(EE: EnvParams) -> None:
|
|||||||
if not os.path.isdir(p):
|
if not os.path.isdir(p):
|
||||||
os.mkdir(p)
|
os.mkdir(p)
|
||||||
|
|
||||||
|
if npath > 1:
|
||||||
|
t = "Using [%s] for config; filekeys/dirkeys will change on every restart. Consider setting XDG_CONFIG_HOME or giving the unix-user a ~/.config/"
|
||||||
|
errs.append(t % (p,))
|
||||||
|
elif errs:
|
||||||
|
errs.append("Using [%s] instead" % (p,))
|
||||||
|
|
||||||
|
if errs:
|
||||||
|
warn(". ".join(errs))
|
||||||
|
|
||||||
return p # type: ignore
|
return p # type: ignore
|
||||||
except:
|
except Exception as ex:
|
||||||
pass
|
if p and npath < 2:
|
||||||
|
t = "Unable to store config in [%s] due to %r"
|
||||||
|
errs.append(t % (p, ex))
|
||||||
|
|
||||||
raise Exception("could not find a writable path for config")
|
raise Exception("could not find a writable path for config")
|
||||||
|
|
||||||
@@ -213,7 +242,7 @@ def init_E(EE: EnvParams) -> None:
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def get_srvname() -> str:
|
def get_srvname(verbose) -> str:
|
||||||
try:
|
try:
|
||||||
ret: str = unicode(socket.gethostname()).split(".")[0]
|
ret: str = unicode(socket.gethostname()).split(".")[0]
|
||||||
except:
|
except:
|
||||||
@@ -223,7 +252,8 @@ def get_srvname() -> str:
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
fp = os.path.join(E.cfg, "name.txt")
|
fp = os.path.join(E.cfg, "name.txt")
|
||||||
lprint("using hostname from {}\n".format(fp))
|
if verbose:
|
||||||
|
lprint("using hostname from {}\n".format(fp))
|
||||||
try:
|
try:
|
||||||
with open(fp, "rb") as f:
|
with open(fp, "rb") as f:
|
||||||
ret = f.read().decode("utf-8", "replace").strip()
|
ret = f.read().decode("utf-8", "replace").strip()
|
||||||
@@ -245,7 +275,7 @@ def get_fk_salt() -> str:
|
|||||||
with open(fp, "rb") as f:
|
with open(fp, "rb") as f:
|
||||||
ret = f.read().strip()
|
ret = f.read().strip()
|
||||||
except:
|
except:
|
||||||
ret = base64.b64encode(os.urandom(18))
|
ret = b64enc(os.urandom(18))
|
||||||
with open(fp, "wb") as f:
|
with open(fp, "wb") as f:
|
||||||
f.write(ret + b"\n")
|
f.write(ret + b"\n")
|
||||||
|
|
||||||
@@ -258,7 +288,7 @@ def get_dk_salt() -> str:
|
|||||||
with open(fp, "rb") as f:
|
with open(fp, "rb") as f:
|
||||||
ret = f.read().strip()
|
ret = f.read().strip()
|
||||||
except:
|
except:
|
||||||
ret = base64.b64encode(os.urandom(30))
|
ret = b64enc(os.urandom(30))
|
||||||
with open(fp, "wb") as f:
|
with open(fp, "wb") as f:
|
||||||
f.write(ret + b"\n")
|
f.write(ret + b"\n")
|
||||||
|
|
||||||
@@ -271,7 +301,7 @@ def get_ah_salt() -> str:
|
|||||||
with open(fp, "rb") as f:
|
with open(fp, "rb") as f:
|
||||||
ret = f.read().strip()
|
ret = f.read().strip()
|
||||||
except:
|
except:
|
||||||
ret = base64.b64encode(os.urandom(18))
|
ret = b64enc(os.urandom(18))
|
||||||
with open(fp, "wb") as f:
|
with open(fp, "wb") as f:
|
||||||
f.write(ret + b"\n")
|
f.write(ret + b"\n")
|
||||||
|
|
||||||
@@ -279,6 +309,9 @@ def get_ah_salt() -> str:
|
|||||||
|
|
||||||
|
|
||||||
def ensure_locale() -> None:
|
def ensure_locale() -> None:
|
||||||
|
if ANYWIN and PY2:
|
||||||
|
return # maybe XP, so busted 65001
|
||||||
|
|
||||||
safe = "en_US.UTF-8"
|
safe = "en_US.UTF-8"
|
||||||
for x in [
|
for x in [
|
||||||
safe,
|
safe,
|
||||||
@@ -298,21 +331,19 @@ def ensure_locale() -> None:
|
|||||||
|
|
||||||
|
|
||||||
def ensure_webdeps() -> None:
|
def ensure_webdeps() -> None:
|
||||||
ap = os.path.join(E.mod, "web/deps/mini-fa.woff")
|
if has_resource(E, "web/deps/mini-fa.woff"):
|
||||||
if os.path.exists(ap):
|
|
||||||
return
|
return
|
||||||
|
|
||||||
warn(
|
t = """could not find webdeps;
|
||||||
"""could not find webdeps;
|
|
||||||
if you are running the sfx, or exe, or pypi package, or docker image,
|
if you are running the sfx, or exe, or pypi package, or docker image,
|
||||||
then this is a bug! Please let me know so I can fix it, thanks :-)
|
then this is a bug! Please let me know so I can fix it, thanks :-)
|
||||||
https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md
|
%s
|
||||||
|
|
||||||
however, if you are a dev, or running copyparty from source, and you want
|
however, if you are a dev, or running copyparty from source, and you want
|
||||||
full client functionality, you will need to build or obtain the webdeps:
|
full client functionality, you will need to build or obtain the webdeps:
|
||||||
https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building
|
%s/blob/hovudstraum/docs/devnotes.md#building
|
||||||
"""
|
"""
|
||||||
)
|
warn(t % (URL_BUG, URL_PRJ))
|
||||||
|
|
||||||
|
|
||||||
def configure_ssl_ver(al: argparse.Namespace) -> None:
|
def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||||
@@ -326,7 +357,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
|||||||
# oh man i love openssl
|
# oh man i love openssl
|
||||||
# check this out
|
# check this out
|
||||||
# hold my beer
|
# hold my beer
|
||||||
assert ssl
|
assert ssl # type: ignore # !rm
|
||||||
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
||||||
sslver = terse_sslver(al.ssl_ver).split(",")
|
sslver = terse_sslver(al.ssl_ver).split(",")
|
||||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||||
@@ -360,7 +391,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||||
assert ssl
|
assert ssl # type: ignore # !rm
|
||||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||||
if al.ssl_ver:
|
if al.ssl_ver:
|
||||||
ctx.options &= ~al.ssl_flags_en
|
ctx.options &= ~al.ssl_flags_en
|
||||||
@@ -473,27 +504,75 @@ def disable_quickedit() -> None:
|
|||||||
|
|
||||||
|
|
||||||
def sfx_tpoke(top: str):
|
def sfx_tpoke(top: str):
|
||||||
files = [os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df]
|
if os.environ.get("PRTY_NO_TPOKE"):
|
||||||
|
return
|
||||||
|
|
||||||
|
files = [top] + [
|
||||||
|
os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df
|
||||||
|
]
|
||||||
while True:
|
while True:
|
||||||
t = int(time.time())
|
t = int(time.time())
|
||||||
for f in [top] + files:
|
for f in list(files):
|
||||||
os.utime(f, (t, t))
|
try:
|
||||||
|
os.utime(f, (t, t))
|
||||||
|
except Exception as ex:
|
||||||
|
lprint("<TPOKE> [%s] %r" % (f, ex))
|
||||||
|
files.remove(f)
|
||||||
|
|
||||||
time.sleep(78123)
|
time.sleep(78123)
|
||||||
|
|
||||||
|
|
||||||
def showlic() -> None:
|
def showlic() -> None:
|
||||||
p = os.path.join(E.mod, "res", "COPYING.txt")
|
try:
|
||||||
if not os.path.exists(p):
|
with load_resource(E, "res/COPYING.txt") as f:
|
||||||
|
buf = f.read()
|
||||||
|
except:
|
||||||
|
buf = b""
|
||||||
|
|
||||||
|
if buf:
|
||||||
|
print(buf.decode("utf-8", "replace"))
|
||||||
|
else:
|
||||||
print("no relevant license info to display")
|
print("no relevant license info to display")
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(p, "rb") as f:
|
|
||||||
print(f.read().decode("utf-8", "replace"))
|
|
||||||
|
|
||||||
|
|
||||||
def get_sects():
|
def get_sects():
|
||||||
return [
|
return [
|
||||||
|
[
|
||||||
|
"bind",
|
||||||
|
"configure listening",
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
\033[33m-i\033[0m takes a comma-separated list of interfaces to listen on;
|
||||||
|
IP-addresses and/or unix-sockets (Unix Domain Sockets)
|
||||||
|
|
||||||
|
the default (\033[32m-i ::\033[0m) means all IPv4 and IPv6 addresses
|
||||||
|
|
||||||
|
\033[32m-i 0.0.0.0\033[0m listens on all IPv4 NICs/subnets
|
||||||
|
\033[32m-i 127.0.0.1\033[0m listens on IPv4 localhost only
|
||||||
|
\033[32m-i 127.1\033[0m listens on IPv4 localhost only
|
||||||
|
\033[32m-i 127.1,192.168.123.1\033[0m = IPv4 localhost and 192.168.123.1
|
||||||
|
|
||||||
|
\033[33m-p\033[0m takes a comma-separated list of tcp ports to listen on;
|
||||||
|
the default is \033[32m-p 3923\033[0m but as root you can \033[32m-p 80,443,3923\033[0m
|
||||||
|
|
||||||
|
when running behind a reverse-proxy, it's recommended to
|
||||||
|
use unix-sockets for improved performance and security;
|
||||||
|
|
||||||
|
\033[32m-i unix:770:www:\033[33m/tmp/a.sock\033[0m listens on \033[33m/tmp/a.sock\033[0m with
|
||||||
|
permissions \033[33m0770\033[0m; only accessible to members of the \033[33mwww\033[0m
|
||||||
|
group. This is the best approach. Alternatively,
|
||||||
|
|
||||||
|
\033[32m-i unix:777:\033[33m/tmp/a.sock\033[0m sets perms \033[33m0777\033[0m so anyone can
|
||||||
|
access it; bad unless it's inside a restricted folder
|
||||||
|
|
||||||
|
\033[32m-i unix:\033[33m/tmp/a.sock\033[0m keeps umask-defined permissions
|
||||||
|
(usually \033[33m0600\033[0m) and the same user/group as copyparty
|
||||||
|
|
||||||
|
\033[33m-p\033[0m (tcp ports) is ignored for unix sockets
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"accounts",
|
"accounts",
|
||||||
"accounts and volumes",
|
"accounts and volumes",
|
||||||
@@ -608,6 +687,8 @@ def get_sects():
|
|||||||
\033[36mxbu\033[35m executes CMD before a file upload starts
|
\033[36mxbu\033[35m executes CMD before a file upload starts
|
||||||
\033[36mxau\033[35m executes CMD after a file upload finishes
|
\033[36mxau\033[35m executes CMD after a file upload finishes
|
||||||
\033[36mxiu\033[35m executes CMD after all uploads finish and volume is idle
|
\033[36mxiu\033[35m executes CMD after all uploads finish and volume is idle
|
||||||
|
\033[36mxbc\033[35m executes CMD before a file copy
|
||||||
|
\033[36mxac\033[35m executes CMD after a file copy
|
||||||
\033[36mxbr\033[35m executes CMD before a file rename/move
|
\033[36mxbr\033[35m executes CMD before a file rename/move
|
||||||
\033[36mxar\033[35m executes CMD after a file rename/move
|
\033[36mxar\033[35m executes CMD after a file rename/move
|
||||||
\033[36mxbd\033[35m executes CMD before a file delete
|
\033[36mxbd\033[35m executes CMD before a file delete
|
||||||
@@ -616,12 +697,12 @@ def get_sects():
|
|||||||
\033[36mxban\033[35m executes CMD if someone gets banned
|
\033[36mxban\033[35m executes CMD if someone gets banned
|
||||||
\033[0m
|
\033[0m
|
||||||
can be defined as --args or volflags; for example \033[36m
|
can be defined as --args or volflags; for example \033[36m
|
||||||
--xau notify-send
|
--xau foo.py
|
||||||
-v .::r:c,xau=notify-send
|
-v .::r:c,xau=bar.py
|
||||||
\033[0m
|
\033[0m
|
||||||
commands specified as --args are appended to volflags;
|
hooks specified as commandline --args are appended to volflags;
|
||||||
each --arg and volflag can be specified multiple times,
|
each commandline --arg and volflag can be specified multiple times,
|
||||||
each command will execute in order unless one returns non-zero
|
each hook will execute in order unless one returns non-zero
|
||||||
|
|
||||||
optionally prefix the command with comma-sep. flags similar to -mtp:
|
optionally prefix the command with comma-sep. flags similar to -mtp:
|
||||||
|
|
||||||
@@ -632,6 +713,10 @@ def get_sects():
|
|||||||
\033[36mtN\033[35m sets an N sec timeout before the command is abandoned
|
\033[36mtN\033[35m sets an N sec timeout before the command is abandoned
|
||||||
\033[36miN\033[35m xiu only: volume must be idle for N sec (default = 5)
|
\033[36miN\033[35m xiu only: volume must be idle for N sec (default = 5)
|
||||||
|
|
||||||
|
\033[36mar\033[35m only run hook if user has read-access
|
||||||
|
\033[36marw\033[35m only run hook if user has read-write-access
|
||||||
|
\033[36marwmd\033[35m ...and so on... (doesn't work for xiu or xban)
|
||||||
|
|
||||||
\033[36mkt\033[35m kills the entire process tree on timeout (default),
|
\033[36mkt\033[35m kills the entire process tree on timeout (default),
|
||||||
\033[36mkm\033[35m kills just the main process
|
\033[36mkm\033[35m kills just the main process
|
||||||
\033[36mkn\033[35m lets it continue running until copyparty is terminated
|
\033[36mkn\033[35m lets it continue running until copyparty is terminated
|
||||||
@@ -641,6 +726,25 @@ def get_sects():
|
|||||||
\033[36mc2\033[35m show only stdout
|
\033[36mc2\033[35m show only stdout
|
||||||
\033[36mc3\033[35m mute all process otput
|
\033[36mc3\033[35m mute all process otput
|
||||||
\033[0m
|
\033[0m
|
||||||
|
examples:
|
||||||
|
|
||||||
|
\033[36m--xm some.py\033[35m runs \033[33msome.py msgtxt\033[35m on each 📟 message;
|
||||||
|
\033[33mmsgtxt\033[35m is the message that was written into the web-ui
|
||||||
|
|
||||||
|
\033[36m--xm j,some.py\033[35m runs \033[33msome.py jsontext\033[35m on each 📟 message;
|
||||||
|
\033[33mjsontext\033[35m is the message info (ip, user, ..., msg-text)
|
||||||
|
|
||||||
|
\033[36m--xm aw,j,some.py\033[35m requires user to have write-access
|
||||||
|
|
||||||
|
\033[36m--xm aw,,notify-send,hey,--\033[35m shows an OS alert on linux;
|
||||||
|
the \033[33m,,\033[35m stops copyparty from reading the rest as flags and
|
||||||
|
the \033[33m--\033[35m stops notify-send from reading the message as args
|
||||||
|
and the alert will be "hey" followed by the messagetext
|
||||||
|
|
||||||
|
\033[36m--xau zmq:pub:tcp://*:5556\033[35m announces uploads on zeromq;
|
||||||
|
\033[36m--xau t3,zmq:push:tcp://*:5557\033[35m also works, and you can
|
||||||
|
\033[36m--xau t3,j,zmq:req:tcp://localhost:5555\033[35m too for example
|
||||||
|
\033[0m
|
||||||
each hook is executed once for each event, except for \033[36mxiu\033[0m
|
each hook is executed once for each event, except for \033[36mxiu\033[0m
|
||||||
which builds up a backlog of uploads, running the hook just once
|
which builds up a backlog of uploads, running the hook just once
|
||||||
as soon as the volume has been idle for iN seconds (5 by default)
|
as soon as the volume has been idle for iN seconds (5 by default)
|
||||||
@@ -652,6 +756,11 @@ def get_sects():
|
|||||||
\033[36mxban\033[0m can be used to overrule / cancel a user ban event;
|
\033[36mxban\033[0m can be used to overrule / cancel a user ban event;
|
||||||
if the program returns 0 (true/OK) then the ban will NOT happen
|
if the program returns 0 (true/OK) then the ban will NOT happen
|
||||||
|
|
||||||
|
effects can be used to redirect uploads into other
|
||||||
|
locations, and to delete or index other files based
|
||||||
|
on new uploads, but with certain limitations. See
|
||||||
|
bin/hooks/reloc* and docs/devnotes.md#hook-effects
|
||||||
|
|
||||||
except for \033[36mxm\033[0m, only one hook / one action can run at a time,
|
except for \033[36mxm\033[0m, only one hook / one action can run at a time,
|
||||||
so it's recommended to use the \033[36mf\033[0m flag unless you really need
|
so it's recommended to use the \033[36mf\033[0m flag unless you really need
|
||||||
to wait for the hook to finish before continuing (without \033[36mf\033[0m
|
to wait for the hook to finish before continuing (without \033[36mf\033[0m
|
||||||
@@ -666,8 +775,22 @@ def get_sects():
|
|||||||
values for --urlform:
|
values for --urlform:
|
||||||
\033[36mstash\033[35m dumps the data to file and returns length + checksum
|
\033[36mstash\033[35m dumps the data to file and returns length + checksum
|
||||||
\033[36msave,get\033[35m dumps to file and returns the page like a GET
|
\033[36msave,get\033[35m dumps to file and returns the page like a GET
|
||||||
\033[36mprint,get\033[35m prints the data in the log and returns GET
|
\033[36mprint \033[35m prints the data to log and returns an error
|
||||||
(leave out the ",get" to return an error instead)
|
\033[36mprint,xm \033[35m prints the data to log and returns --xm output
|
||||||
|
\033[36mprint,get\033[35m prints the data to log and returns GET\033[0m
|
||||||
|
|
||||||
|
note that the \033[35m--xm\033[0m hook will only run if \033[35m--urlform\033[0m is
|
||||||
|
either \033[36mprint\033[0m or \033[36mprint,get\033[0m or the default \033[36mprint,xm\033[0m
|
||||||
|
|
||||||
|
if an \033[35m--xm\033[0m hook returns text, then
|
||||||
|
the response code will be HTTP 202;
|
||||||
|
http/get responses will be HTTP 200
|
||||||
|
|
||||||
|
if there are multiple \033[35m--xm\033[0m hooks defined, then
|
||||||
|
the first hook that produced output is returned
|
||||||
|
|
||||||
|
if there are no \033[35m--xm\033[0m hooks defined, then the default
|
||||||
|
\033[36mprint,xm\033[0m behaves like \033[36mprint,get\033[0m (returning html)
|
||||||
"""
|
"""
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
@@ -677,7 +800,7 @@ def get_sects():
|
|||||||
dedent(
|
dedent(
|
||||||
"""
|
"""
|
||||||
specify --exp or the "exp" volflag to enable placeholder expansions
|
specify --exp or the "exp" volflag to enable placeholder expansions
|
||||||
in README.md / .prologue.html / .epilogue.html
|
in README.md / PREADME.md / .prologue.html / .epilogue.html
|
||||||
|
|
||||||
--exp-md (volflag exp_md) holds the list of placeholders which can be
|
--exp-md (volflag exp_md) holds the list of placeholders which can be
|
||||||
expanded in READMEs, and --exp-lg (volflag exp_lg) likewise for logues;
|
expanded in READMEs, and --exp-lg (volflag exp_lg) likewise for logues;
|
||||||
@@ -771,8 +894,9 @@ def get_sects():
|
|||||||
use argon2id with timecost 3, 256 MiB, 4 threads, version 19 (0x13/v1.3)
|
use argon2id with timecost 3, 256 MiB, 4 threads, version 19 (0x13/v1.3)
|
||||||
|
|
||||||
\033[36m--ah-alg scrypt\033[0m # which is the same as:
|
\033[36m--ah-alg scrypt\033[0m # which is the same as:
|
||||||
\033[36m--ah-alg scrypt,13,2,8,4\033[0m
|
\033[36m--ah-alg scrypt,13,2,8,4,32\033[0m
|
||||||
use scrypt with cost 2**13, 2 iterations, blocksize 8, 4 threads
|
use scrypt with cost 2**13, 2 iterations, blocksize 8, 4 threads,
|
||||||
|
and allow using up to 32 MiB RAM (ram=cost*blksz roughly)
|
||||||
|
|
||||||
\033[36m--ah-alg sha2\033[0m # which is the same as:
|
\033[36m--ah-alg sha2\033[0m # which is the same as:
|
||||||
\033[36m--ah-alg sha2,424242\033[0m
|
\033[36m--ah-alg sha2,424242\033[0m
|
||||||
@@ -793,7 +917,7 @@ def get_sects():
|
|||||||
dedent(
|
dedent(
|
||||||
"""
|
"""
|
||||||
the mDNS protocol is multicast-based, which means there are thousands
|
the mDNS protocol is multicast-based, which means there are thousands
|
||||||
of fun and intersesting ways for it to break unexpectedly
|
of fun and interesting ways for it to break unexpectedly
|
||||||
|
|
||||||
things to check if it does not work at all:
|
things to check if it does not work at all:
|
||||||
|
|
||||||
@@ -847,7 +971,7 @@ def add_general(ap, nc, srvname):
|
|||||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m], see --help-accounts")
|
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m], see --help-accounts")
|
||||||
ap2.add_argument("--grp", metavar="G:N,N", type=u, action="append", help="add group, \033[33mNAME\033[0m:\033[33mUSER1\033[0m,\033[33mUSER2\033[0m,\033[33m...\033[0m; example [\033[32madmins:ed,foo,bar\033[0m]")
|
ap2.add_argument("--grp", metavar="G:N,N", type=u, action="append", help="add group, \033[33mNAME\033[0m:\033[33mUSER1\033[0m,\033[33mUSER2\033[0m,\033[33m...\033[0m; example [\033[32madmins:ed,foo,bar\033[0m]")
|
||||||
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files (volflag=dots)")
|
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files (volflag=dots)")
|
||||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see \033[33m--help-urlform\033[0m")
|
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,xm", help="how to handle url-form POSTs; see \033[33m--help-urlform\033[0m")
|
||||||
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="server terminal title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]")
|
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="server terminal title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]")
|
||||||
ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)")
|
ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)")
|
||||||
ap2.add_argument("--mime", metavar="EXT=MIME", type=u, action="append", help="map file \033[33mEXT\033[0mension to \033[33mMIME\033[0mtype, for example [\033[32mjpg=image/jpeg\033[0m]")
|
ap2.add_argument("--mime", metavar="EXT=MIME", type=u, action="append", help="map file \033[33mEXT\033[0mension to \033[33mMIME\033[0mtype, for example [\033[32mjpg=image/jpeg\033[0m]")
|
||||||
@@ -877,6 +1001,16 @@ def add_fs(ap):
|
|||||||
ap2.add_argument("--mtab-age", metavar="SEC", type=int, default=60, help="rebuild mountpoint cache every \033[33mSEC\033[0m to keep track of sparse-files support; keep low on servers with removable media")
|
ap2.add_argument("--mtab-age", metavar="SEC", type=int, default=60, help="rebuild mountpoint cache every \033[33mSEC\033[0m to keep track of sparse-files support; keep low on servers with removable media")
|
||||||
|
|
||||||
|
|
||||||
|
def add_share(ap):
|
||||||
|
db_path = os.path.join(E.cfg, "shares.db")
|
||||||
|
ap2 = ap.add_argument_group('share-url options')
|
||||||
|
ap2.add_argument("--shr", metavar="DIR", type=u, default="", help="toplevel virtual folder for shared files/folders, for example [\033[32m/share\033[0m]")
|
||||||
|
ap2.add_argument("--shr-db", metavar="FILE", type=u, default=db_path, help="database to store shares in")
|
||||||
|
ap2.add_argument("--shr-adm", metavar="U,U", type=u, default="", help="comma-separated list of users allowed to view/delete any share")
|
||||||
|
ap2.add_argument("--shr-rt", metavar="MIN", type=int, default=1440, help="shares can be revived by their owner if they expired less than MIN minutes ago; [\033[32m60\033[0m]=hour, [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week")
|
||||||
|
ap2.add_argument("--shr-v", action="store_true", help="debug")
|
||||||
|
|
||||||
|
|
||||||
def add_upload(ap):
|
def add_upload(ap):
|
||||||
ap2 = ap.add_argument_group('upload options')
|
ap2 = ap.add_argument_group('upload options')
|
||||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m")
|
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m")
|
||||||
@@ -887,29 +1021,33 @@ def add_upload(ap):
|
|||||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without \033[33m-e2d\033[0m; roughly 1 MiB RAM per 600")
|
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without \033[33m-e2d\033[0m; roughly 1 MiB RAM per 600")
|
||||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (bad idea to enable this on windows and/or cow filesystems)")
|
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (bad idea to enable this on windows and/or cow filesystems)")
|
||||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
||||||
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem) (volflag=hardlink)")
|
ap2.add_argument("--dedup", action="store_true", help="enable symlink-based upload deduplication (volflag=dedup)")
|
||||||
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=neversymlink)")
|
ap2.add_argument("--safe-dedup", metavar="N", type=int, default=50, help="how careful to be when deduplicating files; [\033[32m1\033[0m] = just verify the filesize, [\033[32m50\033[0m] = verify file contents have not been altered (volflag=safededup)")
|
||||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes)")
|
ap2.add_argument("--hardlink", action="store_true", help="enable hardlink-based dedup; will fallback on symlinks when that is impossible (across filesystems) (volflag=hardlink)")
|
||||||
|
ap2.add_argument("--hardlink-only", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=hardlinkonly)")
|
||||||
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
||||||
|
ap2.add_argument("--no-clone", action="store_true", help="do not use existing data on disk to satisfy dupe uploads; reduces server HDD reads in exchange for much more network load (volflag=noclone)")
|
||||||
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
||||||
ap2.add_argument("--snap-wri", metavar="SEC", type=int, default=300, help="write upload state to ./hist/up2k.snap every \033[33mSEC\033[0m seconds; allows resuming incomplete uploads after a server crash")
|
ap2.add_argument("--snap-wri", metavar="SEC", type=int, default=300, help="write upload state to ./hist/up2k.snap every \033[33mSEC\033[0m seconds; allows resuming incomplete uploads after a server crash")
|
||||||
ap2.add_argument("--snap-drop", metavar="MIN", type=float, default=1440, help="forget unfinished uploads after \033[33mMIN\033[0m minutes; impossible to resume them after that (360=6h, 1440=24h)")
|
ap2.add_argument("--snap-drop", metavar="MIN", type=float, default=1440.0, help="forget unfinished uploads after \033[33mMIN\033[0m minutes; impossible to resume them after that (360=6h, 1440=24h)")
|
||||||
ap2.add_argument("--u2ts", metavar="TXT", type=u, default="c", help="how to timestamp uploaded files; [\033[32mc\033[0m]=client-last-modified, [\033[32mu\033[0m]=upload-time, [\033[32mfc\033[0m]=force-c, [\033[32mfu\033[0m]=force-u (volflag=u2ts)")
|
ap2.add_argument("--u2ts", metavar="TXT", type=u, default="c", help="how to timestamp uploaded files; [\033[32mc\033[0m]=client-last-modified, [\033[32mu\033[0m]=upload-time, [\033[32mfc\033[0m]=force-c, [\033[32mfu\033[0m]=force-u (volflag=u2ts)")
|
||||||
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, \033[33m--nrand\033[0m chars long (volflag=rand)")
|
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, \033[33m--nrand\033[0m chars long (volflag=rand)")
|
||||||
ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)")
|
ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)")
|
||||||
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
|
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
|
||||||
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure \033[33mGiB\033[0m free disk space by rejecting upload requests")
|
ap2.add_argument("--df", metavar="GiB", type=u, default="0", help="ensure \033[33mGiB\033[0m free disk space by rejecting upload requests; assumes gigabytes unless a unit suffix is given: [\033[32m256m\033[0m], [\033[32m4\033[0m], [\033[32m2T\033[0m] (volflag=df)")
|
||||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||||
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
|
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
|
||||||
|
ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for \033[33mdefault\033[0m, and never exceed \033[33mmax\033[0m. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
|
||||||
|
ap2.add_argument("--u2ow", metavar="NUM", type=int, default=0, help="web-client: default setting for when to overwrite existing files; [\033[32m0\033[0m]=never, [\033[32m1\033[0m]=if-client-newer, [\033[32m2\033[0m]=always (volflag=u2ow)")
|
||||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||||
|
|
||||||
|
|
||||||
def add_network(ap):
|
def add_network(ap):
|
||||||
ap2 = ap.add_argument_group('network options')
|
ap2 = ap.add_argument_group('network options')
|
||||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
|
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="IPs and/or unix-sockets to listen on (see \033[33m--help-bind\033[0m). Default: all IPv4 and IPv6")
|
||||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to listen on (comma/range); ignored for unix-sockets")
|
||||||
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 in mDNS replies, even if the NIC has routable IPs (breaks some mDNS clients)")
|
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 in mDNS replies, even if the NIC has routable IPs (breaks some mDNS clients)")
|
||||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to associate clients with; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy")
|
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to associate clients with; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy")
|
||||||
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from")
|
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from")
|
||||||
@@ -921,12 +1059,12 @@ def add_network(ap):
|
|||||||
else:
|
else:
|
||||||
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
||||||
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
|
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
|
||||||
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=186, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=128.0, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
||||||
ap2.add_argument("--s-rd-sz", metavar="B", type=int, default=256*1024, help="socket read size in bytes (indirectly affects filesystem writes; recommendation: keep equal-to or lower-than \033[33m--iobuf\033[0m)")
|
ap2.add_argument("--s-rd-sz", metavar="B", type=int, default=256*1024, help="socket read size in bytes (indirectly affects filesystem writes; recommendation: keep equal-to or lower-than \033[33m--iobuf\033[0m)")
|
||||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0.0, help="debug: socket write delay in seconds")
|
||||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0.0, help="debug: response delay in seconds")
|
||||||
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..\033[33mSEC\033[0m")
|
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0.0, help="debug: response delay, random duration 0..\033[33mSEC\033[0m")
|
||||||
|
|
||||||
|
|
||||||
def add_tls(ap, cert_path):
|
def add_tls(ap, cert_path):
|
||||||
@@ -934,10 +1072,10 @@ def add_tls(ap, cert_path):
|
|||||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
|
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
|
||||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
|
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
|
||||||
ap2.add_argument("--cert", metavar="PATH", type=u, default=cert_path, help="path to TLS certificate")
|
ap2.add_argument("--cert", metavar="PATH", type=u, default=cert_path, help="path to TLS certificate")
|
||||||
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [\033[32mhelp\033[0m] shows available versions; default is what your python version considers safe")
|
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, default="", help="set allowed ssl/tls versions; [\033[32mhelp\033[0m] shows available versions; default is what your python version considers safe")
|
||||||
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [\033[32mhelp\033[0m] shows available ciphers")
|
ap2.add_argument("--ciphers", metavar="LIST", type=u, default="", help="set allowed ssl/tls ciphers; [\033[32mhelp\033[0m] shows available ciphers")
|
||||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
|
ap2.add_argument("--ssl-log", metavar="PATH", type=u, default="", help="log master secrets for later decryption in wireshark")
|
||||||
|
|
||||||
|
|
||||||
def add_cert(ap, cert_path):
|
def add_cert(ap, cert_path):
|
||||||
@@ -950,23 +1088,38 @@ def add_cert(ap, cert_path):
|
|||||||
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
|
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
|
||||||
ap2.add_argument("--crt-nohn", action="store_true", help="do not add mDNS names / hostname into cert")
|
ap2.add_argument("--crt-nohn", action="store_true", help="do not add mDNS names / hostname into cert")
|
||||||
ap2.add_argument("--crt-dir", metavar="PATH", default=cert_dir, help="where to save the CA cert")
|
ap2.add_argument("--crt-dir", metavar="PATH", default=cert_dir, help="where to save the CA cert")
|
||||||
ap2.add_argument("--crt-cdays", metavar="D", type=float, default=3650, help="ca-certificate expiration time in days")
|
ap2.add_argument("--crt-cdays", metavar="D", type=float, default=3650.0, help="ca-certificate expiration time in days")
|
||||||
ap2.add_argument("--crt-sdays", metavar="D", type=float, default=365, help="server-cert expiration time in days")
|
ap2.add_argument("--crt-sdays", metavar="D", type=float, default=365.0, help="server-cert expiration time in days")
|
||||||
ap2.add_argument("--crt-cn", metavar="TXT", type=u, default="partyco", help="CA/server-cert common-name")
|
ap2.add_argument("--crt-cn", metavar="TXT", type=u, default="partyco", help="CA/server-cert common-name")
|
||||||
ap2.add_argument("--crt-cnc", metavar="TXT", type=u, default="--crt-cn", help="override CA name")
|
ap2.add_argument("--crt-cnc", metavar="TXT", type=u, default="--crt-cn", help="override CA name")
|
||||||
ap2.add_argument("--crt-cns", metavar="TXT", type=u, default="--crt-cn cpp", help="override server-cert name")
|
ap2.add_argument("--crt-cns", metavar="TXT", type=u, default="--crt-cn cpp", help="override server-cert name")
|
||||||
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72, help="backdate in hours")
|
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72.0, help="backdate in hours")
|
||||||
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: \033[32mecdsa-256 rsa-4096 rsa-2048\033[0m")
|
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: \033[32mecdsa-256 rsa-4096 rsa-2048\033[0m")
|
||||||
|
|
||||||
|
|
||||||
def add_auth(ap):
|
def add_auth(ap):
|
||||||
|
ses_db = os.path.join(E.cfg, "sessions.db")
|
||||||
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
||||||
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks and assume the request-header \033[33mHN\033[0m contains the username of the requesting user (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks if the request-header \033[33mHN\033[0m contains a username to associate the request with (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
||||||
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
||||||
ap2.add_argument("--idp-h-key", metavar="HN", type=u, default="", help="optional but recommended safeguard; your reverse-proxy will insert a secret header named \033[33mHN\033[0m into all requests, and the other IdP headers will be ignored if this header is not present")
|
ap2.add_argument("--idp-h-key", metavar="HN", type=u, default="", help="optional but recommended safeguard; your reverse-proxy will insert a secret header named \033[33mHN\033[0m into all requests, and the other IdP headers will be ignored if this header is not present")
|
||||||
ap2.add_argument("--idp-gsep", metavar="RE", type=u, default="|:;+,", help="if there are multiple groups in \033[33m--idp-h-grp\033[0m, they are separated by one of the characters in \033[33mRE\033[0m")
|
ap2.add_argument("--idp-gsep", metavar="RE", type=u, default="|:;+,", help="if there are multiple groups in \033[33m--idp-h-grp\033[0m, they are separated by one of the characters in \033[33mRE\033[0m")
|
||||||
ap2.add_argument("--no-bauth", action="store_true", help="disable basic-authentication support; do not accept passwords from the 'Authenticate' header at all. NOTE: This breaks support for the android app")
|
ap2.add_argument("--no-bauth", action="store_true", help="disable basic-authentication support; do not accept passwords from the 'Authenticate' header at all. NOTE: This breaks support for the android app")
|
||||||
ap2.add_argument("--bauth-last", action="store_true", help="keeps basic-authentication enabled, but only as a last-resort; if a cookie is also provided then the cookie wins")
|
ap2.add_argument("--bauth-last", action="store_true", help="keeps basic-authentication enabled, but only as a last-resort; if a cookie is also provided then the cookie wins")
|
||||||
|
ap2.add_argument("--ses-db", metavar="PATH", type=u, default=ses_db, help="where to store the sessions database (if you run multiple copyparty instances, make sure they use different DBs)")
|
||||||
|
ap2.add_argument("--ses-len", metavar="CHARS", type=int, default=20, help="session key length; default is 120 bits ((20//4)*4*6)")
|
||||||
|
ap2.add_argument("--no-ses", action="store_true", help="disable sessions; use plaintext passwords in cookies")
|
||||||
|
ap2.add_argument("--ipu", metavar="CIDR=USR", type=u, action="append", help="users with IP matching \033[33mCIDR\033[0m are auto-authenticated as username \033[33mUSR\033[0m; example: [\033[32m172.16.24.0/24=dave]")
|
||||||
|
|
||||||
|
|
||||||
|
def add_chpw(ap):
|
||||||
|
db_path = os.path.join(E.cfg, "chpw.json")
|
||||||
|
ap2 = ap.add_argument_group('user-changeable passwords options')
|
||||||
|
ap2.add_argument("--chpw", action="store_true", help="allow users to change their own passwords")
|
||||||
|
ap2.add_argument("--chpw-no", metavar="U,U,U", type=u, action="append", help="do not allow password-changes for this comma-separated list of usernames")
|
||||||
|
ap2.add_argument("--chpw-db", metavar="PATH", type=u, default=db_path, help="where to store the passwords database (if you run multiple copyparty instances, make sure they use different DBs)")
|
||||||
|
ap2.add_argument("--chpw-len", metavar="N", type=int, default=8, help="minimum password length")
|
||||||
|
ap2.add_argument("--chpw-v", metavar="LVL", type=int, default=2, help="verbosity of summary on config load [\033[32m0\033[0m] = nothing at all, [\033[32m1\033[0m] = number of users, [\033[32m2\033[0m] = list users with default-pw, [\033[32m3\033[0m] = list all users")
|
||||||
|
|
||||||
|
|
||||||
def add_zeroconf(ap):
|
def add_zeroconf(ap):
|
||||||
@@ -988,6 +1141,8 @@ def add_zc_mdns(ap):
|
|||||||
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
||||||
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
||||||
ap2.add_argument("--zmvv", action="store_true", help="verboser mdns")
|
ap2.add_argument("--zmvv", action="store_true", help="verboser mdns")
|
||||||
|
ap2.add_argument("--zm-no-pe", action="store_true", help="mute parser errors (invalid incoming MDNS packets)")
|
||||||
|
ap2.add_argument("--zm-nwa-1", action="store_true", help="disable workaround for avahi-bug #379 (corruption in Avahi's mDNS reflection feature)")
|
||||||
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set, \033[32mDd\033[0m otherwise)")
|
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set, \033[32mDd\033[0m otherwise)")
|
||||||
ap2.add_argument("--zm-ld", metavar="PATH", type=u, default="", help="link a specific folder for webdav shares")
|
ap2.add_argument("--zm-ld", metavar="PATH", type=u, default="", help="link a specific folder for webdav shares")
|
||||||
ap2.add_argument("--zm-lh", metavar="PATH", type=u, default="", help="link a specific folder for http shares")
|
ap2.add_argument("--zm-lh", metavar="PATH", type=u, default="", help="link a specific folder for http shares")
|
||||||
@@ -996,7 +1151,7 @@ def add_zc_mdns(ap):
|
|||||||
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
|
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
|
||||||
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
|
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
|
||||||
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
|
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
|
||||||
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every \033[33mSEC\033[0m; useful if clients have IPs in a subnet which doesn't overlap with the server, or to avoid some firewall issues")
|
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0.0, help="send unsolicited announce every \033[33mSEC\033[0m; useful if clients have IPs in a subnet which doesn't overlap with the server, or to avoid some firewall issues")
|
||||||
|
|
||||||
|
|
||||||
def add_zc_ssdp(ap):
|
def add_zc_ssdp(ap):
|
||||||
@@ -1011,14 +1166,15 @@ def add_zc_ssdp(ap):
|
|||||||
|
|
||||||
def add_ftp(ap):
|
def add_ftp(ap):
|
||||||
ap2 = ap.add_argument_group('FTP options (TCP only)')
|
ap2 = ap.add_argument_group('FTP options (TCP only)')
|
||||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on \033[33mPORT\033[0m, for example \033[32m3921")
|
ap2.add_argument("--ftp", metavar="PORT", type=int, default=0, help="enable FTP server on \033[33mPORT\033[0m, for example \033[32m3921")
|
||||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on \033[33mPORT\033[0m, for example \033[32m3990")
|
ap2.add_argument("--ftps", metavar="PORT", type=int, default=0, help="enable FTPS server on \033[33mPORT\033[0m, for example \033[32m3990")
|
||||||
ap2.add_argument("--ftpv", action="store_true", help="verbose")
|
ap2.add_argument("--ftpv", action="store_true", help="verbose")
|
||||||
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
|
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
|
||||||
ap2.add_argument("--ftp-ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
ap2.add_argument("--ftp-ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
||||||
|
ap2.add_argument("--ftp-no-ow", action="store_true", help="if target file exists, reject upload instead of overwrite")
|
||||||
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
||||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, default="", help="the NAT address to use for passive connections")
|
||||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, default="", help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
||||||
|
|
||||||
|
|
||||||
def add_webdav(ap):
|
def add_webdav(ap):
|
||||||
@@ -1028,18 +1184,20 @@ def add_webdav(ap):
|
|||||||
ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)")
|
ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)")
|
||||||
ap2.add_argument("--dav-rt", action="store_true", help="show symlink-destination's lastmodified instead of the link itself; always enabled for recursive listings (volflag=davrt)")
|
ap2.add_argument("--dav-rt", action="store_true", help="show symlink-destination's lastmodified instead of the link itself; always enabled for recursive listings (volflag=davrt)")
|
||||||
ap2.add_argument("--dav-auth", action="store_true", help="force auth for all folders (required by davfs2 when only some folders are world-readable) (volflag=davauth)")
|
ap2.add_argument("--dav-auth", action="store_true", help="force auth for all folders (required by davfs2 when only some folders are world-readable) (volflag=davauth)")
|
||||||
|
ap2.add_argument("--dav-ua1", metavar="PTN", type=u, default=r" kioworker/", help="regex of tricky user-agents which expect 401 from GET requests; disable with [\033[32mno\033[0m] or blank")
|
||||||
|
|
||||||
|
|
||||||
def add_tftp(ap):
|
def add_tftp(ap):
|
||||||
ap2 = ap.add_argument_group('TFTP options (UDP only)')
|
ap2 = ap.add_argument_group('TFTP options (UDP only)')
|
||||||
ap2.add_argument("--tftp", metavar="PORT", type=int, help="enable TFTP server on \033[33mPORT\033[0m, for example \033[32m69 \033[0mor \033[32m3969")
|
ap2.add_argument("--tftp", metavar="PORT", type=int, default=0, help="enable TFTP server on \033[33mPORT\033[0m, for example \033[32m69 \033[0mor \033[32m3969")
|
||||||
|
ap2.add_argument("--tftp4", action="store_true", help="only listen on IPv4")
|
||||||
ap2.add_argument("--tftpv", action="store_true", help="verbose")
|
ap2.add_argument("--tftpv", action="store_true", help="verbose")
|
||||||
ap2.add_argument("--tftpvv", action="store_true", help="verboser")
|
ap2.add_argument("--tftpvv", action="store_true", help="verboser")
|
||||||
ap2.add_argument("--tftp-no-fast", action="store_true", help="debug: disable optimizations")
|
ap2.add_argument("--tftp-no-fast", action="store_true", help="debug: disable optimizations")
|
||||||
ap2.add_argument("--tftp-lsf", metavar="PTN", type=u, default="\\.?(dir|ls)(\\.txt)?", help="return a directory listing if a file with this name is requested and it does not exist; defaults matches .ls, dir, .dir.txt, ls.txt, ...")
|
ap2.add_argument("--tftp-lsf", metavar="PTN", type=u, default="\\.?(dir|ls)(\\.txt)?", help="return a directory listing if a file with this name is requested and it does not exist; defaults matches .ls, dir, .dir.txt, ls.txt, ...")
|
||||||
ap2.add_argument("--tftp-nols", action="store_true", help="if someone tries to download a directory, return an error instead of showing its directory listing")
|
ap2.add_argument("--tftp-nols", action="store_true", help="if someone tries to download a directory, return an error instead of showing its directory listing")
|
||||||
ap2.add_argument("--tftp-ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
ap2.add_argument("--tftp-ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
||||||
ap2.add_argument("--tftp-pr", metavar="P-P", type=u, help="the range of UDP ports to use for data transfer, for example \033[32m12000-13000")
|
ap2.add_argument("--tftp-pr", metavar="P-P", type=u, default="", help="the range of UDP ports to use for data transfer, for example \033[32m12000-13000")
|
||||||
|
|
||||||
|
|
||||||
def add_smb(ap):
|
def add_smb(ap):
|
||||||
@@ -1048,7 +1206,7 @@ def add_smb(ap):
|
|||||||
ap2.add_argument("--smbw", action="store_true", help="enable write support (please dont)")
|
ap2.add_argument("--smbw", action="store_true", help="enable write support (please dont)")
|
||||||
ap2.add_argument("--smb1", action="store_true", help="disable SMBv2, only enable SMBv1 (CIFS)")
|
ap2.add_argument("--smb1", action="store_true", help="disable SMBv2, only enable SMBv1 (CIFS)")
|
||||||
ap2.add_argument("--smb-port", metavar="PORT", type=int, default=445, help="port to listen on -- if you change this value, you must NAT from TCP:445 to this port using iptables or similar")
|
ap2.add_argument("--smb-port", metavar="PORT", type=int, default=445, help="port to listen on -- if you change this value, you must NAT from TCP:445 to this port using iptables or similar")
|
||||||
ap2.add_argument("--smb-nwa-1", action="store_true", help="disable impacket#1433 workaround (truncate directory listings to 64kB)")
|
ap2.add_argument("--smb-nwa-1", action="store_true", help="truncate directory listings to 64kB (~400 files); avoids impacket-0.11 bug, fixes impacket-0.12 performance")
|
||||||
ap2.add_argument("--smb-nwa-2", action="store_true", help="disable impacket workaround for filecopy globs")
|
ap2.add_argument("--smb-nwa-2", action="store_true", help="disable impacket workaround for filecopy globs")
|
||||||
ap2.add_argument("--smba", action="store_true", help="small performance boost: disable per-account permissions, enables account coalescing instead (if one user has write/delete-access, then everyone does)")
|
ap2.add_argument("--smba", action="store_true", help="small performance boost: disable per-account permissions, enables account coalescing instead (if one user has write/delete-access, then everyone does)")
|
||||||
ap2.add_argument("--smbv", action="store_true", help="verbose")
|
ap2.add_argument("--smbv", action="store_true", help="verbose")
|
||||||
@@ -1068,12 +1226,15 @@ def add_hooks(ap):
|
|||||||
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file upload starts")
|
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file upload starts")
|
||||||
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file upload finishes")
|
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file upload finishes")
|
||||||
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after all uploads finish and volume is idle")
|
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after all uploads finish and volume is idle")
|
||||||
|
ap2.add_argument("--xbc", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file copy")
|
||||||
|
ap2.add_argument("--xac", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file copy")
|
||||||
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file move/rename")
|
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file move/rename")
|
||||||
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file move/rename")
|
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file move/rename")
|
||||||
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file delete")
|
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file delete")
|
||||||
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file delete")
|
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file delete")
|
||||||
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m on message")
|
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m on message")
|
||||||
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m if someone gets banned (pw/404/403/url)")
|
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m if someone gets banned (pw/404/403/url)")
|
||||||
|
ap2.add_argument("--hook-v", action="store_true", help="verbose hooks")
|
||||||
|
|
||||||
|
|
||||||
def add_stats(ap):
|
def add_stats(ap):
|
||||||
@@ -1099,15 +1260,17 @@ def add_optouts(ap):
|
|||||||
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
|
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
|
||||||
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
||||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||||
|
ap2.add_argument("--no-cp", action="store_true", help="disable copy operations")
|
||||||
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show \033[33m--name\033[0m in <title>")
|
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show \033[33m--name\033[0m in <title>")
|
||||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||||
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
|
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
|
||||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
ap2.add_argument("--zip-who", metavar="LVL", type=int, default=3, help="who can download as zip/tar? [\033[32m0\033[0m]=nobody, [\033[32m1\033[0m]=admins, [\033[32m2\033[0m]=authenticated-with-read-access, [\033[32m3\033[0m]=everyone-with-read-access (volflag=zip_who)\n\033[1;31mWARNING:\033[0m if a nested volume has a more restrictive value than a parent volume, then this will be \033[33mignored\033[0m if the download is initiated from the parent, more lenient volume")
|
||||||
|
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar; same as \033[33m--zip-who=0\033[0m")
|
||||||
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
||||||
ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
|
ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
|
||||||
ap2.add_argument("--no-pipe", action="store_true", help="disable race-the-beam (lockstep download of files which are currently being uploaded) (volflag=nopipe)")
|
ap2.add_argument("--no-pipe", action="store_true", help="disable race-the-beam (lockstep download of files which are currently being uploaded) (volflag=nopipe)")
|
||||||
ap2.add_argument("--no-db-ip", action="store_true", help="do not write uploader IPs into the database")
|
ap2.add_argument("--no-db-ip", action="store_true", help="do not write uploader-IP into the database; will also disable unpost, you may want \033[32m--forget-ip\033[0m instead (volflag=no_db_ip)")
|
||||||
|
|
||||||
|
|
||||||
def add_safety(ap):
|
def add_safety(ap):
|
||||||
@@ -1115,17 +1278,17 @@ def add_safety(ap):
|
|||||||
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
||||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 -nih")
|
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 -nih")
|
||||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m (see \033[33m--help-ls\033[0m); example [\033[32m**,*,ln,p,r\033[0m]")
|
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, default="", help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m (see \033[33m--help-ls\033[0m); example [\033[32m**,*,ln,p,r\033[0m]")
|
||||||
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
|
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
|
||||||
ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
||||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to turn something into a dotfile")
|
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to turn something into a dotfile")
|
||||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme/preadme.md into directory listings")
|
||||||
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||||
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore \033[33m--no-robots\033[0m")
|
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore \033[33m--no-robots\033[0m")
|
||||||
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)")
|
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)")
|
||||||
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after \033[33mH\033[0m hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
ap2.add_argument("--logout", metavar="H", type=float, default=8086.0, help="logout clients after \033[33mH\033[0m hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
||||||
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
||||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="50,60,1440", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; only affects users who cannot see directory listings because their access is either g/G/h")
|
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="50,60,1440", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; only affects users who cannot see directory listings because their access is either g/G/h")
|
||||||
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
|
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
|
||||||
@@ -1161,17 +1324,19 @@ def add_shutdown(ap):
|
|||||||
def add_logging(ap):
|
def add_logging(ap):
|
||||||
ap2 = ap.add_argument_group('logging options')
|
ap2 = ap.add_argument_group('logging options')
|
||||||
ap2.add_argument("-q", action="store_true", help="quiet; disable most STDOUT messages")
|
ap2.add_argument("-q", action="store_true", help="quiet; disable most STDOUT messages")
|
||||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz\033[0m (NB: some errors may appear on STDOUT only)")
|
ap2.add_argument("-lo", metavar="PATH", type=u, default="", help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz\033[0m (NB: some errors may appear on STDOUT only)")
|
||||||
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
|
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
|
||||||
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
|
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
|
||||||
ap2.add_argument("--no-logflush", action="store_true", help="don't flush the logfile after each write; tiny bit faster")
|
ap2.add_argument("--no-logflush", action="store_true", help="don't flush the logfile after each write; tiny bit faster")
|
||||||
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
||||||
|
ap2.add_argument("--log-utc", action="store_true", help="do not use local timezone; assume the TZ env-var is UTC (tiny bit faster)")
|
||||||
ap2.add_argument("--log-tdec", metavar="N", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
|
ap2.add_argument("--log-tdec", metavar="N", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
|
||||||
ap2.add_argument("--log-badpwd", metavar="N", type=int, default=1, help="log failed login attempt passwords: 0=terse, 1=plaintext, 2=hashed")
|
ap2.add_argument("--log-badpwd", metavar="N", type=int, default=1, help="log failed login attempt passwords: 0=terse, 1=plaintext, 2=hashed")
|
||||||
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
||||||
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
||||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="print request \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="print request \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
||||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
|
ap2.add_argument("--ohead", metavar="HEADER", type=u, action='append', help="print response \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
||||||
|
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|[?&]th=[wjp]|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
|
||||||
|
|
||||||
|
|
||||||
def add_admin(ap):
|
def add_admin(ap):
|
||||||
@@ -1179,19 +1344,26 @@ def add_admin(ap):
|
|||||||
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
||||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||||
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
||||||
|
ap2.add_argument("--no-ups-page", action="store_true", help="disable ?ru (list of recent uploads)")
|
||||||
|
ap2.add_argument("--no-up-list", action="store_true", help="don't show list of incoming files in controlpanel")
|
||||||
|
ap2.add_argument("--dl-list", metavar="LVL", type=int, default=2, help="who can see active downloads in the controlpanel? [\033[32m0\033[0m]=nobody, [\033[32m1\033[0m]=admins, [\033[32m2\033[0m]=everyone")
|
||||||
|
ap2.add_argument("--ups-who", metavar="LVL", type=int, default=2, help="who can see recent uploads on the ?ru page? [\033[32m0\033[0m]=nobody, [\033[32m1\033[0m]=admins, [\033[32m2\033[0m]=everyone (volflag=ups_who)")
|
||||||
|
ap2.add_argument("--ups-when", action="store_true", help="let everyone see upload timestamps on the ?ru page, not just admins")
|
||||||
|
|
||||||
|
|
||||||
def add_thumbnail(ap):
|
def add_thumbnail(ap):
|
||||||
|
th_ram = (RAM_AVAIL or RAM_TOTAL or 9) * 0.6
|
||||||
|
th_ram = int(max(min(th_ram, 6), 1) * 10) / 10
|
||||||
ap2 = ap.add_argument_group('thumbnail options')
|
ap2 = ap.add_argument_group('thumbnail options')
|
||||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
||||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
||||||
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms) (volflag=dathumb)")
|
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms) (volflag=dathumb)")
|
||||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
|
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
|
||||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
||||||
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60, help="conversion timeout in seconds (volflag=convt)")
|
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60.0, help="conversion timeout in seconds (volflag=convt)")
|
||||||
ap2.add_argument("--th-ram-max", metavar="GB", type=float, default=6, help="max memory usage (GiB) permitted by thumbnailer; not very accurate")
|
ap2.add_argument("--th-ram-max", metavar="GB", type=float, default=th_ram, help="max memory usage (GiB) permitted by thumbnailer; not very accurate")
|
||||||
ap2.add_argument("--th-crop", metavar="TXT", type=u, default="y", help="crop thumbnails to 4:3 or keep dynamic height; client can override in UI unless force. [\033[32mfy\033[0m]=crop, [\033[32mfn\033[0m]=nocrop, [\033[32mfy\033[0m]=force-y, [\033[32mfn\033[0m]=force-n (volflag=crop)")
|
ap2.add_argument("--th-crop", metavar="TXT", type=u, default="y", help="crop thumbnails to 4:3 or keep dynamic height; client can override in UI unless force. [\033[32my\033[0m]=crop, [\033[32mn\033[0m]=nocrop, [\033[32mfy\033[0m]=force-y, [\033[32mfn\033[0m]=force-n (volflag=crop)")
|
||||||
ap2.add_argument("--th-x3", metavar="TXT", type=u, default="n", help="show thumbs at 3x resolution; client can override in UI unless force. [\033[32mfy\033[0m]=yes, [\033[32mfn\033[0m]=no, [\033[32mfy\033[0m]=force-yes, [\033[32mfn\033[0m]=force-no (volflag=th3x)")
|
ap2.add_argument("--th-x3", metavar="TXT", type=u, default="n", help="show thumbs at 3x resolution; client can override in UI unless force. [\033[32my\033[0m]=yes, [\033[32mn\033[0m]=no, [\033[32mfy\033[0m]=force-yes, [\033[32mfn\033[0m]=force-no (volflag=th3x)")
|
||||||
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
||||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||||
@@ -1204,45 +1376,59 @@ def add_thumbnail(ap):
|
|||||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||||
# https://github.com/libvips/libvips
|
# https://github.com/libvips/libvips
|
||||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,qoi,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,cbz,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,qoi,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
||||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
||||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,cbz,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,itgz,itxz,itz,m4a,mdgz,mdxz,mdz,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,s3gz,s3xz,s3z,tak,tta,ulaw,wav,wma,wv,xm,xmgz,xmxz,xmz,xpk", help="audio formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,itgz,itxz,itz,m4a,mdgz,mdxz,mdz,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,s3gz,s3xz,s3z,tak,tta,ulaw,wav,wma,wv,xm,xmgz,xmxz,xmz,xpk", help="audio formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--au-unpk", metavar="E=F.C", type=u, default="mdz=mod.zip, mdgz=mod.gz, mdxz=mod.xz, s3z=s3m.zip, s3gz=s3m.gz, s3xz=s3m.xz, xmz=xm.zip, xmgz=xm.gz, xmxz=xm.xz, itz=it.zip, itgz=it.gz, itxz=it.xz", help="audio formats to decompress before passing to ffmpeg")
|
ap2.add_argument("--au-unpk", metavar="E=F.C", type=u, default="mdz=mod.zip, mdgz=mod.gz, mdxz=mod.xz, s3z=s3m.zip, s3gz=s3m.gz, s3xz=s3m.xz, xmz=xm.zip, xmgz=xm.gz, xmxz=xm.xz, itz=it.zip, itgz=it.gz, itxz=it.xz, cbz=jpg.cbz", help="audio/image formats to decompress before passing to ffmpeg")
|
||||||
|
|
||||||
|
|
||||||
def add_transcoding(ap):
|
def add_transcoding(ap):
|
||||||
ap2 = ap.add_argument_group('transcoding options')
|
ap2 = ap.add_argument_group('transcoding options')
|
||||||
ap2.add_argument("--q-opus", metavar="KBPS", type=int, default=128, help="target bitrate for transcoding to opus; set 0 to disable")
|
ap2.add_argument("--q-opus", metavar="KBPS", type=int, default=128, help="target bitrate for transcoding to opus; set 0 to disable")
|
||||||
ap2.add_argument("--q-mp3", metavar="QUALITY", type=u, default="q2", help="target quality for transcoding to mp3, for example [\033[32m192k\033[0m] (CBR) or [\033[32mq0\033[0m] (CQ/CRF, q0=maxquality, q9=smallest); set 0 to disable")
|
ap2.add_argument("--q-mp3", metavar="QUALITY", type=u, default="q2", help="target quality for transcoding to mp3, for example [\033[32m192k\033[0m] (CBR) or [\033[32mq0\033[0m] (CQ/CRF, q0=maxquality, q9=smallest); set 0 to disable")
|
||||||
|
ap2.add_argument("--no-caf", action="store_true", help="disable transcoding to caf-opus (affects iOS v12~v17), will use mp3 instead")
|
||||||
|
ap2.add_argument("--no-owa", action="store_true", help="disable transcoding to webm-opus (iOS v18 and later), will use mp3 instead")
|
||||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||||
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
|
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
|
||||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
||||||
|
|
||||||
|
|
||||||
|
def add_rss(ap):
|
||||||
|
ap2 = ap.add_argument_group('RSS options')
|
||||||
|
ap2.add_argument("--rss", action="store_true", help="enable RSS output (experimental) (volflag=rss)")
|
||||||
|
ap2.add_argument("--rss-nf", metavar="HITS", type=int, default=250, help="default number of files to return (url-param 'nf')")
|
||||||
|
ap2.add_argument("--rss-fext", metavar="E,E", type=u, default="", help="default list of file extensions to include (url-param 'fext'); blank=all")
|
||||||
|
ap2.add_argument("--rss-sort", metavar="ORD", type=u, default="m", help="default sort order (url-param 'sort'); [\033[32mm\033[0m]=last-modified [\033[32mu\033[0m]=upload-time [\033[32mn\033[0m]=filename [\033[32ms\033[0m]=filesize; Uppercase=oldest-first. Note that upload-time is 0 for non-uploaded files")
|
||||||
|
|
||||||
|
|
||||||
def add_db_general(ap, hcores):
|
def add_db_general(ap, hcores):
|
||||||
noidx = APPLESAN_TXT if MACOS else ""
|
noidx = APPLESAN_TXT if MACOS else ""
|
||||||
ap2 = ap.add_argument_group('general db options')
|
ap2 = ap.add_argument_group('general db options')
|
||||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplication")
|
ap2.add_argument("-e2d", action="store_true", help="enable up2k database; this enables file search, upload-undo, improves deduplication")
|
||||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets \033[33m-e2d\033[0m")
|
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets \033[33m-e2d\033[0m")
|
||||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets \033[33m-e2ds\033[0m")
|
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets \033[33m-e2ds\033[0m")
|
||||||
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
||||||
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
||||||
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
||||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs) (volflag=hist)")
|
ap2.add_argument("--hist", metavar="PATH", type=u, default="", help="where to store volume data (db, thumbs); default is a folder named \".hist\" inside each volume (volflag=hist)")
|
||||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
|
ap2.add_argument("--no-hash", metavar="PTN", type=u, default="", help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
|
||||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, default=noidx, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
ap2.add_argument("--no-idx", metavar="PTN", type=u, default=noidx, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
||||||
|
ap2.add_argument("--no-dirsz", action="store_true", help="do not show total recursive size of folders in listings, show inode size instead; slightly faster (volflag=nodirsz)")
|
||||||
|
ap2.add_argument("--re-dirsz", action="store_true", help="if the directory-sizes in the UI are bonkers, use this along with \033[33m-e2dsa\033[0m to rebuild the index from scratch")
|
||||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||||
ap2.add_argument("--re-dhash", action="store_true", help="force a cache rebuild on startup; enable this once if it gets out of sync (should never be necessary)")
|
ap2.add_argument("--re-dhash", action="store_true", help="force a cache rebuild on startup; enable this once if it gets out of sync (should never be necessary)")
|
||||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice -- only useful for offloading uploads to a cloud service or something (volflag=noforget)")
|
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice -- only useful for offloading uploads to a cloud service or something (volflag=noforget)")
|
||||||
|
ap2.add_argument("--forget-ip", metavar="MIN", type=int, default=0, help="remove uploader-IP from database (and make unpost impossible) \033[33mMIN\033[0m minutes after upload, for GDPR reasons. Default [\033[32m0\033[0m] is never-forget. [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month. (volflag=forget_ip)")
|
||||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see \033[33m--help-dbd\033[0m (volflag=dbd)")
|
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see \033[33m--help-dbd\033[0m (volflag=dbd)")
|
||||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (probably buggy, not recommended) (volflag=xlink)")
|
||||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="rescan filesystem for changes every \033[33mSEC\033[0m seconds; 0=off (volflag=scan)")
|
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="rescan filesystem for changes every \033[33mSEC\033[0m seconds; 0=off (volflag=scan)")
|
||||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
|
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10.0, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
|
||||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than \033[33mSEC\033[0m seconds")
|
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than \033[33mSEC\033[0m seconds")
|
||||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||||
|
ap2.add_argument("--srch-excl", metavar="PTN", type=u, default="", help="regex: exclude files from search results if the file-URL matches \033[33mPTN\033[0m (case-sensitive). Example: [\033[32mpassword|logs/[0-9]\033[0m] any URL containing 'password' or 'logs/DIGIT' (volflag=srch_excl)")
|
||||||
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
||||||
|
|
||||||
|
|
||||||
@@ -1293,28 +1479,37 @@ def add_og(ap):
|
|||||||
def add_ui(ap, retry):
|
def add_ui(ap, retry):
|
||||||
ap2 = ap.add_argument_group('ui options')
|
ap2 = ap.add_argument_group('ui options')
|
||||||
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
|
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
|
||||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: \033[32meng nor\033[0m")
|
ap2.add_argument("--gsel", action="store_true", help="select files in grid by ctrl-click (volflag=gsel)")
|
||||||
|
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: \033[32meng nor chi\033[0m")
|
||||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use (0..7)")
|
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use (0..7)")
|
||||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||||
ap2.add_argument("--au-vol", metavar="0-100", type=int, default=50, choices=range(0, 101), help="default audio/video volume percent")
|
ap2.add_argument("--au-vol", metavar="0-100", type=int, default=50, choices=range(0, 101), help="default audio/video volume percent")
|
||||||
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
||||||
|
ap2.add_argument("--nsort", action="store_true", help="default-enable natural sort of filenames with leading numbers (volflag=nsort)")
|
||||||
|
ap2.add_argument("--hsortn", metavar="N", type=int, default=2, help="number of sorting rules to include in media URLs by default (volflag=hsortn)")
|
||||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||||
|
ap2.add_argument("--ext-th", metavar="E=VP", type=u, action="append", help="use thumbnail-image \033[33mVP\033[0m for file-extension \033[33mE\033[0m, example: [\033[32mexe=/.res/exe.png\033[0m] (volflag=ext_th)")
|
||||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
ap2.add_argument("--spinner", metavar="TXT", type=u, default="🌲", help="\033[33memoji\033[0m or \033[33memoji,css\033[0m Example: [\033[32m🥖,padding:0\033[0m]")
|
||||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
ap2.add_argument("--css-browser", metavar="L", type=u, default="", help="URL to additional CSS to include in the filebrowser html")
|
||||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages; can be @PATH to send the contents of a file at PATH, and/or begin with %% to render as jinja2 template (volflag=html_head)")
|
ap2.add_argument("--js-browser", metavar="L", type=u, default="", help="URL to additional JS to include in the filebrowser html")
|
||||||
|
ap2.add_argument("--js-other", metavar="L", type=u, default="", help="URL to additional JS to include in all other pages")
|
||||||
|
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages (except for basic-browser); can be @PATH to send the contents of a file at PATH, and/or begin with %% to render as jinja2 template (volflag=html_head)")
|
||||||
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI, or add ?v to URL for override)")
|
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI, or add ?v to URL for override)")
|
||||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
|
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
|
||||||
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
|
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
|
||||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with \033[33m-np\033[0m")
|
ap2.add_argument("--pb-url", metavar="URL", type=u, default=URL_PRJ, help="powered-by link; disable with \033[33m-np\033[0m")
|
||||||
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
|
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
|
||||||
ap2.add_argument("--k304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable k304 on the controlpanel (workaround for buggy reverse-proxies); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
ap2.add_argument("--k304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable k304 on the controlpanel (workaround for buggy reverse-proxies); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
||||||
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
ap2.add_argument("--no304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable no304 on the controlpanel (workaround for buggy caching in browsers); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
||||||
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to allow in the iframe 'sandbox' attribute for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#sandbox")
|
||||||
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
|
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to allow in the iframe 'sandbox' attribute for prologue/epilogue docs (volflag=lg_sbf)")
|
||||||
|
ap2.add_argument("--md-sba", metavar="TXT", type=u, default="", help="the value of the iframe 'allow' attribute for README.md docs, for example [\033[32mfullscreen\033[0m] (volflag=md_sba)")
|
||||||
|
ap2.add_argument("--lg-sba", metavar="TXT", type=u, default="", help="the value of the iframe 'allow' attribute for prologue/epilogue docs (volflag=lg_sba); see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Permissions-Policy#iframes")
|
||||||
|
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README/PREADME.md documents (volflags: no_sb_md | sb_md)")
|
||||||
ap2.add_argument("--no-sb-lg", action="store_true", help="don't sandbox prologue/epilogue docs (volflags: no_sb_lg | sb_lg); enables non-js support")
|
ap2.add_argument("--no-sb-lg", action="store_true", help="don't sandbox prologue/epilogue docs (volflags: no_sb_lg | sb_lg); enables non-js support")
|
||||||
|
|
||||||
|
|
||||||
@@ -1322,28 +1517,35 @@ def add_debug(ap):
|
|||||||
ap2 = ap.add_argument_group('debug options')
|
ap2 = ap.add_argument_group('debug options')
|
||||||
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
|
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
|
||||||
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
|
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
|
||||||
|
ap2.add_argument("--deps", action="store_true", help="list information about detected optional dependencies")
|
||||||
|
if hasattr(select, "poll"):
|
||||||
|
ap2.add_argument("--no-poll", action="store_true", help="kernel-bug workaround: disable poll; use select instead (limits max num clients to ~700)")
|
||||||
ap2.add_argument("--no-sendfile", action="store_true", help="kernel-bug workaround: disable sendfile; do a safe and slow read-send-loop instead")
|
ap2.add_argument("--no-sendfile", action="store_true", help="kernel-bug workaround: disable sendfile; do a safe and slow read-send-loop instead")
|
||||||
ap2.add_argument("--no-scandir", action="store_true", help="kernel-bug workaround: disable scandir; do a listdir + stat on each file instead")
|
ap2.add_argument("--no-scandir", action="store_true", help="kernel-bug workaround: disable scandir; do a listdir + stat on each file instead")
|
||||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for initial filesystem indexing before accepting client requests")
|
ap2.add_argument("--no-fastboot", action="store_true", help="wait for initial filesystem indexing before accepting client requests")
|
||||||
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||||
|
ap2.add_argument("--rm-sck", action="store_true", help="when listening on unix-sockets, do a basic delete+bind instead of the default atomic bind")
|
||||||
ap2.add_argument("--srch-dbg", action="store_true", help="explain search processing, and do some extra expensive sanity checks")
|
ap2.add_argument("--srch-dbg", action="store_true", help="explain search processing, and do some extra expensive sanity checks")
|
||||||
ap2.add_argument("--rclone-mdns", action="store_true", help="use mdns-domain instead of server-ip on /?hc")
|
ap2.add_argument("--rclone-mdns", action="store_true", help="use mdns-domain instead of server-ip on /?hc")
|
||||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to \033[33mP\033[0math every \033[33mS\033[0m second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
ap2.add_argument("--stackmon", metavar="P,S", type=u, default="", help="write stacktrace to \033[33mP\033[0math every \033[33mS\033[0m second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
||||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every \033[33mSEC\033[0m")
|
ap2.add_argument("--log-thrs", metavar="SEC", type=float, default=0.0, help="list active threads every \033[33mSEC\033[0m")
|
||||||
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches \033[33mREGEX\033[0m; [\033[32m.\033[0m] (a single dot) = all files")
|
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches \033[33mREGEX\033[0m; [\033[32m.\033[0m] (a single dot) = all files")
|
||||||
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to \033[33m--bf-nc\033[0m and \033[33m--bf-dir\033[0m")
|
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to \033[33m--bf-nc\033[0m and \033[33m--bf-dir\033[0m")
|
||||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than \033[33mNUM\033[0m files at \033[33m--kf-dir\033[0m already; default: 6.3 GiB max (200*32M)")
|
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than \033[33mNUM\033[0m files at \033[33m--kf-dir\033[0m already; default: 6.3 GiB max (200*32M)")
|
||||||
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at \033[33mPATH\033[0m; default: folder named 'bf' wherever copyparty was started")
|
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at \033[33mPATH\033[0m; default: folder named 'bf' wherever copyparty was started")
|
||||||
|
ap2.add_argument("--bf-log", metavar="PATH", type=u, default="", help="bak-flips: log corruption info to a textfile at \033[33mPATH\033[0m")
|
||||||
|
ap2.add_argument("--no-cfg-cmt-warn", action="store_true", help=argparse.SUPPRESS)
|
||||||
|
|
||||||
|
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
|
|
||||||
def run_argparse(
|
def run_argparse(
|
||||||
argv: list[str], formatter: Any, retry: bool, nc: int
|
argv: list[str], formatter: Any, retry: bool, nc: int, verbose=True
|
||||||
) -> argparse.Namespace:
|
) -> argparse.Namespace:
|
||||||
ap = argparse.ArgumentParser(
|
ap = argparse.ArgumentParser(
|
||||||
formatter_class=formatter,
|
formatter_class=formatter,
|
||||||
|
usage=argparse.SUPPRESS,
|
||||||
prog="copyparty",
|
prog="copyparty",
|
||||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||||
)
|
)
|
||||||
@@ -1361,23 +1563,26 @@ def run_argparse(
|
|||||||
|
|
||||||
tty = os.environ.get("TERM", "").lower() == "linux"
|
tty = os.environ.get("TERM", "").lower() == "linux"
|
||||||
|
|
||||||
srvname = get_srvname()
|
srvname = get_srvname(verbose)
|
||||||
|
|
||||||
add_general(ap, nc, srvname)
|
add_general(ap, nc, srvname)
|
||||||
add_network(ap)
|
add_network(ap)
|
||||||
add_tls(ap, cert_path)
|
add_tls(ap, cert_path)
|
||||||
add_cert(ap, cert_path)
|
add_cert(ap, cert_path)
|
||||||
add_auth(ap)
|
add_auth(ap)
|
||||||
|
add_chpw(ap)
|
||||||
add_qr(ap, tty)
|
add_qr(ap, tty)
|
||||||
add_zeroconf(ap)
|
add_zeroconf(ap)
|
||||||
add_zc_mdns(ap)
|
add_zc_mdns(ap)
|
||||||
add_zc_ssdp(ap)
|
add_zc_ssdp(ap)
|
||||||
add_fs(ap)
|
add_fs(ap)
|
||||||
|
add_share(ap)
|
||||||
add_upload(ap)
|
add_upload(ap)
|
||||||
add_db_general(ap, hcores)
|
add_db_general(ap, hcores)
|
||||||
add_db_metadata(ap)
|
add_db_metadata(ap)
|
||||||
add_thumbnail(ap)
|
add_thumbnail(ap)
|
||||||
add_transcoding(ap)
|
add_transcoding(ap)
|
||||||
|
add_rss(ap)
|
||||||
add_ftp(ap)
|
add_ftp(ap)
|
||||||
add_webdav(ap)
|
add_webdav(ap)
|
||||||
add_tftp(ap)
|
add_tftp(ap)
|
||||||
@@ -1426,16 +1631,13 @@ def run_argparse(
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
def main(argv: Optional[list[str]] = None) -> None:
|
||||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
os.system("rem") # enables colors
|
os.system("rem") # enables colors
|
||||||
|
|
||||||
init_E(E)
|
init_E(E)
|
||||||
|
|
||||||
if rsrc: # pyz
|
|
||||||
E.mod = rsrc
|
|
||||||
|
|
||||||
if argv is None:
|
if argv is None:
|
||||||
argv = sys.argv
|
argv = sys.argv
|
||||||
|
|
||||||
@@ -1492,6 +1694,7 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
|||||||
("--hdr-au-usr", "--idp-h-usr"),
|
("--hdr-au-usr", "--idp-h-usr"),
|
||||||
("--idp-h-sep", "--idp-gsep"),
|
("--idp-h-sep", "--idp-gsep"),
|
||||||
("--th-no-crop", "--th-crop=n"),
|
("--th-no-crop", "--th-crop=n"),
|
||||||
|
("--never-symlink", "--hardlink-only"),
|
||||||
]
|
]
|
||||||
for dk, nk in deprecated:
|
for dk, nk in deprecated:
|
||||||
idx = -1
|
idx = -1
|
||||||
@@ -1516,7 +1719,7 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
|||||||
argv.extend(["--qr"])
|
argv.extend(["--qr"])
|
||||||
if ANYWIN or not os.geteuid():
|
if ANYWIN or not os.geteuid():
|
||||||
# win10 allows symlinks if admin; can be unexpected
|
# win10 allows symlinks if admin; can be unexpected
|
||||||
argv.extend(["-p80,443,3923", "--ign-ebind", "--no-dedup"])
|
argv.extend(["-p80,443,3923", "--ign-ebind"])
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -1532,13 +1735,13 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
|||||||
if hard > 0: # -1 == infinite
|
if hard > 0: # -1 == infinite
|
||||||
nc = min(nc, int(hard / 4))
|
nc = min(nc, int(hard / 4))
|
||||||
except:
|
except:
|
||||||
nc = 512
|
nc = 486 # mdns/ssdp restart headroom; select() maxfd is 512 on windows
|
||||||
|
|
||||||
retry = False
|
retry = False
|
||||||
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
|
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
|
||||||
try:
|
try:
|
||||||
al = run_argparse(argv, fmtr, retry, nc)
|
al = run_argparse(argv, fmtr, retry, nc)
|
||||||
dal = run_argparse([], fmtr, retry, nc)
|
dal = run_argparse([], fmtr, retry, nc, False)
|
||||||
break
|
break
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
raise
|
raise
|
||||||
@@ -1564,7 +1767,7 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
|||||||
except:
|
except:
|
||||||
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
|
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
|
||||||
|
|
||||||
if al.ansi:
|
if not al.ansi:
|
||||||
al.wintitle = ""
|
al.wintitle = ""
|
||||||
|
|
||||||
# propagate implications
|
# propagate implications
|
||||||
@@ -1577,6 +1780,9 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
|||||||
if getattr(al, k1):
|
if getattr(al, k1):
|
||||||
setattr(al, k2, False)
|
setattr(al, k2, False)
|
||||||
|
|
||||||
|
if not HAVE_IPV6 and al.i == "::":
|
||||||
|
al.i = "0.0.0.0"
|
||||||
|
|
||||||
al.i = al.i.split(",")
|
al.i = al.i.split(",")
|
||||||
try:
|
try:
|
||||||
if "-" in al.p:
|
if "-" in al.p:
|
||||||
@@ -1599,6 +1805,9 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
|||||||
if al.ihead:
|
if al.ihead:
|
||||||
al.ihead = [x.lower() for x in al.ihead]
|
al.ihead = [x.lower() for x in al.ihead]
|
||||||
|
|
||||||
|
if al.ohead:
|
||||||
|
al.ohead = [x.lower() for x in al.ohead]
|
||||||
|
|
||||||
if HAVE_SSL:
|
if HAVE_SSL:
|
||||||
if al.ssl_ver:
|
if al.ssl_ver:
|
||||||
configure_ssl_ver(al)
|
configure_ssl_ver(al)
|
||||||
@@ -1619,12 +1828,15 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
|||||||
print("error: python2 cannot --smb")
|
print("error: python2 cannot --smb")
|
||||||
return
|
return
|
||||||
|
|
||||||
if sys.version_info < (3, 6):
|
if not PY36:
|
||||||
al.no_scandir = True
|
al.no_scandir = True
|
||||||
|
|
||||||
if not hasattr(os, "sendfile"):
|
if not hasattr(os, "sendfile"):
|
||||||
al.no_sendfile = True
|
al.no_sendfile = True
|
||||||
|
|
||||||
|
if not hasattr(select, "poll"):
|
||||||
|
al.no_poll = True
|
||||||
|
|
||||||
# signal.signal(signal.SIGINT, sighandler)
|
# signal.signal(signal.SIGINT, sighandler)
|
||||||
|
|
||||||
SvcHub(al, dal, argv, "".join(printed)).run()
|
SvcHub(al, dal, argv, "".join(printed)).run()
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (1, 13, 2)
|
VERSION = (1, 16, 16)
|
||||||
CODENAME = "race the beam"
|
CODENAME = "COPYparty"
|
||||||
BUILD_DT = (2024, 5, 10)
|
BUILD_DT = (2025, 2, 28)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -9,14 +9,14 @@ import queue
|
|||||||
|
|
||||||
from .__init__ import CORES, TYPE_CHECKING
|
from .__init__ import CORES, TYPE_CHECKING
|
||||||
from .broker_mpw import MpWorker
|
from .broker_mpw import MpWorker
|
||||||
from .broker_util import ExceptionalQueue, try_exec
|
from .broker_util import ExceptionalQueue, NotExQueue, try_exec
|
||||||
from .util import Daemon, mp
|
from .util import Daemon, mp
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any
|
from typing import Any, Union
|
||||||
|
|
||||||
|
|
||||||
class MProcess(mp.Process):
|
class MProcess(mp.Process):
|
||||||
@@ -43,6 +43,9 @@ class BrokerMp(object):
|
|||||||
self.procs = []
|
self.procs = []
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
|
|
||||||
|
self.retpend: dict[int, Any] = {}
|
||||||
|
self.retpend_mutex = threading.Lock()
|
||||||
|
|
||||||
self.num_workers = self.args.j or CORES
|
self.num_workers = self.args.j or CORES
|
||||||
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
||||||
for n in range(1, self.num_workers + 1):
|
for n in range(1, self.num_workers + 1):
|
||||||
@@ -54,6 +57,8 @@ class BrokerMp(object):
|
|||||||
self.procs.append(proc)
|
self.procs.append(proc)
|
||||||
proc.start()
|
proc.start()
|
||||||
|
|
||||||
|
Daemon(self.periodic, "mp-periodic")
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
self.log("broker", "shutting down")
|
self.log("broker", "shutting down")
|
||||||
for n, proc in enumerate(self.procs):
|
for n, proc in enumerate(self.procs):
|
||||||
@@ -76,6 +81,10 @@ class BrokerMp(object):
|
|||||||
for _, proc in enumerate(self.procs):
|
for _, proc in enumerate(self.procs):
|
||||||
proc.q_pend.put((0, "reload", []))
|
proc.q_pend.put((0, "reload", []))
|
||||||
|
|
||||||
|
def reload_sessions(self) -> None:
|
||||||
|
for _, proc in enumerate(self.procs):
|
||||||
|
proc.q_pend.put((0, "reload_sessions", []))
|
||||||
|
|
||||||
def collector(self, proc: MProcess) -> None:
|
def collector(self, proc: MProcess) -> None:
|
||||||
"""receive message from hub in other process"""
|
"""receive message from hub in other process"""
|
||||||
while True:
|
while True:
|
||||||
@@ -86,8 +95,10 @@ class BrokerMp(object):
|
|||||||
self.log(*args)
|
self.log(*args)
|
||||||
|
|
||||||
elif dest == "retq":
|
elif dest == "retq":
|
||||||
# response from previous ipc call
|
with self.retpend_mutex:
|
||||||
raise Exception("invalid broker_mp usage")
|
retq = self.retpend.pop(retq_id)
|
||||||
|
|
||||||
|
retq.put(args[0])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# new ipc invoking managed service in hub
|
# new ipc invoking managed service in hub
|
||||||
@@ -104,8 +115,7 @@ class BrokerMp(object):
|
|||||||
if retq_id:
|
if retq_id:
|
||||||
proc.q_pend.put((retq_id, "retq", rv))
|
proc.q_pend.put((retq_id, "retq", rv))
|
||||||
|
|
||||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||||
|
|
||||||
# new non-ipc invoking managed service in hub
|
# new non-ipc invoking managed service in hub
|
||||||
obj = self.hub
|
obj = self.hub
|
||||||
for node in dest.split("."):
|
for node in dest.split("."):
|
||||||
@@ -117,17 +127,30 @@ class BrokerMp(object):
|
|||||||
retq.put(rv)
|
retq.put(rv)
|
||||||
return retq
|
return retq
|
||||||
|
|
||||||
|
def wask(self, dest: str, *args: Any) -> list[Union[ExceptionalQueue, NotExQueue]]:
|
||||||
|
# call from hub to workers
|
||||||
|
ret = []
|
||||||
|
for p in self.procs:
|
||||||
|
retq = ExceptionalQueue(1)
|
||||||
|
retq_id = id(retq)
|
||||||
|
with self.retpend_mutex:
|
||||||
|
self.retpend[retq_id] = retq
|
||||||
|
|
||||||
|
p.q_pend.put((retq_id, dest, list(args)))
|
||||||
|
ret.append(retq)
|
||||||
|
return ret
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any) -> None:
|
||||||
"""
|
"""
|
||||||
send message to non-hub component in other process,
|
send message to non-hub component in other process,
|
||||||
returns a Queue object which eventually contains the response if want_retval
|
returns a Queue object which eventually contains the response if want_retval
|
||||||
(not-impl here since nothing uses it yet)
|
(not-impl here since nothing uses it yet)
|
||||||
"""
|
"""
|
||||||
if dest == "listen":
|
if dest == "httpsrv.listen":
|
||||||
for p in self.procs:
|
for p in self.procs:
|
||||||
p.q_pend.put((0, dest, [args[0], len(self.procs)]))
|
p.q_pend.put((0, dest, [args[0], len(self.procs)]))
|
||||||
|
|
||||||
elif dest == "set_netdevs":
|
elif dest == "httpsrv.set_netdevs":
|
||||||
for p in self.procs:
|
for p in self.procs:
|
||||||
p.q_pend.put((0, dest, list(args)))
|
p.q_pend.put((0, dest, list(args)))
|
||||||
|
|
||||||
@@ -136,3 +159,19 @@ class BrokerMp(object):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
raise Exception("what is " + str(dest))
|
raise Exception("what is " + str(dest))
|
||||||
|
|
||||||
|
def periodic(self) -> None:
|
||||||
|
while True:
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
tdli = {}
|
||||||
|
tdls = {}
|
||||||
|
qs = self.wask("httpsrv.read_dls")
|
||||||
|
for q in qs:
|
||||||
|
qr = q.get()
|
||||||
|
dli, dls = qr
|
||||||
|
tdli.update(dli)
|
||||||
|
tdls.update(dls)
|
||||||
|
tdl = (tdli, tdls)
|
||||||
|
for p in self.procs:
|
||||||
|
p.q_pend.put((0, "httpsrv.write_dls", tdl))
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import queue
|
|||||||
|
|
||||||
from .__init__ import ANYWIN
|
from .__init__ import ANYWIN
|
||||||
from .authsrv import AuthSrv
|
from .authsrv import AuthSrv
|
||||||
from .broker_util import BrokerCli, ExceptionalQueue
|
from .broker_util import BrokerCli, ExceptionalQueue, NotExQueue
|
||||||
from .httpsrv import HttpSrv
|
from .httpsrv import HttpSrv
|
||||||
from .util import FAKE_MP, Daemon, HMaccas
|
from .util import FAKE_MP, Daemon, HMaccas
|
||||||
|
|
||||||
@@ -82,35 +82,40 @@ class MpWorker(BrokerCli):
|
|||||||
while True:
|
while True:
|
||||||
retq_id, dest, args = self.q_pend.get()
|
retq_id, dest, args = self.q_pend.get()
|
||||||
|
|
||||||
# self.logw("work: [{}]".format(d[0]))
|
if dest == "retq":
|
||||||
|
# response from previous ipc call
|
||||||
|
with self.retpend_mutex:
|
||||||
|
retq = self.retpend.pop(retq_id)
|
||||||
|
|
||||||
|
retq.put(args)
|
||||||
|
continue
|
||||||
|
|
||||||
if dest == "shutdown":
|
if dest == "shutdown":
|
||||||
self.httpsrv.shutdown()
|
self.httpsrv.shutdown()
|
||||||
self.logw("ok bye")
|
self.logw("ok bye")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
return
|
return
|
||||||
|
|
||||||
elif dest == "reload":
|
if dest == "reload":
|
||||||
self.logw("mpw.asrv reloading")
|
self.logw("mpw.asrv reloading")
|
||||||
self.asrv.reload()
|
self.asrv.reload()
|
||||||
self.logw("mpw.asrv reloaded")
|
self.logw("mpw.asrv reloaded")
|
||||||
|
continue
|
||||||
|
|
||||||
elif dest == "listen":
|
if dest == "reload_sessions":
|
||||||
self.httpsrv.listen(args[0], args[1])
|
with self.asrv.mutex:
|
||||||
|
self.asrv.load_sessions()
|
||||||
|
continue
|
||||||
|
|
||||||
elif dest == "set_netdevs":
|
obj = self
|
||||||
self.httpsrv.set_netdevs(args[0])
|
for node in dest.split("."):
|
||||||
|
obj = getattr(obj, node)
|
||||||
|
|
||||||
elif dest == "retq":
|
rv = obj(*args) # type: ignore
|
||||||
# response from previous ipc call
|
if retq_id:
|
||||||
with self.retpend_mutex:
|
self.say("retq", rv, retq_id=retq_id)
|
||||||
retq = self.retpend.pop(retq_id)
|
|
||||||
|
|
||||||
retq.put(args)
|
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||||
|
|
||||||
else:
|
|
||||||
raise Exception("what is " + str(dest))
|
|
||||||
|
|
||||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
|
||||||
retq = ExceptionalQueue(1)
|
retq = ExceptionalQueue(1)
|
||||||
retq_id = id(retq)
|
retq_id = id(retq)
|
||||||
with self.retpend_mutex:
|
with self.retpend_mutex:
|
||||||
@@ -119,5 +124,5 @@ class MpWorker(BrokerCli):
|
|||||||
self.q_yield.put((retq_id, dest, list(args)))
|
self.q_yield.put((retq_id, dest, list(args)))
|
||||||
return retq
|
return retq
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any, retq_id=0) -> None:
|
||||||
self.q_yield.put((0, dest, list(args)))
|
self.q_yield.put((retq_id, dest, list(args)))
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import os
|
|||||||
import threading
|
import threading
|
||||||
|
|
||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import TYPE_CHECKING
|
||||||
from .broker_util import BrokerCli, ExceptionalQueue, try_exec
|
from .broker_util import BrokerCli, ExceptionalQueue, NotExQueue
|
||||||
from .httpsrv import HttpSrv
|
from .httpsrv import HttpSrv
|
||||||
from .util import HMaccas
|
from .util import HMaccas
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@ if TYPE_CHECKING:
|
|||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any
|
from typing import Any, Union
|
||||||
|
|
||||||
|
|
||||||
class BrokerThr(BrokerCli):
|
class BrokerThr(BrokerCli):
|
||||||
@@ -34,6 +34,7 @@ class BrokerThr(BrokerCli):
|
|||||||
self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8)
|
self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8)
|
||||||
self.httpsrv = HttpSrv(self, None)
|
self.httpsrv = HttpSrv(self, None)
|
||||||
self.reload = self.noop
|
self.reload = self.noop
|
||||||
|
self.reload_sessions = self.noop
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
# self.log("broker", "shutting down")
|
# self.log("broker", "shutting down")
|
||||||
@@ -42,26 +43,21 @@ class BrokerThr(BrokerCli):
|
|||||||
def noop(self) -> None:
|
def noop(self) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||||
|
|
||||||
# new ipc invoking managed service in hub
|
# new ipc invoking managed service in hub
|
||||||
obj = self.hub
|
obj = self.hub
|
||||||
for node in dest.split("."):
|
for node in dest.split("."):
|
||||||
obj = getattr(obj, node)
|
obj = getattr(obj, node)
|
||||||
|
|
||||||
rv = try_exec(True, obj, *args)
|
return NotExQueue(obj(*args)) # type: ignore
|
||||||
|
|
||||||
# pretend we're broker_mp
|
|
||||||
retq = ExceptionalQueue(1)
|
|
||||||
retq.put(rv)
|
|
||||||
return retq
|
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any) -> None:
|
||||||
if dest == "listen":
|
if dest == "httpsrv.listen":
|
||||||
self.httpsrv.listen(args[0], 1)
|
self.httpsrv.listen(args[0], 1)
|
||||||
return
|
return
|
||||||
|
|
||||||
if dest == "set_netdevs":
|
if dest == "httpsrv.set_netdevs":
|
||||||
self.httpsrv.set_netdevs(args[0])
|
self.httpsrv.set_netdevs(args[0])
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -70,4 +66,4 @@ class BrokerThr(BrokerCli):
|
|||||||
for node in dest.split("."):
|
for node in dest.split("."):
|
||||||
obj = getattr(obj, node)
|
obj = getattr(obj, node)
|
||||||
|
|
||||||
try_exec(False, obj, *args)
|
obj(*args) # type: ignore
|
||||||
|
|||||||
@@ -28,11 +28,23 @@ class ExceptionalQueue(Queue, object):
|
|||||||
if rv[1] == "pebkac":
|
if rv[1] == "pebkac":
|
||||||
raise Pebkac(*rv[2:])
|
raise Pebkac(*rv[2:])
|
||||||
else:
|
else:
|
||||||
raise Exception(rv[2])
|
raise rv[2]
|
||||||
|
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
class NotExQueue(object):
|
||||||
|
"""
|
||||||
|
BrokerThr uses this instead of ExceptionalQueue; 7x faster
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, rv: Any) -> None:
|
||||||
|
self.rv = rv
|
||||||
|
|
||||||
|
def get(self) -> Any:
|
||||||
|
return self.rv
|
||||||
|
|
||||||
|
|
||||||
class BrokerCli(object):
|
class BrokerCli(object):
|
||||||
"""
|
"""
|
||||||
helps mypy understand httpsrv.broker but still fails a few levels deeper,
|
helps mypy understand httpsrv.broker but still fails a few levels deeper,
|
||||||
@@ -48,7 +60,7 @@ class BrokerCli(object):
|
|||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||||
return ExceptionalQueue(1)
|
return ExceptionalQueue(1)
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any) -> None:
|
||||||
@@ -65,8 +77,8 @@ def try_exec(want_retval: Union[bool, int], func: Any, *args: list[Any]) -> Any:
|
|||||||
|
|
||||||
return ["exception", "pebkac", ex.code, str(ex)]
|
return ["exception", "pebkac", ex.code, str(ex)]
|
||||||
|
|
||||||
except:
|
except Exception as ex:
|
||||||
if not want_retval:
|
if not want_retval:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return ["exception", "stack", traceback.format_exc()]
|
return ["exception", "stack", ex]
|
||||||
|
|||||||
@@ -7,12 +7,12 @@ import shutil
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from .__init__ import ANYWIN
|
from .__init__ import ANYWIN
|
||||||
from .util import Netdev, runcmd, wrename, wunlink
|
from .util import Netdev, load_resource, runcmd, wrename, wunlink
|
||||||
|
|
||||||
HAVE_CFSSL = True
|
HAVE_CFSSL = not os.environ.get("PRTY_NO_CFSSL")
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from .util import RootLogger
|
from .util import NamedLogger, RootLogger
|
||||||
|
|
||||||
|
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
@@ -29,13 +29,15 @@ def ensure_cert(log: "RootLogger", args) -> None:
|
|||||||
|
|
||||||
i feel awful about this and so should they
|
i feel awful about this and so should they
|
||||||
"""
|
"""
|
||||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
with load_resource(args.E, "res/insecure.pem") as f:
|
||||||
|
cert_insec = f.read()
|
||||||
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
|
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
|
||||||
if not os.path.isfile(args.cert):
|
if not os.path.isfile(args.cert):
|
||||||
if cert_appdata != args.cert:
|
if cert_appdata != args.cert:
|
||||||
raise Exception("certificate file does not exist: " + args.cert)
|
raise Exception("certificate file does not exist: " + args.cert)
|
||||||
|
|
||||||
shutil.copy(cert_insec, args.cert)
|
with open(args.cert, "wb") as f:
|
||||||
|
f.write(cert_insec)
|
||||||
|
|
||||||
with open(args.cert, "rb") as f:
|
with open(args.cert, "rb") as f:
|
||||||
buf = f.read()
|
buf = f.read()
|
||||||
@@ -50,7 +52,9 @@ def ensure_cert(log: "RootLogger", args) -> None:
|
|||||||
raise Exception(m + "private key must appear before server certificate")
|
raise Exception(m + "private key must appear before server certificate")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if filecmp.cmp(args.cert, cert_insec):
|
with open(args.cert, "rb") as f:
|
||||||
|
active_cert = f.read()
|
||||||
|
if active_cert == cert_insec:
|
||||||
t = "using default TLS certificate; https will be insecure:\033[36m {}"
|
t = "using default TLS certificate; https will be insecure:\033[36m {}"
|
||||||
log("cert", t.format(args.cert), 3)
|
log("cert", t.format(args.cert), 3)
|
||||||
except:
|
except:
|
||||||
@@ -83,6 +87,8 @@ def _read_crt(args, fn):
|
|||||||
|
|
||||||
|
|
||||||
def _gen_ca(log: "RootLogger", args):
|
def _gen_ca(log: "RootLogger", args):
|
||||||
|
nlog: "NamedLogger" = lambda msg, c=0: log("cert-gen-ca", msg, c)
|
||||||
|
|
||||||
expiry = _read_crt(args, "ca.pem")[0]
|
expiry = _read_crt(args, "ca.pem")[0]
|
||||||
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
|
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
|
||||||
return
|
return
|
||||||
@@ -113,16 +119,18 @@ def _gen_ca(log: "RootLogger", args):
|
|||||||
|
|
||||||
bname = os.path.join(args.crt_dir, "ca")
|
bname = os.path.join(args.crt_dir, "ca")
|
||||||
try:
|
try:
|
||||||
wunlink(log, bname + ".key", VF)
|
wunlink(nlog, bname + ".key", VF)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
wrename(log, bname + "-key.pem", bname + ".key", VF)
|
wrename(nlog, bname + "-key.pem", bname + ".key", VF)
|
||||||
wunlink(log, bname + ".csr", VF)
|
wunlink(nlog, bname + ".csr", VF)
|
||||||
|
|
||||||
log("cert", "new ca OK", 2)
|
log("cert", "new ca OK", 2)
|
||||||
|
|
||||||
|
|
||||||
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||||
|
nlog: "NamedLogger" = lambda msg, c=0: log("cert-gen-srv", msg, c)
|
||||||
|
|
||||||
names = args.crt_ns.split(",") if args.crt_ns else []
|
names = args.crt_ns.split(",") if args.crt_ns else []
|
||||||
if not args.crt_exact:
|
if not args.crt_exact:
|
||||||
for n in names[:]:
|
for n in names[:]:
|
||||||
@@ -147,14 +155,22 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
|||||||
raise Exception("no useable cert found")
|
raise Exception("no useable cert found")
|
||||||
|
|
||||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
|
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
|
||||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
if expired:
|
||||||
|
raise Exception("old server-cert has expired")
|
||||||
|
|
||||||
for n in names:
|
for n in names:
|
||||||
if n not in inf["sans"]:
|
if n not in inf["sans"]:
|
||||||
raise Exception("does not have {}".format(n))
|
raise Exception("does not have {}".format(n))
|
||||||
if expired:
|
|
||||||
raise Exception("old server-cert has expired")
|
with load_resource(args.E, "res/insecure.pem") as f:
|
||||||
if not filecmp.cmp(args.cert, cert_insec):
|
cert_insec = f.read()
|
||||||
|
|
||||||
|
with open(args.cert, "rb") as f:
|
||||||
|
active_cert = f.read()
|
||||||
|
|
||||||
|
if active_cert and active_cert != cert_insec:
|
||||||
return
|
return
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
log("cert", "will create new server-cert; {}".format(ex))
|
log("cert", "will create new server-cert; {}".format(ex))
|
||||||
|
|
||||||
@@ -196,11 +212,11 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
|||||||
|
|
||||||
bname = os.path.join(args.crt_dir, "srv")
|
bname = os.path.join(args.crt_dir, "srv")
|
||||||
try:
|
try:
|
||||||
wunlink(log, bname + ".key", VF)
|
wunlink(nlog, bname + ".key", VF)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
wrename(log, bname + "-key.pem", bname + ".key", VF)
|
wrename(nlog, bname + "-key.pem", bname + ".key", VF)
|
||||||
wunlink(log, bname + ".csr", VF)
|
wunlink(nlog, bname + ".csr", VF)
|
||||||
|
|
||||||
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
||||||
ca = f.read()
|
ca = f.read()
|
||||||
|
|||||||
@@ -2,9 +2,12 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
# awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' '
|
# awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' '
|
||||||
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nw p q s ss sss v z zv"
|
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nth nw p q s ss sss v z zv"
|
||||||
onedash = set(zs.split())
|
onedash = set(zs.split())
|
||||||
|
|
||||||
|
# verify that all volflags are documented here:
|
||||||
|
# grep volflag= __main__.py | sed -r 's/.*volflag=//;s/\).*//' | sort | uniq | while IFS= read -r x; do grep -E "\"$x(=[^ \"]+)?\": \"" cfg.py || printf '%s\n' "$x"; done
|
||||||
|
|
||||||
|
|
||||||
def vf_bmap() -> dict[str, str]:
|
def vf_bmap() -> dict[str, str]:
|
||||||
"""argv-to-volflag: simple bools"""
|
"""argv-to-volflag: simple bools"""
|
||||||
@@ -12,8 +15,9 @@ def vf_bmap() -> dict[str, str]:
|
|||||||
"dav_auth": "davauth",
|
"dav_auth": "davauth",
|
||||||
"dav_rt": "davrt",
|
"dav_rt": "davrt",
|
||||||
"ed": "dots",
|
"ed": "dots",
|
||||||
"never_symlink": "neversymlink",
|
"hardlink_only": "hardlinkonly",
|
||||||
"no_dedup": "copydupes",
|
"no_clone": "noclone",
|
||||||
|
"no_dirsz": "nodirsz",
|
||||||
"no_dupe": "nodupe",
|
"no_dupe": "nodupe",
|
||||||
"no_forget": "noforget",
|
"no_forget": "noforget",
|
||||||
"no_pipe": "nopipe",
|
"no_pipe": "nopipe",
|
||||||
@@ -23,6 +27,7 @@ def vf_bmap() -> dict[str, str]:
|
|||||||
"no_athumb": "dathumb",
|
"no_athumb": "dathumb",
|
||||||
}
|
}
|
||||||
for k in (
|
for k in (
|
||||||
|
"dedup",
|
||||||
"dotsrch",
|
"dotsrch",
|
||||||
"e2d",
|
"e2d",
|
||||||
"e2ds",
|
"e2ds",
|
||||||
@@ -35,14 +40,18 @@ def vf_bmap() -> dict[str, str]:
|
|||||||
"e2vp",
|
"e2vp",
|
||||||
"exp",
|
"exp",
|
||||||
"grid",
|
"grid",
|
||||||
|
"gsel",
|
||||||
"hardlink",
|
"hardlink",
|
||||||
"magic",
|
"magic",
|
||||||
|
"no_db_ip",
|
||||||
"no_sb_md",
|
"no_sb_md",
|
||||||
"no_sb_lg",
|
"no_sb_lg",
|
||||||
|
"nsort",
|
||||||
"og",
|
"og",
|
||||||
"og_no_head",
|
"og_no_head",
|
||||||
"og_s_title",
|
"og_s_title",
|
||||||
"rand",
|
"rand",
|
||||||
|
"rss",
|
||||||
"xdev",
|
"xdev",
|
||||||
"xlink",
|
"xlink",
|
||||||
"xvol",
|
"xvol",
|
||||||
@@ -57,6 +66,7 @@ def vf_vmap() -> dict[str, str]:
|
|||||||
"no_hash": "nohash",
|
"no_hash": "nohash",
|
||||||
"no_idx": "noidx",
|
"no_idx": "noidx",
|
||||||
"re_maxage": "scan",
|
"re_maxage": "scan",
|
||||||
|
"safe_dedup": "safededup",
|
||||||
"th_convt": "convt",
|
"th_convt": "convt",
|
||||||
"th_size": "thsize",
|
"th_size": "thsize",
|
||||||
"th_crop": "crop",
|
"th_crop": "crop",
|
||||||
@@ -64,10 +74,15 @@ def vf_vmap() -> dict[str, str]:
|
|||||||
}
|
}
|
||||||
for k in (
|
for k in (
|
||||||
"dbd",
|
"dbd",
|
||||||
|
"forget_ip",
|
||||||
|
"hsortn",
|
||||||
"html_head",
|
"html_head",
|
||||||
"lg_sbf",
|
"lg_sbf",
|
||||||
"md_sbf",
|
"md_sbf",
|
||||||
|
"lg_sba",
|
||||||
|
"md_sba",
|
||||||
"nrand",
|
"nrand",
|
||||||
|
"u2ow",
|
||||||
"og_desc",
|
"og_desc",
|
||||||
"og_site",
|
"og_site",
|
||||||
"og_th",
|
"og_th",
|
||||||
@@ -84,6 +99,8 @@ def vf_vmap() -> dict[str, str]:
|
|||||||
"unlist",
|
"unlist",
|
||||||
"u2abort",
|
"u2abort",
|
||||||
"u2ts",
|
"u2ts",
|
||||||
|
"ups_who",
|
||||||
|
"zip_who",
|
||||||
):
|
):
|
||||||
ret[k] = k
|
ret[k] = k
|
||||||
return ret
|
return ret
|
||||||
@@ -95,13 +112,16 @@ def vf_cmap() -> dict[str, str]:
|
|||||||
for k in (
|
for k in (
|
||||||
"exp_lg",
|
"exp_lg",
|
||||||
"exp_md",
|
"exp_md",
|
||||||
|
"ext_th",
|
||||||
"mte",
|
"mte",
|
||||||
"mth",
|
"mth",
|
||||||
"mtp",
|
"mtp",
|
||||||
|
"xac",
|
||||||
"xad",
|
"xad",
|
||||||
"xar",
|
"xar",
|
||||||
"xau",
|
"xau",
|
||||||
"xban",
|
"xban",
|
||||||
|
"xbc",
|
||||||
"xbd",
|
"xbd",
|
||||||
"xbr",
|
"xbr",
|
||||||
"xbu",
|
"xbu",
|
||||||
@@ -128,15 +148,19 @@ permdescs = {
|
|||||||
|
|
||||||
flagcats = {
|
flagcats = {
|
||||||
"uploads, general": {
|
"uploads, general": {
|
||||||
"nodupe": "rejects existing files (instead of symlinking them)",
|
"dedup": "enable symlink-based file deduplication",
|
||||||
"hardlink": "does dedup with hardlinks instead of symlinks",
|
"hardlink": "enable hardlink-based file deduplication,\nwith fallback on symlinks when that is impossible",
|
||||||
"neversymlink": "disables symlink fallback; full copy instead",
|
"hardlinkonly": "dedup with hardlink only, never symlink;\nmake a full copy if hardlink is impossible",
|
||||||
"copydupes": "disables dedup, always saves full copies of dupes",
|
"safededup": "verify on-disk data before using it for dedup",
|
||||||
|
"noclone": "take dupe data from clients, even if available on HDD",
|
||||||
|
"nodupe": "rejects existing files (instead of linking/cloning them)",
|
||||||
"sparse": "force use of sparse files, mainly for s3-backed storage",
|
"sparse": "force use of sparse files, mainly for s3-backed storage",
|
||||||
|
"nosparse": "deny use of sparse files, mainly for slow storage",
|
||||||
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
||||||
"nosub": "forces all uploads into the top folder of the vfs",
|
"nosub": "forces all uploads into the top folder of the vfs",
|
||||||
"magic": "enables filetype detection for nameless uploads",
|
"magic": "enables filetype detection for nameless uploads",
|
||||||
"gz": "allows server-side gzip of uploads with ?gz (also c,xz)",
|
"gz": "allows server-side gzip compression of uploads with ?gz",
|
||||||
|
"xz": "allows server-side lzma compression of uploads with ?xz",
|
||||||
"pk": "forces server-side compression, optional arg: xz,9",
|
"pk": "forces server-side compression, optional arg: xz,9",
|
||||||
},
|
},
|
||||||
"upload rules": {
|
"upload rules": {
|
||||||
@@ -144,8 +168,10 @@ flagcats = {
|
|||||||
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
|
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
|
||||||
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
||||||
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
||||||
|
"medialinks": "return medialinks for non-up2k uploads (not hotlinks)",
|
||||||
"rand": "force randomized filenames, 9 chars long by default",
|
"rand": "force randomized filenames, 9 chars long by default",
|
||||||
"nrand=N": "randomized filenames are N chars long",
|
"nrand=N": "randomized filenames are N chars long",
|
||||||
|
"u2ow=N": "overwrite existing files? 0=no 1=if-older 2=always",
|
||||||
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
|
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
|
||||||
"u2abort=1": "allow aborting unfinished uploads? 0=no 1=strict 2=ip-chk 3=acct-chk",
|
"u2abort=1": "allow aborting unfinished uploads? 0=no 1=strict 2=ip-chk 3=acct-chk",
|
||||||
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
||||||
@@ -157,13 +183,16 @@ flagcats = {
|
|||||||
"lifetime=3600": "uploads are deleted after 1 hour",
|
"lifetime=3600": "uploads are deleted after 1 hour",
|
||||||
},
|
},
|
||||||
"database, general": {
|
"database, general": {
|
||||||
"e2d": "enable database; makes files searchable + enables upload dedup",
|
"e2d": "enable database; makes files searchable + enables upload-undo",
|
||||||
"e2ds": "scan writable folders for new files on startup; also sets -e2d",
|
"e2ds": "scan writable folders for new files on startup; also sets -e2d",
|
||||||
"e2dsa": "scans all folders for new files on startup; also sets -e2d",
|
"e2dsa": "scans all folders for new files on startup; also sets -e2d",
|
||||||
"e2t": "enable multimedia indexing; makes it possible to search for tags",
|
"e2t": "enable multimedia indexing; makes it possible to search for tags",
|
||||||
"e2ts": "scan existing files for tags on startup; also sets -e2t",
|
"e2ts": "scan existing files for tags on startup; also sets -e2t",
|
||||||
"e2tsa": "delete all metadata from DB (full rescan); also sets -e2ts",
|
"e2tsr": "delete all metadata from DB (full rescan); also sets -e2ts",
|
||||||
"d2ts": "disables metadata collection for existing files",
|
"d2ts": "disables metadata collection for existing files",
|
||||||
|
"e2v": "verify integrity on startup by hashing files and comparing to db",
|
||||||
|
"e2vu": "when e2v fails, update the db (assume on-disk files are good)",
|
||||||
|
"e2vp": "when e2v fails, panic and quit copyparty",
|
||||||
"d2ds": "disables onboot indexing, overrides -e2ds*",
|
"d2ds": "disables onboot indexing, overrides -e2ds*",
|
||||||
"d2t": "disables metadata collection, overrides -e2t*",
|
"d2t": "disables metadata collection, overrides -e2t*",
|
||||||
"d2v": "disables file verification, overrides -e2v*",
|
"d2v": "disables file verification, overrides -e2v*",
|
||||||
@@ -173,15 +202,20 @@ flagcats = {
|
|||||||
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
|
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
|
||||||
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
|
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
|
||||||
"noforget": "don't forget files when deleted from disk",
|
"noforget": "don't forget files when deleted from disk",
|
||||||
|
"forget_ip=43200": "forget uploader-IP after 30 days (GDPR)",
|
||||||
|
"no_db_ip": "never store uploader-IP in the db; disables unpost",
|
||||||
"fat32": "avoid excessive reindexing on android sdcardfs",
|
"fat32": "avoid excessive reindexing on android sdcardfs",
|
||||||
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
||||||
"xlink": "cross-volume dupe detection / linking",
|
"xlink": "cross-volume dupe detection / linking (dangerous)",
|
||||||
"xdev": "do not descend into other filesystems",
|
"xdev": "do not descend into other filesystems",
|
||||||
"xvol": "do not follow symlinks leaving the volume root",
|
"xvol": "do not follow symlinks leaving the volume root",
|
||||||
"dotsrch": "show dotfiles in search results",
|
"dotsrch": "show dotfiles in search results",
|
||||||
"nodotsrch": "hide dotfiles in search results (default)",
|
"nodotsrch": "hide dotfiles in search results (default)",
|
||||||
|
"srch_excl": "exclude search results with URL matching this regex",
|
||||||
},
|
},
|
||||||
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
|
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
|
||||||
|
"mte=artist,title": "media-tags to index/display",
|
||||||
|
"mth=fmt,res,ac": "media-tags to hide by default",
|
||||||
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
|
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
|
||||||
"mtp=ahash,vhash=media-hash.py": "collects two tags at once",
|
"mtp=ahash,vhash=media-hash.py": "collects two tags at once",
|
||||||
},
|
},
|
||||||
@@ -195,6 +229,7 @@ flagcats = {
|
|||||||
"crop": "center-cropping (y/n/fy/fn)",
|
"crop": "center-cropping (y/n/fy/fn)",
|
||||||
"th3x": "3x resolution (y/n/fy/fn)",
|
"th3x": "3x resolution (y/n/fy/fn)",
|
||||||
"convt": "conversion timeout in seconds",
|
"convt": "conversion timeout in seconds",
|
||||||
|
"ext_th=s=/b.png": "use /b.png as thumbnail for file-extension s",
|
||||||
},
|
},
|
||||||
"handlers\n(better explained in --help-handlers)": {
|
"handlers\n(better explained in --help-handlers)": {
|
||||||
"on404=PY": "handle 404s by executing PY file",
|
"on404=PY": "handle 404s by executing PY file",
|
||||||
@@ -204,6 +239,8 @@ flagcats = {
|
|||||||
"xbu=CMD": "execute CMD before a file upload starts",
|
"xbu=CMD": "execute CMD before a file upload starts",
|
||||||
"xau=CMD": "execute CMD after a file upload finishes",
|
"xau=CMD": "execute CMD after a file upload finishes",
|
||||||
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
|
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
|
||||||
|
"xbc=CMD": "execute CMD before a file copy",
|
||||||
|
"xac=CMD": "execute CMD after a file copy",
|
||||||
"xbr=CMD": "execute CMD before a file rename/move",
|
"xbr=CMD": "execute CMD before a file rename/move",
|
||||||
"xar=CMD": "execute CMD after a file rename/move",
|
"xar=CMD": "execute CMD after a file rename/move",
|
||||||
"xbd=CMD": "execute CMD before a file delete",
|
"xbd=CMD": "execute CMD before a file delete",
|
||||||
@@ -213,9 +250,14 @@ flagcats = {
|
|||||||
},
|
},
|
||||||
"client and ux": {
|
"client and ux": {
|
||||||
"grid": "show grid/thumbnails by default",
|
"grid": "show grid/thumbnails by default",
|
||||||
|
"gsel": "select files in grid by ctrl-click",
|
||||||
"sort": "default sort order",
|
"sort": "default sort order",
|
||||||
|
"nsort": "natural-sort of leading digits in filenames",
|
||||||
|
"hsortn": "number of sort-rules to add to media URLs",
|
||||||
"unlist": "dont list files matching REGEX",
|
"unlist": "dont list files matching REGEX",
|
||||||
"html_head=TXT": "includes TXT in the <head>, or @PATH for file at PATH",
|
"html_head=TXT": "includes TXT in the <head>, or @PATH for file at PATH",
|
||||||
|
"tcolor=#fc0": "theme color (a hint for webbrowsers, discord, etc.)",
|
||||||
|
"nodirsz": "don't show total folder size",
|
||||||
"robots": "allows indexing by search engines (default)",
|
"robots": "allows indexing by search engines (default)",
|
||||||
"norobots": "kindly asks search engines to leave",
|
"norobots": "kindly asks search engines to leave",
|
||||||
"no_sb_md": "disable js sandbox for markdown files",
|
"no_sb_md": "disable js sandbox for markdown files",
|
||||||
@@ -224,12 +266,40 @@ flagcats = {
|
|||||||
"sb_lg": "enable js sandbox for prologue/epilogue (default)",
|
"sb_lg": "enable js sandbox for prologue/epilogue (default)",
|
||||||
"md_sbf": "list of markdown-sandbox safeguards to disable",
|
"md_sbf": "list of markdown-sandbox safeguards to disable",
|
||||||
"lg_sbf": "list of *logue-sandbox safeguards to disable",
|
"lg_sbf": "list of *logue-sandbox safeguards to disable",
|
||||||
|
"md_sba": "value of iframe allow-prop for markdown-sandbox",
|
||||||
|
"lg_sba": "value of iframe allow-prop for *logue-sandbox",
|
||||||
"nohtml": "return html and markdown as text/html",
|
"nohtml": "return html and markdown as text/html",
|
||||||
},
|
},
|
||||||
|
"opengraph (discord embeds)": {
|
||||||
|
"og": "enable OG (disables hotlinking)",
|
||||||
|
"og_site": "sitename; defaults to --name, disable with '-'",
|
||||||
|
"og_desc": "description text for all files; disable with '-'",
|
||||||
|
"og_th=jf": "thumbnail format; j / jf / jf3 / w / w3 / ...",
|
||||||
|
"og_title_a": "audio title format; default: {{ artist }} - {{ title }}",
|
||||||
|
"og_title_v": "video title format; default: {{ title }}",
|
||||||
|
"og_title_i": "image title format; default: {{ title }}",
|
||||||
|
"og_title=foo": "fallback title if there's nothing in the db",
|
||||||
|
"og_s_title": "force default title; do not read from tags",
|
||||||
|
"og_tpl": "custom html; see --og-tpl in --help",
|
||||||
|
"og_no_head": "you want to add tags manually with og_tpl",
|
||||||
|
"og_ua": "if defined: only send OG html if useragent matches this regex",
|
||||||
|
},
|
||||||
|
"textfiles": {
|
||||||
|
"exp": "enable textfile expansion; see --help-exp",
|
||||||
|
"exp_md": "placeholders to expand in markdown files; see --help",
|
||||||
|
"exp_lg": "placeholders to expand in prologue/epilogue; see --help",
|
||||||
|
},
|
||||||
"others": {
|
"others": {
|
||||||
"dots": "allow all users with read-access to\nenable the option to show dotfiles in listings",
|
"dots": "allow all users with read-access to\nenable the option to show dotfiles in listings",
|
||||||
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
||||||
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
||||||
|
"dk=8": 'generates per-directory accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
||||||
|
"dks": "per-directory accesskeys allow browsing into subdirs",
|
||||||
|
"dky": 'allow seeing files (not folders) inside a specific folder\nwith "g" perm, and does not require a valid dirkey to do so',
|
||||||
|
"rss": "allow '?rss' URL suffix (experimental)",
|
||||||
|
"ups_who=2": "restrict viewing the list of recent uploads",
|
||||||
|
"zip_who=2": "restrict access to download-as-zip/tar",
|
||||||
|
"nopipe": "disable race-the-beam (download unfinished uploads)",
|
||||||
"mv_retry": "ms-windows: timeout for renaming busy files",
|
"mv_retry": "ms-windows: timeout for renaming busy files",
|
||||||
"rm_retry": "ms-windows: timeout for deleting busy files",
|
"rm_retry": "ms-windows: timeout for deleting busy files",
|
||||||
"davauth": "ask webdav clients to login for all folders",
|
"davauth": "ask webdav clients to login for all folders",
|
||||||
@@ -239,3 +309,10 @@ flagcats = {
|
|||||||
|
|
||||||
|
|
||||||
flagdescs = {k.split("=")[0]: v for tab in flagcats.values() for k, v in tab.items()}
|
flagdescs = {k.split("=")[0]: v for tab in flagcats.values() for k, v in tab.items()}
|
||||||
|
|
||||||
|
|
||||||
|
if True: # so it gets removed in release-builds
|
||||||
|
for fun in [vf_bmap, vf_cmap, vf_vmap]:
|
||||||
|
for k in fun().values():
|
||||||
|
if k not in flagdescs:
|
||||||
|
raise Exception("undocumented volflag: " + k)
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import importlib
|
import importlib
|
||||||
import sys
|
import sys
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
@@ -8,6 +11,10 @@ if True: # pylint: disable=using-constant-test
|
|||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
|
class BadXML(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def get_ET() -> ET.XMLParser:
|
def get_ET() -> ET.XMLParser:
|
||||||
pn = "xml.etree.ElementTree"
|
pn = "xml.etree.ElementTree"
|
||||||
cn = "_elementtree"
|
cn = "_elementtree"
|
||||||
@@ -34,7 +41,7 @@ def get_ET() -> ET.XMLParser:
|
|||||||
XMLParser: ET.XMLParser = get_ET()
|
XMLParser: ET.XMLParser = get_ET()
|
||||||
|
|
||||||
|
|
||||||
class DXMLParser(XMLParser): # type: ignore
|
class _DXMLParser(XMLParser): # type: ignore
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
tb = ET.TreeBuilder()
|
tb = ET.TreeBuilder()
|
||||||
super(DXMLParser, self).__init__(target=tb)
|
super(DXMLParser, self).__init__(target=tb)
|
||||||
@@ -49,8 +56,12 @@ class DXMLParser(XMLParser): # type: ignore
|
|||||||
raise BadXML("{}, {}".format(a, ka))
|
raise BadXML("{}, {}".format(a, ka))
|
||||||
|
|
||||||
|
|
||||||
class BadXML(Exception):
|
class _NG(XMLParser): # type: ignore
|
||||||
pass
|
def __int__(self) -> None:
|
||||||
|
raise BadXML("dxml selftest failed")
|
||||||
|
|
||||||
|
|
||||||
|
DXMLParser = _DXMLParser
|
||||||
|
|
||||||
|
|
||||||
def parse_xml(txt: str) -> ET.Element:
|
def parse_xml(txt: str) -> ET.Element:
|
||||||
@@ -59,6 +70,40 @@ def parse_xml(txt: str) -> ET.Element:
|
|||||||
return parser.close() # type: ignore
|
return parser.close() # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def selftest() -> bool:
|
||||||
|
qbe = r"""<!DOCTYPE d [
|
||||||
|
<!ENTITY a "nice_bakuretsu">
|
||||||
|
]>
|
||||||
|
<root>&a;&a;&a;</root>"""
|
||||||
|
|
||||||
|
emb = r"""<!DOCTYPE d [
|
||||||
|
<!ENTITY a SYSTEM "file:///etc/hostname">
|
||||||
|
]>
|
||||||
|
<root>&a;</root>"""
|
||||||
|
|
||||||
|
# future-proofing; there's never been any known vulns
|
||||||
|
# regarding DTDs and ET.XMLParser, but might as well
|
||||||
|
# block them since webdav-clients don't use them
|
||||||
|
dtd = r"""<!DOCTYPE d SYSTEM "a.dtd">
|
||||||
|
<root>a</root>"""
|
||||||
|
|
||||||
|
for txt in (qbe, emb, dtd):
|
||||||
|
try:
|
||||||
|
parse_xml(txt)
|
||||||
|
t = "WARNING: dxml selftest failed:\n%s\n"
|
||||||
|
print(t % (txt,), file=sys.stderr)
|
||||||
|
return False
|
||||||
|
except BadXML:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
DXML_OK = selftest()
|
||||||
|
if not DXML_OK:
|
||||||
|
DXMLParser = _NG
|
||||||
|
|
||||||
|
|
||||||
def mktnod(name: str, text: str) -> ET.Element:
|
def mktnod(name: str, text: str) -> ET.Element:
|
||||||
el = ET.Element(name)
|
el = ET.Element(name)
|
||||||
el.text = text
|
el.text = text
|
||||||
|
|||||||
@@ -9,12 +9,12 @@ import time
|
|||||||
from .__init__ import ANYWIN, MACOS
|
from .__init__ import ANYWIN, MACOS
|
||||||
from .authsrv import AXS, VFS
|
from .authsrv import AXS, VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import chkcmd, min_ex
|
from .util import chkcmd, min_ex, undot
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
from .util import RootLogger
|
from .util import RootLogger, undot
|
||||||
|
|
||||||
|
|
||||||
class Fstab(object):
|
class Fstab(object):
|
||||||
@@ -42,17 +42,17 @@ class Fstab(object):
|
|||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
fs = "ext4"
|
fs = "ext4"
|
||||||
msg = "failed to determine filesystem at [{}]; assuming {}\n{}"
|
msg = "failed to determine filesystem at %r; assuming %s\n%s"
|
||||||
|
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
fs = "vfat"
|
fs = "vfat"
|
||||||
try:
|
try:
|
||||||
path = self._winpath(path)
|
path = self._winpath(path)
|
||||||
except:
|
except:
|
||||||
self.log(msg.format(path, fs, min_ex()), 3)
|
self.log(msg % (path, fs, min_ex()), 3)
|
||||||
return fs
|
return fs
|
||||||
|
|
||||||
path = path.lstrip("/")
|
path = undot(path)
|
||||||
try:
|
try:
|
||||||
return self.cache[path]
|
return self.cache[path]
|
||||||
except:
|
except:
|
||||||
@@ -61,11 +61,11 @@ class Fstab(object):
|
|||||||
try:
|
try:
|
||||||
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
||||||
except:
|
except:
|
||||||
self.log(msg.format(path, fs, min_ex()), 3)
|
self.log(msg % (path, fs, min_ex()), 3)
|
||||||
|
|
||||||
fs = fs.lower()
|
fs = fs.lower()
|
||||||
self.cache[path] = fs
|
self.cache[path] = fs
|
||||||
self.log("found {} at {}".format(fs, path))
|
self.log("found %s at %r" % (fs, path))
|
||||||
return fs
|
return fs
|
||||||
|
|
||||||
def _winpath(self, path: str) -> str:
|
def _winpath(self, path: str) -> str:
|
||||||
@@ -119,12 +119,12 @@ class Fstab(object):
|
|||||||
self.srctab = srctab
|
self.srctab = srctab
|
||||||
|
|
||||||
def relabel(self, path: str, nval: str) -> None:
|
def relabel(self, path: str, nval: str) -> None:
|
||||||
assert self.tab
|
assert self.tab # !rm
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
path = self._winpath(path)
|
path = self._winpath(path)
|
||||||
|
|
||||||
path = path.lstrip("/")
|
path = undot(path)
|
||||||
ptn = re.compile(r"^[^\\/]*")
|
ptn = re.compile(r"^[^\\/]*")
|
||||||
vn, rem = self.tab._find(path)
|
vn, rem = self.tab._find(path)
|
||||||
if not self.trusted:
|
if not self.trusted:
|
||||||
@@ -156,7 +156,7 @@ class Fstab(object):
|
|||||||
self.log("failed to build tab:\n{}".format(min_ex()), 3)
|
self.log("failed to build tab:\n{}".format(min_ex()), 3)
|
||||||
self.build_fallback()
|
self.build_fallback()
|
||||||
|
|
||||||
assert self.tab
|
assert self.tab # !rm
|
||||||
ret = self.tab._find(path)[0]
|
ret = self.tab._find(path)[0]
|
||||||
if self.trusted or path == ret.vpath:
|
if self.trusted or path == ret.vpath:
|
||||||
return ret.realpath.split("/")[0]
|
return ret.realpath.split("/")[0]
|
||||||
@@ -167,6 +167,6 @@ class Fstab(object):
|
|||||||
if not self.tab:
|
if not self.tab:
|
||||||
self.build_fallback()
|
self.build_fallback()
|
||||||
|
|
||||||
assert self.tab
|
assert self.tab # !rm
|
||||||
ret = self.tab._find(path)[0]
|
ret = self.tab._find(path)[0]
|
||||||
return ret.realpath
|
return ret.realpath
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ from .__init__ import PY2, TYPE_CHECKING
|
|||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import (
|
from .util import (
|
||||||
|
VF_CAREFUL,
|
||||||
Daemon,
|
Daemon,
|
||||||
ODict,
|
ODict,
|
||||||
Pebkac,
|
Pebkac,
|
||||||
@@ -30,6 +31,7 @@ from .util import (
|
|||||||
runhook,
|
runhook,
|
||||||
sanitize_fn,
|
sanitize_fn,
|
||||||
vjoin,
|
vjoin,
|
||||||
|
wunlink,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -37,7 +39,10 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
import typing
|
import typing
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class FSE(FilesystemError):
|
class FSE(FilesystemError):
|
||||||
@@ -71,6 +76,7 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
else:
|
else:
|
||||||
raise AuthenticationFailed("banned")
|
raise AuthenticationFailed("banned")
|
||||||
|
|
||||||
|
args = self.hub.args
|
||||||
asrv = self.hub.asrv
|
asrv = self.hub.asrv
|
||||||
uname = "*"
|
uname = "*"
|
||||||
if username != "anonymous":
|
if username != "anonymous":
|
||||||
@@ -81,6 +87,9 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
uname = zs
|
uname = zs
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if args.ipu and uname == "*":
|
||||||
|
uname = args.ipu_iu[args.ipu_nm.map(ip)]
|
||||||
|
|
||||||
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
|
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
|
||||||
g = self.hub.gpwd
|
g = self.hub.gpwd
|
||||||
if g.lim:
|
if g.lim:
|
||||||
@@ -139,6 +148,9 @@ class FtpFs(AbstractedFS):
|
|||||||
self.listdirinfo = self.listdir
|
self.listdirinfo = self.listdir
|
||||||
self.chdir(".")
|
self.chdir(".")
|
||||||
|
|
||||||
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
|
self.hub.log("ftpd", msg, c)
|
||||||
|
|
||||||
def v2a(
|
def v2a(
|
||||||
self,
|
self,
|
||||||
vpath: str,
|
vpath: str,
|
||||||
@@ -155,7 +167,7 @@ class FtpFs(AbstractedFS):
|
|||||||
t = "Unsupported characters in [{}]"
|
t = "Unsupported characters in [{}]"
|
||||||
raise FSE(t.format(vpath), 1)
|
raise FSE(t.format(vpath), 1)
|
||||||
|
|
||||||
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
|
fn = sanitize_fn(fn or "", "")
|
||||||
vpath = vjoin(rd, fn)
|
vpath = vjoin(rd, fn)
|
||||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||||
if not vfs.realpath:
|
if not vfs.realpath:
|
||||||
@@ -207,17 +219,37 @@ class FtpFs(AbstractedFS):
|
|||||||
w = "w" in mode or "a" in mode or "+" in mode
|
w = "w" in mode or "a" in mode or "+" in mode
|
||||||
|
|
||||||
ap = self.rv2a(filename, r, w)[0]
|
ap = self.rv2a(filename, r, w)[0]
|
||||||
|
self.validpath(ap)
|
||||||
if w:
|
if w:
|
||||||
try:
|
try:
|
||||||
st = bos.stat(ap)
|
st = bos.stat(ap)
|
||||||
td = time.time() - st.st_mtime
|
td = time.time() - st.st_mtime
|
||||||
|
need_unlink = True
|
||||||
except:
|
except:
|
||||||
|
need_unlink = False
|
||||||
td = 0
|
td = 0
|
||||||
|
|
||||||
if td < -1 or td > self.args.ftp_wt:
|
if w and need_unlink:
|
||||||
raise FSE("Cannot open existing file for writing")
|
if td >= -1 and td <= self.args.ftp_wt:
|
||||||
|
# within permitted timeframe; unlink and accept
|
||||||
|
do_it = True
|
||||||
|
elif self.args.no_del or self.args.ftp_no_ow:
|
||||||
|
# file too old, or overwrite not allowed; reject
|
||||||
|
do_it = False
|
||||||
|
else:
|
||||||
|
# allow overwrite if user has delete permission
|
||||||
|
# (avoids win2000 freaking out and deleting the server copy without uploading its own)
|
||||||
|
try:
|
||||||
|
self.rv2a(filename, False, True, False, True)
|
||||||
|
do_it = True
|
||||||
|
except:
|
||||||
|
do_it = False
|
||||||
|
|
||||||
|
if not do_it:
|
||||||
|
raise FSE("File already exists")
|
||||||
|
|
||||||
|
wunlink(self.log, ap, VF_CAREFUL)
|
||||||
|
|
||||||
self.validpath(ap)
|
|
||||||
return open(fsenc(ap), mode, self.args.iobuf)
|
return open(fsenc(ap), mode, self.args.iobuf)
|
||||||
|
|
||||||
def chdir(self, path: str) -> None:
|
def chdir(self, path: str) -> None:
|
||||||
@@ -264,6 +296,7 @@ class FtpFs(AbstractedFS):
|
|||||||
self.uname,
|
self.uname,
|
||||||
not self.args.no_scandir,
|
not self.args.no_scandir,
|
||||||
[[True, False], [False, True]],
|
[[True, False], [False, True]],
|
||||||
|
throw=True,
|
||||||
)
|
)
|
||||||
vfs_ls = [x[0] for x in vfs_ls1]
|
vfs_ls = [x[0] for x in vfs_ls1]
|
||||||
vfs_ls.extend(vfs_virt.keys())
|
vfs_ls.extend(vfs_virt.keys())
|
||||||
@@ -282,9 +315,20 @@ class FtpFs(AbstractedFS):
|
|||||||
# display write-only folders as empty
|
# display write-only folders as empty
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# return list of volumes
|
# return list of accessible volumes
|
||||||
r = {x.split("/")[0]: 1 for x in self.hub.asrv.vfs.all_vols.keys()}
|
ret = []
|
||||||
return list(sorted(list(r.keys())))
|
for vn in self.hub.asrv.vfs.all_vols.values():
|
||||||
|
if "/" in vn.vpath or not vn.vpath:
|
||||||
|
continue # only include toplevel-mounted vols
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.hub.asrv.vfs.get(vn.vpath, self.uname, True, False)
|
||||||
|
ret.append(vn.vpath)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
ret.sort()
|
||||||
|
return ret
|
||||||
|
|
||||||
def rmdir(self, path: str) -> None:
|
def rmdir(self, path: str) -> None:
|
||||||
ap = self.rv2a(path, d=True)[0]
|
ap = self.rv2a(path, d=True)[0]
|
||||||
@@ -314,7 +358,7 @@ class FtpFs(AbstractedFS):
|
|||||||
svp = join(self.cwd, src).lstrip("/")
|
svp = join(self.cwd, src).lstrip("/")
|
||||||
dvp = join(self.cwd, dst).lstrip("/")
|
dvp = join(self.cwd, dst).lstrip("/")
|
||||||
try:
|
try:
|
||||||
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
self.hub.up2k.handle_mv(self.uname, self.h.cli_ip, svp, dvp)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise FSE(str(ex))
|
raise FSE(str(ex))
|
||||||
|
|
||||||
@@ -432,15 +476,19 @@ class FtpHandler(FTPHandler):
|
|||||||
xbu = vfs.flags.get("xbu")
|
xbu = vfs.flags.get("xbu")
|
||||||
if xbu and not runhook(
|
if xbu and not runhook(
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
|
self.hub.up2k,
|
||||||
|
"xbu.ftpd",
|
||||||
xbu,
|
xbu,
|
||||||
ap,
|
ap,
|
||||||
vfs.canonical(rem),
|
vp,
|
||||||
"",
|
"",
|
||||||
self.uname,
|
self.uname,
|
||||||
|
self.hub.asrv.vfs.get_perms(vp, self.uname),
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
self.cli_ip,
|
self.cli_ip,
|
||||||
0,
|
time.time(),
|
||||||
"",
|
"",
|
||||||
):
|
):
|
||||||
raise FSE("Upload blocked by xbu server config")
|
raise FSE("Upload blocked by xbu server config")
|
||||||
@@ -543,9 +591,15 @@ class Ftpd(object):
|
|||||||
if "::" in ips:
|
if "::" in ips:
|
||||||
ips.append("0.0.0.0")
|
ips.append("0.0.0.0")
|
||||||
|
|
||||||
|
ips = [x for x in ips if "unix:" not in x]
|
||||||
|
|
||||||
if self.args.ftp4:
|
if self.args.ftp4:
|
||||||
ips = [x for x in ips if ":" not in x]
|
ips = [x for x in ips if ":" not in x]
|
||||||
|
|
||||||
|
if not ips:
|
||||||
|
lgr.fatal("cannot start ftp-server; no compatible IPs in -i")
|
||||||
|
return
|
||||||
|
|
||||||
ips = list(ODict.fromkeys(ips)) # dedup
|
ips = list(ODict.fromkeys(ips)) # dedup
|
||||||
|
|
||||||
ioloop = IOLoop()
|
ioloop = IOLoop()
|
||||||
|
|||||||
2711
copyparty/httpcli.py
2711
copyparty/httpcli.py
File diff suppressed because it is too large
Load Diff
@@ -9,6 +9,9 @@ import threading # typechk
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_TLS"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
HAVE_SSL = True
|
HAVE_SSL = True
|
||||||
import ssl
|
import ssl
|
||||||
except:
|
except:
|
||||||
@@ -56,6 +59,8 @@ class HttpConn(object):
|
|||||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||||
self.pipes: Util.CachedDict = hsrv.pipes # mypy404
|
self.pipes: Util.CachedDict = hsrv.pipes # mypy404
|
||||||
|
self.ipu_iu: Optional[dict[str, str]] = hsrv.ipu_iu
|
||||||
|
self.ipu_nm: Optional[NetMap] = hsrv.ipu_nm
|
||||||
self.ipa_nm: Optional[NetMap] = hsrv.ipa_nm
|
self.ipa_nm: Optional[NetMap] = hsrv.ipa_nm
|
||||||
self.xff_nm: Optional[NetMap] = hsrv.xff_nm
|
self.xff_nm: Optional[NetMap] = hsrv.xff_nm
|
||||||
self.xff_lan: NetMap = hsrv.xff_lan # type: ignore
|
self.xff_lan: NetMap = hsrv.xff_lan # type: ignore
|
||||||
@@ -100,9 +105,6 @@ class HttpConn(object):
|
|||||||
self.log_src = ("%s \033[%dm%d" % (ip, color, self.addr[1])).ljust(26)
|
self.log_src = ("%s \033[%dm%d" % (ip, color, self.addr[1])).ljust(26)
|
||||||
return self.log_src
|
return self.log_src
|
||||||
|
|
||||||
def respath(self, res_name: str) -> str:
|
|
||||||
return os.path.join(self.E.mod, "web", res_name)
|
|
||||||
|
|
||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log_func(self.log_src, msg, c)
|
self.log_func(self.log_src, msg, c)
|
||||||
|
|
||||||
@@ -162,6 +164,7 @@ class HttpConn(object):
|
|||||||
|
|
||||||
self.log_src = self.log_src.replace("[36m", "[35m")
|
self.log_src = self.log_src.replace("[36m", "[35m")
|
||||||
try:
|
try:
|
||||||
|
assert ssl # type: ignore # !rm
|
||||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||||
ctx.load_cert_chain(self.args.cert)
|
ctx.load_cert_chain(self.args.cert)
|
||||||
if self.args.ssl_ver:
|
if self.args.ssl_ver:
|
||||||
@@ -187,7 +190,7 @@ class HttpConn(object):
|
|||||||
|
|
||||||
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"):
|
||||||
ciphers = self.s.shared_ciphers()
|
ciphers = self.s.shared_ciphers()
|
||||||
assert ciphers
|
assert ciphers # !rm
|
||||||
overlap = [str(y[::-1]) for y in ciphers]
|
overlap = [str(y[::-1]) for y in ciphers]
|
||||||
self.log("TLS cipher overlap:" + "\n".join(overlap))
|
self.log("TLS cipher overlap:" + "\n".join(overlap))
|
||||||
for k, v in [
|
for k, v in [
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import base64
|
import hashlib
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -12,7 +12,7 @@ import time
|
|||||||
|
|
||||||
import queue
|
import queue
|
||||||
|
|
||||||
from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams
|
from .__init__ import ANYWIN, CORES, EXE, MACOS, PY2, TYPE_CHECKING, EnvParams, unicode
|
||||||
|
|
||||||
try:
|
try:
|
||||||
MNFE = ModuleNotFoundError
|
MNFE = ModuleNotFoundError
|
||||||
@@ -67,23 +67,38 @@ from .util import (
|
|||||||
Magician,
|
Magician,
|
||||||
Netdev,
|
Netdev,
|
||||||
NetMap,
|
NetMap,
|
||||||
absreal,
|
|
||||||
build_netmap,
|
build_netmap,
|
||||||
|
has_resource,
|
||||||
ipnorm,
|
ipnorm,
|
||||||
|
load_ipu,
|
||||||
|
load_resource,
|
||||||
min_ex,
|
min_ex,
|
||||||
shut_socket,
|
shut_socket,
|
||||||
spack,
|
spack,
|
||||||
start_log_thrs,
|
start_log_thrs,
|
||||||
start_stackmon,
|
start_stackmon,
|
||||||
|
ub64enc,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from .authsrv import VFS
|
||||||
from .broker_util import BrokerCli
|
from .broker_util import BrokerCli
|
||||||
from .ssdp import SSDPr
|
from .ssdp import SSDPr
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
|
if not hasattr(socket, "AF_UNIX"):
|
||||||
|
setattr(socket, "AF_UNIX", -9001)
|
||||||
|
|
||||||
|
|
||||||
|
def load_jinja2_resource(E: EnvParams, name: str):
|
||||||
|
with load_resource(E, "web/" + name, "r") as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
class HttpSrv(object):
|
class HttpSrv(object):
|
||||||
"""
|
"""
|
||||||
@@ -116,6 +131,12 @@ class HttpSrv(object):
|
|||||||
self.bans: dict[str, int] = {}
|
self.bans: dict[str, int] = {}
|
||||||
self.aclose: dict[str, int] = {}
|
self.aclose: dict[str, int] = {}
|
||||||
|
|
||||||
|
dli: dict[str, tuple[float, int, "VFS", str, str]] = {} # info
|
||||||
|
dls: dict[str, tuple[float, int]] = {} # state
|
||||||
|
self.dli = self.tdli = dli
|
||||||
|
self.dls = self.tdls = dls
|
||||||
|
self.iiam = '<img src="%s.cpr/iiam.gif?cache=i" />' % (self.args.SRS,)
|
||||||
|
|
||||||
self.bound: set[tuple[str, int]] = set()
|
self.bound: set[tuple[str, int]] = set()
|
||||||
self.name = "hsrv" + nsuf
|
self.name = "hsrv" + nsuf
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
@@ -131,6 +152,7 @@ class HttpSrv(object):
|
|||||||
self.t_periodic: Optional[threading.Thread] = None
|
self.t_periodic: Optional[threading.Thread] = None
|
||||||
|
|
||||||
self.u2fh = FHC()
|
self.u2fh = FHC()
|
||||||
|
self.u2sc: dict[str, tuple[int, "hashlib._Hash"]] = {}
|
||||||
self.pipes = CachedDict(0.2)
|
self.pipes = CachedDict(0.2)
|
||||||
self.metrics = Metrics(self)
|
self.metrics = Metrics(self)
|
||||||
self.nreq = 0
|
self.nreq = 0
|
||||||
@@ -146,23 +168,33 @@ class HttpSrv(object):
|
|||||||
self.u2idx_free: dict[str, U2idx] = {}
|
self.u2idx_free: dict[str, U2idx] = {}
|
||||||
self.u2idx_n = 0
|
self.u2idx_n = 0
|
||||||
|
|
||||||
|
assert jinja2 # type: ignore # !rm
|
||||||
env = jinja2.Environment()
|
env = jinja2.Environment()
|
||||||
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
|
env.loader = jinja2.FunctionLoader(lambda f: load_jinja2_resource(self.E, f))
|
||||||
jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"]
|
jn = [
|
||||||
|
"browser",
|
||||||
|
"browser2",
|
||||||
|
"cf",
|
||||||
|
"md",
|
||||||
|
"mde",
|
||||||
|
"msg",
|
||||||
|
"rups",
|
||||||
|
"shares",
|
||||||
|
"splash",
|
||||||
|
"svcs",
|
||||||
|
]
|
||||||
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
||||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
self.prism = has_resource(self.E, "web/deps/prism.js.gz")
|
||||||
self.prism = os.path.exists(zs)
|
|
||||||
|
if self.args.ipu:
|
||||||
|
self.ipu_iu, self.ipu_nm = load_ipu(self.log, self.args.ipu)
|
||||||
|
else:
|
||||||
|
self.ipu_iu = self.ipu_nm = None
|
||||||
|
|
||||||
self.ipa_nm = build_netmap(self.args.ipa)
|
self.ipa_nm = build_netmap(self.args.ipa)
|
||||||
self.xff_nm = build_netmap(self.args.xff_src)
|
self.xff_nm = build_netmap(self.args.xff_src)
|
||||||
self.xff_lan = build_netmap("lan")
|
self.xff_lan = build_netmap("lan")
|
||||||
|
|
||||||
self.statics: set[str] = set()
|
|
||||||
self._build_statics()
|
|
||||||
|
|
||||||
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
|
||||||
self.ptn_hsafe = re.compile(r"[\x00-\x1f<>\"'&]")
|
|
||||||
|
|
||||||
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
||||||
if not self.args.no_dav:
|
if not self.args.no_dav:
|
||||||
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
|
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
|
||||||
@@ -177,6 +209,9 @@ class HttpSrv(object):
|
|||||||
self.start_threads(4)
|
self.start_threads(4)
|
||||||
|
|
||||||
if nid:
|
if nid:
|
||||||
|
self.tdli = {}
|
||||||
|
self.tdls = {}
|
||||||
|
|
||||||
if self.args.stackmon:
|
if self.args.stackmon:
|
||||||
start_stackmon(self.args.stackmon, nid)
|
start_stackmon(self.args.stackmon, nid)
|
||||||
|
|
||||||
@@ -193,14 +228,6 @@ class HttpSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _build_statics(self) -> None:
|
|
||||||
for dp, _, df in os.walk(os.path.join(self.E.mod, "web")):
|
|
||||||
for fn in df:
|
|
||||||
ap = absreal(os.path.join(dp, fn))
|
|
||||||
self.statics.add(ap)
|
|
||||||
if ap.endswith(".gz"):
|
|
||||||
self.statics.add(ap[:-3])
|
|
||||||
|
|
||||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||||
ips = set()
|
ips = set()
|
||||||
for ip, _ in self.bound:
|
for ip, _ in self.bound:
|
||||||
@@ -221,7 +248,7 @@ class HttpSrv(object):
|
|||||||
if self.args.log_htp:
|
if self.args.log_htp:
|
||||||
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
|
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
|
||||||
|
|
||||||
assert self.tp_q
|
assert self.tp_q # !rm
|
||||||
for _ in range(n):
|
for _ in range(n):
|
||||||
self.tp_q.put(None)
|
self.tp_q.put(None)
|
||||||
|
|
||||||
@@ -240,15 +267,24 @@ class HttpSrv(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
||||||
|
tcp = sck.family != socket.AF_UNIX
|
||||||
|
|
||||||
if self.args.j != 1:
|
if self.args.j != 1:
|
||||||
# lost in the pickle; redefine
|
# lost in the pickle; redefine
|
||||||
if not ANYWIN or self.args.reuseaddr:
|
if not ANYWIN or self.args.reuseaddr:
|
||||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
if tcp:
|
||||||
|
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
|
|
||||||
sck.settimeout(None) # < does not inherit, ^ opts above do
|
sck.settimeout(None) # < does not inherit, ^ opts above do
|
||||||
|
|
||||||
ip, port = sck.getsockname()[:2]
|
if tcp:
|
||||||
|
ip, port = sck.getsockname()[:2]
|
||||||
|
else:
|
||||||
|
ip = re.sub(r"\.[0-9]+$", "", sck.getsockname().split("/")[-1])
|
||||||
|
port = 0
|
||||||
|
|
||||||
self.srvs.append(sck)
|
self.srvs.append(sck)
|
||||||
self.bound.add((ip, port))
|
self.bound.add((ip, port))
|
||||||
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
||||||
@@ -260,10 +296,19 @@ class HttpSrv(object):
|
|||||||
|
|
||||||
def thr_listen(self, srv_sck: socket.socket) -> None:
|
def thr_listen(self, srv_sck: socket.socket) -> None:
|
||||||
"""listens on a shared tcp server"""
|
"""listens on a shared tcp server"""
|
||||||
ip, port = srv_sck.getsockname()[:2]
|
|
||||||
fno = srv_sck.fileno()
|
fno = srv_sck.fileno()
|
||||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
if srv_sck.family == socket.AF_UNIX:
|
||||||
msg = "subscribed @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
ip = re.sub(r"\.[0-9]+$", "", srv_sck.getsockname())
|
||||||
|
msg = "subscribed @ %s f%d p%d" % (ip, fno, os.getpid())
|
||||||
|
ip = ip.split("/")[-1]
|
||||||
|
port = 0
|
||||||
|
tcp = False
|
||||||
|
else:
|
||||||
|
tcp = True
|
||||||
|
ip, port = srv_sck.getsockname()[:2]
|
||||||
|
hip = "[%s]" % (ip,) if ":" in ip else ip
|
||||||
|
msg = "subscribed @ %s:%d f%d p%d" % (hip, port, fno, os.getpid())
|
||||||
|
|
||||||
self.log(self.name, msg)
|
self.log(self.name, msg)
|
||||||
|
|
||||||
Daemon(self.broker.say, "sig-hsrv-up1", ("cb_httpsrv_up",))
|
Daemon(self.broker.say, "sig-hsrv-up1", ("cb_httpsrv_up",))
|
||||||
@@ -335,11 +380,13 @@ class HttpSrv(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
sck, saddr = srv_sck.accept()
|
sck, saddr = srv_sck.accept()
|
||||||
cip, cport = saddr[:2]
|
if tcp:
|
||||||
if cip.startswith("::ffff:"):
|
cip = unicode(saddr[0])
|
||||||
cip = cip[7:]
|
if cip.startswith("::ffff:"):
|
||||||
|
cip = cip[7:]
|
||||||
addr = (cip, cport)
|
addr = (cip, saddr[1])
|
||||||
|
else:
|
||||||
|
addr = ("127.8.3.7", sck.fileno())
|
||||||
except (OSError, socket.error) as ex:
|
except (OSError, socket.error) as ex:
|
||||||
if self.stopping:
|
if self.stopping:
|
||||||
break
|
break
|
||||||
@@ -395,7 +442,7 @@ class HttpSrv(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def thr_poolw(self) -> None:
|
def thr_poolw(self) -> None:
|
||||||
assert self.tp_q
|
assert self.tp_q # !rm
|
||||||
while True:
|
while True:
|
||||||
task = self.tp_q.get()
|
task = self.tp_q.get()
|
||||||
if not task:
|
if not task:
|
||||||
@@ -507,8 +554,8 @@ class HttpSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
v = base64.urlsafe_b64encode(spack(b">xxL", int(v)))
|
# spack gives 4 lsb, take 3 lsb, get 4 ch
|
||||||
self.cb_v = v.decode("ascii")[-4:]
|
self.cb_v = ub64enc(spack(b">L", int(v))[1:]).decode("ascii")
|
||||||
self.cb_ts = time.time()
|
self.cb_ts = time.time()
|
||||||
return self.cb_v
|
return self.cb_v
|
||||||
|
|
||||||
@@ -539,3 +586,32 @@ class HttpSrv(object):
|
|||||||
ident += "a"
|
ident += "a"
|
||||||
|
|
||||||
self.u2idx_free[ident] = u2idx
|
self.u2idx_free[ident] = u2idx
|
||||||
|
|
||||||
|
def read_dls(
|
||||||
|
self,
|
||||||
|
) -> tuple[
|
||||||
|
dict[str, tuple[float, int, str, str, str]], dict[str, tuple[float, int]]
|
||||||
|
]:
|
||||||
|
"""
|
||||||
|
mp-broker asking for local dl-info + dl-state;
|
||||||
|
reduce overhead by sending just the vfs vpath
|
||||||
|
"""
|
||||||
|
dli = {k: (a, b, c.vpath, d, e) for k, (a, b, c, d, e) in self.dli.items()}
|
||||||
|
return (dli, self.dls)
|
||||||
|
|
||||||
|
def write_dls(
|
||||||
|
self,
|
||||||
|
sdli: dict[str, tuple[float, int, str, str, str]],
|
||||||
|
dls: dict[str, tuple[float, int]],
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
mp-broker pushing total dl-info + dl-state;
|
||||||
|
swap out the vfs vpath with the vfs node
|
||||||
|
"""
|
||||||
|
dli: dict[str, tuple[float, int, "VFS", str, str]] = {}
|
||||||
|
for k, (a, b, c, d, e) in sdli.items():
|
||||||
|
vn = self.asrv.vfs.all_nodes[c]
|
||||||
|
dli[k] = (a, b, vn, d, e)
|
||||||
|
|
||||||
|
self.tdli = dli
|
||||||
|
self.tdls = dls
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ class Ico(object):
|
|||||||
try:
|
try:
|
||||||
_, _, tw, th = pb.textbbox((0, 0), ext)
|
_, _, tw, th = pb.textbbox((0, 0), ext)
|
||||||
except:
|
except:
|
||||||
tw, th = pb.textsize(ext)
|
tw, th = pb.textsize(ext) # type: ignore
|
||||||
|
|
||||||
tw += len(ext)
|
tw += len(ext)
|
||||||
cw = tw // len(ext)
|
cw = tw // len(ext)
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ from .stolen.dnslib import (
|
|||||||
DNSHeader,
|
DNSHeader,
|
||||||
DNSQuestion,
|
DNSQuestion,
|
||||||
DNSRecord,
|
DNSRecord,
|
||||||
|
set_avahi_379,
|
||||||
)
|
)
|
||||||
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
|
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
|
||||||
|
|
||||||
@@ -72,6 +73,9 @@ class MDNS(MCast):
|
|||||||
self.ngen = ngen
|
self.ngen = ngen
|
||||||
self.ttl = 300
|
self.ttl = 300
|
||||||
|
|
||||||
|
if not self.args.zm_nwa_1:
|
||||||
|
set_avahi_379()
|
||||||
|
|
||||||
zs = self.args.name + ".local."
|
zs = self.args.name + ".local."
|
||||||
zs = zs.encode("ascii", "replace").decode("ascii", "replace")
|
zs = zs.encode("ascii", "replace").decode("ascii", "replace")
|
||||||
self.hn = "-".join(x for x in zs.split("?") if x) or (
|
self.hn = "-".join(x for x in zs.split("?") if x) or (
|
||||||
@@ -292,6 +296,22 @@ class MDNS(MCast):
|
|||||||
def run2(self) -> None:
|
def run2(self) -> None:
|
||||||
last_hop = time.time()
|
last_hop = time.time()
|
||||||
ihop = self.args.mc_hop
|
ihop = self.args.mc_hop
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self.args.no_poll:
|
||||||
|
raise Exception()
|
||||||
|
fd2sck = {}
|
||||||
|
srvpoll = select.poll()
|
||||||
|
for sck in self.srv:
|
||||||
|
fd = sck.fileno()
|
||||||
|
fd2sck[fd] = sck
|
||||||
|
srvpoll.register(fd, select.POLLIN)
|
||||||
|
except Exception as ex:
|
||||||
|
srvpoll = None
|
||||||
|
if not self.args.no_poll:
|
||||||
|
t = "WARNING: failed to poll(), will use select() instead: %r"
|
||||||
|
self.log(t % (ex,), 3)
|
||||||
|
|
||||||
while self.running:
|
while self.running:
|
||||||
timeout = (
|
timeout = (
|
||||||
0.02 + random.random() * 0.07
|
0.02 + random.random() * 0.07
|
||||||
@@ -300,8 +320,13 @@ class MDNS(MCast):
|
|||||||
if self.unsolicited
|
if self.unsolicited
|
||||||
else (last_hop + ihop if ihop else 180)
|
else (last_hop + ihop if ihop else 180)
|
||||||
)
|
)
|
||||||
rdy = select.select(self.srv, [], [], timeout)
|
if srvpoll:
|
||||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
pr = srvpoll.poll(timeout * 1000)
|
||||||
|
rx = [fd2sck[x[0]] for x in pr if x[1] & select.POLLIN]
|
||||||
|
else:
|
||||||
|
rdy = select.select(self.srv, [], [], timeout)
|
||||||
|
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||||
|
|
||||||
self.rx4.cln()
|
self.rx4.cln()
|
||||||
self.rx6.cln()
|
self.rx6.cln()
|
||||||
buf = b""
|
buf = b""
|
||||||
@@ -315,6 +340,9 @@ class MDNS(MCast):
|
|||||||
self.log("stopped", 2)
|
self.log("stopped", 2)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self.args.zm_no_pe:
|
||||||
|
continue
|
||||||
|
|
||||||
t = "{} {} \033[33m|{}| {}\n{}".format(
|
t = "{} {} \033[33m|{}| {}\n{}".format(
|
||||||
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
||||||
)
|
)
|
||||||
@@ -340,7 +368,7 @@ class MDNS(MCast):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.srv = {}
|
self.srv.clear()
|
||||||
|
|
||||||
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
||||||
cip = addr[0]
|
cip = addr[0]
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ class Metrics(object):
|
|||||||
|
|
||||||
def tx(self, cli: "HttpCli") -> bool:
|
def tx(self, cli: "HttpCli") -> bool:
|
||||||
if not cli.avol:
|
if not cli.avol:
|
||||||
raise Pebkac(403, "not allowed for user " + cli.uname)
|
raise Pebkac(403, "'stats' not allowed for user " + cli.uname)
|
||||||
|
|
||||||
args = cli.args
|
args = cli.args
|
||||||
if not args.stats:
|
if not args.stats:
|
||||||
@@ -72,6 +72,9 @@ class Metrics(object):
|
|||||||
v = "{:.3f}".format(self.hsrv.t0)
|
v = "{:.3f}".format(self.hsrv.t0)
|
||||||
addug("cpp_boot_unixtime", "seconds", v, t)
|
addug("cpp_boot_unixtime", "seconds", v, t)
|
||||||
|
|
||||||
|
t = "number of active downloads"
|
||||||
|
addg("cpp_active_dl", str(len(self.hsrv.tdls)), t)
|
||||||
|
|
||||||
t = "number of open http(s) client connections"
|
t = "number of open http(s) client connections"
|
||||||
addg("cpp_http_conns", str(self.hsrv.ncli), t)
|
addg("cpp_http_conns", str(self.hsrv.ncli), t)
|
||||||
|
|
||||||
@@ -88,7 +91,7 @@ class Metrics(object):
|
|||||||
addg("cpp_total_bans", str(self.hsrv.nban), t)
|
addg("cpp_total_bans", str(self.hsrv.nban), t)
|
||||||
|
|
||||||
if not args.nos_vst:
|
if not args.nos_vst:
|
||||||
x = self.hsrv.broker.ask("up2k.get_state")
|
x = self.hsrv.broker.ask("up2k.get_state", True, "")
|
||||||
vs = json.loads(x.get())
|
vs = json.loads(x.get())
|
||||||
|
|
||||||
nvidle = 0
|
nvidle = 0
|
||||||
@@ -128,7 +131,7 @@ class Metrics(object):
|
|||||||
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
||||||
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
||||||
for vpath, vol in allvols:
|
for vpath, vol in allvols:
|
||||||
free, total = get_df(vol.realpath)
|
free, total, _ = get_df(vol.realpath, False)
|
||||||
if free is None or total is None:
|
if free is None or total is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
|||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import sys
|
import sys
|
||||||
@@ -32,6 +33,17 @@ if True: # pylint: disable=using-constant-test
|
|||||||
from .util import NamedLogger, RootLogger
|
from .util import NamedLogger, RootLogger
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_MUTAGEN"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
from mutagen import version # noqa: F401
|
||||||
|
|
||||||
|
HAVE_MUTAGEN = True
|
||||||
|
except:
|
||||||
|
HAVE_MUTAGEN = False
|
||||||
|
|
||||||
|
|
||||||
def have_ff(scmd: str) -> bool:
|
def have_ff(scmd: str) -> bool:
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
scmd += ".exe"
|
scmd += ".exe"
|
||||||
@@ -48,8 +60,11 @@ def have_ff(scmd: str) -> bool:
|
|||||||
return bool(shutil.which(scmd))
|
return bool(shutil.which(scmd))
|
||||||
|
|
||||||
|
|
||||||
HAVE_FFMPEG = have_ff("ffmpeg")
|
HAVE_FFMPEG = not os.environ.get("PRTY_NO_FFMPEG") and have_ff("ffmpeg")
|
||||||
HAVE_FFPROBE = have_ff("ffprobe")
|
HAVE_FFPROBE = not os.environ.get("PRTY_NO_FFPROBE") and have_ff("ffprobe")
|
||||||
|
|
||||||
|
CBZ_PICS = set("png jpg jpeg gif bmp tga tif tiff webp avif".split())
|
||||||
|
CBZ_01 = re.compile(r"(^|[^0-9v])0+[01]\b")
|
||||||
|
|
||||||
|
|
||||||
class MParser(object):
|
class MParser(object):
|
||||||
@@ -111,8 +126,11 @@ class MParser(object):
|
|||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
|
|
||||||
def au_unpk(log: "NamedLogger", fmt_map: dict[str, str], abspath: str, vn: Optional[VFS] = None) -> str:
|
def au_unpk(
|
||||||
|
log: "NamedLogger", fmt_map: dict[str, str], abspath: str, vn: Optional[VFS] = None
|
||||||
|
) -> str:
|
||||||
ret = ""
|
ret = ""
|
||||||
|
maxsz = 1024 * 1024 * 64
|
||||||
try:
|
try:
|
||||||
ext = abspath.split(".")[-1].lower()
|
ext = abspath.split(".")[-1].lower()
|
||||||
au, pk = fmt_map[ext].split(".")
|
au, pk = fmt_map[ext].split(".")
|
||||||
@@ -135,21 +153,48 @@ def au_unpk(log: "NamedLogger", fmt_map: dict[str, str], abspath: str, vn: Optio
|
|||||||
zf = zipfile.ZipFile(abspath, "r")
|
zf = zipfile.ZipFile(abspath, "r")
|
||||||
zil = zf.infolist()
|
zil = zf.infolist()
|
||||||
zil = [x for x in zil if x.filename.lower().split(".")[-1] == au]
|
zil = [x for x in zil if x.filename.lower().split(".")[-1] == au]
|
||||||
|
if not zil:
|
||||||
|
raise Exception("no audio inside zip")
|
||||||
fi = zf.open(zil[0])
|
fi = zf.open(zil[0])
|
||||||
|
|
||||||
|
elif pk == "cbz":
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
zf = zipfile.ZipFile(abspath, "r")
|
||||||
|
znil = [(x.filename.lower(), x) for x in zf.infolist()]
|
||||||
|
nf = len(znil)
|
||||||
|
znil = [x for x in znil if x[0].split(".")[-1] in CBZ_PICS]
|
||||||
|
znil = [x for x in znil if "cover" in x[0]] or znil
|
||||||
|
znil = [x for x in znil if CBZ_01.search(x[0])] or znil
|
||||||
|
t = "cbz: %d files, %d hits" % (nf, len(znil))
|
||||||
|
if znil:
|
||||||
|
t += ", using " + znil[0][1].filename
|
||||||
|
log(t)
|
||||||
|
if not znil:
|
||||||
|
raise Exception("no images inside cbz")
|
||||||
|
fi = zf.open(znil[0][1])
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise Exception("unknown compression %s" % (pk,))
|
||||||
|
|
||||||
|
fsz = 0
|
||||||
with os.fdopen(fd, "wb") as fo:
|
with os.fdopen(fd, "wb") as fo:
|
||||||
while True:
|
while True:
|
||||||
buf = fi.read(32768)
|
buf = fi.read(32768)
|
||||||
if not buf:
|
if not buf:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
fsz += len(buf)
|
||||||
|
if fsz > maxsz:
|
||||||
|
raise Exception("zipbomb defused")
|
||||||
|
|
||||||
fo.write(buf)
|
fo.write(buf)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if ret:
|
if ret:
|
||||||
t = "failed to decompress audio file [%s]: %r"
|
t = "failed to decompress audio file %r: %r"
|
||||||
log(t % (abspath, ex))
|
log(t % (abspath, ex))
|
||||||
wunlink(log, ret, vn.flags if vn else VF_CAREFUL)
|
wunlink(log, ret, vn.flags if vn else VF_CAREFUL)
|
||||||
|
|
||||||
@@ -331,9 +376,7 @@ class MTag(object):
|
|||||||
|
|
||||||
if self.backend == "mutagen":
|
if self.backend == "mutagen":
|
||||||
self._get = self.get_mutagen
|
self._get = self.get_mutagen
|
||||||
try:
|
if not HAVE_MUTAGEN:
|
||||||
from mutagen import version # noqa: F401
|
|
||||||
except:
|
|
||||||
self.log("could not load Mutagen, trying FFprobe instead", c=3)
|
self.log("could not load Mutagen, trying FFprobe instead", c=3)
|
||||||
self.backend = "ffprobe"
|
self.backend = "ffprobe"
|
||||||
|
|
||||||
@@ -459,7 +502,7 @@ class MTag(object):
|
|||||||
sv = str(zv).split("/")[0].strip().lstrip("0")
|
sv = str(zv).split("/")[0].strip().lstrip("0")
|
||||||
ret[sk] = sv or 0
|
ret[sk] = sv or 0
|
||||||
|
|
||||||
# normalize key notation to rkeobo
|
# normalize key notation to rekobo
|
||||||
okey = ret.get("key")
|
okey = ret.get("key")
|
||||||
if okey:
|
if okey:
|
||||||
key = str(okey).replace(" ", "").replace("maj", "").replace("min", "m")
|
key = str(okey).replace(" ", "").replace("maj", "").replace("min", "m")
|
||||||
@@ -539,7 +582,7 @@ class MTag(object):
|
|||||||
raise Exception()
|
raise Exception()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if self.args.mtag_v:
|
if self.args.mtag_v:
|
||||||
self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90")
|
self.log("mutagen-err [%s] @ %r" % (ex, abspath), "90")
|
||||||
|
|
||||||
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||||
|
|
||||||
@@ -573,7 +616,7 @@ class MTag(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if k == ".aq":
|
if k == ".aq":
|
||||||
v /= 1000
|
v /= 1000 # type: ignore
|
||||||
|
|
||||||
if k == "ac" and v.startswith("mp4a.40."):
|
if k == "ac" and v.startswith("mp4a.40."):
|
||||||
v = "aac"
|
v = "aac"
|
||||||
@@ -656,8 +699,8 @@ class MTag(object):
|
|||||||
ret[tag] = zj[tag]
|
ret[tag] = zj[tag]
|
||||||
except:
|
except:
|
||||||
if self.args.mtag_v:
|
if self.args.mtag_v:
|
||||||
t = "mtag error: tagname {}, parser {}, file {} => {}"
|
t = "mtag error: tagname %r, parser %r, file %r => %r"
|
||||||
self.log(t.format(tagname, parser.bin, abspath, min_ex()))
|
self.log(t % (tagname, parser.bin, abspath, min_ex()), 6)
|
||||||
|
|
||||||
if ap != abspath:
|
if ap != abspath:
|
||||||
wunlink(self.log, ap, VF_CAREFUL)
|
wunlink(self.log, ap, VF_CAREFUL)
|
||||||
|
|||||||
@@ -163,6 +163,7 @@ class MCast(object):
|
|||||||
sck.settimeout(None)
|
sck.settimeout(None)
|
||||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
try:
|
try:
|
||||||
|
# safe for this purpose; https://lwn.net/Articles/853637/
|
||||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -4,27 +4,33 @@ from __future__ import print_function, unicode_literals
|
|||||||
import argparse
|
import argparse
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from .__init__ import unicode
|
from .__init__ import unicode
|
||||||
|
|
||||||
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_ARGON2"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
HAVE_ARGON2 = True
|
||||||
|
from argon2 import __version__ as argon2ver
|
||||||
|
except:
|
||||||
|
HAVE_ARGON2 = False
|
||||||
|
|
||||||
|
|
||||||
class PWHash(object):
|
class PWHash(object):
|
||||||
def __init__(self, args: argparse.Namespace):
|
def __init__(self, args: argparse.Namespace):
|
||||||
self.args = args
|
self.args = args
|
||||||
|
|
||||||
try:
|
zsl = args.ah_alg.split(",")
|
||||||
alg, ac = args.ah_alg.split(",")
|
alg = zsl[0]
|
||||||
except:
|
|
||||||
alg = args.ah_alg
|
|
||||||
ac = {}
|
|
||||||
|
|
||||||
if alg == "none":
|
if alg == "none":
|
||||||
alg = ""
|
alg = ""
|
||||||
|
|
||||||
self.alg = alg
|
self.alg = alg
|
||||||
self.ac = ac
|
self.ac = zsl[1:]
|
||||||
if not alg:
|
if not alg:
|
||||||
self.on = False
|
self.on = False
|
||||||
self.hash = unicode
|
self.hash = unicode
|
||||||
@@ -80,17 +86,23 @@ class PWHash(object):
|
|||||||
its = 2
|
its = 2
|
||||||
blksz = 8
|
blksz = 8
|
||||||
para = 4
|
para = 4
|
||||||
|
ramcap = 0 # openssl 1.1 = 32 MiB
|
||||||
try:
|
try:
|
||||||
cost = 2 << int(self.ac[0])
|
cost = 2 << int(self.ac[0])
|
||||||
its = int(self.ac[1])
|
its = int(self.ac[1])
|
||||||
blksz = int(self.ac[2])
|
blksz = int(self.ac[2])
|
||||||
para = int(self.ac[3])
|
para = int(self.ac[3])
|
||||||
|
ramcap = int(self.ac[4]) * 1024 * 1024
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
cfg = {"salt": self.salt, "n": cost, "r": blksz, "p": para, "dklen": 24}
|
||||||
|
if ramcap:
|
||||||
|
cfg["maxmem"] = ramcap
|
||||||
|
|
||||||
ret = plain.encode("utf-8")
|
ret = plain.encode("utf-8")
|
||||||
for _ in range(its):
|
for _ in range(its):
|
||||||
ret = hashlib.scrypt(ret, salt=self.salt, n=cost, r=blksz, p=para, dklen=24)
|
ret = hashlib.scrypt(ret, **cfg)
|
||||||
|
|
||||||
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
|
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from types import SimpleNamespace
|
|||||||
from .__init__ import ANYWIN, EXE, TYPE_CHECKING
|
from .__init__ import ANYWIN, EXE, TYPE_CHECKING
|
||||||
from .authsrv import LEELOO_DALLAS, VFS
|
from .authsrv import LEELOO_DALLAS, VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import Daemon, min_ex, pybin, runhook
|
from .util import Daemon, absreal, min_ex, pybin, runhook, vjoin
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
@@ -151,6 +151,8 @@ class SMB(object):
|
|||||||
def _uname(self) -> str:
|
def _uname(self) -> str:
|
||||||
if self.noacc:
|
if self.noacc:
|
||||||
return LEELOO_DALLAS
|
return LEELOO_DALLAS
|
||||||
|
if not self.asrv.acct:
|
||||||
|
return "*"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# you found it! my single worst bit of code so far
|
# you found it! my single worst bit of code so far
|
||||||
@@ -187,7 +189,9 @@ class SMB(object):
|
|||||||
|
|
||||||
debug('%s("%s", %s) %s @%s\033[K\033[0m', caller, vpath, str(a), perms, uname)
|
debug('%s("%s", %s) %s @%s\033[K\033[0m', caller, vpath, str(a), perms, uname)
|
||||||
vfs, rem = self.asrv.vfs.get(vpath, uname, *perms)
|
vfs, rem = self.asrv.vfs.get(vpath, uname, *perms)
|
||||||
return vfs, vfs.canonical(rem)
|
if not vfs.realpath:
|
||||||
|
raise Exception("unmapped vfs")
|
||||||
|
return vfs, vjoin(vfs.realpath, rem)
|
||||||
|
|
||||||
def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]:
|
def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]:
|
||||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||||
@@ -195,6 +199,8 @@ class SMB(object):
|
|||||||
uname = self._uname()
|
uname = self._uname()
|
||||||
# debug('listdir("%s", %s) @%s\033[K\033[0m', vpath, str(a), uname)
|
# debug('listdir("%s", %s) @%s\033[K\033[0m', vpath, str(a), uname)
|
||||||
vfs, rem = self.asrv.vfs.get(vpath, uname, False, False)
|
vfs, rem = self.asrv.vfs.get(vpath, uname, False, False)
|
||||||
|
if not vfs.realpath:
|
||||||
|
raise Exception("unmapped vfs")
|
||||||
_, vfs_ls, vfs_virt = vfs.ls(
|
_, vfs_ls, vfs_virt = vfs.ls(
|
||||||
rem, uname, not self.args.no_scandir, [[False, False]]
|
rem, uname, not self.args.no_scandir, [[False, False]]
|
||||||
)
|
)
|
||||||
@@ -209,7 +215,7 @@ class SMB(object):
|
|||||||
sz = 112 * 2 # ['.', '..']
|
sz = 112 * 2 # ['.', '..']
|
||||||
for n, fn in enumerate(ls):
|
for n, fn in enumerate(ls):
|
||||||
if sz >= 64000:
|
if sz >= 64000:
|
||||||
t = "listing only %d of %d files (%d byte) in /%s; see impacket#1433"
|
t = "listing only %d of %d files (%d byte) in /%s for performance; see --smb-nwa-1"
|
||||||
warning(t, n, len(ls), sz, vpath)
|
warning(t, n, len(ls), sz, vpath)
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -238,11 +244,26 @@ class SMB(object):
|
|||||||
t = "blocked write (no-write-acc %s): /%s @%s"
|
t = "blocked write (no-write-acc %s): /%s @%s"
|
||||||
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
||||||
|
|
||||||
|
ap = absreal(ap)
|
||||||
xbu = vfs.flags.get("xbu")
|
xbu = vfs.flags.get("xbu")
|
||||||
if xbu and not runhook(
|
if xbu and not runhook(
|
||||||
self.nlog, xbu, ap, vpath, "", "", 0, 0, "1.7.6.2", 0, ""
|
self.nlog,
|
||||||
|
None,
|
||||||
|
self.hub.up2k,
|
||||||
|
"xbu.smb",
|
||||||
|
xbu,
|
||||||
|
ap,
|
||||||
|
vpath,
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
"1.7.6.2",
|
||||||
|
time.time(),
|
||||||
|
"",
|
||||||
):
|
):
|
||||||
yeet("blocked by xbu server config: " + vpath)
|
yeet("blocked by xbu server config: %r" % (vpath,))
|
||||||
|
|
||||||
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
||||||
if wr:
|
if wr:
|
||||||
@@ -297,7 +318,7 @@ class SMB(object):
|
|||||||
t = "blocked rename (no-move-acc %s): /%s @%s"
|
t = "blocked rename (no-move-acc %s): /%s @%s"
|
||||||
yeet(t % (vfs1.axs.umove, vp1, uname))
|
yeet(t % (vfs1.axs.umove, vp1, uname))
|
||||||
|
|
||||||
self.hub.up2k.handle_mv(uname, vp1, vp2)
|
self.hub.up2k.handle_mv(uname, "1.7.6.2", vp1, vp2)
|
||||||
try:
|
try:
|
||||||
bos.makedirs(ap2)
|
bos.makedirs(ap2)
|
||||||
except:
|
except:
|
||||||
|
|||||||
@@ -5,11 +5,11 @@ import errno
|
|||||||
import re
|
import re
|
||||||
import select
|
import select
|
||||||
import socket
|
import socket
|
||||||
from email.utils import formatdate
|
import time
|
||||||
|
|
||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import TYPE_CHECKING
|
||||||
from .multicast import MC_Sck, MCast
|
from .multicast import MC_Sck, MCast
|
||||||
from .util import CachedSet, html_escape, min_ex
|
from .util import CachedSet, formatdate, html_escape, min_ex
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .broker_util import BrokerCli
|
from .broker_util import BrokerCli
|
||||||
@@ -84,7 +84,7 @@ class SSDPr(object):
|
|||||||
name = self.args.doctitle
|
name = self.args.doctitle
|
||||||
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
|
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
|
||||||
hc.reply(zs.encode("utf-8", "replace"))
|
hc.reply(zs.encode("utf-8", "replace"))
|
||||||
return False # close connectino
|
return False # close connection
|
||||||
|
|
||||||
|
|
||||||
class SSDPd(MCast):
|
class SSDPd(MCast):
|
||||||
@@ -141,9 +141,29 @@ class SSDPd(MCast):
|
|||||||
self.log("stopped", 2)
|
self.log("stopped", 2)
|
||||||
|
|
||||||
def run2(self) -> None:
|
def run2(self) -> None:
|
||||||
|
try:
|
||||||
|
if self.args.no_poll:
|
||||||
|
raise Exception()
|
||||||
|
fd2sck = {}
|
||||||
|
srvpoll = select.poll()
|
||||||
|
for sck in self.srv:
|
||||||
|
fd = sck.fileno()
|
||||||
|
fd2sck[fd] = sck
|
||||||
|
srvpoll.register(fd, select.POLLIN)
|
||||||
|
except Exception as ex:
|
||||||
|
srvpoll = None
|
||||||
|
if not self.args.no_poll:
|
||||||
|
t = "WARNING: failed to poll(), will use select() instead: %r"
|
||||||
|
self.log(t % (ex,), 3)
|
||||||
|
|
||||||
while self.running:
|
while self.running:
|
||||||
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
if srvpoll:
|
||||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
pr = srvpoll.poll((self.args.z_chk or 180) * 1000)
|
||||||
|
rx = [fd2sck[x[0]] for x in pr if x[1] & select.POLLIN]
|
||||||
|
else:
|
||||||
|
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
||||||
|
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||||
|
|
||||||
self.rxc.cln()
|
self.rxc.cln()
|
||||||
buf = b""
|
buf = b""
|
||||||
addr = ("0", 0)
|
addr = ("0", 0)
|
||||||
@@ -168,7 +188,7 @@ class SSDPd(MCast):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.srv = {}
|
self.srv.clear()
|
||||||
|
|
||||||
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
||||||
cip = addr[0]
|
cip = addr[0]
|
||||||
@@ -209,7 +229,7 @@ CONFIGID.UPNP.ORG: 1
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
v4 = srv.ip.replace("::ffff:", "")
|
v4 = srv.ip.replace("::ffff:", "")
|
||||||
zs = zs.format(formatdate(usegmt=True), v4, srv.hport, self.args.zsid)
|
zs = zs.format(formatdate(), v4, srv.hport, self.args.zsid)
|
||||||
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
||||||
srv.sck.sendto(zb, addr[:2])
|
srv.sck.sendto(zb, addr[:2])
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
|
||||||
import re
|
import re
|
||||||
import stat
|
import stat
|
||||||
import tarfile
|
import tarfile
|
||||||
|
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
|
||||||
|
from .authsrv import AuthSrv
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .sutil import StreamArc, errdesc
|
from .sutil import StreamArc, errdesc
|
||||||
from .util import Daemon, fsenc, min_ex
|
from .util import Daemon, fsenc, min_ex
|
||||||
@@ -45,12 +45,12 @@ class StreamTar(StreamArc):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
log: "NamedLogger",
|
log: "NamedLogger",
|
||||||
args: argparse.Namespace,
|
asrv: AuthSrv,
|
||||||
fgen: Generator[dict[str, Any], None, None],
|
fgen: Generator[dict[str, Any], None, None],
|
||||||
cmp: str = "",
|
cmp: str = "",
|
||||||
**kwargs: Any
|
**kwargs: Any
|
||||||
):
|
):
|
||||||
super(StreamTar, self).__init__(log, args, fgen)
|
super(StreamTar, self).__init__(log, asrv, fgen)
|
||||||
|
|
||||||
self.ci = 0
|
self.ci = 0
|
||||||
self.co = 0
|
self.co = 0
|
||||||
@@ -148,7 +148,7 @@ class StreamTar(StreamArc):
|
|||||||
errors.append((f["vp"], ex))
|
errors.append((f["vp"], ex))
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
self.errf, txt = errdesc(errors)
|
self.errf, txt = errdesc(self.asrv.vfs, errors)
|
||||||
self.log("\n".join(([repr(self.errf)] + txt[1:])))
|
self.log("\n".join(([repr(self.errf)] + txt[1:])))
|
||||||
self.ser(self.errf)
|
self.ser(self.errf)
|
||||||
|
|
||||||
|
|||||||
@@ -8,10 +8,16 @@ from itertools import chain
|
|||||||
from .bimap import Bimap, BimapError
|
from .bimap import Bimap, BimapError
|
||||||
from .bit import get_bits, set_bits
|
from .bit import get_bits, set_bits
|
||||||
from .buffer import BufferError
|
from .buffer import BufferError
|
||||||
from .label import DNSBuffer, DNSLabel
|
from .label import DNSBuffer, DNSLabel, set_avahi_379
|
||||||
from .ranges import IP4, IP6, H, I, check_bytes
|
from .ranges import IP4, IP6, H, I, check_bytes
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
range = xrange
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class DNSError(Exception):
|
class DNSError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -420,7 +426,7 @@ class RR(object):
|
|||||||
if rdlength:
|
if rdlength:
|
||||||
rdata = RDMAP.get(QTYPE.get(rtype), RD).parse(buffer, rdlength)
|
rdata = RDMAP.get(QTYPE.get(rtype), RD).parse(buffer, rdlength)
|
||||||
else:
|
else:
|
||||||
rdata = ""
|
rdata = RD(b"a")
|
||||||
return cls(rname, rtype, rclass, ttl, rdata)
|
return cls(rname, rtype, rclass, ttl, rdata)
|
||||||
except (BufferError, BimapError) as e:
|
except (BufferError, BimapError) as e:
|
||||||
raise DNSError("Error unpacking RR [offset=%d]: %s" % (buffer.offset, e))
|
raise DNSError("Error unpacking RR [offset=%d]: %s" % (buffer.offset, e))
|
||||||
|
|||||||
@@ -11,6 +11,23 @@ LDH = set(range(33, 127))
|
|||||||
ESCAPE = re.compile(r"\\([0-9][0-9][0-9])")
|
ESCAPE = re.compile(r"\\([0-9][0-9][0-9])")
|
||||||
|
|
||||||
|
|
||||||
|
avahi_379 = 0
|
||||||
|
|
||||||
|
|
||||||
|
def set_avahi_379():
|
||||||
|
global avahi_379
|
||||||
|
avahi_379 = 1
|
||||||
|
|
||||||
|
|
||||||
|
def log_avahi_379(args):
|
||||||
|
global avahi_379
|
||||||
|
if avahi_379 == 2:
|
||||||
|
return
|
||||||
|
avahi_379 = 2
|
||||||
|
t = "Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d];\n\033[35m NOTE: this is probably avahi-bug #379, packet corruption in Avahi's mDNS-reflection feature. Copyparty has a workaround and is OK, but other devices need either --zm4 or --zm6"
|
||||||
|
raise BufferError(t % args)
|
||||||
|
|
||||||
|
|
||||||
class DNSLabelError(Exception):
|
class DNSLabelError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -96,8 +113,11 @@ class DNSBuffer(Buffer):
|
|||||||
)
|
)
|
||||||
if pointer < self.offset:
|
if pointer < self.offset:
|
||||||
self.offset = pointer
|
self.offset = pointer
|
||||||
|
elif avahi_379:
|
||||||
|
log_avahi_379((self.offset, pointer, len(self.data)))
|
||||||
|
label.extend(b"a")
|
||||||
|
break
|
||||||
else:
|
else:
|
||||||
|
|
||||||
raise BufferError(
|
raise BufferError(
|
||||||
"Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d]"
|
"Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d]"
|
||||||
% (self.offset, pointer, len(self.data))
|
% (self.offset, pointer, len(self.data))
|
||||||
|
|||||||
@@ -11,7 +11,21 @@ import os
|
|||||||
|
|
||||||
from ._shared import IP, Adapter
|
from ._shared import IP, Adapter
|
||||||
|
|
||||||
if os.name == "nt":
|
|
||||||
|
def nope(include_unconfigured=False):
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
S390X = os.uname().machine == "s390x"
|
||||||
|
except:
|
||||||
|
S390X = False
|
||||||
|
|
||||||
|
|
||||||
|
if os.environ.get("PRTY_NO_IFADDR") or S390X:
|
||||||
|
# s390x deadlocks at libc.getifaddrs
|
||||||
|
get_adapters = nope
|
||||||
|
elif os.name == "nt":
|
||||||
from ._win32 import get_adapters
|
from ._win32 import get_adapters
|
||||||
elif os.name == "posix":
|
elif os.name == "posix":
|
||||||
from ._posix import get_adapters
|
from ._posix import get_adapters
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ if not PY2:
|
|||||||
U: Callable[[str], str] = str
|
U: Callable[[str], str] = str
|
||||||
else:
|
else:
|
||||||
U = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
U = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||||
|
range = xrange # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||||
|
|
||||||
|
|
||||||
class Adapter(object):
|
class Adapter(object):
|
||||||
|
|||||||
@@ -16,6 +16,11 @@ if True: # pylint: disable=using-constant-test
|
|||||||
|
|
||||||
from typing import Callable, List, Optional, Tuple, Union
|
from typing import Callable, List, Optional, Tuple, Union
|
||||||
|
|
||||||
|
try:
|
||||||
|
range = xrange
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def num_char_count_bits(ver: int) -> int:
|
def num_char_count_bits(ver: int) -> int:
|
||||||
return 16 if (ver + 7) // 17 else 8
|
return 16 if (ver + 7) // 17 else 8
|
||||||
@@ -589,3 +594,20 @@ def _get_bit(x: int, i: int) -> bool:
|
|||||||
|
|
||||||
class DataTooLongError(ValueError):
|
class DataTooLongError(ValueError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def qr2svg(qr: QrCode, border: int) -> str:
|
||||||
|
parts: list[str] = []
|
||||||
|
for y in range(qr.size):
|
||||||
|
sy = border + y
|
||||||
|
for x in range(qr.size):
|
||||||
|
if qr.modules[y][x]:
|
||||||
|
parts.append("M%d,%dh1v1h-1z" % (border + x, sy))
|
||||||
|
t = """\
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" viewBox="0 0 {0} {0}" stroke="none">
|
||||||
|
<rect width="100%" height="100%" fill="#F7F7F7"/>
|
||||||
|
<path d="{1}" fill="#111111"/>
|
||||||
|
</svg>
|
||||||
|
"""
|
||||||
|
return t.format(qr.size + border * 2, " ".join(parts))
|
||||||
|
|||||||
@@ -1,15 +1,15 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from .__init__ import CORES
|
from .__init__ import CORES
|
||||||
|
from .authsrv import VFS, AuthSrv
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .th_cli import ThumbCli
|
from .th_cli import ThumbCli
|
||||||
from .util import UTC, vjoin
|
from .util import UTC, vjoin, vol_san
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Generator, Optional
|
from typing import Any, Generator, Optional
|
||||||
@@ -21,12 +21,13 @@ class StreamArc(object):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
log: "NamedLogger",
|
log: "NamedLogger",
|
||||||
args: argparse.Namespace,
|
asrv: AuthSrv,
|
||||||
fgen: Generator[dict[str, Any], None, None],
|
fgen: Generator[dict[str, Any], None, None],
|
||||||
**kwargs: Any
|
**kwargs: Any
|
||||||
):
|
):
|
||||||
self.log = log
|
self.log = log
|
||||||
self.args = args
|
self.asrv = asrv
|
||||||
|
self.args = asrv.args
|
||||||
self.fgen = fgen
|
self.fgen = fgen
|
||||||
self.stopped = False
|
self.stopped = False
|
||||||
|
|
||||||
@@ -103,15 +104,20 @@ def enthumb(
|
|||||||
return f
|
return f
|
||||||
|
|
||||||
|
|
||||||
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
def errdesc(
|
||||||
|
vfs: VFS, errors: list[tuple[str, str]]
|
||||||
|
) -> tuple[dict[str, Any], list[str]]:
|
||||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||||
|
|
||||||
for fn, err in errors:
|
for fn, err in errors:
|
||||||
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
|
report.extend([" file: %r" % (fn,), "error: %s" % (err,), ""])
|
||||||
|
|
||||||
|
btxt = "\r\n".join(report).encode("utf-8", "replace")
|
||||||
|
btxt = vol_san(list(vfs.all_vols.values()), btxt)
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
|
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
|
||||||
tf_path = tf.name
|
tf_path = tf.name
|
||||||
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
tf.write(btxt)
|
||||||
|
|
||||||
dt = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S")
|
dt = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S")
|
||||||
|
|
||||||
|
|||||||
@@ -2,8 +2,6 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import base64
|
|
||||||
import calendar
|
|
||||||
import errno
|
import errno
|
||||||
import gzip
|
import gzip
|
||||||
import logging
|
import logging
|
||||||
@@ -16,7 +14,7 @@ import string
|
|||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime
|
||||||
|
|
||||||
# from inspect import currentframe
|
# from inspect import currentframe
|
||||||
# print(currentframe().f_lineno)
|
# print(currentframe().f_lineno)
|
||||||
@@ -28,18 +26,32 @@ if True: # pylint: disable=using-constant-test
|
|||||||
import typing
|
import typing
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, E, EnvParams, unicode
|
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, E, EnvParams, unicode
|
||||||
from .authsrv import BAD_CFG, AuthSrv
|
from .authsrv import BAD_CFG, AuthSrv
|
||||||
from .cert import ensure_cert
|
from .cert import ensure_cert
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, HAVE_MUTAGEN
|
||||||
|
from .pwhash import HAVE_ARGON2
|
||||||
from .tcpsrv import TcpSrv
|
from .tcpsrv import TcpSrv
|
||||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
from .th_srv import (
|
||||||
|
HAVE_AVIF,
|
||||||
|
HAVE_FFMPEG,
|
||||||
|
HAVE_FFPROBE,
|
||||||
|
HAVE_HEIF,
|
||||||
|
HAVE_PIL,
|
||||||
|
HAVE_VIPS,
|
||||||
|
HAVE_WEBP,
|
||||||
|
ThumbSrv,
|
||||||
|
)
|
||||||
from .up2k import Up2k
|
from .up2k import Up2k
|
||||||
from .util import (
|
from .util import (
|
||||||
DEF_EXP,
|
DEF_EXP,
|
||||||
DEF_MTE,
|
DEF_MTE,
|
||||||
DEF_MTH,
|
DEF_MTH,
|
||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
|
HAVE_PSUTIL,
|
||||||
|
HAVE_SQLITE3,
|
||||||
|
HAVE_ZMQ,
|
||||||
|
URL_BUG,
|
||||||
UTC,
|
UTC,
|
||||||
VERSIONS,
|
VERSIONS,
|
||||||
Daemon,
|
Daemon,
|
||||||
@@ -50,12 +62,15 @@ from .util import (
|
|||||||
alltrace,
|
alltrace,
|
||||||
ansi_re,
|
ansi_re,
|
||||||
build_netmap,
|
build_netmap,
|
||||||
|
expat_ver,
|
||||||
|
load_ipu,
|
||||||
min_ex,
|
min_ex,
|
||||||
mp,
|
mp,
|
||||||
odfusion,
|
odfusion,
|
||||||
pybin,
|
pybin,
|
||||||
start_log_thrs,
|
start_log_thrs,
|
||||||
start_stackmon,
|
start_stackmon,
|
||||||
|
ub64enc,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -65,6 +80,9 @@ if TYPE_CHECKING:
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class SvcHub(object):
|
class SvcHub(object):
|
||||||
"""
|
"""
|
||||||
@@ -89,20 +107,23 @@ class SvcHub(object):
|
|||||||
self.argv = argv
|
self.argv = argv
|
||||||
self.E: EnvParams = args.E
|
self.E: EnvParams = args.E
|
||||||
self.no_ansi = args.no_ansi
|
self.no_ansi = args.no_ansi
|
||||||
|
self.tz = UTC if args.log_utc else None
|
||||||
self.logf: Optional[typing.TextIO] = None
|
self.logf: Optional[typing.TextIO] = None
|
||||||
self.logf_base_fn = ""
|
self.logf_base_fn = ""
|
||||||
|
self.is_dut = False # running in unittest; always False
|
||||||
self.stop_req = False
|
self.stop_req = False
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.stopped = False
|
self.stopped = False
|
||||||
self.reload_req = False
|
self.reload_req = False
|
||||||
self.reloading = 0
|
self.reload_mutex = threading.Lock()
|
||||||
self.stop_cond = threading.Condition()
|
self.stop_cond = threading.Condition()
|
||||||
self.nsigs = 3
|
self.nsigs = 3
|
||||||
self.retcode = 0
|
self.retcode = 0
|
||||||
self.httpsrv_up = 0
|
self.httpsrv_up = 0
|
||||||
|
|
||||||
self.log_mutex = threading.Lock()
|
self.log_mutex = threading.Lock()
|
||||||
self.next_day = 0
|
self.cday = 0
|
||||||
|
self.cmon = 0
|
||||||
self.tstack = 0.0
|
self.tstack = 0.0
|
||||||
|
|
||||||
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
|
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
|
||||||
@@ -193,6 +214,38 @@ class SvcHub(object):
|
|||||||
t = "WARNING: --s-rd-sz (%d) is larger than --iobuf (%d); this may lead to reduced performance"
|
t = "WARNING: --s-rd-sz (%d) is larger than --iobuf (%d); this may lead to reduced performance"
|
||||||
self.log("root", t % (args.s_rd_sz, args.iobuf), 3)
|
self.log("root", t % (args.s_rd_sz, args.iobuf), 3)
|
||||||
|
|
||||||
|
zs = ""
|
||||||
|
if args.th_ram_max < 0.22:
|
||||||
|
zs = "generate thumbnails"
|
||||||
|
elif args.th_ram_max < 1:
|
||||||
|
zs = "generate audio waveforms or spectrograms"
|
||||||
|
if zs:
|
||||||
|
t = "WARNING: --th-ram-max is very small (%.2f GiB); will not be able to %s"
|
||||||
|
self.log("root", t % (args.th_ram_max, zs), 3)
|
||||||
|
|
||||||
|
if args.chpw and args.idp_h_usr:
|
||||||
|
t = "ERROR: user-changeable passwords is incompatible with IdP/identity-providers; you must disable either --chpw or --idp-h-usr"
|
||||||
|
self.log("root", t, 1)
|
||||||
|
raise Exception(t)
|
||||||
|
|
||||||
|
noch = set()
|
||||||
|
for zs in args.chpw_no or []:
|
||||||
|
zsl = [x.strip() for x in zs.split(",")]
|
||||||
|
noch.update([x for x in zsl if x])
|
||||||
|
args.chpw_no = noch
|
||||||
|
|
||||||
|
if args.ipu:
|
||||||
|
iu, nm = load_ipu(self.log, args.ipu, True)
|
||||||
|
setattr(args, "ipu_iu", iu)
|
||||||
|
setattr(args, "ipu_nm", nm)
|
||||||
|
|
||||||
|
if not self.args.no_ses:
|
||||||
|
self.setup_session_db()
|
||||||
|
|
||||||
|
args.shr1 = ""
|
||||||
|
if args.shr:
|
||||||
|
self.setup_share_db()
|
||||||
|
|
||||||
bri = "zy"[args.theme % 2 :][:1]
|
bri = "zy"[args.theme % 2 :][:1]
|
||||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||||
@@ -232,6 +285,8 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.up2k = Up2k(self)
|
self.up2k = Up2k(self)
|
||||||
|
|
||||||
|
self._feature_test()
|
||||||
|
|
||||||
decs = {k: 1 for k in self.args.th_dec.split(",")}
|
decs = {k: 1 for k in self.args.th_dec.split(",")}
|
||||||
if not HAVE_VIPS:
|
if not HAVE_VIPS:
|
||||||
decs.pop("vips", None)
|
decs.pop("vips", None)
|
||||||
@@ -336,6 +391,160 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.broker = Broker(self)
|
self.broker = Broker(self)
|
||||||
|
|
||||||
|
# create netmaps early to avoid firewall gaps,
|
||||||
|
# but the mutex blocks multiprocessing startup
|
||||||
|
for zs in "ipu_iu ftp_ipa_nm tftp_ipa_nm".split():
|
||||||
|
try:
|
||||||
|
getattr(args, zs).mutex = threading.Lock()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setup_session_db(self) -> None:
|
||||||
|
if not HAVE_SQLITE3:
|
||||||
|
self.args.no_ses = True
|
||||||
|
t = "WARNING: sqlite3 not available; disabling sessions, will use plaintext passwords in cookies"
|
||||||
|
self.log("root", t, 3)
|
||||||
|
return
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
create = True
|
||||||
|
db_path = self.args.ses_db
|
||||||
|
self.log("root", "opening sessions-db %s" % (db_path,))
|
||||||
|
for n in range(2):
|
||||||
|
try:
|
||||||
|
db = sqlite3.connect(db_path)
|
||||||
|
cur = db.cursor()
|
||||||
|
try:
|
||||||
|
cur.execute("select count(*) from us").fetchone()
|
||||||
|
create = False
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
except Exception as ex:
|
||||||
|
if n:
|
||||||
|
raise
|
||||||
|
t = "sessions-db corrupt; deleting and recreating: %r"
|
||||||
|
self.log("root", t % (ex,), 3)
|
||||||
|
try:
|
||||||
|
cur.close() # type: ignore
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
db.close() # type: ignore
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
os.unlink(db_path)
|
||||||
|
|
||||||
|
sch = [
|
||||||
|
r"create table kv (k text, v int)",
|
||||||
|
r"create table us (un text, si text, t0 int)",
|
||||||
|
# username, session-id, creation-time
|
||||||
|
r"create index us_un on us(un)",
|
||||||
|
r"create index us_si on us(si)",
|
||||||
|
r"create index us_t0 on us(t0)",
|
||||||
|
r"insert into kv values ('sver', 1)",
|
||||||
|
]
|
||||||
|
|
||||||
|
assert db # type: ignore # !rm
|
||||||
|
assert cur # type: ignore # !rm
|
||||||
|
if create:
|
||||||
|
for cmd in sch:
|
||||||
|
cur.execute(cmd)
|
||||||
|
self.log("root", "created new sessions-db")
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
cur.close()
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
def setup_share_db(self) -> None:
|
||||||
|
al = self.args
|
||||||
|
if not HAVE_SQLITE3:
|
||||||
|
self.log("root", "sqlite3 not available; disabling --shr", 1)
|
||||||
|
al.shr = ""
|
||||||
|
return
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
al.shr = al.shr.strip("/")
|
||||||
|
if "/" in al.shr or not al.shr:
|
||||||
|
t = "config error: --shr must be the name of a virtual toplevel directory to put shares inside"
|
||||||
|
self.log("root", t, 1)
|
||||||
|
raise Exception(t)
|
||||||
|
|
||||||
|
al.shr = "/%s/" % (al.shr,)
|
||||||
|
al.shr1 = al.shr[1:]
|
||||||
|
|
||||||
|
create = True
|
||||||
|
modified = False
|
||||||
|
db_path = self.args.shr_db
|
||||||
|
self.log("root", "opening shares-db %s" % (db_path,))
|
||||||
|
for n in range(2):
|
||||||
|
try:
|
||||||
|
db = sqlite3.connect(db_path)
|
||||||
|
cur = db.cursor()
|
||||||
|
try:
|
||||||
|
cur.execute("select count(*) from sh").fetchone()
|
||||||
|
create = False
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
except Exception as ex:
|
||||||
|
if n:
|
||||||
|
raise
|
||||||
|
t = "shares-db corrupt; deleting and recreating: %r"
|
||||||
|
self.log("root", t % (ex,), 3)
|
||||||
|
try:
|
||||||
|
cur.close() # type: ignore
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
db.close() # type: ignore
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
os.unlink(db_path)
|
||||||
|
|
||||||
|
sch1 = [
|
||||||
|
r"create table kv (k text, v int)",
|
||||||
|
r"create table sh (k text, pw text, vp text, pr text, st int, un text, t0 int, t1 int)",
|
||||||
|
# sharekey, password, src, perms, numFiles, owner, created, expires
|
||||||
|
]
|
||||||
|
sch2 = [
|
||||||
|
r"create table sf (k text, vp text)",
|
||||||
|
r"create index sf_k on sf(k)",
|
||||||
|
r"create index sh_k on sh(k)",
|
||||||
|
r"create index sh_t1 on sh(t1)",
|
||||||
|
]
|
||||||
|
|
||||||
|
assert db # type: ignore # !rm
|
||||||
|
assert cur # type: ignore # !rm
|
||||||
|
if create:
|
||||||
|
dver = 2
|
||||||
|
modified = True
|
||||||
|
for cmd in sch1 + sch2:
|
||||||
|
cur.execute(cmd)
|
||||||
|
self.log("root", "created new shares-db")
|
||||||
|
else:
|
||||||
|
(dver,) = cur.execute("select v from kv where k = 'sver'").fetchall()[0]
|
||||||
|
|
||||||
|
if dver == 1:
|
||||||
|
modified = True
|
||||||
|
for cmd in sch2:
|
||||||
|
cur.execute(cmd)
|
||||||
|
cur.execute("update sh set st = 0")
|
||||||
|
self.log("root", "shares-db schema upgrade ok")
|
||||||
|
|
||||||
|
if modified:
|
||||||
|
for cmd in [
|
||||||
|
r"delete from kv where k = 'sver'",
|
||||||
|
r"insert into kv values ('sver', %d)" % (2,),
|
||||||
|
]:
|
||||||
|
cur.execute(cmd)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
cur.close()
|
||||||
|
db.close()
|
||||||
|
|
||||||
def start_ftpd(self) -> None:
|
def start_ftpd(self) -> None:
|
||||||
time.sleep(30)
|
time.sleep(30)
|
||||||
|
|
||||||
@@ -420,6 +629,59 @@ class SvcHub(object):
|
|||||||
|
|
||||||
Daemon(self.sd_notify, "sd-notify")
|
Daemon(self.sd_notify, "sd-notify")
|
||||||
|
|
||||||
|
def _feature_test(self) -> None:
|
||||||
|
fok = []
|
||||||
|
fng = []
|
||||||
|
t_ff = "transcode audio, create spectrograms, video thumbnails"
|
||||||
|
to_check = [
|
||||||
|
(HAVE_SQLITE3, "sqlite", "sessions and file/media indexing"),
|
||||||
|
(HAVE_PIL, "pillow", "image thumbnails (plenty fast)"),
|
||||||
|
(HAVE_VIPS, "vips", "image thumbnails (faster, eats more ram)"),
|
||||||
|
(HAVE_WEBP, "pillow-webp", "create thumbnails as webp files"),
|
||||||
|
(HAVE_FFMPEG, "ffmpeg", t_ff + ", good-but-slow image thumbnails"),
|
||||||
|
(HAVE_FFPROBE, "ffprobe", t_ff + ", read audio/media tags"),
|
||||||
|
(HAVE_MUTAGEN, "mutagen", "read audio tags (ffprobe is better but slower)"),
|
||||||
|
(HAVE_ARGON2, "argon2", "secure password hashing (advanced users only)"),
|
||||||
|
(HAVE_ZMQ, "pyzmq", "send zeromq messages from event-hooks"),
|
||||||
|
(HAVE_HEIF, "pillow-heif", "read .heif images with pillow (rarely useful)"),
|
||||||
|
(HAVE_AVIF, "pillow-avif", "read .avif images with pillow (rarely useful)"),
|
||||||
|
]
|
||||||
|
if ANYWIN:
|
||||||
|
to_check += [
|
||||||
|
(HAVE_PSUTIL, "psutil", "improved plugin cleanup (rarely useful)")
|
||||||
|
]
|
||||||
|
|
||||||
|
verbose = self.args.deps
|
||||||
|
if verbose:
|
||||||
|
self.log("dependencies", "")
|
||||||
|
|
||||||
|
for have, feat, what in to_check:
|
||||||
|
lst = fok if have else fng
|
||||||
|
lst.append((feat, what))
|
||||||
|
if verbose:
|
||||||
|
zi = 2 if have else 5
|
||||||
|
sgot = "found" if have else "missing"
|
||||||
|
t = "%7s: %s \033[36m(%s)"
|
||||||
|
self.log("dependencies", t % (sgot, feat, what), zi)
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
self.log("dependencies", "")
|
||||||
|
return
|
||||||
|
|
||||||
|
sok = ", ".join(x[0] for x in fok)
|
||||||
|
sng = ", ".join(x[0] for x in fng)
|
||||||
|
|
||||||
|
t = ""
|
||||||
|
if sok:
|
||||||
|
t += "OK: \033[32m" + sok
|
||||||
|
if sng:
|
||||||
|
if t:
|
||||||
|
t += ", "
|
||||||
|
t += "\033[0mNG: \033[35m" + sng
|
||||||
|
|
||||||
|
t += "\033[0m, see --deps"
|
||||||
|
self.log("dependencies", t, 6)
|
||||||
|
|
||||||
def _check_env(self) -> None:
|
def _check_env(self) -> None:
|
||||||
try:
|
try:
|
||||||
files = os.listdir(E.cfg)
|
files = os.listdir(E.cfg)
|
||||||
@@ -437,6 +699,15 @@ class SvcHub(object):
|
|||||||
if self.args.bauth_last:
|
if self.args.bauth_last:
|
||||||
self.log("root", "WARNING: ignoring --bauth-last due to --no-bauth", 3)
|
self.log("root", "WARNING: ignoring --bauth-last due to --no-bauth", 3)
|
||||||
|
|
||||||
|
if not self.args.no_dav:
|
||||||
|
from .dxml import DXML_OK
|
||||||
|
|
||||||
|
if not DXML_OK:
|
||||||
|
if not self.args.no_dav:
|
||||||
|
self.args.no_dav = True
|
||||||
|
t = "WARNING:\nDisabling WebDAV support because dxml selftest failed. Please report this bug;\n%s\n...and include the following information in the bug-report:\n%s | expat %s\n"
|
||||||
|
self.log("root", t % (URL_BUG, VERSIONS, expat_ver()), 1)
|
||||||
|
|
||||||
def _process_config(self) -> bool:
|
def _process_config(self) -> bool:
|
||||||
al = self.args
|
al = self.args
|
||||||
|
|
||||||
@@ -479,8 +750,10 @@ class SvcHub(object):
|
|||||||
zsl = al.th_covers.split(",")
|
zsl = al.th_covers.split(",")
|
||||||
zsl = [x.strip() for x in zsl]
|
zsl = [x.strip() for x in zsl]
|
||||||
zsl = [x for x in zsl if x]
|
zsl = [x for x in zsl if x]
|
||||||
al.th_covers = set(zsl)
|
al.th_covers = zsl
|
||||||
al.th_coversd = set(zsl + ["." + x for x in zsl])
|
al.th_coversd = zsl + ["." + x for x in zsl]
|
||||||
|
al.th_covers_set = set(al.th_covers)
|
||||||
|
al.th_coversd_set = set(al.th_coversd)
|
||||||
|
|
||||||
for k in "c".split(" "):
|
for k in "c".split(" "):
|
||||||
vl = getattr(al, k)
|
vl = getattr(al, k)
|
||||||
@@ -496,7 +769,7 @@ class SvcHub(object):
|
|||||||
vs = os.path.expandvars(os.path.expanduser(vs))
|
vs = os.path.expandvars(os.path.expanduser(vs))
|
||||||
setattr(al, k, vs)
|
setattr(al, k, vs)
|
||||||
|
|
||||||
for k in "sus_urls nonsus_urls".split(" "):
|
for k in "dav_ua1 sus_urls nonsus_urls".split(" "):
|
||||||
vs = getattr(al, k)
|
vs = getattr(al, k)
|
||||||
if not vs or vs == "no":
|
if not vs or vs == "no":
|
||||||
setattr(al, k, None)
|
setattr(al, k, None)
|
||||||
@@ -520,8 +793,8 @@ class SvcHub(object):
|
|||||||
al.idp_h_grp = al.idp_h_grp.lower()
|
al.idp_h_grp = al.idp_h_grp.lower()
|
||||||
al.idp_h_key = al.idp_h_key.lower()
|
al.idp_h_key = al.idp_h_key.lower()
|
||||||
|
|
||||||
al.ftp_ipa_nm = build_netmap(al.ftp_ipa or al.ipa)
|
al.ftp_ipa_nm = build_netmap(al.ftp_ipa or al.ipa, True)
|
||||||
al.tftp_ipa_nm = build_netmap(al.tftp_ipa or al.ipa)
|
al.tftp_ipa_nm = build_netmap(al.tftp_ipa or al.ipa, True)
|
||||||
|
|
||||||
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
||||||
al.mte = odfusion(mte, al.mte)
|
al.mte = odfusion(mte, al.mte)
|
||||||
@@ -533,7 +806,7 @@ class SvcHub(object):
|
|||||||
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
||||||
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
||||||
|
|
||||||
for k in ["no_hash", "no_idx", "og_ua"]:
|
for k in ["no_hash", "no_idx", "og_ua", "srch_excl"]:
|
||||||
ptn = getattr(self.args, k)
|
ptn = getattr(self.args, k)
|
||||||
if ptn:
|
if ptn:
|
||||||
setattr(self.args, k, re.compile(ptn))
|
setattr(self.args, k, re.compile(ptn))
|
||||||
@@ -568,6 +841,24 @@ class SvcHub(object):
|
|||||||
if len(al.tcolor) == 3: # fc5 => ffcc55
|
if len(al.tcolor) == 3: # fc5 => ffcc55
|
||||||
al.tcolor = "".join([x * 2 for x in al.tcolor])
|
al.tcolor = "".join([x * 2 for x in al.tcolor])
|
||||||
|
|
||||||
|
zs = al.u2sz
|
||||||
|
zsl = zs.split(",")
|
||||||
|
if len(zsl) not in (1, 3):
|
||||||
|
t = "invalid --u2sz; must be either one number, or a comma-separated list of three numbers (min,default,max)"
|
||||||
|
raise Exception(t)
|
||||||
|
if len(zsl) < 3:
|
||||||
|
zsl = ["1", zs, zs]
|
||||||
|
zi2 = 1
|
||||||
|
for zs in zsl:
|
||||||
|
zi = int(zs)
|
||||||
|
# arbitrary constraint (anything above 2 GiB is probably unintended)
|
||||||
|
if zi < 1 or zi > 2047:
|
||||||
|
raise Exception("invalid --u2sz; minimum is 1, max is 2047")
|
||||||
|
if zi < zi2:
|
||||||
|
raise Exception("invalid --u2sz; values must be equal or ascending")
|
||||||
|
zi2 = zi
|
||||||
|
al.u2sz = ",".join(zsl)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
||||||
@@ -618,7 +909,7 @@ class SvcHub(object):
|
|||||||
self.args.nc = min(self.args.nc, soft // 2)
|
self.args.nc = min(self.args.nc, soft // 2)
|
||||||
|
|
||||||
def _logname(self) -> str:
|
def _logname(self) -> str:
|
||||||
dt = datetime.now(UTC)
|
dt = datetime.now(self.tz)
|
||||||
fn = str(self.args.lo)
|
fn = str(self.args.lo)
|
||||||
for fs in "YmdHMS":
|
for fs in "YmdHMS":
|
||||||
fs = "%" + fs
|
fs = "%" + fs
|
||||||
@@ -735,38 +1026,23 @@ class SvcHub(object):
|
|||||||
except:
|
except:
|
||||||
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
||||||
|
|
||||||
def reload(self) -> str:
|
def reload(self, rescan_all_vols: bool, up2k: bool) -> str:
|
||||||
with self.up2k.mutex:
|
t = "config has been reloaded"
|
||||||
if self.reloading:
|
with self.reload_mutex:
|
||||||
return "cannot reload; already in progress"
|
|
||||||
self.reloading = 1
|
|
||||||
|
|
||||||
Daemon(self._reload, "reloading")
|
|
||||||
return "reload initiated"
|
|
||||||
|
|
||||||
def _reload(self, rescan_all_vols: bool = True) -> None:
|
|
||||||
with self.up2k.mutex:
|
|
||||||
if self.reloading != 1:
|
|
||||||
return
|
|
||||||
self.reloading = 2
|
|
||||||
self.log("root", "reloading config")
|
self.log("root", "reloading config")
|
||||||
self.asrv.reload()
|
self.asrv.reload(9 if up2k else 4)
|
||||||
self.up2k.reload(rescan_all_vols)
|
if up2k:
|
||||||
|
self.up2k.reload(rescan_all_vols)
|
||||||
|
t += "; volumes are now reinitializing"
|
||||||
|
else:
|
||||||
|
self.log("root", "reload done")
|
||||||
self.broker.reload()
|
self.broker.reload()
|
||||||
self.reloading = 0
|
return t
|
||||||
|
|
||||||
def _reload_blocking(self, rescan_all_vols: bool = True) -> None:
|
def _reload_sessions(self) -> None:
|
||||||
while True:
|
with self.asrv.mutex:
|
||||||
with self.up2k.mutex:
|
self.asrv.load_sessions(True)
|
||||||
if self.reloading < 2:
|
self.broker.reload_sessions()
|
||||||
self.reloading = 1
|
|
||||||
break
|
|
||||||
time.sleep(0.05)
|
|
||||||
|
|
||||||
# try to handle multiple pending IdP reloads at once:
|
|
||||||
time.sleep(0.2)
|
|
||||||
|
|
||||||
self._reload(rescan_all_vols=rescan_all_vols)
|
|
||||||
|
|
||||||
def stop_thr(self) -> None:
|
def stop_thr(self) -> None:
|
||||||
while not self.stop_req:
|
while not self.stop_req:
|
||||||
@@ -775,7 +1051,7 @@ class SvcHub(object):
|
|||||||
|
|
||||||
if self.reload_req:
|
if self.reload_req:
|
||||||
self.reload_req = False
|
self.reload_req = False
|
||||||
self.reload()
|
self.reload(True, True)
|
||||||
|
|
||||||
self.shutdown()
|
self.shutdown()
|
||||||
|
|
||||||
@@ -888,12 +1164,12 @@ class SvcHub(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
with self.log_mutex:
|
with self.log_mutex:
|
||||||
zd = datetime.now(UTC)
|
dt = datetime.now(self.tz)
|
||||||
ts = self.log_dfmt % (
|
ts = self.log_dfmt % (
|
||||||
zd.year,
|
dt.year,
|
||||||
zd.month * 100 + zd.day,
|
dt.month * 100 + dt.day,
|
||||||
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
(dt.hour * 100 + dt.minute) * 100 + dt.second,
|
||||||
zd.microsecond // self.log_div,
|
dt.microsecond // self.log_div,
|
||||||
)
|
)
|
||||||
|
|
||||||
if c and not self.args.no_ansi:
|
if c and not self.args.no_ansi:
|
||||||
@@ -914,41 +1190,26 @@ class SvcHub(object):
|
|||||||
if not self.args.no_logflush:
|
if not self.args.no_logflush:
|
||||||
self.logf.flush()
|
self.logf.flush()
|
||||||
|
|
||||||
now = time.time()
|
if dt.day != self.cday or dt.month != self.cmon:
|
||||||
if int(now) >= self.next_day:
|
self._set_next_day(dt)
|
||||||
self._set_next_day()
|
|
||||||
|
|
||||||
def _set_next_day(self) -> None:
|
def _set_next_day(self, dt: datetime) -> None:
|
||||||
if self.next_day and self.logf and self.logf_base_fn != self._logname():
|
if self.cday and self.logf and self.logf_base_fn != self._logname():
|
||||||
self.logf.close()
|
self.logf.close()
|
||||||
self._setup_logfile("")
|
self._setup_logfile("")
|
||||||
|
|
||||||
dt = datetime.now(UTC)
|
self.cday = dt.day
|
||||||
|
self.cmon = dt.month
|
||||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
|
||||||
day_now = dt.day
|
|
||||||
while dt.day == day_now:
|
|
||||||
dt += timedelta(hours=12)
|
|
||||||
|
|
||||||
dt = dt.replace(hour=0, minute=0, second=0)
|
|
||||||
try:
|
|
||||||
tt = dt.utctimetuple()
|
|
||||||
except:
|
|
||||||
# still makes me hella uncomfortable
|
|
||||||
tt = dt.timetuple()
|
|
||||||
|
|
||||||
self.next_day = calendar.timegm(tt)
|
|
||||||
|
|
||||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
"""handles logging from all components"""
|
"""handles logging from all components"""
|
||||||
with self.log_mutex:
|
with self.log_mutex:
|
||||||
now = time.time()
|
dt = datetime.now(self.tz)
|
||||||
if int(now) >= self.next_day:
|
if dt.day != self.cday or dt.month != self.cmon:
|
||||||
dt = datetime.fromtimestamp(now, UTC)
|
|
||||||
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
||||||
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
||||||
print(zs, end="")
|
print(zs, end="")
|
||||||
self._set_next_day()
|
self._set_next_day(dt)
|
||||||
if self.logf:
|
if self.logf:
|
||||||
self.logf.write(zs)
|
self.logf.write(zs)
|
||||||
|
|
||||||
@@ -967,12 +1228,11 @@ class SvcHub(object):
|
|||||||
else:
|
else:
|
||||||
msg = "%s%s\033[0m" % (c, msg)
|
msg = "%s%s\033[0m" % (c, msg)
|
||||||
|
|
||||||
zd = datetime.fromtimestamp(now, UTC)
|
|
||||||
ts = self.log_efmt % (
|
ts = self.log_efmt % (
|
||||||
zd.hour,
|
dt.hour,
|
||||||
zd.minute,
|
dt.minute,
|
||||||
zd.second,
|
dt.second,
|
||||||
zd.microsecond // self.log_div,
|
dt.microsecond // self.log_div,
|
||||||
)
|
)
|
||||||
msg = fmt % (ts, src, msg)
|
msg = fmt % (ts, src, msg)
|
||||||
try:
|
try:
|
||||||
@@ -1070,5 +1330,5 @@ class SvcHub(object):
|
|||||||
zs = "{}\n{}".format(VERSIONS, alltrace())
|
zs = "{}\n{}".format(VERSIONS, alltrace())
|
||||||
zb = zs.encode("utf-8", "replace")
|
zb = zs.encode("utf-8", "replace")
|
||||||
zb = gzip.compress(zb)
|
zb = gzip.compress(zb)
|
||||||
zs = base64.b64encode(zb).decode("ascii")
|
zs = ub64enc(zb).decode("ascii")
|
||||||
self.log("stacks", zs)
|
self.log("stacks", zs)
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
|
||||||
import calendar
|
import calendar
|
||||||
import stat
|
import stat
|
||||||
import time
|
import time
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
|
from .authsrv import AuthSrv
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .sutil import StreamArc, errdesc
|
from .sutil import StreamArc, errdesc
|
||||||
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
|
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
|
||||||
@@ -37,9 +37,7 @@ def dostime2unix(buf: bytes) -> int:
|
|||||||
|
|
||||||
|
|
||||||
def unixtime2dos(ts: int) -> bytes:
|
def unixtime2dos(ts: int) -> bytes:
|
||||||
tt = time.gmtime(ts + 1)
|
dy, dm, dd, th, tm, ts, _, _, _ = time.gmtime(ts + 1)
|
||||||
dy, dm, dd, th, tm, ts = list(tt)[:6]
|
|
||||||
|
|
||||||
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
||||||
bt = (th << 11) + (tm << 5) + ts // 2
|
bt = (th << 11) + (tm << 5) + ts // 2
|
||||||
try:
|
try:
|
||||||
@@ -102,12 +100,12 @@ def gen_hdr(
|
|||||||
|
|
||||||
# spec says to put zeros when !crc if bit3 (streaming)
|
# spec says to put zeros when !crc if bit3 (streaming)
|
||||||
# however infozip does actual sz and it even works on winxp
|
# however infozip does actual sz and it even works on winxp
|
||||||
# (same reasning for z64 extradata later)
|
# (same reasoning for z64 extradata later)
|
||||||
vsz = 0xFFFFFFFF if z64 else sz
|
vsz = 0xFFFFFFFF if z64 else sz
|
||||||
ret += spack(b"<LL", vsz, vsz)
|
ret += spack(b"<LL", vsz, vsz)
|
||||||
|
|
||||||
# windows support (the "?" replace below too)
|
# windows support (the "?" replace below too)
|
||||||
fn = sanitize_fn(fn, "/", [])
|
fn = sanitize_fn(fn, "/")
|
||||||
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
||||||
|
|
||||||
# add ntfs (0x24) and/or unix (0x10) extrafields for utc, add z64 if requested
|
# add ntfs (0x24) and/or unix (0x10) extrafields for utc, add z64 if requested
|
||||||
@@ -219,13 +217,13 @@ class StreamZip(StreamArc):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
log: "NamedLogger",
|
log: "NamedLogger",
|
||||||
args: argparse.Namespace,
|
asrv: AuthSrv,
|
||||||
fgen: Generator[dict[str, Any], None, None],
|
fgen: Generator[dict[str, Any], None, None],
|
||||||
utf8: bool = False,
|
utf8: bool = False,
|
||||||
pre_crc: bool = False,
|
pre_crc: bool = False,
|
||||||
**kwargs: Any
|
**kwargs: Any
|
||||||
) -> None:
|
) -> None:
|
||||||
super(StreamZip, self).__init__(log, args, fgen)
|
super(StreamZip, self).__init__(log, asrv, fgen)
|
||||||
|
|
||||||
self.utf8 = utf8
|
self.utf8 = utf8
|
||||||
self.pre_crc = pre_crc
|
self.pre_crc = pre_crc
|
||||||
@@ -302,7 +300,7 @@ class StreamZip(StreamArc):
|
|||||||
mbuf = b""
|
mbuf = b""
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
errf, txt = errdesc(errors)
|
errf, txt = errdesc(self.asrv.vfs, errors)
|
||||||
self.log("\n".join(([repr(errf)] + txt[1:])))
|
self.log("\n".join(([repr(errf)] + txt[1:])))
|
||||||
for x in self.ser(errf):
|
for x in self.ser(errf):
|
||||||
yield x
|
yield x
|
||||||
|
|||||||
@@ -15,19 +15,25 @@ from .util import (
|
|||||||
E_ADDR_IN_USE,
|
E_ADDR_IN_USE,
|
||||||
E_ADDR_NOT_AVAIL,
|
E_ADDR_NOT_AVAIL,
|
||||||
E_UNREACH,
|
E_UNREACH,
|
||||||
|
HAVE_IPV6,
|
||||||
IP6ALL,
|
IP6ALL,
|
||||||
|
VF_CAREFUL,
|
||||||
Netdev,
|
Netdev,
|
||||||
|
atomic_move,
|
||||||
min_ex,
|
min_ex,
|
||||||
sunpack,
|
sunpack,
|
||||||
termsize,
|
termsize,
|
||||||
)
|
)
|
||||||
|
|
||||||
if True:
|
if True:
|
||||||
from typing import Generator
|
from typing import Generator, Union
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
|
if not hasattr(socket, "AF_UNIX"):
|
||||||
|
setattr(socket, "AF_UNIX", -9001)
|
||||||
|
|
||||||
if not hasattr(socket, "IPPROTO_IPV6"):
|
if not hasattr(socket, "IPPROTO_IPV6"):
|
||||||
setattr(socket, "IPPROTO_IPV6", 41)
|
setattr(socket, "IPPROTO_IPV6", 41)
|
||||||
|
|
||||||
@@ -89,7 +95,7 @@ class TcpSrv(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# binding 0.0.0.0 after :: fails on dualstack
|
# binding 0.0.0.0 after :: fails on dualstack
|
||||||
# but is necessary on non-dualstakc
|
# but is necessary on non-dualstack
|
||||||
if successful_binds:
|
if successful_binds:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -111,8 +117,10 @@ class TcpSrv(object):
|
|||||||
|
|
||||||
eps = {
|
eps = {
|
||||||
"127.0.0.1": Netdev("127.0.0.1", 0, "", "local only"),
|
"127.0.0.1": Netdev("127.0.0.1", 0, "", "local only"),
|
||||||
"::1": Netdev("::1", 0, "", "local only"),
|
|
||||||
}
|
}
|
||||||
|
if HAVE_IPV6:
|
||||||
|
eps["::1"] = Netdev("::1", 0, "", "local only")
|
||||||
|
|
||||||
nonlocals = [x for x in self.args.i if x not in [k.split("/")[0] for k in eps]]
|
nonlocals = [x for x in self.args.i if x not in [k.split("/")[0] for k in eps]]
|
||||||
if nonlocals:
|
if nonlocals:
|
||||||
try:
|
try:
|
||||||
@@ -214,14 +222,41 @@ class TcpSrv(object):
|
|||||||
if self.args.qr or self.args.qrs:
|
if self.args.qr or self.args.qrs:
|
||||||
self.qr = self._qr(qr1, qr2)
|
self.qr = self._qr(qr1, qr2)
|
||||||
|
|
||||||
|
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
|
self.log("tcpsrv", msg, c)
|
||||||
|
|
||||||
def _listen(self, ip: str, port: int) -> None:
|
def _listen(self, ip: str, port: int) -> None:
|
||||||
ipv = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
uds_perm = uds_gid = -1
|
||||||
|
if "unix:" in ip:
|
||||||
|
tcp = False
|
||||||
|
ipv = socket.AF_UNIX
|
||||||
|
uds = ip.split(":")
|
||||||
|
ip = uds[-1]
|
||||||
|
if len(uds) > 2:
|
||||||
|
uds_perm = int(uds[1], 8)
|
||||||
|
if len(uds) > 3:
|
||||||
|
try:
|
||||||
|
uds_gid = int(uds[2])
|
||||||
|
except:
|
||||||
|
import grp
|
||||||
|
|
||||||
|
uds_gid = grp.getgrnam(uds[2]).gr_gid
|
||||||
|
|
||||||
|
elif ":" in ip:
|
||||||
|
tcp = True
|
||||||
|
ipv = socket.AF_INET6
|
||||||
|
else:
|
||||||
|
tcp = True
|
||||||
|
ipv = socket.AF_INET
|
||||||
|
|
||||||
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
||||||
|
|
||||||
if not ANYWIN or self.args.reuseaddr:
|
if not ANYWIN or self.args.reuseaddr:
|
||||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
if tcp:
|
||||||
|
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
|
|
||||||
srv.settimeout(None) # < does not inherit, ^ opts above do
|
srv.settimeout(None) # < does not inherit, ^ opts above do
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -233,8 +268,25 @@ class TcpSrv(object):
|
|||||||
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
srv.bind((ip, port))
|
if tcp:
|
||||||
sport = srv.getsockname()[1]
|
srv.bind((ip, port))
|
||||||
|
else:
|
||||||
|
if ANYWIN or self.args.rm_sck:
|
||||||
|
if os.path.exists(ip):
|
||||||
|
os.unlink(ip)
|
||||||
|
srv.bind(ip)
|
||||||
|
else:
|
||||||
|
tf = "%s.%d" % (ip, os.getpid())
|
||||||
|
if os.path.exists(tf):
|
||||||
|
os.unlink(tf)
|
||||||
|
srv.bind(tf)
|
||||||
|
if uds_gid != -1:
|
||||||
|
os.chown(tf, -1, uds_gid)
|
||||||
|
if uds_perm != -1:
|
||||||
|
os.chmod(tf, uds_perm)
|
||||||
|
atomic_move(self.nlog, tf, ip, VF_CAREFUL)
|
||||||
|
|
||||||
|
sport = srv.getsockname()[1] if tcp else port
|
||||||
if port != sport:
|
if port != sport:
|
||||||
# linux 6.0.16 lets you bind a port which is in use
|
# linux 6.0.16 lets you bind a port which is in use
|
||||||
# except it just gives you a random port instead
|
# except it just gives you a random port instead
|
||||||
@@ -246,12 +298,23 @@ class TcpSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
e = ""
|
||||||
if ex.errno in E_ADDR_IN_USE:
|
if ex.errno in E_ADDR_IN_USE:
|
||||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||||
|
if not tcp:
|
||||||
|
e = "\033[1;31munix-socket {} is busy\033[0m".format(ip)
|
||||||
elif ex.errno in E_ADDR_NOT_AVAIL:
|
elif ex.errno in E_ADDR_NOT_AVAIL:
|
||||||
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
|
e = "\033[1;31minterface {} does not exist\033[0m".format(ip)
|
||||||
else:
|
|
||||||
|
if not e:
|
||||||
|
if not tcp:
|
||||||
|
t = "\n\n\n NOTE: this crash may be due to a unix-socket bug; try --rm-sck\n"
|
||||||
|
self.log("tcpsrv", t, 2)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
if not tcp and not self.args.rm_sck:
|
||||||
|
e += "; maybe this is a bug? try --rm-sck"
|
||||||
|
|
||||||
raise Exception(e)
|
raise Exception(e)
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
@@ -259,7 +322,14 @@ class TcpSrv(object):
|
|||||||
bound: list[tuple[str, int]] = []
|
bound: list[tuple[str, int]] = []
|
||||||
srvs: list[socket.socket] = []
|
srvs: list[socket.socket] = []
|
||||||
for srv in self.srv:
|
for srv in self.srv:
|
||||||
ip, port = srv.getsockname()[:2]
|
if srv.family == socket.AF_UNIX:
|
||||||
|
tcp = False
|
||||||
|
ip = re.sub(r"\.[0-9]+$", "", srv.getsockname())
|
||||||
|
port = 0
|
||||||
|
else:
|
||||||
|
tcp = True
|
||||||
|
ip, port = srv.getsockname()[:2]
|
||||||
|
|
||||||
if ip == IP6ALL:
|
if ip == IP6ALL:
|
||||||
ip = "::" # jython
|
ip = "::" # jython
|
||||||
|
|
||||||
@@ -291,13 +361,17 @@ class TcpSrv(object):
|
|||||||
bound.append((ip, port))
|
bound.append((ip, port))
|
||||||
srvs.append(srv)
|
srvs.append(srv)
|
||||||
fno = srv.fileno()
|
fno = srv.fileno()
|
||||||
hip = "[{}]".format(ip) if ":" in ip else ip
|
if tcp:
|
||||||
msg = "listening @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
hip = "[{}]".format(ip) if ":" in ip else ip
|
||||||
|
msg = "listening @ {}:{} f{} p{}".format(hip, port, fno, os.getpid())
|
||||||
|
else:
|
||||||
|
msg = "listening @ {} f{} p{}".format(ip, fno, os.getpid())
|
||||||
|
|
||||||
self.log("tcpsrv", msg)
|
self.log("tcpsrv", msg)
|
||||||
if self.args.q:
|
if self.args.q:
|
||||||
print(msg)
|
print(msg)
|
||||||
|
|
||||||
self.hub.broker.say("listen", srv)
|
self.hub.broker.say("httpsrv.listen", srv)
|
||||||
|
|
||||||
self.srv = srvs
|
self.srv = srvs
|
||||||
self.bound = bound
|
self.bound = bound
|
||||||
@@ -305,7 +379,7 @@ class TcpSrv(object):
|
|||||||
self._distribute_netdevs()
|
self._distribute_netdevs()
|
||||||
|
|
||||||
def _distribute_netdevs(self):
|
def _distribute_netdevs(self):
|
||||||
self.hub.broker.say("set_netdevs", self.netdevs)
|
self.hub.broker.say("httpsrv.set_netdevs", self.netdevs)
|
||||||
self.hub.start_zeroconf()
|
self.hub.start_zeroconf()
|
||||||
gencert(self.log, self.args, self.netdevs)
|
gencert(self.log, self.args, self.netdevs)
|
||||||
self.hub.restart_ftpd()
|
self.hub.restart_ftpd()
|
||||||
@@ -328,23 +402,25 @@ class TcpSrv(object):
|
|||||||
if not netdevs:
|
if not netdevs:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
added = "nothing"
|
add = []
|
||||||
removed = "nothing"
|
rem = []
|
||||||
for k, v in netdevs.items():
|
for k, v in netdevs.items():
|
||||||
if k not in self.netdevs:
|
if k not in self.netdevs:
|
||||||
added = "{} = {}".format(k, v)
|
add.append("\n\033[32m added %s = %s" % (k, v))
|
||||||
for k, v in self.netdevs.items():
|
for k, v in self.netdevs.items():
|
||||||
if k not in netdevs:
|
if k not in netdevs:
|
||||||
removed = "{} = {}".format(k, v)
|
rem.append("\n\033[33mremoved %s = %s" % (k, v))
|
||||||
|
|
||||||
t = "network change detected:\n added {}\033[0;33m\nremoved {}"
|
t = "network change detected:%s%s"
|
||||||
self.log("tcpsrv", t.format(added, removed), 3)
|
self.log("tcpsrv", t % ("".join(add), "".join(rem)), 3)
|
||||||
self.netdevs = netdevs
|
self.netdevs = netdevs
|
||||||
self._distribute_netdevs()
|
self._distribute_netdevs()
|
||||||
|
|
||||||
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
||||||
from .stolen.ifaddr import get_adapters
|
from .stolen.ifaddr import get_adapters
|
||||||
|
|
||||||
|
listen_ips = [x for x in listen_ips if "unix:" not in x]
|
||||||
|
|
||||||
nics = get_adapters(True)
|
nics = get_adapters(True)
|
||||||
eps: dict[str, Netdev] = {}
|
eps: dict[str, Netdev] = {}
|
||||||
for nic in nics:
|
for nic in nics:
|
||||||
|
|||||||
@@ -33,10 +33,10 @@ from partftpy import (
|
|||||||
)
|
)
|
||||||
from partftpy.TftpShared import TftpException
|
from partftpy.TftpShared import TftpException
|
||||||
|
|
||||||
from .__init__ import EXE, TYPE_CHECKING
|
from .__init__ import EXE, PY2, TYPE_CHECKING
|
||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
from .util import UTC, BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
@@ -44,6 +44,9 @@ if True: # pylint: disable=using-constant-test
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
|
|
||||||
lg = logging.getLogger("tftp")
|
lg = logging.getLogger("tftp")
|
||||||
debug, info, warning, error = (lg.debug, lg.info, lg.warning, lg.error)
|
debug, info, warning, error = (lg.debug, lg.info, lg.warning, lg.error)
|
||||||
@@ -95,7 +98,7 @@ class Tftpd(object):
|
|||||||
TftpServer,
|
TftpServer,
|
||||||
]
|
]
|
||||||
cbak = []
|
cbak = []
|
||||||
if not self.args.tftp_no_fast and not EXE:
|
if not self.args.tftp_no_fast and not EXE and not PY2:
|
||||||
try:
|
try:
|
||||||
ptn = re.compile(r"(^\s*)log\.debug\(.*\)$")
|
ptn = re.compile(r"(^\s*)log\.debug\(.*\)$")
|
||||||
for C in Cs:
|
for C in Cs:
|
||||||
@@ -105,7 +108,7 @@ class Tftpd(object):
|
|||||||
cfn = C.__spec__.origin
|
cfn = C.__spec__.origin
|
||||||
exec (compile(src2, filename=cfn, mode="exec"), C.__dict__)
|
exec (compile(src2, filename=cfn, mode="exec"), C.__dict__)
|
||||||
except Exception:
|
except Exception:
|
||||||
t = "failed to optimize tftp code; run with --tftp-noopt if there are issues:\n"
|
t = "failed to optimize tftp code; run with --tftp-no-fast if there are issues:\n"
|
||||||
self.log("tftp", t + min_ex(), 3)
|
self.log("tftp", t + min_ex(), 3)
|
||||||
for n, zd in enumerate(cbak):
|
for n, zd in enumerate(cbak):
|
||||||
Cs[n].__dict__ = zd
|
Cs[n].__dict__ = zd
|
||||||
@@ -150,11 +153,6 @@ class Tftpd(object):
|
|||||||
|
|
||||||
self._disarm(fos)
|
self._disarm(fos)
|
||||||
|
|
||||||
ip = next((x for x in self.args.i if ":" not in x), None)
|
|
||||||
if not ip:
|
|
||||||
self.log("tftp", "IPv6 not supported for tftp; listening on 0.0.0.0", 3)
|
|
||||||
ip = "0.0.0.0"
|
|
||||||
|
|
||||||
self.port = int(self.args.tftp)
|
self.port = int(self.args.tftp)
|
||||||
self.srv = []
|
self.srv = []
|
||||||
self.ips = []
|
self.ips = []
|
||||||
@@ -168,9 +166,16 @@ class Tftpd(object):
|
|||||||
if "::" in ips:
|
if "::" in ips:
|
||||||
ips.append("0.0.0.0")
|
ips.append("0.0.0.0")
|
||||||
|
|
||||||
if self.args.ftp4:
|
ips = [x for x in ips if "unix:" not in x]
|
||||||
|
|
||||||
|
if self.args.tftp4:
|
||||||
ips = [x for x in ips if ":" not in x]
|
ips = [x for x in ips if ":" not in x]
|
||||||
|
|
||||||
|
if not ips:
|
||||||
|
t = "cannot start tftp-server; no compatible IPs in -i"
|
||||||
|
self.nlog(t, 1)
|
||||||
|
return
|
||||||
|
|
||||||
ips = list(ODict.fromkeys(ips)) # dedup
|
ips = list(ODict.fromkeys(ips)) # dedup
|
||||||
|
|
||||||
for ip in ips:
|
for ip in ips:
|
||||||
@@ -246,6 +251,8 @@ class Tftpd(object):
|
|||||||
|
|
||||||
debug('%s("%s", %s) %s\033[K\033[0m', caller, vpath, str(a), perms)
|
debug('%s("%s", %s) %s\033[K\033[0m', caller, vpath, str(a), perms)
|
||||||
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
||||||
|
if not vfs.realpath:
|
||||||
|
raise Exception("unmapped vfs")
|
||||||
return vfs, vfs.canonical(rem)
|
return vfs, vfs.canonical(rem)
|
||||||
|
|
||||||
def _ls(self, vpath: str, raddress: str, rport: int, force=False) -> Any:
|
def _ls(self, vpath: str, raddress: str, rport: int, force=False) -> Any:
|
||||||
@@ -262,12 +269,13 @@ class Tftpd(object):
|
|||||||
"*",
|
"*",
|
||||||
not self.args.no_scandir,
|
not self.args.no_scandir,
|
||||||
[[True, False]],
|
[[True, False]],
|
||||||
|
throw=True,
|
||||||
)
|
)
|
||||||
dnames = set([x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)])
|
dnames = set([x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)])
|
||||||
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
||||||
fils1 = [(v.st_mtime, v.st_size, k) for k, v in vfs_ls if k not in dnames]
|
fils1 = [(v.st_mtime, v.st_size, k) for k, v in vfs_ls if k not in dnames]
|
||||||
real1 = dirs1 + fils1
|
real1 = dirs1 + fils1
|
||||||
realt = [(datetime.fromtimestamp(mt), sz, fn) for mt, sz, fn in real1]
|
realt = [(datetime.fromtimestamp(mt, UTC), sz, fn) for mt, sz, fn in real1]
|
||||||
reals = [
|
reals = [
|
||||||
(
|
(
|
||||||
"%04d-%02d-%02d %02d:%02d:%02d"
|
"%04d-%02d-%02d %02d:%02d:%02d"
|
||||||
@@ -333,15 +341,29 @@ class Tftpd(object):
|
|||||||
|
|
||||||
xbu = vfs.flags.get("xbu")
|
xbu = vfs.flags.get("xbu")
|
||||||
if xbu and not runhook(
|
if xbu and not runhook(
|
||||||
self.nlog, xbu, ap, vpath, "", "", 0, 0, "8.3.8.7", 0, ""
|
self.nlog,
|
||||||
|
None,
|
||||||
|
self.hub.up2k,
|
||||||
|
"xbu.tftpd",
|
||||||
|
xbu,
|
||||||
|
ap,
|
||||||
|
vpath,
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
"",
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
"8.3.8.7",
|
||||||
|
time.time(),
|
||||||
|
"",
|
||||||
):
|
):
|
||||||
yeet("blocked by xbu server config: " + vpath)
|
yeet("blocked by xbu server config: %r" % (vpath,))
|
||||||
|
|
||||||
if not self.args.tftp_nols and bos.path.isdir(ap):
|
if not self.args.tftp_nols and bos.path.isdir(ap):
|
||||||
return self._ls(vpath, "", 0, True)
|
return self._ls(vpath, "", 0, True)
|
||||||
|
|
||||||
if not a:
|
if not a:
|
||||||
a = [self.args.iobuf]
|
a = (self.args.iobuf,)
|
||||||
|
|
||||||
return open(ap, mode, *a, **ka)
|
return open(ap, mode, *a, **ka)
|
||||||
|
|
||||||
@@ -382,7 +404,7 @@ class Tftpd(object):
|
|||||||
bos.stat(ap)
|
bos.stat(ap)
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
return False
|
return vpath == "/"
|
||||||
|
|
||||||
def _p_isdir(self, vpath: str) -> bool:
|
def _p_isdir(self, vpath: str) -> bool:
|
||||||
try:
|
try:
|
||||||
@@ -390,7 +412,7 @@ class Tftpd(object):
|
|||||||
ret = stat.S_ISDIR(st.st_mode)
|
ret = stat.S_ISDIR(st.st_mode)
|
||||||
return ret
|
return ret
|
||||||
except:
|
except:
|
||||||
return False
|
return vpath == "/"
|
||||||
|
|
||||||
def _hook(self, *a: Any, **ka: Any) -> None:
|
def _hook(self, *a: Any, **ka: Any) -> None:
|
||||||
src = inspect.currentframe().f_back.f_code.co_name
|
src = inspect.currentframe().f_back.f_code.co_name
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import os
|
|||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import TYPE_CHECKING
|
||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .th_srv import HAVE_WEBP, thumb_path
|
from .th_srv import EXTS_AC, HAVE_WEBP, thumb_path
|
||||||
from .util import Cooldown
|
from .util import Cooldown
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
@@ -57,12 +57,17 @@ class ThumbCli(object):
|
|||||||
if is_vid and "dvthumb" in dbv.flags:
|
if is_vid and "dvthumb" in dbv.flags:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
want_opus = fmt in ("opus", "caf", "mp3")
|
want_opus = fmt in EXTS_AC
|
||||||
is_au = ext in self.fmt_ffa
|
is_au = ext in self.fmt_ffa
|
||||||
if is_au:
|
is_vau = want_opus and ext in self.fmt_ffv
|
||||||
|
if is_au or is_vau:
|
||||||
if want_opus:
|
if want_opus:
|
||||||
if self.args.no_acode:
|
if self.args.no_acode:
|
||||||
return None
|
return None
|
||||||
|
elif fmt == "caf" and self.args.no_caf:
|
||||||
|
fmt = "mp3"
|
||||||
|
elif fmt == "owa" and self.args.no_owa:
|
||||||
|
fmt = "mp3"
|
||||||
else:
|
else:
|
||||||
if "dathumb" in dbv.flags:
|
if "dathumb" in dbv.flags:
|
||||||
return None
|
return None
|
||||||
@@ -106,15 +111,15 @@ class ThumbCli(object):
|
|||||||
sfmt += "3" if "3" in fmt else ""
|
sfmt += "3" if "3" in fmt else ""
|
||||||
|
|
||||||
fmt = sfmt
|
fmt = sfmt
|
||||||
|
|
||||||
elif fmt[:1] == "p" and not is_au:
|
elif fmt[:1] == "p" and not is_au and not is_vid:
|
||||||
t = "cannot thumbnail [%s]: png only allowed for waveforms"
|
t = "cannot thumbnail %r: png only allowed for waveforms"
|
||||||
self.log(t % (rem), 6)
|
self.log(t % (rem,), 6)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for %r" % (ptop,))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import base64
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@@ -12,7 +11,7 @@ import time
|
|||||||
|
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
|
||||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
from .__init__ import ANYWIN, PY2, TYPE_CHECKING
|
||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, au_unpk, ffprobe
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, au_unpk, ffprobe
|
||||||
@@ -21,46 +20,64 @@ from .util import (
|
|||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
Cooldown,
|
Cooldown,
|
||||||
Daemon,
|
Daemon,
|
||||||
Pebkac,
|
|
||||||
afsenc,
|
afsenc,
|
||||||
fsenc,
|
fsenc,
|
||||||
min_ex,
|
min_ex,
|
||||||
runcmd,
|
runcmd,
|
||||||
statdir,
|
statdir,
|
||||||
|
ub64enc,
|
||||||
vsplit,
|
vsplit,
|
||||||
wrename,
|
wrename,
|
||||||
wunlink,
|
wunlink,
|
||||||
)
|
)
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
HAVE_PIL = False
|
HAVE_PIL = False
|
||||||
HAVE_PILF = False
|
HAVE_PILF = False
|
||||||
HAVE_HEIF = False
|
HAVE_HEIF = False
|
||||||
HAVE_AVIF = False
|
HAVE_AVIF = False
|
||||||
HAVE_WEBP = False
|
HAVE_WEBP = False
|
||||||
|
|
||||||
|
EXTS_TH = set(["jpg", "webp", "png"])
|
||||||
|
EXTS_AC = set(["opus", "owa", "caf", "mp3"])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_PIL"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
from PIL import ExifTags, Image, ImageFont, ImageOps
|
from PIL import ExifTags, Image, ImageFont, ImageOps
|
||||||
|
|
||||||
HAVE_PIL = True
|
HAVE_PIL = True
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_PILF"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
ImageFont.load_default(size=16)
|
ImageFont.load_default(size=16)
|
||||||
HAVE_PILF = True
|
HAVE_PILF = True
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_PIL_WEBP"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||||
HAVE_WEBP = True
|
HAVE_WEBP = True
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_PIL_HEIF"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
from pyheif_pillow_opener import register_heif_opener
|
from pyheif_pillow_opener import register_heif_opener
|
||||||
|
|
||||||
register_heif_opener()
|
register_heif_opener()
|
||||||
@@ -69,6 +86,9 @@ try:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_PIL_AVIF"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
import pillow_avif # noqa: F401 # pylint: disable=unused-import
|
import pillow_avif # noqa: F401 # pylint: disable=unused-import
|
||||||
|
|
||||||
HAVE_AVIF = True
|
HAVE_AVIF = True
|
||||||
@@ -80,6 +100,9 @@ except:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_VIPS"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
HAVE_VIPS = True
|
HAVE_VIPS = True
|
||||||
import pyvips
|
import pyvips
|
||||||
|
|
||||||
@@ -88,6 +111,9 @@ except:
|
|||||||
HAVE_VIPS = False
|
HAVE_VIPS = False
|
||||||
|
|
||||||
|
|
||||||
|
th_dir_cache = {}
|
||||||
|
|
||||||
|
|
||||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
|
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
|
||||||
# base16 = 16 = 256
|
# base16 = 16 = 256
|
||||||
# b64-lc = 38 = 1444
|
# b64-lc = 38 = 1444
|
||||||
@@ -101,16 +127,22 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -
|
|||||||
if ext in ffa and fmt[:2] in ("wf", "jf"):
|
if ext in ffa and fmt[:2] in ("wf", "jf"):
|
||||||
fmt = fmt.replace("f", "")
|
fmt = fmt.replace("f", "")
|
||||||
|
|
||||||
rd += "\n" + fmt
|
dcache = th_dir_cache
|
||||||
h = hashlib.sha512(afsenc(rd)).digest()
|
rd_key = rd + "\n" + fmt
|
||||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
rd = dcache.get(rd_key)
|
||||||
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
if not rd:
|
||||||
|
h = hashlib.sha512(afsenc(rd_key)).digest()
|
||||||
|
b64 = ub64enc(h).decode("ascii")[:24]
|
||||||
|
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
||||||
|
if len(dcache) > 9001:
|
||||||
|
dcache.clear()
|
||||||
|
dcache[rd_key] = rd
|
||||||
|
|
||||||
# could keep original filenames but this is safer re pathlen
|
# could keep original filenames but this is safer re pathlen
|
||||||
h = hashlib.sha512(afsenc(fn)).digest()
|
h = hashlib.sha512(afsenc(fn)).digest()
|
||||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
fn = ub64enc(h).decode("ascii")[:24]
|
||||||
|
|
||||||
if fmt in ("opus", "caf", "mp3"):
|
if fmt in EXTS_AC:
|
||||||
cat = "ac"
|
cat = "ac"
|
||||||
else:
|
else:
|
||||||
fc = fmt[:1]
|
fc = fmt[:1]
|
||||||
@@ -134,6 +166,7 @@ class ThumbSrv(object):
|
|||||||
self.ram: dict[str, float] = {}
|
self.ram: dict[str, float] = {}
|
||||||
self.memcond = threading.Condition(self.mutex)
|
self.memcond = threading.Condition(self.mutex)
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
|
self.rm_nullthumbs = True # forget failed conversions on startup
|
||||||
self.nthr = max(1, self.args.th_mt)
|
self.nthr = max(1, self.args.th_mt)
|
||||||
|
|
||||||
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
||||||
@@ -209,7 +242,7 @@ class ThumbSrv(object):
|
|||||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for %r" % (ptop,))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||||
@@ -219,7 +252,7 @@ class ThumbSrv(object):
|
|||||||
with self.mutex:
|
with self.mutex:
|
||||||
try:
|
try:
|
||||||
self.busy[tpath].append(cond)
|
self.busy[tpath].append(cond)
|
||||||
self.log("joined waiting room for %s" % (tpath,))
|
self.log("joined waiting room for %r" % (tpath,))
|
||||||
except:
|
except:
|
||||||
thdir = os.path.dirname(tpath)
|
thdir = os.path.dirname(tpath)
|
||||||
bos.makedirs(os.path.join(thdir, "w"))
|
bos.makedirs(os.path.join(thdir, "w"))
|
||||||
@@ -236,11 +269,11 @@ class ThumbSrv(object):
|
|||||||
allvols = list(self.asrv.vfs.all_vols.values())
|
allvols = list(self.asrv.vfs.all_vols.values())
|
||||||
vn = next((x for x in allvols if x.realpath == ptop), None)
|
vn = next((x for x in allvols if x.realpath == ptop), None)
|
||||||
if not vn:
|
if not vn:
|
||||||
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
self.log("ptop %r not in %s" % (ptop, allvols), 3)
|
||||||
vn = self.asrv.vfs.all_aps[0][1]
|
vn = self.asrv.vfs.all_aps[0][1]
|
||||||
|
|
||||||
self.q.put((abspath, tpath, fmt, vn))
|
self.q.put((abspath, tpath, fmt, vn))
|
||||||
self.log("conv {} :{} \033[0m{}".format(tpath, fmt, abspath), c=6)
|
self.log("conv %r :%s \033[0m%r" % (tpath, fmt, abspath), 6)
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
@@ -304,23 +337,32 @@ class ThumbSrv(object):
|
|||||||
ap_unpk = abspath
|
ap_unpk = abspath
|
||||||
|
|
||||||
if not bos.path.exists(tpath):
|
if not bos.path.exists(tpath):
|
||||||
|
tex = tpath.rsplit(".", 1)[-1]
|
||||||
|
want_mp3 = tex == "mp3"
|
||||||
|
want_opus = tex in ("opus", "owa", "caf")
|
||||||
|
want_png = tex == "png"
|
||||||
|
want_au = want_mp3 or want_opus
|
||||||
for lib in self.args.th_dec:
|
for lib in self.args.th_dec:
|
||||||
|
can_au = lib == "ff" and (
|
||||||
|
ext in self.fmt_ffa or ext in self.fmt_ffv
|
||||||
|
)
|
||||||
|
|
||||||
if lib == "pil" and ext in self.fmt_pil:
|
if lib == "pil" and ext in self.fmt_pil:
|
||||||
funs.append(self.conv_pil)
|
funs.append(self.conv_pil)
|
||||||
elif lib == "vips" and ext in self.fmt_vips:
|
elif lib == "vips" and ext in self.fmt_vips:
|
||||||
funs.append(self.conv_vips)
|
funs.append(self.conv_vips)
|
||||||
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
elif can_au and (want_png or want_au):
|
||||||
funs.append(self.conv_ffmpeg)
|
if want_opus:
|
||||||
elif lib == "ff" and ext in self.fmt_ffa:
|
|
||||||
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
|
||||||
funs.append(self.conv_opus)
|
funs.append(self.conv_opus)
|
||||||
elif tpath.endswith(".mp3"):
|
elif want_mp3:
|
||||||
funs.append(self.conv_mp3)
|
funs.append(self.conv_mp3)
|
||||||
elif tpath.endswith(".png"):
|
elif want_png:
|
||||||
funs.append(self.conv_waves)
|
funs.append(self.conv_waves)
|
||||||
png_ok = True
|
png_ok = True
|
||||||
else:
|
elif lib == "ff" and (ext in self.fmt_ffi or ext in self.fmt_ffv):
|
||||||
funs.append(self.conv_spec)
|
funs.append(self.conv_ffmpeg)
|
||||||
|
elif lib == "ff" and ext in self.fmt_ffa and not want_au:
|
||||||
|
funs.append(self.conv_spec)
|
||||||
|
|
||||||
tdir, tfn = os.path.split(tpath)
|
tdir, tfn = os.path.split(tpath)
|
||||||
ttpath = os.path.join(tdir, "w", tfn)
|
ttpath = os.path.join(tdir, "w", tfn)
|
||||||
@@ -337,8 +379,8 @@ class ThumbSrv(object):
|
|||||||
fun(ap_unpk, ttpath, fmt, vn)
|
fun(ap_unpk, ttpath, fmt, vn)
|
||||||
break
|
break
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
msg = "{} could not create thumbnail of {}\n{}"
|
msg = "%s could not create thumbnail of %r\n%s"
|
||||||
msg = msg.format(fun.__name__, abspath, min_ex())
|
msg = msg % (fun.__name__, abspath, min_ex())
|
||||||
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
||||||
self.log(msg, c)
|
self.log(msg, c)
|
||||||
if getattr(ex, "returncode", 0) != 321:
|
if getattr(ex, "returncode", 0) != 321:
|
||||||
@@ -450,7 +492,7 @@ class ThumbSrv(object):
|
|||||||
if c == crops[-1]:
|
if c == crops[-1]:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
assert img # type: ignore
|
assert img # type: ignore # !rm
|
||||||
img.write_to_file(tpath, Q=40)
|
img.write_to_file(tpath, Q=40)
|
||||||
|
|
||||||
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
@@ -599,13 +641,14 @@ class ThumbSrv(object):
|
|||||||
b"pngquant",
|
b"pngquant",
|
||||||
b"--strip",
|
b"--strip",
|
||||||
b"--nofs",
|
b"--nofs",
|
||||||
b"--output", fsenc(wtpath),
|
b"--output",
|
||||||
fsenc(tpath)
|
fsenc(wtpath),
|
||||||
|
fsenc(tpath),
|
||||||
]
|
]
|
||||||
ret = runcmd(cmd, timeout=vn.flags["convt"], nice=True, oom=400)[0]
|
ret = runcmd(cmd, timeout=vn.flags["convt"], nice=True, oom=400)[0]
|
||||||
if ret:
|
if ret:
|
||||||
try:
|
try:
|
||||||
wunlink(self.log, wtpath, vn.flags)
|
wunlink(self.log, wtpath, vn.flags)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
@@ -673,8 +716,8 @@ class ThumbSrv(object):
|
|||||||
raise Exception("disabled in server config")
|
raise Exception("disabled in server config")
|
||||||
|
|
||||||
self.wait4ram(0.2, tpath)
|
self.wait4ram(0.2, tpath)
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
tags, rawtags = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in tags:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
|
|
||||||
if quality.endswith("k"):
|
if quality.endswith("k"):
|
||||||
@@ -695,7 +738,7 @@ class ThumbSrv(object):
|
|||||||
b"-v", b"error",
|
b"-v", b"error",
|
||||||
b"-hide_banner",
|
b"-hide_banner",
|
||||||
b"-i", fsenc(abspath),
|
b"-i", fsenc(abspath),
|
||||||
b"-map_metadata", b"-1",
|
] + self.big_tags(rawtags) + [
|
||||||
b"-map", b"0:a:0",
|
b"-map", b"0:a:0",
|
||||||
b"-ar", b"44100",
|
b"-ar", b"44100",
|
||||||
b"-ac", b"2",
|
b"-ac", b"2",
|
||||||
@@ -711,51 +754,106 @@ class ThumbSrv(object):
|
|||||||
raise Exception("disabled in server config")
|
raise Exception("disabled in server config")
|
||||||
|
|
||||||
self.wait4ram(0.2, tpath)
|
self.wait4ram(0.2, tpath)
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
tags, rawtags = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in tags:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
|
|
||||||
|
sq = "%dk" % (self.args.q_opus,)
|
||||||
|
bq = sq.encode("ascii")
|
||||||
|
if tags["ac"][1] == "opus":
|
||||||
|
enc = "-c:a copy"
|
||||||
|
else:
|
||||||
|
enc = "-c:a libopus -b:a " + sq
|
||||||
|
|
||||||
|
fun = self._conv_caf if fmt == "caf" else self._conv_owa
|
||||||
|
|
||||||
|
fun(abspath, tpath, tags, rawtags, enc, bq, vn)
|
||||||
|
|
||||||
|
def _conv_owa(
|
||||||
|
self,
|
||||||
|
abspath: str,
|
||||||
|
tpath: str,
|
||||||
|
tags: dict[str, tuple[int, Any]],
|
||||||
|
rawtags: dict[str, list[Any]],
|
||||||
|
enc: str,
|
||||||
|
bq: bytes,
|
||||||
|
vn: VFS,
|
||||||
|
) -> None:
|
||||||
|
if tpath.endswith(".owa"):
|
||||||
|
container = b"webm"
|
||||||
|
tagset = [b"-map_metadata", b"-1"]
|
||||||
|
else:
|
||||||
|
container = b"opus"
|
||||||
|
tagset = self.big_tags(rawtags)
|
||||||
|
|
||||||
|
self.log("conv2 %s [%s]" % (container, enc), 6)
|
||||||
|
benc = enc.encode("ascii").split(b" ")
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
cmd = [
|
||||||
|
b"ffmpeg",
|
||||||
|
b"-nostdin",
|
||||||
|
b"-v", b"error",
|
||||||
|
b"-hide_banner",
|
||||||
|
b"-i", fsenc(abspath),
|
||||||
|
] + tagset + [
|
||||||
|
b"-map", b"0:a:0",
|
||||||
|
] + benc + [
|
||||||
|
b"-f", container,
|
||||||
|
fsenc(tpath)
|
||||||
|
]
|
||||||
|
# fmt: on
|
||||||
|
self._run_ff(cmd, vn, oom=300)
|
||||||
|
|
||||||
|
def _conv_caf(
|
||||||
|
self,
|
||||||
|
abspath: str,
|
||||||
|
tpath: str,
|
||||||
|
tags: dict[str, tuple[int, Any]],
|
||||||
|
rawtags: dict[str, list[Any]],
|
||||||
|
enc: str,
|
||||||
|
bq: bytes,
|
||||||
|
vn: VFS,
|
||||||
|
) -> None:
|
||||||
|
tmp_opus = tpath + ".opus"
|
||||||
try:
|
try:
|
||||||
dur = ret[".dur"][1]
|
wunlink(self.log, tmp_opus, vn.flags)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
dur = tags[".dur"][1]
|
||||||
except:
|
except:
|
||||||
dur = 0
|
dur = 0
|
||||||
|
|
||||||
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
|
self.log("conv2 caf-tmp [%s]" % (enc,), 6)
|
||||||
want_caf = tpath.endswith(".caf")
|
benc = enc.encode("ascii").split(b" ")
|
||||||
tmp_opus = tpath
|
|
||||||
if want_caf:
|
|
||||||
tmp_opus = tpath + ".opus"
|
|
||||||
try:
|
|
||||||
wunlink(self.log, tmp_opus, vn.flags)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
caf_src = abspath if src_opus else tmp_opus
|
# fmt: off
|
||||||
bq = ("%dk" % (self.args.q_opus,)).encode("ascii")
|
cmd = [
|
||||||
|
b"ffmpeg",
|
||||||
if not want_caf or not src_opus:
|
b"-nostdin",
|
||||||
# fmt: off
|
b"-v", b"error",
|
||||||
cmd = [
|
b"-hide_banner",
|
||||||
b"ffmpeg",
|
b"-i", fsenc(abspath),
|
||||||
b"-nostdin",
|
b"-map_metadata", b"-1",
|
||||||
b"-v", b"error",
|
b"-map", b"0:a:0",
|
||||||
b"-hide_banner",
|
] + benc + [
|
||||||
b"-i", fsenc(abspath),
|
b"-f", b"opus",
|
||||||
b"-map_metadata", b"-1",
|
fsenc(tmp_opus)
|
||||||
b"-map", b"0:a:0",
|
]
|
||||||
b"-c:a", b"libopus",
|
# fmt: on
|
||||||
b"-b:a", bq,
|
self._run_ff(cmd, vn, oom=300)
|
||||||
fsenc(tmp_opus)
|
|
||||||
]
|
|
||||||
# fmt: on
|
|
||||||
self._run_ff(cmd, vn, oom=300)
|
|
||||||
|
|
||||||
# iOS fails to play some "insufficiently complex" files
|
# iOS fails to play some "insufficiently complex" files
|
||||||
# (average file shorter than 8 seconds), so of course we
|
# (average file shorter than 8 seconds), so of course we
|
||||||
# fix that by mixing in some inaudible pink noise :^)
|
# fix that by mixing in some inaudible pink noise :^)
|
||||||
# 6.3 sec seems like the cutoff so lets do 7, and
|
# 6.3 sec seems like the cutoff so lets do 7, and
|
||||||
# 7 sec of psyqui-musou.opus @ 3:50 is 174 KiB
|
# 7 sec of psyqui-musou.opus @ 3:50 is 174 KiB
|
||||||
if want_caf and (dur < 20 or bos.path.getsize(caf_src) < 256 * 1024):
|
sz = bos.path.getsize(tmp_opus)
|
||||||
|
if dur < 20 or sz < 256 * 1024:
|
||||||
|
zs = bq.decode("ascii")
|
||||||
|
self.log("conv2 caf-transcode; dur=%d sz=%d q=%s" % (dur, sz, zs), 6)
|
||||||
# fmt: off
|
# fmt: off
|
||||||
cmd = [
|
cmd = [
|
||||||
b"ffmpeg",
|
b"ffmpeg",
|
||||||
@@ -774,15 +872,16 @@ class ThumbSrv(object):
|
|||||||
# fmt: on
|
# fmt: on
|
||||||
self._run_ff(cmd, vn, oom=300)
|
self._run_ff(cmd, vn, oom=300)
|
||||||
|
|
||||||
elif want_caf:
|
else:
|
||||||
# simple remux should be safe
|
# simple remux should be safe
|
||||||
|
self.log("conv2 caf-remux; dur=%d sz=%d" % (dur, sz), 6)
|
||||||
# fmt: off
|
# fmt: off
|
||||||
cmd = [
|
cmd = [
|
||||||
b"ffmpeg",
|
b"ffmpeg",
|
||||||
b"-nostdin",
|
b"-nostdin",
|
||||||
b"-v", b"error",
|
b"-v", b"error",
|
||||||
b"-hide_banner",
|
b"-hide_banner",
|
||||||
b"-i", fsenc(abspath if src_opus else tmp_opus),
|
b"-i", fsenc(tmp_opus),
|
||||||
b"-map_metadata", b"-1",
|
b"-map_metadata", b"-1",
|
||||||
b"-map", b"0:a:0",
|
b"-map", b"0:a:0",
|
||||||
b"-c:a", b"copy",
|
b"-c:a", b"copy",
|
||||||
@@ -792,11 +891,20 @@ class ThumbSrv(object):
|
|||||||
# fmt: on
|
# fmt: on
|
||||||
self._run_ff(cmd, vn, oom=300)
|
self._run_ff(cmd, vn, oom=300)
|
||||||
|
|
||||||
if tmp_opus != tpath:
|
try:
|
||||||
try:
|
wunlink(self.log, tmp_opus, vn.flags)
|
||||||
wunlink(self.log, tmp_opus, vn.flags)
|
except:
|
||||||
except:
|
pass
|
||||||
pass
|
|
||||||
|
def big_tags(self, raw_tags: dict[str, list[str]]) -> list[bytes]:
|
||||||
|
ret = []
|
||||||
|
for k, vs in raw_tags.items():
|
||||||
|
for v in vs:
|
||||||
|
if len(str(v)) >= 1024:
|
||||||
|
bv = k.encode("utf-8", "replace")
|
||||||
|
ret += [b"-metadata", bv + b"="]
|
||||||
|
break
|
||||||
|
return ret
|
||||||
|
|
||||||
def poke(self, tdir: str) -> None:
|
def poke(self, tdir: str) -> None:
|
||||||
if not self.poke_cd.poke(tdir):
|
if not self.poke_cd.poke(tdir):
|
||||||
@@ -813,7 +921,6 @@ class ThumbSrv(object):
|
|||||||
def cleaner(self) -> None:
|
def cleaner(self) -> None:
|
||||||
interval = self.args.th_clean
|
interval = self.args.th_clean
|
||||||
while True:
|
while True:
|
||||||
time.sleep(interval)
|
|
||||||
ndirs = 0
|
ndirs = 0
|
||||||
for vol, histpath in self.asrv.vfs.histtab.items():
|
for vol, histpath in self.asrv.vfs.histtab.items():
|
||||||
if histpath.startswith(vol):
|
if histpath.startswith(vol):
|
||||||
@@ -827,6 +934,8 @@ class ThumbSrv(object):
|
|||||||
self.log("\033[Jcln err in %s: %r" % (histpath, ex), 3)
|
self.log("\033[Jcln err in %s: %r" % (histpath, ex), 3)
|
||||||
|
|
||||||
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||||
|
self.rm_nullthumbs = False
|
||||||
|
time.sleep(interval)
|
||||||
|
|
||||||
def clean(self, histpath: str) -> int:
|
def clean(self, histpath: str) -> int:
|
||||||
ret = 0
|
ret = 0
|
||||||
@@ -841,13 +950,15 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
def _clean(self, cat: str, thumbpath: str) -> int:
|
def _clean(self, cat: str, thumbpath: str) -> int:
|
||||||
# self.log("cln {}".format(thumbpath))
|
# self.log("cln {}".format(thumbpath))
|
||||||
exts = ["jpg", "webp", "png"] if cat == "th" else ["opus", "caf", "mp3"]
|
exts = EXTS_TH if cat == "th" else EXTS_AC
|
||||||
maxage = getattr(self.args, cat + "_maxage")
|
maxage = getattr(self.args, cat + "_maxage")
|
||||||
now = time.time()
|
now = time.time()
|
||||||
prev_b64 = None
|
prev_b64 = None
|
||||||
prev_fp = ""
|
prev_fp = ""
|
||||||
try:
|
try:
|
||||||
t1 = statdir(self.log_func, not self.args.no_scandir, False, thumbpath)
|
t1 = statdir(
|
||||||
|
self.log_func, not self.args.no_scandir, False, thumbpath, False
|
||||||
|
)
|
||||||
ents = sorted(list(t1))
|
ents = sorted(list(t1))
|
||||||
except:
|
except:
|
||||||
return 0
|
return 0
|
||||||
@@ -888,6 +999,10 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if self.rm_nullthumbs and not inf.st_size:
|
||||||
|
bos.unlink(fp)
|
||||||
|
continue
|
||||||
|
|
||||||
if b64 == prev_b64:
|
if b64 == prev_b64:
|
||||||
self.log("rm replaced [{}]".format(fp))
|
self.log("rm replaced [{}]".format(fp))
|
||||||
bos.unlink(prev_fp)
|
bos.unlink(prev_fp)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import threading
|
|||||||
import time
|
import time
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, unicode
|
||||||
from .authsrv import LEELOO_DALLAS, VFS
|
from .authsrv import LEELOO_DALLAS, VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .up2k import up2k_wark_from_hashlist
|
from .up2k import up2k_wark_from_hashlist
|
||||||
@@ -38,6 +38,9 @@ if True: # pylint: disable=using-constant-test
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .httpsrv import HttpSrv
|
from .httpsrv import HttpSrv
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
range = xrange # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class U2idx(object):
|
class U2idx(object):
|
||||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||||
@@ -50,12 +53,16 @@ class U2idx(object):
|
|||||||
self.log("your python does not have sqlite3; searching will be disabled")
|
self.log("your python does not have sqlite3; searching will be disabled")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
assert sqlite3 # type: ignore # !rm
|
||||||
|
|
||||||
self.active_id = ""
|
self.active_id = ""
|
||||||
self.active_cur: Optional["sqlite3.Cursor"] = None
|
self.active_cur: Optional["sqlite3.Cursor"] = None
|
||||||
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
||||||
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor()
|
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor()
|
||||||
self.mem_cur.execute(r"create table a (b text)")
|
self.mem_cur.execute(r"create table a (b text)")
|
||||||
|
|
||||||
|
self.sh_cur: Optional["sqlite3.Cursor"] = None
|
||||||
|
|
||||||
self.p_end = 0.0
|
self.p_end = 0.0
|
||||||
self.p_dur = 0.0
|
self.p_dur = 0.0
|
||||||
|
|
||||||
@@ -63,6 +70,9 @@ class U2idx(object):
|
|||||||
self.log_func("u2idx", msg, c)
|
self.log_func("u2idx", msg, c)
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
|
if not HAVE_SQLITE3:
|
||||||
|
return
|
||||||
|
|
||||||
for cur in self.cur.values():
|
for cur in self.cur.values():
|
||||||
db = cur.connection
|
db = cur.connection
|
||||||
try:
|
try:
|
||||||
@@ -73,6 +83,12 @@ class U2idx(object):
|
|||||||
cur.close()
|
cur.close()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
|
for cur in (self.mem_cur, self.sh_cur):
|
||||||
|
if cur:
|
||||||
|
db = cur.connection
|
||||||
|
cur.close()
|
||||||
|
db.close()
|
||||||
|
|
||||||
def fsearch(
|
def fsearch(
|
||||||
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
||||||
) -> list[dict[str, Any]]:
|
) -> list[dict[str, Any]]:
|
||||||
@@ -88,25 +104,39 @@ class U2idx(object):
|
|||||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(uname, vols, uq, uv, False, 99999)[0]
|
return self.run_query(uname, vols, uq, uv, False, True, 99999)[0]
|
||||||
except:
|
except:
|
||||||
raise Pebkac(500, min_ex())
|
raise Pebkac(500, min_ex())
|
||||||
|
|
||||||
def get_cur(self, vn: VFS) -> Optional["sqlite3.Cursor"]:
|
def get_shr(self) -> Optional["sqlite3.Cursor"]:
|
||||||
if not HAVE_SQLITE3:
|
if self.sh_cur:
|
||||||
|
return self.sh_cur
|
||||||
|
|
||||||
|
if not HAVE_SQLITE3 or not self.args.shr:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
assert sqlite3 # type: ignore # !rm
|
||||||
|
|
||||||
|
db = sqlite3.connect(self.args.shr_db, timeout=2, check_same_thread=False)
|
||||||
|
cur = db.cursor()
|
||||||
|
cur.execute('pragma table_info("sh")').fetchall()
|
||||||
|
self.sh_cur = cur
|
||||||
|
return cur
|
||||||
|
|
||||||
|
def get_cur(self, vn: VFS) -> Optional["sqlite3.Cursor"]:
|
||||||
cur = self.cur.get(vn.realpath)
|
cur = self.cur.get(vn.realpath)
|
||||||
if cur:
|
if cur:
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
if "e2d" not in vn.flags:
|
if not HAVE_SQLITE3 or "e2d" not in vn.flags:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
assert sqlite3 # type: ignore # !rm
|
||||||
|
|
||||||
ptop = vn.realpath
|
ptop = vn.realpath
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for %r" % (ptop,))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
db_path = os.path.join(histpath, "up2k.db")
|
db_path = os.path.join(histpath, "up2k.db")
|
||||||
@@ -121,7 +151,7 @@ class U2idx(object):
|
|||||||
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
||||||
cur = db.cursor()
|
cur = db.cursor()
|
||||||
cur.execute('pragma table_info("up")').fetchone()
|
cur.execute('pragma table_info("up")').fetchone()
|
||||||
self.log("ro: {}".format(db_path))
|
self.log("ro: %r" % (db_path,))
|
||||||
except:
|
except:
|
||||||
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
||||||
# may not fail until the pragma so unset it
|
# may not fail until the pragma so unset it
|
||||||
@@ -131,7 +161,7 @@ class U2idx(object):
|
|||||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||||
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
||||||
self.log("opened {}".format(db_path))
|
self.log("opened %r" % (db_path,))
|
||||||
|
|
||||||
self.cur[ptop] = cur
|
self.cur[ptop] = cur
|
||||||
return cur
|
return cur
|
||||||
@@ -280,7 +310,7 @@ class U2idx(object):
|
|||||||
q += " lower({}) {} ? ) ".format(field, oper)
|
q += " lower({}) {} ? ) ".format(field, oper)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(uname, vols, q, va, have_mt, lim)
|
return self.run_query(uname, vols, q, va, have_mt, True, lim)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise Pebkac(500, repr(ex))
|
raise Pebkac(500, repr(ex))
|
||||||
|
|
||||||
@@ -291,9 +321,11 @@ class U2idx(object):
|
|||||||
uq: str,
|
uq: str,
|
||||||
uv: list[Union[str, int]],
|
uv: list[Union[str, int]],
|
||||||
have_mt: bool,
|
have_mt: bool,
|
||||||
|
sort: bool,
|
||||||
lim: int,
|
lim: int,
|
||||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||||
if self.args.srch_dbg:
|
dbg = self.args.srch_dbg
|
||||||
|
if dbg:
|
||||||
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
||||||
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
||||||
self.log(t % (len(vols), zs), 5)
|
self.log(t % (len(vols), zs), 5)
|
||||||
@@ -336,14 +368,14 @@ class U2idx(object):
|
|||||||
if not cur:
|
if not cur:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
excl = []
|
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||||
for vp2 in self.asrv.vfs.all_vols.keys():
|
zs = "srch_re_dots" if dots else "srch_re_nodot"
|
||||||
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
|
rex: re.Pattern = flags.get(zs) # type: ignore
|
||||||
excl.append(vp2[len(vtop) :].lstrip("/"))
|
|
||||||
|
|
||||||
if self.args.srch_dbg:
|
if dbg:
|
||||||
t = "searching in volume /%s (%s), excludelist %s"
|
t = "searching in volume /%s (%s), excluding %s"
|
||||||
self.log(t % (vtop, ptop, excl), 5)
|
self.log(t % (vtop, ptop, rex.pattern), 5)
|
||||||
|
rex_cfg: Optional[re.Pattern] = flags.get("srch_excl")
|
||||||
|
|
||||||
self.active_cur = cur
|
self.active_cur = cur
|
||||||
|
|
||||||
@@ -356,7 +388,6 @@ class U2idx(object):
|
|||||||
|
|
||||||
sret = []
|
sret = []
|
||||||
fk = flags.get("fk")
|
fk = flags.get("fk")
|
||||||
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
|
||||||
fk_alg = 2 if "fka" in flags else 1
|
fk_alg = 2 if "fka" in flags else 1
|
||||||
c = cur.execute(uq, tuple(vuv))
|
c = cur.execute(uq, tuple(vuv))
|
||||||
for hit in c:
|
for hit in c:
|
||||||
@@ -365,20 +396,23 @@ class U2idx(object):
|
|||||||
if rd.startswith("//") or fn.startswith("//"):
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
if rd in excl or any([x for x in excl if rd.startswith(x + "/")]):
|
vp = vjoin(vjoin(vtop, rd), fn)
|
||||||
if self.args.srch_dbg:
|
|
||||||
zs = vjoin(vjoin(vtop, rd), fn)
|
if vp in seen_rps:
|
||||||
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
|
||||||
self.log(t % (vtop, zs), 1)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
if rex.search(vp):
|
||||||
if not dots and "/." in ("/" + rp):
|
if dbg:
|
||||||
continue
|
if rex_cfg and rex_cfg.search(vp): # type: ignore
|
||||||
|
self.log("filtered by srch_excl: %s" % (vp,), 6)
|
||||||
if rp in seen_rps:
|
elif not dots and "/." in ("/" + vp):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
||||||
|
self.log(t % (vtop, vp), 1)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
rp = quotep(vp)
|
||||||
if not fk:
|
if not fk:
|
||||||
suf = ""
|
suf = ""
|
||||||
else:
|
else:
|
||||||
@@ -400,7 +434,7 @@ class U2idx(object):
|
|||||||
if lim < 0:
|
if lim < 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
if self.args.srch_dbg:
|
if dbg:
|
||||||
t = "in volume '/%s': hit: %s"
|
t = "in volume '/%s': hit: %s"
|
||||||
self.log(t % (vtop, rp), 5)
|
self.log(t % (vtop, rp), 5)
|
||||||
|
|
||||||
@@ -430,14 +464,15 @@ class U2idx(object):
|
|||||||
ret.extend(sret)
|
ret.extend(sret)
|
||||||
# print("[{}] {}".format(ptop, sret))
|
# print("[{}] {}".format(ptop, sret))
|
||||||
|
|
||||||
if self.args.srch_dbg:
|
if dbg:
|
||||||
t = "in volume '/%s': got %d hits, %d total so far"
|
t = "in volume '/%s': got %d hits, %d total so far"
|
||||||
self.log(t % (vtop, len(sret), len(ret)), 5)
|
self.log(t % (vtop, len(sret), len(ret)), 5)
|
||||||
|
|
||||||
done_flag.append(True)
|
done_flag.append(True)
|
||||||
self.active_id = ""
|
self.active_id = ""
|
||||||
|
|
||||||
ret.sort(key=itemgetter("rp"))
|
if sort:
|
||||||
|
ret.sort(key=itemgetter("rp"))
|
||||||
|
|
||||||
return ret, list(taglist.keys()), lim < 0 and not clamped
|
return ret, list(taglist.keys()), lim < 0 and not clamped
|
||||||
|
|
||||||
@@ -448,5 +483,5 @@ class U2idx(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if identifier == self.active_id:
|
if identifier == self.active_id:
|
||||||
assert self.active_cur
|
assert self.active_cur # !rm
|
||||||
self.active_cur.connection.interrupt()
|
self.active_cur.connection.interrupt()
|
||||||
|
|||||||
1894
copyparty/up2k.py
1894
copyparty/up2k.py
File diff suppressed because it is too large
Load Diff
1034
copyparty/util.py
1034
copyparty/util.py
File diff suppressed because it is too large
Load Diff
@@ -29,9 +29,10 @@ window.baguetteBox = (function () {
|
|||||||
isOverlayVisible = false,
|
isOverlayVisible = false,
|
||||||
touch = {}, // start-pos
|
touch = {}, // start-pos
|
||||||
touchFlag = false, // busy
|
touchFlag = false, // busy
|
||||||
|
scrollCSS = ['', ''],
|
||||||
scrollTimer = 0,
|
scrollTimer = 0,
|
||||||
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||||
re_v = /^[^?]+\.(webm|mkv|mp4)(\?|$)/i,
|
re_v = /^[^?]+\.(webm|mkv|mp4|m4v|mov)(\?|$)/i,
|
||||||
anims = ['slideIn', 'fadeIn', 'none'],
|
anims = ['slideIn', 'fadeIn', 'none'],
|
||||||
data = {}, // all galleries
|
data = {}, // all galleries
|
||||||
imagesElements = [],
|
imagesElements = [],
|
||||||
@@ -567,6 +568,12 @@ window.baguetteBox = (function () {
|
|||||||
|
|
||||||
function showOverlay(chosenImageIndex) {
|
function showOverlay(chosenImageIndex) {
|
||||||
if (options.noScrollbars) {
|
if (options.noScrollbars) {
|
||||||
|
var a = document.documentElement.style.overflowY,
|
||||||
|
b = document.body.style.overflowY;
|
||||||
|
|
||||||
|
if (a != 'hidden' || b != 'scroll')
|
||||||
|
scrollCSS = [a, b];
|
||||||
|
|
||||||
document.documentElement.style.overflowY = 'hidden';
|
document.documentElement.style.overflowY = 'hidden';
|
||||||
document.body.style.overflowY = 'scroll';
|
document.body.style.overflowY = 'scroll';
|
||||||
}
|
}
|
||||||
@@ -615,8 +622,8 @@ window.baguetteBox = (function () {
|
|||||||
playvid(false);
|
playvid(false);
|
||||||
removeFromCache('#files');
|
removeFromCache('#files');
|
||||||
if (options.noScrollbars) {
|
if (options.noScrollbars) {
|
||||||
document.documentElement.style.overflowY = 'auto';
|
document.documentElement.style.overflowY = scrollCSS[0];
|
||||||
document.body.style.overflowY = 'auto';
|
document.body.style.overflowY = scrollCSS[1];
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -626,6 +633,9 @@ window.baguetteBox = (function () {
|
|||||||
catch (ex) { }
|
catch (ex) { }
|
||||||
isFullscreen = false;
|
isFullscreen = false;
|
||||||
|
|
||||||
|
if (toast.tag == 'bb-ded')
|
||||||
|
toast.hide();
|
||||||
|
|
||||||
if (dtor || overlay.style.display === 'none')
|
if (dtor || overlay.style.display === 'none')
|
||||||
return;
|
return;
|
||||||
|
|
||||||
@@ -661,6 +671,7 @@ window.baguetteBox = (function () {
|
|||||||
if (v == keep)
|
if (v == keep)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
|
unbind(v, 'error', lerr);
|
||||||
v.src = '';
|
v.src = '';
|
||||||
v.load();
|
v.load();
|
||||||
|
|
||||||
@@ -688,6 +699,28 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function lerr() {
|
||||||
|
var t;
|
||||||
|
try {
|
||||||
|
t = this.getAttribute('src');
|
||||||
|
t = uricom_dec(t.split('/').pop().split('?')[0]);
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
|
||||||
|
t = 'Failed to open ' + (t?t:'file');
|
||||||
|
console.log('bb-ded', t);
|
||||||
|
t += '\n\nEither the file is corrupt, or your browser does not understand the file format or codec';
|
||||||
|
|
||||||
|
try {
|
||||||
|
t += "\n\nerr#" + this.error.code + ", " + this.error.message;
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
|
||||||
|
this.ded = esc(t);
|
||||||
|
if (this === vidimg())
|
||||||
|
toast.err(20, this.ded, 'bb-ded');
|
||||||
|
}
|
||||||
|
|
||||||
function loadImage(index, callback) {
|
function loadImage(index, callback) {
|
||||||
var imageContainer = imagesElements[index];
|
var imageContainer = imagesElements[index];
|
||||||
var galleryItem = currentGallery[index];
|
var galleryItem = currentGallery[index];
|
||||||
@@ -732,7 +765,8 @@ window.baguetteBox = (function () {
|
|||||||
var image = mknod(is_vid ? 'video' : 'img');
|
var image = mknod(is_vid ? 'video' : 'img');
|
||||||
clmod(imageContainer, 'vid', is_vid);
|
clmod(imageContainer, 'vid', is_vid);
|
||||||
|
|
||||||
image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () {
|
bind(image, 'error', lerr);
|
||||||
|
bind(image, is_vid ? 'loadedmetadata' : 'load', function () {
|
||||||
// Remove loader element
|
// Remove loader element
|
||||||
qsr('#baguette-img-' + index + ' .bbox-spinner');
|
qsr('#baguette-img-' + index + ' .bbox-spinner');
|
||||||
if (!options.async && callback)
|
if (!options.async && callback)
|
||||||
@@ -743,6 +777,8 @@ window.baguetteBox = (function () {
|
|||||||
image.volume = clamp(fcfg_get('vol', dvol / 100), 0, 1);
|
image.volume = clamp(fcfg_get('vol', dvol / 100), 0, 1);
|
||||||
image.setAttribute('controls', 'controls');
|
image.setAttribute('controls', 'controls');
|
||||||
image.onended = vidEnd;
|
image.onended = vidEnd;
|
||||||
|
image.onplay = function () { show_buttons(1); };
|
||||||
|
image.onpause = function () { show_buttons(); };
|
||||||
}
|
}
|
||||||
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
|
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
|
||||||
if (options.titleTag && imageCaption)
|
if (options.titleTag && imageCaption)
|
||||||
@@ -807,6 +843,12 @@ window.baguetteBox = (function () {
|
|||||||
});
|
});
|
||||||
updateOffset();
|
updateOffset();
|
||||||
|
|
||||||
|
var im = vidimg();
|
||||||
|
if (im && im.ded)
|
||||||
|
toast.err(20, im.ded, 'bb-ded');
|
||||||
|
else if (toast.tag == 'bb-ded')
|
||||||
|
toast.hide();
|
||||||
|
|
||||||
if (options.animation == 'none')
|
if (options.animation == 'none')
|
||||||
unvid(vid());
|
unvid(vid());
|
||||||
else
|
else
|
||||||
@@ -988,6 +1030,12 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function show_buttons(v) {
|
||||||
|
clmod(ebi('bbox-btns'), 'off', v);
|
||||||
|
clmod(btnPrev, 'off', v);
|
||||||
|
clmod(btnNext, 'off', v);
|
||||||
|
}
|
||||||
|
|
||||||
function bounceAnimation(direction) {
|
function bounceAnimation(direction) {
|
||||||
slider.className = options.animation == 'slideIn' ? 'bounce-from-' + direction : 'eog';
|
slider.className = options.animation == 'slideIn' ? 'bounce-from-' + direction : 'eog';
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
@@ -1051,9 +1099,7 @@ window.baguetteBox = (function () {
|
|||||||
if (fx > 0.7)
|
if (fx > 0.7)
|
||||||
return showNextImage();
|
return showNextImage();
|
||||||
|
|
||||||
clmod(ebi('bbox-btns'), 'off', 't');
|
show_buttons('t');
|
||||||
clmod(btnPrev, 'off', 't');
|
|
||||||
clmod(btnNext, 'off', 't');
|
|
||||||
|
|
||||||
if (Date.now() - ctime <= 500 && !IPHONE)
|
if (Date.now() - ctime <= 500 && !IPHONE)
|
||||||
tglfull();
|
tglfull();
|
||||||
|
|||||||
@@ -10,7 +10,6 @@
|
|||||||
--fg2-max: #fff;
|
--fg2-max: #fff;
|
||||||
--fg-weak: #bbb;
|
--fg-weak: #bbb;
|
||||||
|
|
||||||
--bg-u7: #555;
|
|
||||||
--bg-u6: #4c4c4c;
|
--bg-u6: #4c4c4c;
|
||||||
--bg-u5: #444;
|
--bg-u5: #444;
|
||||||
--bg-u4: #383838;
|
--bg-u4: #383838;
|
||||||
@@ -43,8 +42,14 @@
|
|||||||
--btn-h-bg: #805;
|
--btn-h-bg: #805;
|
||||||
--btn-1-fg: #400;
|
--btn-1-fg: #400;
|
||||||
--btn-1-bg: var(--a);
|
--btn-1-bg: var(--a);
|
||||||
|
--btn-h-bs: var(--btn-bs);
|
||||||
|
--btn-h-bb: var(--btn-bb);
|
||||||
|
--btn-1-bs: var(--btn-bs);
|
||||||
|
--btn-1-bb: var(--btn-bb);
|
||||||
--btn-1h-fg: var(--btn-1-fg);
|
--btn-1h-fg: var(--btn-1-fg);
|
||||||
--btn-1h-bg: #fe8;
|
--btn-1h-bg: #fe8;
|
||||||
|
--btn-1h-bs: var(--btn-1-bs);
|
||||||
|
--btn-1h-bb: var(--btn-1-bb);
|
||||||
--chk-fg: var(--tab-alt);
|
--chk-fg: var(--tab-alt);
|
||||||
--txt-sh: var(--bg-d2);
|
--txt-sh: var(--bg-d2);
|
||||||
--txt-bg: var(--btn-bg);
|
--txt-bg: var(--btn-bg);
|
||||||
@@ -59,7 +64,7 @@
|
|||||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||||
--u2-tab-b1: rgba(128,128,128,0.8);
|
--u2-tab-b1: rgba(128,128,128,0.8);
|
||||||
--u2-tab-1-fg: #fd7;
|
--u2-tab-1-fg: #fd7;
|
||||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
--u2-tab-1-bg: linear-gradient(to bottom, #353, var(--bg) 80%);
|
||||||
--u2-tab-1-b1: #7c5;
|
--u2-tab-1-b1: #7c5;
|
||||||
--u2-tab-1-b2: #583;
|
--u2-tab-1-b2: #583;
|
||||||
--u2-tab-1-sh: #280;
|
--u2-tab-1-sh: #280;
|
||||||
@@ -183,7 +188,6 @@ html.y {
|
|||||||
--srv-1: #555;
|
--srv-1: #555;
|
||||||
--srv-2: #c83;
|
--srv-2: #c83;
|
||||||
--srv-3: #c0a;
|
--srv-3: #c0a;
|
||||||
--srv-3b: rgba(255,68,204,0.6);
|
|
||||||
|
|
||||||
--tree-bg: #fff;
|
--tree-bg: #fff;
|
||||||
|
|
||||||
@@ -212,22 +216,19 @@ html.y {
|
|||||||
html.a {
|
html.a {
|
||||||
--op-aa-sh: 0 0 .2em var(--bg-d3) inset;
|
--op-aa-sh: 0 0 .2em var(--bg-d3) inset;
|
||||||
|
|
||||||
--u2-o-bg: #603;
|
--btn-bs: 0 0 .2em var(--bg-d3);
|
||||||
--u2-o-b1: #a16;
|
}
|
||||||
--u2-o-sh: #a00;
|
html.az {
|
||||||
--u2-o-h-bg: var(--u2-o-bg);
|
--btn-1-bs: 0 0 .1em var(--fg) inset;
|
||||||
--u2-o-h-b1: #fb0;
|
|
||||||
--u2-o-h-sh: #fb0;
|
|
||||||
--u2-o-1-bg: #6a1;
|
|
||||||
--u2-o-1-b1: #efa;
|
|
||||||
--u2-o-1-sh: #0c0;
|
|
||||||
--u2-o-1h-bg: var(--u2-o-1-bg);
|
|
||||||
}
|
}
|
||||||
html.ay {
|
html.ay {
|
||||||
--op-aa-sh: 0 .1em .2em #ccc;
|
--op-aa-sh: 0 .1em .2em #ccc;
|
||||||
--op-aa-bg: var(--bg-max);
|
--op-aa-bg: var(--bg-max);
|
||||||
}
|
}
|
||||||
html.b {
|
html.b {
|
||||||
|
--btn-bs: 0 .05em 0 var(--bg-d3) inset;
|
||||||
|
--btn-1-bs: 0 .05em 0 var(--btn-1h-bg) inset;
|
||||||
|
|
||||||
--tree-bg: var(--bg);
|
--tree-bg: var(--bg);
|
||||||
|
|
||||||
--g-bg: var(--bg);
|
--g-bg: var(--bg);
|
||||||
@@ -244,17 +245,13 @@ html.b {
|
|||||||
--u2-b1-bg: rgba(128,128,128,0.15);
|
--u2-b1-bg: rgba(128,128,128,0.15);
|
||||||
--u2-b2-bg: var(--u2-b1-bg);
|
--u2-b2-bg: var(--u2-b1-bg);
|
||||||
|
|
||||||
--u2-o-bg: var(--btn-bg);
|
|
||||||
--u2-o-h-bg: var(--btn-h-bg);
|
|
||||||
--u2-o-1-bg: var(--a);
|
|
||||||
--u2-o-1h-bg: var(--a-hil);
|
|
||||||
|
|
||||||
--f-sh1: 0.1;
|
--f-sh1: 0.1;
|
||||||
--mp-b-bg: transparent;
|
--mp-b-bg: transparent;
|
||||||
}
|
}
|
||||||
html.bz {
|
html.bz {
|
||||||
--fg: #cce;
|
--fg: #cce;
|
||||||
--fg-weak: #bbd;
|
--fg-weak: #bbd;
|
||||||
|
|
||||||
--bg-u5: #3b3f58;
|
--bg-u5: #3b3f58;
|
||||||
--bg-u4: #1e2130;
|
--bg-u4: #1e2130;
|
||||||
--bg-u3: #1e2130;
|
--bg-u3: #1e2130;
|
||||||
@@ -266,11 +263,14 @@ html.bz {
|
|||||||
|
|
||||||
--row-alt: #181a27;
|
--row-alt: #181a27;
|
||||||
|
|
||||||
|
--a-b: #fb4;
|
||||||
|
|
||||||
--btn-bg: #202231;
|
--btn-bg: #202231;
|
||||||
--btn-h-bg: #2d2f45;
|
--btn-h-bg: #2d2f45;
|
||||||
--btn-1-bg: #ba2959;
|
--btn-1-bg: #eb6;
|
||||||
--btn-1-fg: #fff;
|
--btn-1-fg: #000;
|
||||||
--btn-1h-fg: #000;
|
--btn-1h-fg: #000;
|
||||||
|
--btn-1h-bg: #ff9;
|
||||||
--txt-sh: a;
|
--txt-sh: a;
|
||||||
|
|
||||||
--u2-tab-b1: var(--bg-u5);
|
--u2-tab-b1: var(--bg-u5);
|
||||||
@@ -285,6 +285,7 @@ html.bz {
|
|||||||
--f-h-b1: #34384e;
|
--f-h-b1: #34384e;
|
||||||
--mp-sh: #11121d;
|
--mp-sh: #11121d;
|
||||||
/*--mp-b-bg: #2c3044;*/
|
/*--mp-b-bg: #2c3044;*/
|
||||||
|
--f-play-bg: var(--btn-1-bg);
|
||||||
}
|
}
|
||||||
html.by {
|
html.by {
|
||||||
--bg: #f2f2f2;
|
--bg: #f2f2f2;
|
||||||
@@ -305,6 +306,7 @@ html.by {
|
|||||||
}
|
}
|
||||||
html.c {
|
html.c {
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
|
|
||||||
--fg: #fff;
|
--fg: #fff;
|
||||||
--fg-weak: #cef;
|
--fg-weak: #cef;
|
||||||
--bg-u5: #409;
|
--bg-u5: #409;
|
||||||
@@ -325,17 +327,25 @@ html.c {
|
|||||||
--chk-fg: #d90;
|
--chk-fg: #d90;
|
||||||
|
|
||||||
--op-aa-bg: #f9dd22;
|
--op-aa-bg: #f9dd22;
|
||||||
--u2-o-1-bg: #4cf;
|
|
||||||
|
|
||||||
--srv-1: #ea0;
|
--srv-1: #ea0;
|
||||||
--mp-b-bg: transparent;
|
--mp-b-bg: transparent;
|
||||||
}
|
}
|
||||||
html.cz {
|
html.cz {
|
||||||
--bgg: var(--bg-u2);
|
--bgg: var(--bg-u2);
|
||||||
|
|
||||||
--sel-bg: var(--bg-u5);
|
--sel-bg: var(--bg-u5);
|
||||||
--sel-fg: var(--fg);
|
--sel-fg: var(--fg);
|
||||||
|
|
||||||
|
--btn-bb: .2em solid #709;
|
||||||
|
--btn-bs: 0 .1em .6em rgba(255,0,185,0.5);
|
||||||
|
--btn-1-bb: .2em solid #e90;
|
||||||
|
--btn-1-bs: 0 .1em .8em rgba(255,205,0,0.9);
|
||||||
|
|
||||||
--srv-3: #fff;
|
--srv-3: #fff;
|
||||||
|
|
||||||
--u2-tab-b1: var(--bg-d3);
|
--u2-tab-b1: var(--bg-d3);
|
||||||
|
--u2-tab-1-bg: a;
|
||||||
}
|
}
|
||||||
html.cy {
|
html.cy {
|
||||||
--fg: #fff;
|
--fg: #fff;
|
||||||
@@ -362,24 +372,25 @@ html.cy {
|
|||||||
--btn-h-fg: #fff;
|
--btn-h-fg: #fff;
|
||||||
--btn-1-bg: #ff0;
|
--btn-1-bg: #ff0;
|
||||||
--btn-1-fg: #000;
|
--btn-1-fg: #000;
|
||||||
|
--btn-bs: 0 .25em 0 #f00;
|
||||||
--chk-fg: #fd0;
|
--chk-fg: #fd0;
|
||||||
|
|
||||||
|
--txt-bg: #000;
|
||||||
--srv-1: #f00;
|
--srv-1: #f00;
|
||||||
--srv-3: #fff;
|
--srv-3: #fff;
|
||||||
--op-aa-bg: #fff;
|
--op-aa-bg: #fff;
|
||||||
|
|
||||||
--u2-b1-bg: #f00;
|
--u2-b1-bg: #f00;
|
||||||
--u2-b2-bg: #f00;
|
--u2-b2-bg: #f00;
|
||||||
--u2-o-bg: #ff0;
|
|
||||||
--u2-o-1-bg: #f00;
|
--g-sel-fg: #fff;
|
||||||
|
--g-sel-bg: #aaa;
|
||||||
|
--g-fsel-bg: #aaa;
|
||||||
}
|
}
|
||||||
html.dz {
|
html.dz {
|
||||||
--fg: #4d4;
|
--fg: #4d4;
|
||||||
--fg-max: #fff;
|
|
||||||
--fg2-max: #fff;
|
|
||||||
--fg-weak: #2a2;
|
--fg-weak: #2a2;
|
||||||
|
|
||||||
--bg-u7: #020;
|
|
||||||
--bg-u6: #020;
|
--bg-u6: #020;
|
||||||
--bg-u5: #050;
|
--bg-u5: #050;
|
||||||
--bg-u4: #020;
|
--bg-u4: #020;
|
||||||
@@ -387,11 +398,9 @@ html.dz {
|
|||||||
--bg-u2: #020;
|
--bg-u2: #020;
|
||||||
--bg-u1: #020;
|
--bg-u1: #020;
|
||||||
--bg: #010;
|
--bg: #010;
|
||||||
--bgg: var(--bg);
|
|
||||||
--bg-d1: #000;
|
--bg-d1: #000;
|
||||||
--bg-d2: #020;
|
--bg-d2: #020;
|
||||||
--bg-d3: #000;
|
--bg-d3: #000;
|
||||||
--bg-max: #000;
|
|
||||||
|
|
||||||
--tab-alt: #6f6;
|
--tab-alt: #6f6;
|
||||||
--row-alt: #030;
|
--row-alt: #030;
|
||||||
@@ -404,48 +413,21 @@ html.dz {
|
|||||||
--a-dark: #afa;
|
--a-dark: #afa;
|
||||||
--a-gray: #2a2;
|
--a-gray: #2a2;
|
||||||
|
|
||||||
--btn-fg: var(--a);
|
|
||||||
--btn-bg: rgba(64,128,64,0.15);
|
--btn-bg: rgba(64,128,64,0.15);
|
||||||
--btn-h-fg: var(--a-hil);
|
|
||||||
--btn-h-bg: #050;
|
--btn-h-bg: #050;
|
||||||
--btn-1-fg: #000;
|
--btn-1-fg: #000;
|
||||||
--btn-1-bg: #4f4;
|
--btn-1-bg: #4f4;
|
||||||
--btn-1h-fg: var(--btn-1-fg);
|
|
||||||
--btn-1h-bg: #3f3;
|
--btn-1h-bg: #3f3;
|
||||||
--chk-fg: var(--tab-alt);
|
--btn-bs: 0 0 0 .1em #080 inset;
|
||||||
--txt-sh: var(--bg-d2);
|
--btn-1-bs: a;
|
||||||
--txt-bg: var(--btn-bg);
|
|
||||||
|
|
||||||
--op-aa-fg: var(--a);
|
|
||||||
--op-aa-bg: var(--bg-d2);
|
|
||||||
--op-a-sh: rgba(0,0,0,0.5);
|
|
||||||
|
|
||||||
--u2-btn-b1: var(--fg-weak);
|
--u2-btn-b1: var(--fg-weak);
|
||||||
--u2-sbtn-b1: var(--fg-weak);
|
--u2-sbtn-b1: var(--fg-weak);
|
||||||
--u2-txt-bg: var(--bg-u5);
|
|
||||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
|
||||||
--u2-tab-b1: var(--fg-weak);
|
--u2-tab-b1: var(--fg-weak);
|
||||||
--u2-tab-1-fg: #fff;
|
--u2-tab-1-fg: #fff;
|
||||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
--u2-tab-1-bg: linear-gradient(to bottom, #151, var(--bg) 80%);
|
||||||
--u2-tab-1-b1: #7c5;
|
|
||||||
--u2-tab-1-b2: #583;
|
|
||||||
--u2-tab-1-sh: #280;
|
|
||||||
--u2-b-fg: #fff;
|
|
||||||
--u2-b1-bg: #3a3;
|
--u2-b1-bg: #3a3;
|
||||||
--u2-b2-bg: #3a3;
|
--u2-b2-bg: #3a3;
|
||||||
--u2-o-bg: var(--btn-bg);
|
|
||||||
--u2-o-b1: var(--bg-u5);
|
|
||||||
--u2-o-h-bg: var(--fg-weak);
|
|
||||||
--u2-o-1-bg: var(--fg-weak);
|
|
||||||
--u2-o-1-b1: var(--a);
|
|
||||||
--u2-o-1h-bg: var(--a);
|
|
||||||
--u2-inf-bg: #07a;
|
|
||||||
--u2-inf-b1: #0be;
|
|
||||||
--u2-ok-bg: #380;
|
|
||||||
--u2-ok-b1: #8e4;
|
|
||||||
--u2-err-bg: #900;
|
|
||||||
--u2-err-b1: #d06;
|
|
||||||
--ud-b1: #888;
|
|
||||||
|
|
||||||
--sort-1: #fff;
|
--sort-1: #fff;
|
||||||
--sort-2: #3f3;
|
--sort-2: #3f3;
|
||||||
@@ -457,47 +439,12 @@ html.dz {
|
|||||||
|
|
||||||
--tree-bg: #010;
|
--tree-bg: #010;
|
||||||
|
|
||||||
--g-play-bg: #750;
|
|
||||||
--g-play-b1: #c90;
|
|
||||||
--g-play-b2: #da4;
|
|
||||||
--g-play-sh: #b83;
|
|
||||||
|
|
||||||
--g-sel-fg: #fff;
|
|
||||||
--g-sel-bg: #925;
|
|
||||||
--g-sel-b1: #c37;
|
--g-sel-b1: #c37;
|
||||||
--g-sel-sh: #b36;
|
--g-sel-sh: #b36;
|
||||||
--g-fsel-bg: #d39;
|
|
||||||
--g-fsel-b1: #d48;
|
--g-fsel-b1: #d48;
|
||||||
--g-fsel-ts: #804;
|
|
||||||
--g-fg: var(--a-hil);
|
|
||||||
--g-bg: var(--bg-u2);
|
|
||||||
--g-b1: var(--bg-u4);
|
|
||||||
--g-b2: var(--bg-u5);
|
|
||||||
--g-g1: var(--bg-u2);
|
|
||||||
--g-g2: var(--bg-u5);
|
|
||||||
--g-f-bg: var(--bg-u4);
|
|
||||||
--g-f-b1: var(--bg-u5);
|
|
||||||
--g-f-fg: var(--a-hil);
|
|
||||||
--g-sh: rgba(0,0,0,0.3);
|
|
||||||
|
|
||||||
--f-sh1: 0.33;
|
|
||||||
--f-sh2: 0.02;
|
|
||||||
--f-sh3: 0.2;
|
|
||||||
--f-h-b1: #3b3;
|
--f-h-b1: #3b3;
|
||||||
|
|
||||||
--f-play-bg: #fc5;
|
|
||||||
--f-play-fg: #000;
|
|
||||||
--f-sel-sh: #fc0;
|
|
||||||
--f-gray: #999;
|
|
||||||
|
|
||||||
--fm-off: #f6c;
|
|
||||||
--mp-sh: var(--bg-d3);
|
|
||||||
|
|
||||||
--err-fg: #fff;
|
|
||||||
--err-bg: #a20;
|
|
||||||
--err-b1: #f00;
|
|
||||||
--err-ts: #500;
|
|
||||||
|
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
@@ -550,10 +497,6 @@ html.dy {
|
|||||||
--u2-tab-1-bg: a;
|
--u2-tab-1-bg: a;
|
||||||
--u2-b1-bg: #000;
|
--u2-b1-bg: #000;
|
||||||
--u2-b2-bg: #000;
|
--u2-b2-bg: #000;
|
||||||
--u2-o-h-bg: #999;
|
|
||||||
--u2-o-1h-bg: #999;
|
|
||||||
--u2-o-bg: #eee;
|
|
||||||
--u2-o-1-bg: #000;
|
|
||||||
|
|
||||||
--ud-b1: a;
|
--ud-b1: a;
|
||||||
|
|
||||||
@@ -598,7 +541,7 @@ html.dy {
|
|||||||
background: var(--sel-bg);
|
background: var(--sel-bg);
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
html,body,tr,th,td,#files,a {
|
html,body,tr,th,td,#files,a,#blogout {
|
||||||
color: inherit;
|
color: inherit;
|
||||||
background: none;
|
background: none;
|
||||||
font-weight: inherit;
|
font-weight: inherit;
|
||||||
@@ -626,6 +569,7 @@ pre, code, tt, #doc, #doc>code {
|
|||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
width: 0;
|
width: 0;
|
||||||
height: 0;
|
height: 0;
|
||||||
|
color: var(--bg);
|
||||||
}
|
}
|
||||||
html .ayjump:focus {
|
html .ayjump:focus {
|
||||||
z-index: 80386;
|
z-index: 80386;
|
||||||
@@ -680,11 +624,15 @@ html.y #path {
|
|||||||
#files tbody div a {
|
#files tbody div a {
|
||||||
color: var(--tab-alt);
|
color: var(--tab-alt);
|
||||||
}
|
}
|
||||||
a, #files tbody div a:last-child {
|
a, #blogout, #files tbody div a:last-child {
|
||||||
color: var(--a);
|
color: var(--a);
|
||||||
padding: .2em;
|
padding: .2em;
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
}
|
}
|
||||||
|
#blogout {
|
||||||
|
margin: -.2em;
|
||||||
|
}
|
||||||
|
#blogout:hover,
|
||||||
a:hover {
|
a:hover {
|
||||||
color: var(--a-hil);
|
color: var(--a-hil);
|
||||||
background: var(--a-h-bg);
|
background: var(--a-h-bg);
|
||||||
@@ -885,7 +833,7 @@ html.y #path a:hover {
|
|||||||
max-width: 52em;
|
max-width: 52em;
|
||||||
}
|
}
|
||||||
.mdo.sb,
|
.mdo.sb,
|
||||||
#epi.logue.mdo>iframe {
|
.logue.mdo>iframe {
|
||||||
max-width: 54em;
|
max-width: 54em;
|
||||||
}
|
}
|
||||||
.mdo,
|
.mdo,
|
||||||
@@ -928,6 +876,9 @@ html.y #path a:hover {
|
|||||||
color: var(--srv-3);
|
color: var(--srv-3);
|
||||||
border-bottom: 1px solid var(--srv-3b);
|
border-bottom: 1px solid var(--srv-3b);
|
||||||
}
|
}
|
||||||
|
#flogout {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
#goh+span {
|
#goh+span {
|
||||||
color: var(--bg-u5);
|
color: var(--bg-u5);
|
||||||
padding-left: .5em;
|
padding-left: .5em;
|
||||||
@@ -962,6 +913,8 @@ html.y #path a:hover {
|
|||||||
#files tbody tr.play a:hover {
|
#files tbody tr.play a:hover {
|
||||||
color: var(--btn-1h-fg);
|
color: var(--btn-1h-fg);
|
||||||
background: var(--btn-1h-bg);
|
background: var(--btn-1h-bg);
|
||||||
|
box-shadow: var(--btn-1h-bs);
|
||||||
|
border-bottom: var(--btn-1h-bb);
|
||||||
}
|
}
|
||||||
#ggrid {
|
#ggrid {
|
||||||
margin: -.2em -.5em;
|
margin: -.2em -.5em;
|
||||||
@@ -970,6 +923,7 @@ html.y #path a:hover {
|
|||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
display: block;
|
display: block;
|
||||||
display: -webkit-box;
|
display: -webkit-box;
|
||||||
|
line-clamp: var(--grid-ln);
|
||||||
-webkit-line-clamp: var(--grid-ln);
|
-webkit-line-clamp: var(--grid-ln);
|
||||||
-webkit-box-orient: vertical;
|
-webkit-box-orient: vertical;
|
||||||
padding-top: .3em;
|
padding-top: .3em;
|
||||||
@@ -1016,9 +970,6 @@ html.y #path a:hover {
|
|||||||
color: var(--g-dfg);
|
color: var(--g-dfg);
|
||||||
}
|
}
|
||||||
#ggrid>a.au:before {
|
#ggrid>a.au:before {
|
||||||
content: '💾';
|
|
||||||
}
|
|
||||||
html.np_open #ggrid>a.au:before {
|
|
||||||
content: '▶';
|
content: '▶';
|
||||||
}
|
}
|
||||||
#ggrid>a:before {
|
#ggrid>a:before {
|
||||||
@@ -1147,6 +1098,7 @@ html.y #widget.open {
|
|||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
}
|
}
|
||||||
|
#fshr,
|
||||||
#wtgrid,
|
#wtgrid,
|
||||||
#wtico {
|
#wtico {
|
||||||
position: relative;
|
position: relative;
|
||||||
@@ -1333,6 +1285,7 @@ html.y #widget.open {
|
|||||||
#widget.cmp #wtoggle {
|
#widget.cmp #wtoggle {
|
||||||
font-size: 1.2em;
|
font-size: 1.2em;
|
||||||
}
|
}
|
||||||
|
#widget.cmp #fshr,
|
||||||
#widget.cmp #wtgrid {
|
#widget.cmp #wtgrid {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
@@ -1346,6 +1299,7 @@ html.y #widget.open {
|
|||||||
}
|
}
|
||||||
#widget.cmp #barpos,
|
#widget.cmp #barpos,
|
||||||
#widget.cmp #barbuf {
|
#widget.cmp #barbuf {
|
||||||
|
height: 1.6em;
|
||||||
width: calc(100% - 11em);
|
width: calc(100% - 11em);
|
||||||
border-radius: 0;
|
border-radius: 0;
|
||||||
left: 5em;
|
left: 5em;
|
||||||
@@ -1433,7 +1387,11 @@ input[type="checkbox"]+label {
|
|||||||
input[type="radio"]:checked+label,
|
input[type="radio"]:checked+label,
|
||||||
input[type="checkbox"]:checked+label {
|
input[type="checkbox"]:checked+label {
|
||||||
color: #0e0;
|
color: #0e0;
|
||||||
color: var(--a);
|
color: var(--btn-1-bg);
|
||||||
|
}
|
||||||
|
input[type="checkbox"]:checked+label {
|
||||||
|
box-shadow: var(--btn-1-bs);
|
||||||
|
border-bottom: var(--btn-1-bb);
|
||||||
}
|
}
|
||||||
html.dz input {
|
html.dz input {
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
@@ -1611,6 +1569,8 @@ html {
|
|||||||
color: var(--btn-fg);
|
color: var(--btn-fg);
|
||||||
background: #eee;
|
background: #eee;
|
||||||
background: var(--btn-bg);
|
background: var(--btn-bg);
|
||||||
|
box-shadow: var(--btn-bs);
|
||||||
|
border-bottom: var(--btn-bb);
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
padding: .2em .4em;
|
padding: .2em .4em;
|
||||||
font-size: 1.2em;
|
font-size: 1.2em;
|
||||||
@@ -1624,20 +1584,14 @@ html.c .btn,
|
|||||||
html.a .btn {
|
html.a .btn {
|
||||||
border-radius: .2em;
|
border-radius: .2em;
|
||||||
}
|
}
|
||||||
html.cz .btn {
|
|
||||||
box-shadow: 0 .1em .6em rgba(255,0,185,0.5);
|
|
||||||
border-bottom: .2em solid #709;
|
|
||||||
}
|
|
||||||
html.dz .btn {
|
html.dz .btn {
|
||||||
font-size: 1em;
|
font-size: 1em;
|
||||||
box-shadow: 0 0 0 .1em #080 inset;
|
|
||||||
}
|
|
||||||
html.dz .tgl.btn.on {
|
|
||||||
box-shadow: 0 0 0 .1em var(--btn-1-bg) inset;
|
|
||||||
}
|
}
|
||||||
.btn:hover {
|
.btn:hover {
|
||||||
color: var(--btn-h-fg);
|
color: var(--btn-h-fg);
|
||||||
background: var(--btn-h-bg);
|
background: var(--btn-h-bg);
|
||||||
|
box-shadow: var(--btn-h-bs);
|
||||||
|
border-bottom: var(--btn-h-bb);
|
||||||
}
|
}
|
||||||
.tgl.btn.on {
|
.tgl.btn.on {
|
||||||
background: #000;
|
background: #000;
|
||||||
@@ -1645,14 +1599,14 @@ html.dz .tgl.btn.on {
|
|||||||
color: #fff;
|
color: #fff;
|
||||||
color: var(--btn-1-fg);
|
color: var(--btn-1-fg);
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
box-shadow: var(--btn-1-bs);
|
||||||
html.cz .tgl.btn.on {
|
border-bottom: var(--btn-1-bb);
|
||||||
box-shadow: 0 .1em .8em rgba(255,205,0,0.9);
|
|
||||||
border-bottom: .2em solid #e90;
|
|
||||||
}
|
}
|
||||||
.tgl.btn.on:hover {
|
.tgl.btn.on:hover {
|
||||||
background: var(--btn-1h-bg);
|
|
||||||
color: var(--btn-1h-fg);
|
color: var(--btn-1h-fg);
|
||||||
|
background: var(--btn-1h-bg);
|
||||||
|
box-shadow: var(--btn-1h-bs);
|
||||||
|
border-bottom: var(--btn-1h-bb);
|
||||||
}
|
}
|
||||||
#detree {
|
#detree {
|
||||||
padding: .3em .5em;
|
padding: .3em .5em;
|
||||||
@@ -1693,6 +1647,18 @@ html.cz .tgl.btn.on {
|
|||||||
background: var(--btn-1-bg);
|
background: var(--btn-1-bg);
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
|
#tree ul a.ld::before {
|
||||||
|
font-weight: bold;
|
||||||
|
font-family: sans-serif;
|
||||||
|
display: inline-block;
|
||||||
|
text-align: center;
|
||||||
|
width: 1em;
|
||||||
|
margin: 0 .3em 0 -1.3em;
|
||||||
|
color: var(--fg-max);
|
||||||
|
opacity: 0;
|
||||||
|
content: '◠';
|
||||||
|
animation: .5s linear infinite forwards spin, ease .25s 1 forwards fadein;
|
||||||
|
}
|
||||||
#tree ul a.par {
|
#tree ul a.par {
|
||||||
color: var(--fg-max);
|
color: var(--fg-max);
|
||||||
}
|
}
|
||||||
@@ -1729,15 +1695,24 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
line-height: 0;
|
line-height: 0;
|
||||||
}
|
}
|
||||||
.dumb_loader_thing {
|
.dumb_loader_thing {
|
||||||
display: inline-block;
|
display: block;
|
||||||
margin: 1em .3em 1em 1em;
|
margin: 1em .3em 1em 1em;
|
||||||
padding: 0 1.2em 0 0;
|
padding: 0 1.2em 0 0;
|
||||||
font-size: 4em;
|
font-size: 4em;
|
||||||
|
min-width: 1em;
|
||||||
|
min-height: 1em;
|
||||||
opacity: 0;
|
opacity: 0;
|
||||||
animation: 1s linear .15s infinite forwards spin, .2s ease .15s 1 forwards fadein;
|
animation: 1s linear .15s infinite forwards spin, .2s ease .15s 1 forwards fadein;
|
||||||
position: absolute;
|
position: fixed;
|
||||||
|
top: .3em;
|
||||||
z-index: 9;
|
z-index: 9;
|
||||||
}
|
}
|
||||||
|
#dlt_t {
|
||||||
|
left: 0;
|
||||||
|
}
|
||||||
|
#dlt_f {
|
||||||
|
right: .5em;
|
||||||
|
}
|
||||||
#files .cfg {
|
#files .cfg {
|
||||||
display: none;
|
display: none;
|
||||||
font-size: 2em;
|
font-size: 2em;
|
||||||
@@ -1857,6 +1832,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
#unpost td:nth-child(4) {
|
#unpost td:nth-child(4) {
|
||||||
text-align: right;
|
text-align: right;
|
||||||
}
|
}
|
||||||
|
#shui,
|
||||||
#rui {
|
#rui {
|
||||||
background: #fff;
|
background: #fff;
|
||||||
background: var(--bg);
|
background: var(--bg);
|
||||||
@@ -1872,13 +1848,25 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
padding: 1em;
|
padding: 1em;
|
||||||
z-index: 765;
|
z-index: 765;
|
||||||
}
|
}
|
||||||
|
#shui div+div,
|
||||||
#rui div+div {
|
#rui div+div {
|
||||||
margin-top: 1em;
|
margin-top: 1em;
|
||||||
}
|
}
|
||||||
|
#shui table,
|
||||||
#rui table {
|
#rui table {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
border-collapse: collapse;
|
border-collapse: collapse;
|
||||||
}
|
}
|
||||||
|
#shui button {
|
||||||
|
margin: 0 1em 0 0;
|
||||||
|
}
|
||||||
|
#shui .btn {
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
#shui td {
|
||||||
|
padding: .8em 0;
|
||||||
|
}
|
||||||
|
#shui td+td,
|
||||||
#rui td+td {
|
#rui td+td {
|
||||||
padding: .2em 0 .2em .5em;
|
padding: .2em 0 .2em .5em;
|
||||||
}
|
}
|
||||||
@@ -1886,21 +1874,25 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
}
|
}
|
||||||
|
#shui td+td,
|
||||||
#rui td+td,
|
#rui td+td,
|
||||||
|
#shui td input[type="text"],
|
||||||
#rui td input[type="text"] {
|
#rui td input[type="text"] {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
|
#shui td.exs input[type="text"] {
|
||||||
|
width: 3em;
|
||||||
|
}
|
||||||
#rn_f.m td:first-child {
|
#rn_f.m td:first-child {
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
#rn_f.m td+td {
|
#rn_f.m td+td {
|
||||||
width: 50%;
|
width: 50%;
|
||||||
}
|
}
|
||||||
#rn_f .err td {
|
#rn_f .err td,
|
||||||
background: var(--err-bg);
|
#rn_f .err input[readonly],
|
||||||
color: var(--fg-max);
|
#rui .ng input[readonly] {
|
||||||
}
|
color: var(--err-fg);
|
||||||
#rn_f .err input[readonly] {
|
|
||||||
background: var(--err-bg);
|
background: var(--err-bg);
|
||||||
}
|
}
|
||||||
#rui input[readonly] {
|
#rui input[readonly] {
|
||||||
@@ -2684,23 +2676,25 @@ html.b #u2conf a.b:hover {
|
|||||||
#u2conf input[type="checkbox"]:checked+label {
|
#u2conf input[type="checkbox"]:checked+label {
|
||||||
position: relative;
|
position: relative;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
background: var(--u2-o-bg);
|
background: var(--btn-bg);
|
||||||
border-bottom: .2em solid var(--u2-o-b1);
|
box-shadow: var(--btn-bs);
|
||||||
box-shadow: 0 .1em .3em var(--u2-o-sh) inset;
|
border-bottom: var(--btn-bb);
|
||||||
text-shadow: 1px 1px 1px #000, 1px -1px 1px #000, -1px -1px 1px #000, -1px 1px 1px #000;
|
text-shadow: 1px 1px 1px #000, 1px -1px 1px #000, -1px -1px 1px #000, -1px 1px 1px #000;
|
||||||
}
|
}
|
||||||
#u2conf input[type="checkbox"]:checked+label {
|
#u2conf input[type="checkbox"]:checked+label {
|
||||||
background: var(--u2-o-1-bg);
|
background: var(--btn-1-bg);
|
||||||
border-bottom: .2em solid var(--u2-o-1-b1);
|
box-shadow: var(--btn-1-bs);
|
||||||
box-shadow: 0 .1em .5em var(--u2-o-1-sh);
|
border-bottom: var(--btn-1-bb);
|
||||||
}
|
}
|
||||||
#u2conf input[type="checkbox"]+label:hover {
|
#u2conf input[type="checkbox"]+label:hover {
|
||||||
box-shadow: 0 .1em .3em var(--u2-o-h-sh);
|
background: var(--btn-h-bg);
|
||||||
border-color: var(--u2-o-h-b1);
|
box-shadow: var(--btn-h-bs);
|
||||||
background: var(--u2-o-h-bg);
|
border-bottom: var(--btn-h-bb);
|
||||||
}
|
}
|
||||||
#u2conf input[type="checkbox"]:checked+label:hover {
|
#u2conf input[type="checkbox"]:checked+label:hover {
|
||||||
background: var(--u2-o-1h-bg);
|
background: var(--btn-1h-bg);
|
||||||
|
box-shadow: var(--btn-1h-bs);
|
||||||
|
border-bottom: var(--btn-1h-bb);
|
||||||
}
|
}
|
||||||
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
||||||
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
||||||
@@ -2800,6 +2794,7 @@ html.b #u2conf a.b:hover {
|
|||||||
padding-left: .2em;
|
padding-left: .2em;
|
||||||
}
|
}
|
||||||
.fsearch_explain {
|
.fsearch_explain {
|
||||||
|
color: var(--a-dark);
|
||||||
padding-left: .7em;
|
padding-left: .7em;
|
||||||
font-size: 1.1em;
|
font-size: 1.1em;
|
||||||
line-height: 0;
|
line-height: 0;
|
||||||
@@ -3089,18 +3084,30 @@ html.by #u2cards a.act {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
html.cy #wrap {
|
|
||||||
color: #000;
|
|
||||||
}
|
|
||||||
html.cy .mdo a {
|
html.cy .mdo a {
|
||||||
background: #f00;
|
background: #f00;
|
||||||
}
|
}
|
||||||
|
html.cy #wrap,
|
||||||
|
html.cy #acc_info a,
|
||||||
html.cy #op_up2k,
|
html.cy #op_up2k,
|
||||||
html.cy #files,
|
html.cy #files,
|
||||||
html.cy #files a,
|
html.cy #files a,
|
||||||
html.cy #files tbody div a:last-child {
|
html.cy #files tbody div a:last-child {
|
||||||
color: #000;
|
color: #000;
|
||||||
}
|
}
|
||||||
|
html.cy #u2tab a,
|
||||||
|
html.cy #u2cards a {
|
||||||
|
color: #f00;
|
||||||
|
}
|
||||||
|
html.cy #unpost a {
|
||||||
|
color: #ff0;
|
||||||
|
}
|
||||||
|
html.cy #barbuf {
|
||||||
|
filter: hue-rotate(267deg) brightness(0.8) contrast(4);
|
||||||
|
}
|
||||||
|
html.cy #pvol {
|
||||||
|
filter: hue-rotate(4deg) contrast(2.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -67,14 +67,14 @@
|
|||||||
<div id="op_up2k" class="opview"></div>
|
<div id="op_up2k" class="opview"></div>
|
||||||
|
|
||||||
<div id="op_cfg" class="opview opbox opwide"></div>
|
<div id="op_cfg" class="opview opbox opwide"></div>
|
||||||
|
|
||||||
<h1 id="path">
|
<h1 id="path">
|
||||||
<a href="#" id="entree">🌲</a>
|
<a href="#" id="entree">🌲</a>
|
||||||
{%- for n in vpnodes %}
|
{%- for n in vpnodes %}
|
||||||
<a href="{{ r }}/{{ n[0] }}">{{ n[1] }}</a>
|
<a href="{{ r }}/{{ n[0] }}">{{ n[1] }}</a>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
</h1>
|
</h1>
|
||||||
|
|
||||||
<div id="tree"></div>
|
<div id="tree"></div>
|
||||||
|
|
||||||
<div id="wrap">
|
<div id="wrap">
|
||||||
@@ -108,43 +108,37 @@
|
|||||||
|
|
||||||
{%- for f in files %}
|
{%- for f in files %}
|
||||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
||||||
{%- if f.tags is defined %}
|
{%- if f.tags is defined %}
|
||||||
{%- for k in taglist %}
|
{%- for k in taglist %}<td>{{ f.tags[k] }}</td>{%- endfor %}
|
||||||
<td>{{ f.tags[k] }}</td>
|
{%- endif %}<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
||||||
{%- endfor %}
|
|
||||||
{%- endif %}
|
|
||||||
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
|
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
|
||||||
|
|
||||||
<h2 id="wfp"><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
<h2 id="wfp"><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
||||||
|
|
||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{%- if srv_info %}
|
|
||||||
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
||||||
{%- endif %}
|
|
||||||
|
|
||||||
<div id="widget"></div>
|
<div id="widget"></div>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
var SR = {{ r|tojson }},
|
var SR = "{{ r }}",
|
||||||
|
CGV1 = {{ cgv1 }},
|
||||||
CGV = {{ cgv|tojson }},
|
CGV = {{ cgv|tojson }},
|
||||||
TS = "{{ ts }}",
|
TS = "{{ ts }}",
|
||||||
dtheme = "{{ dtheme }}",
|
dtheme = "{{ dtheme }}",
|
||||||
srvinf = "{{ srv_info }}",
|
srvinf = "{{ srv_info }}",
|
||||||
s_name = "{{ s_name }}",
|
|
||||||
lang = "{{ lang }}",
|
lang = "{{ lang }}",
|
||||||
dfavico = "{{ favico }}",
|
dfavico = "{{ favico }}",
|
||||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
have_tags_idx = {{ have_tags_idx }},
|
||||||
sb_lg = "{{ sb_lg }}",
|
sb_lg = "{{ sb_lg }}",
|
||||||
txt_ext = "{{ txt_ext }}",
|
|
||||||
logues = {{ logues|tojson if sb_lg else "[]" }},
|
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||||
ls0 = {{ ls0|tojson }};
|
ls0 = {{ ls0|tojson }};
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -11,7 +11,6 @@
|
|||||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||||
a{display:block}
|
a{display:block}
|
||||||
</style>
|
</style>
|
||||||
{{ html_head }}
|
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
@@ -52,11 +51,11 @@
|
|||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
{%- if logues[1] %}
|
{%- if logues[1] %}
|
||||||
<div>{{ logues[1] }}</div><br />
|
<div>{{ logues[1] }}</div><br />
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<h2><a href="{{ r }}/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
<h2><a href="{{ r }}/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||||
|
|
||||||
</body>
|
</body>
|
||||||
|
|||||||
BIN
copyparty/web/iiam.gif
Normal file
BIN
copyparty/web/iiam.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 230 B |
@@ -49,7 +49,7 @@
|
|||||||
<div id="mp" class="mdo"></div>
|
<div id="mp" class="mdo"></div>
|
||||||
</div>
|
</div>
|
||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
|
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<div id="helpbox">
|
<div id="helpbox">
|
||||||
<textarea autocomplete="off">
|
<textarea autocomplete="off">
|
||||||
@@ -125,12 +125,12 @@ write markdown (most html is 🙆 too)
|
|||||||
</textarea>
|
</textarea>
|
||||||
</div>
|
</div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var SR = {{ r|tojson }},
|
var SR = "{{ r }}",
|
||||||
last_modified = {{ lastmod }},
|
last_modified = {{ lastmod }},
|
||||||
have_emp = {{ have_emp|tojson }},
|
have_emp = {{ "true" if have_emp else "false" }},
|
||||||
dfavico = "{{ favico }}";
|
dfavico = "{{ favico }}";
|
||||||
|
|
||||||
var md_opt = {
|
var md_opt = {
|
||||||
@@ -159,5 +159,8 @@ try { l.light = drk? 0:1; } catch (ex) { }
|
|||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|
||||||
|
|||||||
@@ -17,14 +17,13 @@ var chromedbg = function () { console.log(arguments); }
|
|||||||
var dbg = function () { };
|
var dbg = function () { };
|
||||||
|
|
||||||
// replace dbg with the real deal here or in the console:
|
// replace dbg with the real deal here or in the console:
|
||||||
// dbg = chromedbg
|
// dbg = chromedbg;
|
||||||
// dbg = console.log
|
// dbg = console.log;
|
||||||
|
|
||||||
|
|
||||||
// dodge browser issues
|
// dodge browser issues
|
||||||
(function () {
|
(function () {
|
||||||
var ua = navigator.userAgent;
|
if (UA.indexOf(') Gecko/') !== -1 && /Linux| Mac /.exec(UA)) {
|
||||||
if (ua.indexOf(') Gecko/') !== -1 && /Linux| Mac /.exec(ua)) {
|
|
||||||
// necessary on ff-68.7 at least
|
// necessary on ff-68.7 at least
|
||||||
var s = mknod('style');
|
var s = mknod('style');
|
||||||
s.innerHTML = '@page { margin: .5in .6in .8in .6in; }';
|
s.innerHTML = '@page { margin: .5in .6in .8in .6in; }';
|
||||||
|
|||||||
@@ -450,7 +450,7 @@ function savechk_cb() {
|
|||||||
|
|
||||||
// firefox bug: initial selection offset isn't cleared properly through js
|
// firefox bug: initial selection offset isn't cleared properly through js
|
||||||
var ff_clearsel = (function () {
|
var ff_clearsel = (function () {
|
||||||
if (navigator.userAgent.indexOf(') Gecko/') === -1)
|
if (UA.indexOf(') Gecko/') === -1)
|
||||||
return function () { }
|
return function () { }
|
||||||
|
|
||||||
return function () {
|
return function () {
|
||||||
@@ -607,10 +607,10 @@ function md_newline() {
|
|||||||
var s = linebounds(true),
|
var s = linebounds(true),
|
||||||
ln = s.md.substring(s.n1, s.n2),
|
ln = s.md.substring(s.n1, s.n2),
|
||||||
m1 = /^( *)([0-9]+)(\. +)/.exec(ln),
|
m1 = /^( *)([0-9]+)(\. +)/.exec(ln),
|
||||||
m2 = /^[ \t>+-]*(\* )?/.exec(ln),
|
m2 = /^[ \t]*[>+*-]{0,2}[ \t]/.exec(ln),
|
||||||
drop = dom_src.selectionEnd - dom_src.selectionStart;
|
drop = dom_src.selectionEnd - dom_src.selectionStart;
|
||||||
|
|
||||||
var pre = m2[0];
|
var pre = m2 ? m2[0] : '';
|
||||||
if (m1 !== null)
|
if (m1 !== null)
|
||||||
pre = m1[1] + (parseInt(m1[2]) + 1) + m1[3];
|
pre = m1[1] + (parseInt(m1[2]) + 1) + m1[3];
|
||||||
|
|
||||||
@@ -1078,26 +1078,28 @@ action_stack = (function () {
|
|||||||
var p1 = from.length,
|
var p1 = from.length,
|
||||||
p2 = to.length;
|
p2 = to.length;
|
||||||
|
|
||||||
while (p1-- > 0 && p2-- > 0)
|
while (p1 --> 0 && p2 --> 0)
|
||||||
if (from[p1] != to[p2])
|
if (from[p1] != to[p2])
|
||||||
break;
|
break;
|
||||||
|
|
||||||
if (car > ++p1) {
|
if (car > ++p1)
|
||||||
car = p1;
|
car = p1;
|
||||||
}
|
|
||||||
|
|
||||||
var txt = from.substring(car, p1)
|
var txt = from.substring(car, p1)
|
||||||
return {
|
return {
|
||||||
car: car,
|
car: car,
|
||||||
cdr: ++p2,
|
cdr: p2 + (car && 1),
|
||||||
txt: txt,
|
txt: txt,
|
||||||
cpos: cpos
|
cpos: cpos
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
var undiff = function (from, change) {
|
var undiff = function (from, change) {
|
||||||
|
var t1 = from.substring(0, change.car),
|
||||||
|
t2 = from.substring(change.cdr);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
txt: from.substring(0, change.car) + change.txt + from.substring(change.cdr),
|
txt: t1 + change.txt + t2,
|
||||||
cpos: change.cpos
|
cpos: change.cpos
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,9 +26,9 @@
|
|||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var SR = {{ r|tojson }},
|
var SR = "{{ r }}",
|
||||||
last_modified = {{ lastmod }},
|
last_modified = {{ lastmod }},
|
||||||
have_emp = {{ have_emp|tojson }},
|
have_emp = {{ "true" if have_emp else "false" }},
|
||||||
dfavico = "{{ favico }}";
|
dfavico = "{{ favico }}";
|
||||||
|
|
||||||
var md_opt = {
|
var md_opt = {
|
||||||
@@ -53,5 +53,8 @@ try { l.light = drk? 0:1; } catch (ex) { }
|
|||||||
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||||
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||||
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|
||||||
|
|||||||
@@ -46,6 +46,9 @@
|
|||||||
}, 1000);
|
}, 1000);
|
||||||
</script>
|
</script>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
107
copyparty/web/rups.css
Normal file
107
copyparty/web/rups.css
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
html {
|
||||||
|
color: #333;
|
||||||
|
background: #f7f7f7;
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
|
touch-action: manipulation;
|
||||||
|
}
|
||||||
|
#wrap {
|
||||||
|
margin: 2em auto;
|
||||||
|
padding: 0 1em 3em 1em;
|
||||||
|
line-height: 2.3em;
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
color: #047;
|
||||||
|
background: #fff;
|
||||||
|
text-decoration: none;
|
||||||
|
border-bottom: 1px solid #8ab;
|
||||||
|
border-radius: .2em;
|
||||||
|
padding: .2em .6em;
|
||||||
|
margin: 0 .3em;
|
||||||
|
}
|
||||||
|
#wrap td a {
|
||||||
|
margin: 0;
|
||||||
|
line-height: 1em;
|
||||||
|
display: inline-block;
|
||||||
|
white-space: initial;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
|
}
|
||||||
|
#repl {
|
||||||
|
border: none;
|
||||||
|
background: none;
|
||||||
|
color: inherit;
|
||||||
|
padding: 0;
|
||||||
|
position: fixed;
|
||||||
|
bottom: .25em;
|
||||||
|
left: .2em;
|
||||||
|
}
|
||||||
|
#wrap table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
position: relative;
|
||||||
|
margin-top: 2em;
|
||||||
|
}
|
||||||
|
#wrap th {
|
||||||
|
top: -1px;
|
||||||
|
position: sticky;
|
||||||
|
background: #f7f7f7;
|
||||||
|
}
|
||||||
|
#wrap td {
|
||||||
|
font-family: var(--font-mono), monospace, monospace;
|
||||||
|
white-space: pre; /*date*/
|
||||||
|
overflow: hidden; /*ipv6*/
|
||||||
|
}
|
||||||
|
#wrap th:first-child,
|
||||||
|
#wrap td:first-child {
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
#wrap td,
|
||||||
|
#wrap th {
|
||||||
|
text-align: left;
|
||||||
|
padding: .3em .6em;
|
||||||
|
max-width: 30vw;
|
||||||
|
}
|
||||||
|
#wrap tr:hover td {
|
||||||
|
background: #ddd;
|
||||||
|
box-shadow: 0 -1px 0 rgba(128, 128, 128, 0.5) inset;
|
||||||
|
}
|
||||||
|
#wrap th:first-child,
|
||||||
|
#wrap td:first-child {
|
||||||
|
border-radius: .5em 0 0 .5em;
|
||||||
|
}
|
||||||
|
#wrap th:last-child,
|
||||||
|
#wrap td:last-child {
|
||||||
|
border-radius: 0 .5em .5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
html.z {
|
||||||
|
background: #222;
|
||||||
|
color: #ccc;
|
||||||
|
}
|
||||||
|
html.bz {
|
||||||
|
background: #11121d;
|
||||||
|
color: #bbd;
|
||||||
|
}
|
||||||
|
html.z a {
|
||||||
|
color: #fff;
|
||||||
|
background: #057;
|
||||||
|
border-color: #37a;
|
||||||
|
}
|
||||||
|
html.z input[type=text] {
|
||||||
|
color: #ddd;
|
||||||
|
background: #223;
|
||||||
|
border: none;
|
||||||
|
border-bottom: 1px solid #fc5;
|
||||||
|
border-radius: .2em;
|
||||||
|
padding: .2em .3em;
|
||||||
|
}
|
||||||
|
html.z #wrap th {
|
||||||
|
background: #222;
|
||||||
|
}
|
||||||
|
html.bz #wrap th {
|
||||||
|
background: #223;
|
||||||
|
}
|
||||||
|
html.z #wrap tr:hover td {
|
||||||
|
background: #000;
|
||||||
|
}
|
||||||
50
copyparty/web/rups.html
Normal file
50
copyparty/web/rups.html
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>{{ s_doctitle }}</title>
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
|
<meta name="robots" content="noindex, nofollow">
|
||||||
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/rups.css?_={{ ts }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
|
{{ html_head }}
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="wrap">
|
||||||
|
<a href="#" id="re">refresh</a>
|
||||||
|
<a href="{{ r }}/?h">control-panel</a>
|
||||||
|
Filter: <input type="text" id="filter" size="20" placeholder="documents/passwords" />
|
||||||
|
<span id="hits"></span>
|
||||||
|
<table id="tab"><thead><tr>
|
||||||
|
<th>size</th>
|
||||||
|
<th>who</th>
|
||||||
|
<th>when</th>
|
||||||
|
<th>age</th>
|
||||||
|
<th>dir</th>
|
||||||
|
<th>file</th>
|
||||||
|
</tr></thead><tbody id="tb"></tbody></table>
|
||||||
|
</div>
|
||||||
|
<a href="#" id="repl">π</a>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
var SR="{{ r }}",
|
||||||
|
lang="{{ lang }}",
|
||||||
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
|
var STG = window.localStorage;
|
||||||
|
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||||
|
|
||||||
|
</script>
|
||||||
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
|
<script>var V={{ v }};</script>
|
||||||
|
<script src="{{ r }}/.cpr/rups.js?_={{ ts }}"></script>
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
66
copyparty/web/rups.js
Normal file
66
copyparty/web/rups.js
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
function render() {
|
||||||
|
var ups = V.ups, now = V.now, html = [];
|
||||||
|
ebi('filter').value = V.filter;
|
||||||
|
ebi('hits').innerHTML = 'showing ' + ups.length + ' files';
|
||||||
|
|
||||||
|
for (var a = 0; a < ups.length; a++) {
|
||||||
|
var f = ups[a],
|
||||||
|
vsp = vsplit(f.vp.split('?')[0]),
|
||||||
|
dn = esc(uricom_dec(vsp[0])),
|
||||||
|
fn = esc(uricom_dec(vsp[1])),
|
||||||
|
at = f.at,
|
||||||
|
td = now - f.at,
|
||||||
|
ts = !at ? '(?)' : unix2iso(at),
|
||||||
|
sa = !at ? '(?)' : td > 60 ? shumantime(td) : (td + 's'),
|
||||||
|
sz = ('' + f.sz).replace(/\B(?=(\d{3})+(?!\d))/g, " ");
|
||||||
|
|
||||||
|
html.push('<tr><td>' + sz +
|
||||||
|
'</td><td>' + f.ip +
|
||||||
|
'</td><td>' + ts +
|
||||||
|
'</td><td>' + sa +
|
||||||
|
'</td><td><a href="' + vsp[0] + '">' + dn +
|
||||||
|
'</a></td><td><a href="' + f.vp + '">' + fn +
|
||||||
|
'</a></td></tr>');
|
||||||
|
}
|
||||||
|
if (!ups.length) {
|
||||||
|
var t = V.filter ? ' matching the filter' : '';
|
||||||
|
html = ['<tr><td colspan="6">there are no uploads' + t + '</td></tr>'];
|
||||||
|
}
|
||||||
|
ebi('tb').innerHTML = html.join('');
|
||||||
|
}
|
||||||
|
render();
|
||||||
|
|
||||||
|
var ti;
|
||||||
|
function ask(e) {
|
||||||
|
ev(e);
|
||||||
|
clearTimeout(ti);
|
||||||
|
ebi('hits').innerHTML = 'Loading...';
|
||||||
|
|
||||||
|
var xhr = new XHR(),
|
||||||
|
filter = unsmart(ebi('filter').value);
|
||||||
|
|
||||||
|
hist_replace(get_evpath().split('?')[0] + '?ru&filter=' + uricom_enc(filter));
|
||||||
|
|
||||||
|
xhr.onload = xhr.onerror = function () {
|
||||||
|
try {
|
||||||
|
V = JSON.parse(this.responseText)
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
ebi('tb').innerHTML = '<tr><td colspan="6">failed to decode server response as json: <pre>' + esc(this.responseText) + '</pre></td></tr>';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
render();
|
||||||
|
};
|
||||||
|
xhr.open('GET', SR + '/?ru&j&filter=' + uricom_enc(filter), true);
|
||||||
|
xhr.send();
|
||||||
|
}
|
||||||
|
ebi('re').onclick = ask;
|
||||||
|
ebi('filter').oninput = function () {
|
||||||
|
clearTimeout(ti);
|
||||||
|
ti = setTimeout(ask, 500);
|
||||||
|
ebi('hits').innerHTML = '...';
|
||||||
|
};
|
||||||
|
ebi('filter').onkeydown = function (e) {
|
||||||
|
if (('' + e.key).endsWith('Enter'))
|
||||||
|
ask();
|
||||||
|
};
|
||||||
95
copyparty/web/shares.css
Normal file
95
copyparty/web/shares.css
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
html {
|
||||||
|
color: #333;
|
||||||
|
background: #f7f7f7;
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
|
touch-action: manipulation;
|
||||||
|
}
|
||||||
|
#wrap {
|
||||||
|
margin: 2em auto;
|
||||||
|
padding: 0 1em 3em 1em;
|
||||||
|
line-height: 2.3em;
|
||||||
|
}
|
||||||
|
#wrap>span {
|
||||||
|
margin: 0 0 0 1em;
|
||||||
|
border-bottom: 1px solid #999;
|
||||||
|
}
|
||||||
|
li {
|
||||||
|
margin: 1em 0;
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
color: #047;
|
||||||
|
background: #fff;
|
||||||
|
text-decoration: none;
|
||||||
|
white-space: nowrap;
|
||||||
|
border-bottom: 1px solid #8ab;
|
||||||
|
border-radius: .2em;
|
||||||
|
padding: .2em .6em;
|
||||||
|
margin: 0 .3em;
|
||||||
|
}
|
||||||
|
#wrap td a {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
#w {
|
||||||
|
color: #fff;
|
||||||
|
background: #940;
|
||||||
|
border-color: #b70;
|
||||||
|
}
|
||||||
|
#repl {
|
||||||
|
border: none;
|
||||||
|
background: none;
|
||||||
|
color: inherit;
|
||||||
|
padding: 0;
|
||||||
|
position: fixed;
|
||||||
|
bottom: .25em;
|
||||||
|
left: .2em;
|
||||||
|
}
|
||||||
|
#wrap table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
position: relative;
|
||||||
|
margin-top: 2em;
|
||||||
|
}
|
||||||
|
th {
|
||||||
|
top: -1px;
|
||||||
|
position: sticky;
|
||||||
|
background: #f7f7f7;
|
||||||
|
}
|
||||||
|
#wrap td,
|
||||||
|
#wrap th {
|
||||||
|
padding: .3em .6em;
|
||||||
|
text-align: left;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
#wrap td+td+td+td+td+td+td+td {
|
||||||
|
font-family: var(--font-mono), monospace, monospace;
|
||||||
|
}
|
||||||
|
#wrap th:first-child,
|
||||||
|
#wrap td:first-child {
|
||||||
|
border-radius: .5em 0 0 .5em;
|
||||||
|
}
|
||||||
|
#wrap th:last-child,
|
||||||
|
#wrap td:last-child {
|
||||||
|
border-radius: 0 .5em .5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
html.z {
|
||||||
|
background: #222;
|
||||||
|
color: #ccc;
|
||||||
|
}
|
||||||
|
html.z a {
|
||||||
|
color: #fff;
|
||||||
|
background: #057;
|
||||||
|
border-color: #37a;
|
||||||
|
}
|
||||||
|
html.z th {
|
||||||
|
background: #222;
|
||||||
|
}
|
||||||
|
html.bz {
|
||||||
|
color: #bbd;
|
||||||
|
background: #11121d;
|
||||||
|
}
|
||||||
|
html.bz th {
|
||||||
|
background: #223;
|
||||||
|
}
|
||||||
82
copyparty/web/shares.html
Normal file
82
copyparty/web/shares.html
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>{{ s_doctitle }}</title>
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
|
<meta name="robots" content="noindex, nofollow">
|
||||||
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/shares.css?_={{ ts }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
|
{{ html_head }}
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="wrap">
|
||||||
|
<a href="{{ r }}/?shares">refresh</a>
|
||||||
|
<a href="{{ r }}/?h">control-panel</a>
|
||||||
|
|
||||||
|
<span>axs = perms (read,write,move,delet)</span>
|
||||||
|
<span>nf = numFiles (0=dir)</span>
|
||||||
|
<span>min/hrs = time left</span>
|
||||||
|
|
||||||
|
<table id="tab"><thead><tr>
|
||||||
|
<th>sharekey</th>
|
||||||
|
<th>delete</th>
|
||||||
|
<th>pw</th>
|
||||||
|
<th>source</th>
|
||||||
|
<th>axs</th>
|
||||||
|
<th>nf</th>
|
||||||
|
<th>user</th>
|
||||||
|
<th>created</th>
|
||||||
|
<th>expires</th>
|
||||||
|
<th>min</th>
|
||||||
|
<th>hrs</th>
|
||||||
|
<th>add time</th>
|
||||||
|
</tr></thead><tbody>
|
||||||
|
{% for k, pw, vp, pr, st, un, t0, t1 in rows %}
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<a href="{{ r }}{{ shr }}{{ k }}?qr">qr</a>
|
||||||
|
<a href="{{ r }}{{ shr }}{{ k }}">{{ k }}</a>
|
||||||
|
</td>
|
||||||
|
<td><a href="#" k="{{ k }}">delete</a></td>
|
||||||
|
<td>{{ "yes" if pw else "--" }}</td>
|
||||||
|
<td><a href="{{ r }}/{{ vp|e }}">/{{ vp|e }}</a></td>
|
||||||
|
<td>{{ pr }}</td>
|
||||||
|
<td>{{ st }}</td>
|
||||||
|
<td>{{ un|e }}</td>
|
||||||
|
<td>{{ t0 }}</td>
|
||||||
|
<td>{{ t1 }}</td>
|
||||||
|
<td>{{ "inf" if not t1 else "dead" if t1 < now else ((t1 - now) / 60) | round(1) }}</td>
|
||||||
|
<td>{{ "inf" if not t1 else "dead" if t1 < now else ((t1 - now) / 3600) | round(1) }}</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody></table>
|
||||||
|
{% if not rows %}
|
||||||
|
(you don't have any active shares btw)
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<a href="#" id="repl">π</a>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
var SR="{{ r }}",
|
||||||
|
shr="{{ shr }}",
|
||||||
|
lang="{{ lang }}",
|
||||||
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
|
var STG = window.localStorage;
|
||||||
|
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||||
|
|
||||||
|
</script>
|
||||||
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
|
<script src="{{ r }}/.cpr/shares.js?_={{ ts }}"></script>
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
78
copyparty/web/shares.js
Normal file
78
copyparty/web/shares.js
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
var t = QSA('a[k]');
|
||||||
|
for (var a = 0; a < t.length; a++)
|
||||||
|
t[a].onclick = rm;
|
||||||
|
|
||||||
|
function rm() {
|
||||||
|
var u = SR + shr + uricom_enc(this.getAttribute('k')) + '?eshare=rm',
|
||||||
|
xhr = new XHR();
|
||||||
|
|
||||||
|
xhr.open('POST', u, true);
|
||||||
|
xhr.onload = xhr.onerror = cb;
|
||||||
|
xhr.send();
|
||||||
|
}
|
||||||
|
|
||||||
|
function bump() {
|
||||||
|
var k = this.closest('tr').getElementsByTagName('a')[2].getAttribute('k'),
|
||||||
|
u = SR + shr + uricom_enc(k) + '?eshare=' + this.value,
|
||||||
|
xhr = new XHR();
|
||||||
|
|
||||||
|
xhr.open('POST', u, true);
|
||||||
|
xhr.onload = xhr.onerror = cb;
|
||||||
|
xhr.send();
|
||||||
|
}
|
||||||
|
|
||||||
|
function cb() {
|
||||||
|
if (this.status !== 200)
|
||||||
|
return modal.alert('<h6>server error</h6>' + esc(unpre(this.responseText)));
|
||||||
|
|
||||||
|
document.location = '?shares';
|
||||||
|
}
|
||||||
|
|
||||||
|
function qr(e) {
|
||||||
|
ev(e);
|
||||||
|
var href = this.href,
|
||||||
|
pw = this.closest('tr').cells[2].textContent;
|
||||||
|
|
||||||
|
if (pw.indexOf('yes') < 0)
|
||||||
|
return showqr(href);
|
||||||
|
|
||||||
|
modal.prompt("if you want to bypass the password protection by\nembedding the password into the qr-code, then\ntype the password now, otherwise leave this empty", "", function (v) {
|
||||||
|
if (v)
|
||||||
|
href += "&pw=" + v;
|
||||||
|
showqr(href);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function showqr(href) {
|
||||||
|
var vhref = href.replace('?qr&', '?').replace('?qr', '');
|
||||||
|
modal.alert(esc(vhref) + '<img class="b64" width="100" height="100" src="' + href + '" />');
|
||||||
|
}
|
||||||
|
|
||||||
|
(function() {
|
||||||
|
var tab = ebi('tab').tBodies[0],
|
||||||
|
tr = Array.prototype.slice.call(tab.rows, 0);
|
||||||
|
|
||||||
|
var buf = [];
|
||||||
|
for (var a = 0; a < tr.length; a++) {
|
||||||
|
tr[a].cells[0].getElementsByTagName('a')[0].onclick = qr;
|
||||||
|
for (var b = 7; b < 9; b++)
|
||||||
|
buf.push(parseInt(tr[a].cells[b].innerHTML));
|
||||||
|
}
|
||||||
|
|
||||||
|
var ibuf = 0;
|
||||||
|
for (var a = 0; a < tr.length; a++)
|
||||||
|
for (var b = 7; b < 9; b++) {
|
||||||
|
var v = buf[ibuf++];
|
||||||
|
tr[a].cells[b].innerHTML =
|
||||||
|
v ? unix2iso(v).replace(' ', ', ') : 'never';
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var a = 0; a < tr.length; a++)
|
||||||
|
tr[a].cells[11].innerHTML =
|
||||||
|
'<button value="1">1min</button> ' +
|
||||||
|
'<button value="60">1h</button>';
|
||||||
|
|
||||||
|
var btns = QSA('td button'), aa = btns.length;
|
||||||
|
for (var a = 0; a < aa; a++)
|
||||||
|
btns[a].onclick = bump;
|
||||||
|
})();
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user