Compare commits
417 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d0aa20e17c | ||
|
|
1a658dedb7 | ||
|
|
8d376b854c | ||
|
|
490c16b01d | ||
|
|
2437a4e864 | ||
|
|
007d948cb9 | ||
|
|
335fcc8535 | ||
|
|
9eaa9904e0 | ||
|
|
0778da6c4d | ||
|
|
a1bb10012d | ||
|
|
1441ccee4f | ||
|
|
491803d8b7 | ||
|
|
3dcc386b6f | ||
|
|
5aa54d1217 | ||
|
|
88b876027c | ||
|
|
fcc3aa98fd | ||
|
|
f2f5e266b4 | ||
|
|
e17bf8f325 | ||
|
|
d19cb32bf3 | ||
|
|
85a637af09 | ||
|
|
043e3c7dd6 | ||
|
|
8f59afb159 | ||
|
|
77f1e51444 | ||
|
|
22fc4bb938 | ||
|
|
50c7bba6ea | ||
|
|
551d99b71b | ||
|
|
b54b7213a7 | ||
|
|
a14943c8de | ||
|
|
a10cad54fc | ||
|
|
8568b7702a | ||
|
|
5d8cb34885 | ||
|
|
8d248333e8 | ||
|
|
99e2ef7f33 | ||
|
|
e767230383 | ||
|
|
90601314d6 | ||
|
|
9c5eac1274 | ||
|
|
50905439e4 | ||
|
|
a0c1239246 | ||
|
|
b8e851c332 | ||
|
|
baaf2eb24d | ||
|
|
e197895c10 | ||
|
|
cb75efa05d | ||
|
|
8b0cf2c982 | ||
|
|
fc7d9e1f9c | ||
|
|
10caafa34c | ||
|
|
22cc22225a | ||
|
|
22dff4b0e5 | ||
|
|
a00ff2b086 | ||
|
|
e4acddc23b | ||
|
|
2b2d8e4e02 | ||
|
|
5501d49032 | ||
|
|
fa54b2eec4 | ||
|
|
cb0160021f | ||
|
|
93a723d588 | ||
|
|
8ebe1fb5e8 | ||
|
|
2acdf685b1 | ||
|
|
9f122ccd16 | ||
|
|
03be26fafc | ||
|
|
df5d309d6e | ||
|
|
c355f9bd91 | ||
|
|
9c28ba417e | ||
|
|
705b58c741 | ||
|
|
510302d667 | ||
|
|
025a537413 | ||
|
|
60a1ff0fc0 | ||
|
|
f94a0b1bff | ||
|
|
4ccfeeb2cd | ||
|
|
2646f6a4f2 | ||
|
|
b286ab539e | ||
|
|
2cca6e0922 | ||
|
|
db51f1b063 | ||
|
|
d979c47f50 | ||
|
|
e64b87b99b | ||
|
|
b985011a00 | ||
|
|
c2ed2314c8 | ||
|
|
cd496658c3 | ||
|
|
deca082623 | ||
|
|
0ea8bb7c83 | ||
|
|
1fb251a4c2 | ||
|
|
4295923b76 | ||
|
|
572aa4b26c | ||
|
|
b1359f039f | ||
|
|
867d8ee49e | ||
|
|
04c86e8a89 | ||
|
|
bc0cb43ef9 | ||
|
|
769454fdce | ||
|
|
4ee81af8f6 | ||
|
|
8b0e66122f | ||
|
|
8a98efb929 | ||
|
|
b6fd555038 | ||
|
|
7eb413ad51 | ||
|
|
4421d509eb | ||
|
|
793ffd7b01 | ||
|
|
1e22222c60 | ||
|
|
544e0549bc | ||
|
|
83178d0836 | ||
|
|
c44f5f5701 | ||
|
|
138f5bc989 | ||
|
|
e4759f86ef | ||
|
|
d71416437a | ||
|
|
a84c583b2c | ||
|
|
cdacdccdb8 | ||
|
|
d3ccd3f174 | ||
|
|
cb6de0387d | ||
|
|
abff40519d | ||
|
|
55c74ad164 | ||
|
|
673b4f7e23 | ||
|
|
d11e02da49 | ||
|
|
8790f89e08 | ||
|
|
33442026b8 | ||
|
|
03193de6d0 | ||
|
|
8675ff40f3 | ||
|
|
d88889d3fc | ||
|
|
6f244d4335 | ||
|
|
cacca663b3 | ||
|
|
d5109be559 | ||
|
|
d999f06bb9 | ||
|
|
a1a8a8c7b5 | ||
|
|
fdd6f3b4a6 | ||
|
|
f5191973df | ||
|
|
ddbaebe779 | ||
|
|
42099baeff | ||
|
|
2459965ca8 | ||
|
|
6acf436573 | ||
|
|
f217e1ce71 | ||
|
|
418000aee3 | ||
|
|
dbbba9625b | ||
|
|
397bc92fbc | ||
|
|
6e615dcd03 | ||
|
|
9ac5908b33 | ||
|
|
50912480b9 | ||
|
|
24b9b8319d | ||
|
|
b0f4f0b653 | ||
|
|
05bbd41c4b | ||
|
|
8f5f8a3cda | ||
|
|
c8938fc033 | ||
|
|
1550350e05 | ||
|
|
5cc190c026 | ||
|
|
d6a0a738ce | ||
|
|
f5fe3678ee | ||
|
|
f2a7925387 | ||
|
|
fa953ced52 | ||
|
|
f0000d9861 | ||
|
|
4e67516719 | ||
|
|
29db7a6270 | ||
|
|
852499e296 | ||
|
|
f1775fd51c | ||
|
|
4bb306932a | ||
|
|
2a37e81bd8 | ||
|
|
6a312ca856 | ||
|
|
e7f3e475a2 | ||
|
|
854ba0ec06 | ||
|
|
209b49d771 | ||
|
|
949baae539 | ||
|
|
5f4ea27586 | ||
|
|
099cc97247 | ||
|
|
592b7d6315 | ||
|
|
0880bf55a1 | ||
|
|
4cbffec0ec | ||
|
|
cc355417d4 | ||
|
|
e2bc573e61 | ||
|
|
41c0376177 | ||
|
|
c01cad091e | ||
|
|
eb349f339c | ||
|
|
24d8caaf3e | ||
|
|
5ac2c20959 | ||
|
|
bb72e6bf30 | ||
|
|
d8142e866a | ||
|
|
7b7979fd61 | ||
|
|
749616d09d | ||
|
|
5485c6d7ca | ||
|
|
b7aea38d77 | ||
|
|
0ecd9f99e6 | ||
|
|
ca04a00662 | ||
|
|
8a09601be8 | ||
|
|
1fe0d4693e | ||
|
|
bba8a3c6bc | ||
|
|
e3d7f0c7d5 | ||
|
|
be7bb71bbc | ||
|
|
e0c4829ec6 | ||
|
|
5af1575329 | ||
|
|
884f966b86 | ||
|
|
f6c6fbc223 | ||
|
|
b0cc396bca | ||
|
|
ae463518f6 | ||
|
|
2be2e9a0d8 | ||
|
|
e405fddf74 | ||
|
|
c269b0dd91 | ||
|
|
8c3211263a | ||
|
|
bf04e7c089 | ||
|
|
c7c6e48b1a | ||
|
|
974ca773be | ||
|
|
9270c2df19 | ||
|
|
b39ff92f34 | ||
|
|
7454167f78 | ||
|
|
5ceb3a962f | ||
|
|
52bd5642da | ||
|
|
c39c93725f | ||
|
|
d00f0b9fa7 | ||
|
|
01cfc70982 | ||
|
|
e6aec189bd | ||
|
|
c98fff1647 | ||
|
|
0009e31bd3 | ||
|
|
db95e880b2 | ||
|
|
e69fea4a59 | ||
|
|
4360800a6e | ||
|
|
b179e2b031 | ||
|
|
ecdec75b4e | ||
|
|
5cb2e33353 | ||
|
|
43ff2e531a | ||
|
|
1c2c9db8f0 | ||
|
|
7ea183baef | ||
|
|
ab87fac6d8 | ||
|
|
1e3b7eee3b | ||
|
|
4de028fc3b | ||
|
|
604e5dfaaf | ||
|
|
05e0c2ec9e | ||
|
|
76bd005bdc | ||
|
|
5effaed352 | ||
|
|
cedaf4809f | ||
|
|
6deaf5c268 | ||
|
|
9dc6a26472 | ||
|
|
14ad5916fc | ||
|
|
1a46738649 | ||
|
|
9e5e3b099a | ||
|
|
292ce75cc2 | ||
|
|
ce7df7afd4 | ||
|
|
e28e793f81 | ||
|
|
3e561976db | ||
|
|
273a4eb7d0 | ||
|
|
6175f85bb6 | ||
|
|
a80579f63a | ||
|
|
96d6bcf26e | ||
|
|
49e8df25ac | ||
|
|
6a05850f21 | ||
|
|
5e7c3defe3 | ||
|
|
6c0987d4d0 | ||
|
|
6eba9feffe | ||
|
|
8adfcf5950 | ||
|
|
36d6fa512a | ||
|
|
79b6e9b393 | ||
|
|
dc2e2cbd4b | ||
|
|
5c12dac30f | ||
|
|
641929191e | ||
|
|
617321631a | ||
|
|
ddc0c899f8 | ||
|
|
cdec42c1ae | ||
|
|
c48f469e39 | ||
|
|
44909cc7b8 | ||
|
|
8f61e1568c | ||
|
|
b7be7a0fd8 | ||
|
|
1526a4e084 | ||
|
|
dbdb9574b1 | ||
|
|
853ae6386c | ||
|
|
a4b56c74c7 | ||
|
|
d7f1951e44 | ||
|
|
7e2ff9825e | ||
|
|
9b423396ec | ||
|
|
781146b2fb | ||
|
|
84937d1ce0 | ||
|
|
98cce66aa4 | ||
|
|
043c2d4858 | ||
|
|
99cc434779 | ||
|
|
5095d17e81 | ||
|
|
87d835ae37 | ||
|
|
6939ca768b | ||
|
|
e3957e8239 | ||
|
|
4ad6e45216 | ||
|
|
76e5eeea3f | ||
|
|
eb17f57761 | ||
|
|
b0db14d8b0 | ||
|
|
2b644fa81b | ||
|
|
190ccee820 | ||
|
|
4e7dd32e78 | ||
|
|
5817fb66ae | ||
|
|
9cb04eef93 | ||
|
|
0019fe7f04 | ||
|
|
852c6f2de1 | ||
|
|
c4191de2e7 | ||
|
|
4de61defc9 | ||
|
|
0aa88590d0 | ||
|
|
405f3ee5fe | ||
|
|
bc339f774a | ||
|
|
e67b695b23 | ||
|
|
4a7633ab99 | ||
|
|
c58f2ef61f | ||
|
|
3866e6a3f2 | ||
|
|
381686fc66 | ||
|
|
a918c285bf | ||
|
|
1e20eafbe0 | ||
|
|
39399934ee | ||
|
|
b47635150a | ||
|
|
78d2f69ed5 | ||
|
|
7a98dc669e | ||
|
|
2f15bb5085 | ||
|
|
712a578e6c | ||
|
|
d8dfc4ccb2 | ||
|
|
e413007eb0 | ||
|
|
6d1d3e48d8 | ||
|
|
04966164ce | ||
|
|
8b62aa7cc7 | ||
|
|
1088e8c6a5 | ||
|
|
8c54c2226f | ||
|
|
f74ac1f18b | ||
|
|
25931e62fd | ||
|
|
707a940399 | ||
|
|
87ef50d384 | ||
|
|
dcadf2b11c | ||
|
|
37a690a4c3 | ||
|
|
87ad23fb93 | ||
|
|
5f54d534e3 | ||
|
|
aecae552a4 | ||
|
|
eaa6b3d0be | ||
|
|
c2ace91e52 | ||
|
|
0bac87c36f | ||
|
|
e650d05939 | ||
|
|
85a96e4446 | ||
|
|
2569005139 | ||
|
|
c50cb66aef | ||
|
|
d4c5fca15b | ||
|
|
75cea4f684 | ||
|
|
68c6794d33 | ||
|
|
82f98dd54d | ||
|
|
741d781c18 | ||
|
|
0be1e43451 | ||
|
|
5366bf22bb | ||
|
|
bcd91b1809 | ||
|
|
9bd5738e6f | ||
|
|
bab4aa4c0a | ||
|
|
e965b9b9e2 | ||
|
|
31101427d3 | ||
|
|
a083dc36ba | ||
|
|
9b7b9262aa | ||
|
|
660011fa6e | ||
|
|
ead31b6823 | ||
|
|
4310580cd4 | ||
|
|
b005acbfda | ||
|
|
460709e6f3 | ||
|
|
a8768d05a9 | ||
|
|
f8e3e87a52 | ||
|
|
70f1642d0d | ||
|
|
3fc7561da4 | ||
|
|
9065226c3d | ||
|
|
b7e321fa47 | ||
|
|
664665b86b | ||
|
|
f4f362b7a4 | ||
|
|
577d23f460 | ||
|
|
504e168486 | ||
|
|
f2f9640371 | ||
|
|
ee46f832b1 | ||
|
|
b0e755d410 | ||
|
|
cfd24604d5 | ||
|
|
264894e595 | ||
|
|
5bb9f56247 | ||
|
|
18942ed066 | ||
|
|
85321a6f31 | ||
|
|
baf641396d | ||
|
|
17c91e7014 | ||
|
|
010770684d | ||
|
|
b4c503657b | ||
|
|
71bd306268 | ||
|
|
dd7fab1352 | ||
|
|
dacca18863 | ||
|
|
53d92cc0a6 | ||
|
|
434823f6f0 | ||
|
|
2cb1f50370 | ||
|
|
03f53f6392 | ||
|
|
a70ecd7af0 | ||
|
|
8b81e58205 | ||
|
|
4500c04edf | ||
|
|
6222ddd720 | ||
|
|
8a7135cf41 | ||
|
|
b4c7282956 | ||
|
|
8491a40a04 | ||
|
|
343d38b693 | ||
|
|
6cf53d7364 | ||
|
|
b070d44de7 | ||
|
|
79aa40fdea | ||
|
|
dcaff2785f | ||
|
|
497f5b4307 | ||
|
|
be32ad0da6 | ||
|
|
8ee2bf810b | ||
|
|
28232656a9 | ||
|
|
fbc2424e8f | ||
|
|
94cd13e8b8 | ||
|
|
447ed5ab37 | ||
|
|
af59808611 | ||
|
|
e3406a9f86 | ||
|
|
7fd1d6a4e8 | ||
|
|
0ab2a665de | ||
|
|
3895575bc2 | ||
|
|
138c2bbcbb | ||
|
|
bc7af1d1c8 | ||
|
|
19cd96e392 | ||
|
|
db194ab519 | ||
|
|
02ad4bfab2 | ||
|
|
56b73dcc8a | ||
|
|
7704b9c8a2 | ||
|
|
999b7ae919 | ||
|
|
252b5a88b1 | ||
|
|
01e2681a07 | ||
|
|
aa32f30202 | ||
|
|
195eb53995 | ||
|
|
06fa78f54a | ||
|
|
7a57c9dbf1 | ||
|
|
bb657bfa85 | ||
|
|
87181726b0 | ||
|
|
f1477a1c14 | ||
|
|
4f94a9e38b | ||
|
|
fbed322d3b | ||
|
|
9b0f519e4e | ||
|
|
6cd6dadd06 | ||
|
|
9a28afcb48 | ||
|
|
45b701801d | ||
|
|
062246fb12 | ||
|
|
416ebfdd68 | ||
|
|
731eb92f33 |
2
.github/pull_request_template.md
vendored
Normal file
2
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
To show that your contribution is compatible with the MIT License, please include the following text somewhere in this PR description:
|
||||||
|
This PR complies with the DCO; https://developercertificate.org/
|
||||||
14
.gitignore
vendored
14
.gitignore
vendored
@@ -21,11 +21,23 @@ copyparty.egg-info/
|
|||||||
# winmerge
|
# winmerge
|
||||||
*.bak
|
*.bak
|
||||||
|
|
||||||
|
# apple pls
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
# derived
|
# derived
|
||||||
copyparty/res/COPYING.txt
|
copyparty/res/COPYING.txt
|
||||||
copyparty/web/deps/
|
copyparty/web/deps/
|
||||||
srv/
|
srv/
|
||||||
|
scripts/docker/i/
|
||||||
|
contrib/package/arch/pkg/
|
||||||
|
contrib/package/arch/src/
|
||||||
|
|
||||||
# state/logs
|
# state/logs
|
||||||
up.*.txt
|
up.*.txt
|
||||||
.hist/
|
.hist/
|
||||||
|
scripts/docker/*.out
|
||||||
|
scripts/docker/*.err
|
||||||
|
/perf.*
|
||||||
|
|
||||||
|
# nix build output link
|
||||||
|
result
|
||||||
|
|||||||
1
.vscode/launch.json
vendored
1
.vscode/launch.json
vendored
@@ -8,6 +8,7 @@
|
|||||||
"module": "copyparty",
|
"module": "copyparty",
|
||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
"cwd": "${workspaceFolder}",
|
"cwd": "${workspaceFolder}",
|
||||||
|
"justMyCode": false,
|
||||||
"args": [
|
"args": [
|
||||||
//"-nw",
|
//"-nw",
|
||||||
"-ed",
|
"-ed",
|
||||||
|
|||||||
10
.vscode/launch.py
vendored
10
.vscode/launch.py
vendored
@@ -30,9 +30,17 @@ except:
|
|||||||
|
|
||||||
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
|
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
|
||||||
|
|
||||||
|
sfx = ""
|
||||||
|
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
|
||||||
|
sfx = sys.argv[1]
|
||||||
|
sys.argv = [sys.argv[0]] + sys.argv[2:]
|
||||||
|
|
||||||
argv += sys.argv[1:]
|
argv += sys.argv[1:]
|
||||||
|
|
||||||
if re.search(" -j ?[0-9]", " ".join(argv)):
|
if sfx:
|
||||||
|
argv = [sys.executable, sfx] + argv
|
||||||
|
sp.check_call(argv)
|
||||||
|
elif re.search(" -j ?[0-9]", " ".join(argv)):
|
||||||
argv = [sys.executable, "-m", "copyparty"] + argv
|
argv = [sys.executable, "-m", "copyparty"] + argv
|
||||||
sp.check_call(argv)
|
sp.check_call(argv)
|
||||||
else:
|
else:
|
||||||
|
|||||||
31
.vscode/settings.json
vendored
31
.vscode/settings.json
vendored
@@ -35,35 +35,22 @@
|
|||||||
"python.linting.flake8Enabled": true,
|
"python.linting.flake8Enabled": true,
|
||||||
"python.linting.banditEnabled": true,
|
"python.linting.banditEnabled": true,
|
||||||
"python.linting.mypyEnabled": true,
|
"python.linting.mypyEnabled": true,
|
||||||
"python.linting.mypyArgs": [
|
|
||||||
"--ignore-missing-imports",
|
|
||||||
"--follow-imports=silent",
|
|
||||||
"--show-column-numbers",
|
|
||||||
"--strict"
|
|
||||||
],
|
|
||||||
"python.linting.flake8Args": [
|
"python.linting.flake8Args": [
|
||||||
"--max-line-length=120",
|
"--max-line-length=120",
|
||||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128",
|
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226",
|
||||||
],
|
],
|
||||||
"python.linting.banditArgs": [
|
"python.linting.banditArgs": [
|
||||||
"--ignore=B104"
|
"--ignore=B104,B110,B112"
|
||||||
],
|
|
||||||
"python.linting.pylintArgs": [
|
|
||||||
"--disable=missing-module-docstring",
|
|
||||||
"--disable=missing-class-docstring",
|
|
||||||
"--disable=missing-function-docstring",
|
|
||||||
"--disable=wrong-import-position",
|
|
||||||
"--disable=raise-missing-from",
|
|
||||||
"--disable=bare-except",
|
|
||||||
"--disable=invalid-name",
|
|
||||||
"--disable=line-too-long",
|
|
||||||
"--disable=consider-using-f-string"
|
|
||||||
],
|
],
|
||||||
// python3 -m isort --py=27 --profile=black copyparty/
|
// python3 -m isort --py=27 --profile=black copyparty/
|
||||||
"python.formatting.provider": "black",
|
"python.formatting.provider": "none",
|
||||||
|
"[python]": {
|
||||||
|
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||||
|
},
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
"[html]": {
|
"[html]": {
|
||||||
"editor.formatOnSave": false,
|
"editor.formatOnSave": false,
|
||||||
|
"editor.autoIndent": "keep",
|
||||||
},
|
},
|
||||||
"[css]": {
|
"[css]": {
|
||||||
"editor.formatOnSave": false,
|
"editor.formatOnSave": false,
|
||||||
@@ -71,10 +58,6 @@
|
|||||||
"files.associations": {
|
"files.associations": {
|
||||||
"*.makefile": "makefile"
|
"*.makefile": "makefile"
|
||||||
},
|
},
|
||||||
"python.formatting.blackArgs": [
|
|
||||||
"-t",
|
|
||||||
"py27"
|
|
||||||
],
|
|
||||||
"python.linting.enabled": true,
|
"python.linting.enabled": true,
|
||||||
"python.pythonPath": "/usr/bin/python3"
|
"python.pythonPath": "/usr/bin/python3"
|
||||||
}
|
}
|
||||||
9
SECURITY.md
Normal file
9
SECURITY.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
if you hit something extra juicy pls let me know on either of the following
|
||||||
|
* email -- `copyparty@ocv.ze` except `ze` should be `me`
|
||||||
|
* [mastodon dm](https://layer8.space/@tripflag) -- `@tripflag@layer8.space`
|
||||||
|
* [github private vulnerability report](https://github.com/9001/copyparty/security/advisories/new), wow that form is complicated
|
||||||
|
* [twitter dm](https://twitter.com/tripflag) (if im somehow not banned yet)
|
||||||
|
|
||||||
|
no bug bounties sorry! all i can offer is greetz in the release notes
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
# [`up2k.py`](up2k.py)
|
# [`u2c.py`](u2c.py)
|
||||||
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||||
* faster than browsers
|
* sync local folder to server
|
||||||
|
* generally faster than browsers
|
||||||
* if something breaks just restart it
|
* if something breaks just restart it
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
35
bin/handlers/README.md
Normal file
35
bin/handlers/README.md
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
replace the standard 404 / 403 responses with plugins
|
||||||
|
|
||||||
|
|
||||||
|
# usage
|
||||||
|
|
||||||
|
load plugins either globally with `--on404 ~/dev/copyparty/bin/handlers/sorry.py` or for a specific volume with `:c,on404=~/handlers/sorry.py`
|
||||||
|
|
||||||
|
|
||||||
|
# api
|
||||||
|
|
||||||
|
each plugin must define a `main()` which takes 3 arguments;
|
||||||
|
|
||||||
|
* `cli` is an instance of [copyparty/httpcli.py](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/httpcli.py) (the monstrosity itself)
|
||||||
|
* `vn` is the VFS which overlaps with the requested URL, and
|
||||||
|
* `rem` is the URL remainder below the VFS mountpoint
|
||||||
|
* so `vn.vpath + rem` == `cli.vpath` == original request
|
||||||
|
|
||||||
|
|
||||||
|
# examples
|
||||||
|
|
||||||
|
## on404
|
||||||
|
|
||||||
|
* [sorry.py](answer.py) replies with a custom message instead of the usual 404
|
||||||
|
* [nooo.py](nooo.py) replies with an endless noooooooooooooo
|
||||||
|
* [never404.py](never404.py) 100% guarantee that 404 will never be a thing again as it automatically creates dummy files whenever necessary
|
||||||
|
* [caching-proxy.py](caching-proxy.py) transforms copyparty into a squid/varnish knockoff
|
||||||
|
|
||||||
|
## on403
|
||||||
|
|
||||||
|
* [ip-ok.py](ip-ok.py) disables security checks if client-ip is 1.2.3.4
|
||||||
|
|
||||||
|
|
||||||
|
# notes
|
||||||
|
|
||||||
|
* on403 only works for trivial stuff (basic http access) since I haven't been able to think of any good usecases for it (was just easy to add while doing on404)
|
||||||
36
bin/handlers/caching-proxy.py
Executable file
36
bin/handlers/caching-proxy.py
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
# assume each requested file exists on another webserver and
|
||||||
|
# download + mirror them as they're requested
|
||||||
|
# (basically pretend we're warnish)
|
||||||
|
|
||||||
|
import os
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from copyparty.httpcli import HttpCli
|
||||||
|
|
||||||
|
|
||||||
|
def main(cli: "HttpCli", vn, rem):
|
||||||
|
url = "https://mirrors.edge.kernel.org/alpine/" + rem
|
||||||
|
abspath = os.path.join(vn.realpath, rem)
|
||||||
|
|
||||||
|
# sneaky trick to preserve a requests-session between downloads
|
||||||
|
# so it doesn't have to spend ages reopening https connections;
|
||||||
|
# luckily we can stash it inside the copyparty client session,
|
||||||
|
# name just has to be definitely unused so "hacapo_req_s" it is
|
||||||
|
req_s = getattr(cli.conn, "hacapo_req_s", None) or requests.Session()
|
||||||
|
setattr(cli.conn, "hacapo_req_s", req_s)
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(abspath), exist_ok=True)
|
||||||
|
with req_s.get(url, stream=True, timeout=69) as r:
|
||||||
|
r.raise_for_status()
|
||||||
|
with open(abspath, "wb", 64 * 1024) as f:
|
||||||
|
for buf in r.iter_content(chunk_size=64 * 1024):
|
||||||
|
f.write(buf)
|
||||||
|
except:
|
||||||
|
os.unlink(abspath)
|
||||||
|
return "false"
|
||||||
|
|
||||||
|
return "retry"
|
||||||
6
bin/handlers/ip-ok.py
Executable file
6
bin/handlers/ip-ok.py
Executable file
@@ -0,0 +1,6 @@
|
|||||||
|
# disable permission checks and allow access if client-ip is 1.2.3.4
|
||||||
|
|
||||||
|
|
||||||
|
def main(cli, vn, rem):
|
||||||
|
if cli.ip == "1.2.3.4":
|
||||||
|
return "allow"
|
||||||
11
bin/handlers/never404.py
Executable file
11
bin/handlers/never404.py
Executable file
@@ -0,0 +1,11 @@
|
|||||||
|
# create a dummy file and let copyparty return it
|
||||||
|
|
||||||
|
|
||||||
|
def main(cli, vn, rem):
|
||||||
|
print("hello", cli.ip)
|
||||||
|
|
||||||
|
abspath = vn.canonical(rem)
|
||||||
|
with open(abspath, "wb") as f:
|
||||||
|
f.write(b"404? not on MY watch!")
|
||||||
|
|
||||||
|
return "retry"
|
||||||
16
bin/handlers/nooo.py
Executable file
16
bin/handlers/nooo.py
Executable file
@@ -0,0 +1,16 @@
|
|||||||
|
# reply with an endless "noooooooooooooooooooooooo"
|
||||||
|
|
||||||
|
|
||||||
|
def say_no():
|
||||||
|
yield b"n"
|
||||||
|
while True:
|
||||||
|
yield b"o" * 4096
|
||||||
|
|
||||||
|
|
||||||
|
def main(cli, vn, rem):
|
||||||
|
cli.send_headers(None, 404, "text/plain")
|
||||||
|
|
||||||
|
for chunk in say_no():
|
||||||
|
cli.s.sendall(chunk)
|
||||||
|
|
||||||
|
return "false"
|
||||||
7
bin/handlers/sorry.py
Executable file
7
bin/handlers/sorry.py
Executable file
@@ -0,0 +1,7 @@
|
|||||||
|
# sends a custom response instead of the usual 404
|
||||||
|
|
||||||
|
|
||||||
|
def main(cli, vn, rem):
|
||||||
|
msg = f"sorry {cli.ip} but {cli.vpath} doesn't exist"
|
||||||
|
|
||||||
|
return str(cli.reply(msg.encode("utf-8"), 404, "text/plain"))
|
||||||
29
bin/hooks/README.md
Normal file
29
bin/hooks/README.md
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
standalone programs which are executed by copyparty when an event happens (upload, file rename, delete, ...)
|
||||||
|
|
||||||
|
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
||||||
|
|
||||||
|
run copyparty with `--help-hooks` for usage details / hook type explanations (xbu/xau/xiu/xbr/xar/xbd/xad)
|
||||||
|
|
||||||
|
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
||||||
|
|
||||||
|
|
||||||
|
# after upload
|
||||||
|
* [notify.py](notify.py) shows a desktop notification ([example](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png))
|
||||||
|
* [notify2.py](notify2.py) uses the json API to show more context
|
||||||
|
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
||||||
|
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||||
|
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||||
|
|
||||||
|
|
||||||
|
# upload batches
|
||||||
|
these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every single file), `xiu` hooks are given a list of recent uploads on STDIN after the server has gone idle for N seconds, reducing server load + providing more context
|
||||||
|
* [xiu.py](xiu.py) is a "minimal" example showing a list of filenames + total filesize
|
||||||
|
* [xiu-sha.py](xiu-sha.py) produces a sha512 checksum list in the volume root
|
||||||
|
|
||||||
|
|
||||||
|
# before upload
|
||||||
|
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
||||||
|
|
||||||
|
|
||||||
|
# on message
|
||||||
|
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
||||||
68
bin/hooks/discord-announce.py
Executable file
68
bin/hooks/discord-announce.py
Executable file
@@ -0,0 +1,68 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
from copyparty.util import humansize, quotep
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
announces a new upload on discord
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xau f,t5,j,bin/hooks/discord-announce.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all uploads with the params listed below)
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xbu = execute after upload
|
||||||
|
f = fork; don't wait for it to finish
|
||||||
|
t5 = timeout if it's still running after 5 sec
|
||||||
|
j = provide upload information as json; not just the filename
|
||||||
|
|
||||||
|
replace "xau" with "xbu" to announce Before upload starts instead of After completion
|
||||||
|
|
||||||
|
# how to discord:
|
||||||
|
first create the webhook url; https://support.discord.com/hc/en-us/articles/228383668-Intro-to-Webhooks
|
||||||
|
then use this to design your message: https://discohook.org/
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
WEBHOOK = "https://discord.com/api/webhooks/1234/base64"
|
||||||
|
WEBHOOK = "https://discord.com/api/webhooks/1066830390280597718/M1TDD110hQA-meRLMRhdurych8iyG35LDoI1YhzbrjGP--BXNZodZFczNVwK4Ce7Yme5"
|
||||||
|
|
||||||
|
# read info from copyparty
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
vpath = inf["vp"]
|
||||||
|
filename = vpath.split("/")[-1]
|
||||||
|
url = f"https://{inf['host']}/{quotep(vpath)}"
|
||||||
|
|
||||||
|
# compose the message to discord
|
||||||
|
j = {
|
||||||
|
"title": filename,
|
||||||
|
"url": url,
|
||||||
|
"description": url.rsplit("/", 1)[0],
|
||||||
|
"color": 0x449900,
|
||||||
|
"fields": [
|
||||||
|
{"name": "Size", "value": humansize(inf["sz"])},
|
||||||
|
{"name": "User", "value": inf["user"]},
|
||||||
|
{"name": "IP", "value": inf["ip"]},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
for v in j["fields"]:
|
||||||
|
v["inline"] = True
|
||||||
|
|
||||||
|
r = requests.post(WEBHOOK, json={"embeds": [j]})
|
||||||
|
print(f"discord: {r}\n", end="")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
72
bin/hooks/image-noexif.py
Executable file
72
bin/hooks/image-noexif.py
Executable file
@@ -0,0 +1,72 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
remove exif tags from uploaded images; the eventhook edition of
|
||||||
|
https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py
|
||||||
|
|
||||||
|
dependencies:
|
||||||
|
exiftool / perl-Image-ExifTool
|
||||||
|
|
||||||
|
being an upload hook, this will take effect after upload completion
|
||||||
|
but before copyparty has hashed/indexed the file, which means that
|
||||||
|
copyparty will never index the original file, so deduplication will
|
||||||
|
not work as expected... which is mostly OK but ehhh
|
||||||
|
|
||||||
|
note: modifies the file in-place, so don't set the `f` (fork) flag
|
||||||
|
|
||||||
|
example usages; either as global config (all volumes) or as volflag:
|
||||||
|
--xau bin/hooks/image-noexif.py
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xau=bin/hooks/image-noexif.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
explained:
|
||||||
|
share fs-path srv/inc at /inc (readable by all, read-write for user ed)
|
||||||
|
running this xau (execute-after-upload) plugin for all uploaded files
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# filetypes to process; ignores everything else
|
||||||
|
EXTS = ("jpg", "jpeg", "avif", "heif", "heic")
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
except:
|
||||||
|
|
||||||
|
def fsenc(p):
|
||||||
|
return p.encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
fp = sys.argv[1]
|
||||||
|
ext = fp.lower().split(".")[-1]
|
||||||
|
if ext not in EXTS:
|
||||||
|
return
|
||||||
|
|
||||||
|
cwd, fn = os.path.split(fp)
|
||||||
|
os.chdir(cwd)
|
||||||
|
f1 = fsenc(fn)
|
||||||
|
cmd = [
|
||||||
|
b"exiftool",
|
||||||
|
b"-exif:all=",
|
||||||
|
b"-iptc:all=",
|
||||||
|
b"-xmp:all=",
|
||||||
|
b"-P",
|
||||||
|
b"-overwrite_original",
|
||||||
|
b"--",
|
||||||
|
f1,
|
||||||
|
]
|
||||||
|
sp.check_output(cmd)
|
||||||
|
print("image-noexif: stripped")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
main()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
66
bin/hooks/notify.py
Executable file
66
bin/hooks/notify.py
Executable file
@@ -0,0 +1,66 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess as sp
|
||||||
|
from plyer import notification
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
show os notification on upload; works on windows, linux, macos, android
|
||||||
|
|
||||||
|
depdencies:
|
||||||
|
windows: python3 -m pip install --user -U plyer
|
||||||
|
linux: python3 -m pip install --user -U plyer
|
||||||
|
macos: python3 -m pip install --user -U plyer pyobjus
|
||||||
|
android: just termux and termux-api
|
||||||
|
|
||||||
|
example usages; either as global config (all volumes) or as volflag:
|
||||||
|
--xau f,bin/hooks/notify.py
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xau=f,bin/hooks/notify.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all uploads with the params listed below)
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
f = fork so it doesn't block uploads
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from copyparty.util import humansize
|
||||||
|
except:
|
||||||
|
|
||||||
|
def humansize(n):
|
||||||
|
return n
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
fp = sys.argv[1]
|
||||||
|
dp, fn = os.path.split(fp)
|
||||||
|
try:
|
||||||
|
sz = humansize(os.path.getsize(fp))
|
||||||
|
except:
|
||||||
|
sz = "?"
|
||||||
|
|
||||||
|
msg = "{} ({})\n📁 {}".format(fn, sz, dp)
|
||||||
|
title = "File received"
|
||||||
|
|
||||||
|
if "com.termux" in sys.executable:
|
||||||
|
sp.run(["termux-notification", "-t", title, "-c", msg])
|
||||||
|
return
|
||||||
|
|
||||||
|
icon = "emblem-documents-symbolic" if sys.platform == "linux" else ""
|
||||||
|
notification.notify(
|
||||||
|
title=title,
|
||||||
|
message=msg,
|
||||||
|
app_icon=icon,
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
72
bin/hooks/notify2.py
Executable file
72
bin/hooks/notify2.py
Executable file
@@ -0,0 +1,72 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess as sp
|
||||||
|
from datetime import datetime
|
||||||
|
from plyer import notification
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
same as notify.py but with additional info (uploader, ...)
|
||||||
|
and also supports --xm (notify on 📟 message)
|
||||||
|
|
||||||
|
example usages; either as global config (all volumes) or as volflag:
|
||||||
|
--xm f,j,bin/hooks/notify2.py
|
||||||
|
--xau f,j,bin/hooks/notify2.py
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xm=f,j,bin/hooks/notify2.py
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xau=f,j,bin/hooks/notify2.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all uploads / msgs with the params listed below)
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
f = fork so it doesn't block uploads
|
||||||
|
j = provide json instead of filepath list
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from copyparty.util import humansize
|
||||||
|
except:
|
||||||
|
|
||||||
|
def humansize(n):
|
||||||
|
return n
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
fp = inf["ap"]
|
||||||
|
sz = humansize(inf["sz"])
|
||||||
|
dp, fn = os.path.split(fp)
|
||||||
|
mt = datetime.utcfromtimestamp(inf["mt"]).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
msg = f"{fn} ({sz})\n📁 {dp}"
|
||||||
|
title = "File received"
|
||||||
|
icon = "emblem-documents-symbolic" if sys.platform == "linux" else ""
|
||||||
|
|
||||||
|
if inf.get("txt"):
|
||||||
|
msg = inf["txt"]
|
||||||
|
title = "Message received"
|
||||||
|
icon = "mail-unread-symbolic" if sys.platform == "linux" else ""
|
||||||
|
|
||||||
|
msg += f"\n👤 {inf['user']} ({inf['ip']})\n🕒 {mt}"
|
||||||
|
|
||||||
|
if "com.termux" in sys.executable:
|
||||||
|
sp.run(["termux-notification", "-t", title, "-c", msg])
|
||||||
|
return
|
||||||
|
|
||||||
|
notification.notify(
|
||||||
|
title=title,
|
||||||
|
message=msg,
|
||||||
|
app_icon=icon,
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
35
bin/hooks/reject-extension.py
Executable file
35
bin/hooks/reject-extension.py
Executable file
@@ -0,0 +1,35 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
reject file uploads by file extension
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xbu c,bin/hooks/reject-extension.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xbu=c,bin/hooks/reject-extension.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all uploads with the params listed below)
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xbu = execute before upload
|
||||||
|
c = check result, reject upload if error
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
bad = "exe scr com pif bat ps1 jar msi"
|
||||||
|
|
||||||
|
ext = sys.argv[1].split(".")[-1]
|
||||||
|
|
||||||
|
sys.exit(1 if ext in bad.split() else 0)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
44
bin/hooks/reject-mimetype.py
Executable file
44
bin/hooks/reject-mimetype.py
Executable file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import magic
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
reject file uploads by mimetype
|
||||||
|
|
||||||
|
dependencies (linux, macos):
|
||||||
|
python3 -m pip install --user -U python-magic
|
||||||
|
|
||||||
|
dependencies (windows):
|
||||||
|
python3 -m pip install --user -U python-magic-bin
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xau c,bin/hooks/reject-mimetype.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xau=c,bin/hooks/reject-mimetype.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all uploads with the params listed below)
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
c = check result, reject upload if error
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
ok = ["image/jpeg", "image/png"]
|
||||||
|
|
||||||
|
mt = magic.from_file(sys.argv[1], mime=True)
|
||||||
|
|
||||||
|
print(mt)
|
||||||
|
|
||||||
|
sys.exit(1 if mt not in ok else 0)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
60
bin/hooks/wget.py
Executable file
60
bin/hooks/wget.py
Executable file
@@ -0,0 +1,60 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
use copyparty as a file downloader by POSTing URLs as
|
||||||
|
application/x-www-form-urlencoded (for example using the
|
||||||
|
message/pager function on the website)
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xm f,j,t3600,bin/hooks/wget.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on all messages with the params listed below)
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xm = execute on message-to-server-log
|
||||||
|
f = fork so it doesn't block uploads
|
||||||
|
j = provide message information as json; not just the text
|
||||||
|
c3 = mute all output
|
||||||
|
t3600 = timeout and kill download after 1 hour
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
url = inf["txt"]
|
||||||
|
if "://" not in url:
|
||||||
|
url = "https://" + url
|
||||||
|
|
||||||
|
os.chdir(inf["ap"])
|
||||||
|
|
||||||
|
name = url.split("?")[0].split("/")[-1]
|
||||||
|
tfn = "-- DOWNLOADING " + name
|
||||||
|
print(f"{tfn}\n", end="")
|
||||||
|
open(tfn, "wb").close()
|
||||||
|
|
||||||
|
cmd = ["wget", "--trust-server-names", "-nv", "--", url]
|
||||||
|
|
||||||
|
try:
|
||||||
|
sp.check_call(cmd)
|
||||||
|
except:
|
||||||
|
t = "-- FAILED TO DONWLOAD " + name
|
||||||
|
print(f"{t}\n", end="")
|
||||||
|
open(t, "wb").close()
|
||||||
|
|
||||||
|
os.unlink(tfn)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
108
bin/hooks/xiu-sha.py
Executable file
108
bin/hooks/xiu-sha.py
Executable file
@@ -0,0 +1,108 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
this hook will produce a single sha512 file which
|
||||||
|
covers all recent uploads (plus metadata comments)
|
||||||
|
|
||||||
|
use this with --xiu, which makes copyparty buffer
|
||||||
|
uploads until server is idle, providing file infos
|
||||||
|
on stdin (filepaths or json)
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xiu i5,j,bin/hooks/xiu-sha.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xiu=i5,j,bin/hooks/xiu-sha.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on batches of uploads with the params listed below)
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xiu = execute after uploads...
|
||||||
|
i5 = ...after volume has been idle for 5sec
|
||||||
|
j = provide json instead of filepath list
|
||||||
|
|
||||||
|
note the "f" (fork) flag is not set, so this xiu
|
||||||
|
will block other xiu hooks while it's running
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
except:
|
||||||
|
|
||||||
|
def fsenc(p):
|
||||||
|
return p
|
||||||
|
|
||||||
|
|
||||||
|
def humantime(ts):
|
||||||
|
return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
|
||||||
|
def find_files_root(inf):
|
||||||
|
di = 9000
|
||||||
|
for f1, f2 in zip(inf, inf[1:]):
|
||||||
|
p1 = f1["ap"].replace("\\", "/").rsplit("/", 1)[0]
|
||||||
|
p2 = f2["ap"].replace("\\", "/").rsplit("/", 1)[0]
|
||||||
|
di = min(len(p1), len(p2), di)
|
||||||
|
di = next((i for i in range(di) if p1[i] != p2[i]), di)
|
||||||
|
|
||||||
|
return di + 1
|
||||||
|
|
||||||
|
|
||||||
|
def find_vol_root(inf):
|
||||||
|
return len(inf[0]["ap"][: -len(inf[0]["vp"])])
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
zb = sys.stdin.buffer.read()
|
||||||
|
zs = zb.decode("utf-8", "replace")
|
||||||
|
inf = json.loads(zs)
|
||||||
|
|
||||||
|
# root directory (where to put the sha512 file);
|
||||||
|
# di = find_files_root(inf) # next to the file closest to volume root
|
||||||
|
di = find_vol_root(inf) # top of the entire volume
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
total_sz = 0
|
||||||
|
for md in inf:
|
||||||
|
ap = md["ap"]
|
||||||
|
rp = ap[di:]
|
||||||
|
total_sz += md["sz"]
|
||||||
|
fsize = "{:,}".format(md["sz"])
|
||||||
|
mtime = humantime(md["mt"])
|
||||||
|
up_ts = humantime(md["at"])
|
||||||
|
|
||||||
|
h = hashlib.sha512()
|
||||||
|
with open(fsenc(md["ap"]), "rb", 512 * 1024) as f:
|
||||||
|
while True:
|
||||||
|
buf = f.read(512 * 1024)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
h.update(buf)
|
||||||
|
|
||||||
|
cksum = h.hexdigest()
|
||||||
|
meta = " | ".join([md["wark"], up_ts, mtime, fsize, md["ip"]])
|
||||||
|
ret.append("# {}\n{} *{}".format(meta, cksum, rp))
|
||||||
|
|
||||||
|
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
|
||||||
|
ret.append("")
|
||||||
|
ftime = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")
|
||||||
|
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
|
||||||
|
with open(fsenc(fp), "wb") as f:
|
||||||
|
f.write("\n".join(ret).encode("utf-8", "replace"))
|
||||||
|
|
||||||
|
print("wrote checksums to {}".format(fp))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
50
bin/hooks/xiu.py
Executable file
50
bin/hooks/xiu.py
Executable file
@@ -0,0 +1,50 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
this hook prints absolute filepaths + total size
|
||||||
|
|
||||||
|
use this with --xiu, which makes copyparty buffer
|
||||||
|
uploads until server is idle, providing file infos
|
||||||
|
on stdin (filepaths or json)
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xiu i1,j,bin/hooks/xiu.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:r:rw,ed:c,xiu=i1,j,bin/hooks/xiu.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share filesystem-path srv/inc as volume /inc,
|
||||||
|
readable by everyone, read-write for user 'ed',
|
||||||
|
running this plugin on batches of uploads with the params listed below)
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xiu = execute after uploads...
|
||||||
|
i1 = ...after volume has been idle for 1sec
|
||||||
|
j = provide json instead of filepath list
|
||||||
|
|
||||||
|
note the "f" (fork) flag is not set, so this xiu
|
||||||
|
will block other xiu hooks while it's running
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
zb = sys.stdin.buffer.read()
|
||||||
|
zs = zb.decode("utf-8", "replace")
|
||||||
|
inf = json.loads(zs)
|
||||||
|
|
||||||
|
total_sz = 0
|
||||||
|
for upload in inf:
|
||||||
|
sz = upload["sz"]
|
||||||
|
total_sz += sz
|
||||||
|
print("{:9} {}".format(sz, upload["ap"]))
|
||||||
|
|
||||||
|
print("{} files, {} bytes total".format(len(inf), total_sz))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,5 +1,9 @@
|
|||||||
standalone programs which take an audio file as argument
|
standalone programs which take an audio file as argument
|
||||||
|
|
||||||
|
you may want to forget about all this fancy complicated stuff and just use [event hooks](../hooks/) instead (which doesn't need `-e2ts` or ffmpeg)
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
|
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
|
||||||
|
|
||||||
some of these rely on libraries which are not MIT-compatible
|
some of these rely on libraries which are not MIT-compatible
|
||||||
@@ -17,6 +21,16 @@ these do not have any problematic dependencies at all:
|
|||||||
* [cksum.py](./cksum.py) computes various checksums
|
* [cksum.py](./cksum.py) computes various checksums
|
||||||
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
||||||
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
||||||
|
* also available as an [event hook](../hooks/wget.py)
|
||||||
|
|
||||||
|
|
||||||
|
## dangerous plugins
|
||||||
|
|
||||||
|
plugins in this section should only be used with appropriate precautions:
|
||||||
|
|
||||||
|
* [very-bad-idea.py](./very-bad-idea.py) combined with [meadup.js](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js) converts copyparty into a janky yet extremely flexible chromecast clone
|
||||||
|
* also adds a virtual keyboard by @steinuil to the basic-upload tab for comfy couch crowd control
|
||||||
|
* anything uploaded through the [android app](https://github.com/9001/party-up) (files or links) are executed on the server, meaning anyone can infect your PC with malware... so protect this with a password and keep it on a LAN!
|
||||||
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
@@ -26,7 +40,7 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
|
|||||||
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
|
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
|
||||||
|
|
||||||
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
|
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
|
||||||
* from pypy: `keyfinder vamp`
|
* from pip: `keyfinder vamp`
|
||||||
|
|
||||||
|
|
||||||
# usage from copyparty
|
# usage from copyparty
|
||||||
|
|||||||
@@ -16,6 +16,10 @@ dep: ffmpeg
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# save beat timestamps to ".beats/filename.txt"
|
||||||
|
SAVE = False
|
||||||
|
|
||||||
|
|
||||||
def det(tf):
|
def det(tf):
|
||||||
# fmt: off
|
# fmt: off
|
||||||
sp.check_call([
|
sp.check_call([
|
||||||
@@ -23,12 +27,11 @@ def det(tf):
|
|||||||
b"-nostdin",
|
b"-nostdin",
|
||||||
b"-hide_banner",
|
b"-hide_banner",
|
||||||
b"-v", b"fatal",
|
b"-v", b"fatal",
|
||||||
b"-ss", b"13",
|
|
||||||
b"-y", b"-i", fsenc(sys.argv[1]),
|
b"-y", b"-i", fsenc(sys.argv[1]),
|
||||||
b"-map", b"0:a:0",
|
b"-map", b"0:a:0",
|
||||||
b"-ac", b"1",
|
b"-ac", b"1",
|
||||||
b"-ar", b"22050",
|
b"-ar", b"22050",
|
||||||
b"-t", b"300",
|
b"-t", b"360",
|
||||||
b"-f", b"f32le",
|
b"-f", b"f32le",
|
||||||
fsenc(tf)
|
fsenc(tf)
|
||||||
])
|
])
|
||||||
@@ -47,10 +50,29 @@ def det(tf):
|
|||||||
print(c["list"][0]["label"].split(" ")[0])
|
print(c["list"][0]["label"].split(" ")[0])
|
||||||
return
|
return
|
||||||
|
|
||||||
# throws if detection failed:
|
# throws if detection failed:
|
||||||
bpm = float(cl[-1]["timestamp"] - cl[1]["timestamp"])
|
beats = [float(x["timestamp"]) for x in cl]
|
||||||
bpm = round(60 * ((len(cl) - 1) / bpm), 2)
|
bds = [b - a for a, b in zip(beats, beats[1:])]
|
||||||
print(f"{bpm:.2f}")
|
bds.sort()
|
||||||
|
n0 = int(len(bds) * 0.2)
|
||||||
|
n1 = int(len(bds) * 0.75) + 1
|
||||||
|
bds = bds[n0:n1]
|
||||||
|
bpm = sum(bds)
|
||||||
|
bpm = round(60 * (len(bds) / bpm), 2)
|
||||||
|
print(f"{bpm:.2f}")
|
||||||
|
|
||||||
|
if SAVE:
|
||||||
|
fdir, fname = os.path.split(sys.argv[1])
|
||||||
|
bdir = os.path.join(fdir, ".beats")
|
||||||
|
try:
|
||||||
|
os.mkdir(fsenc(bdir))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
fp = os.path.join(bdir, fname) + ".txt"
|
||||||
|
with open(fsenc(fp), "wb") as f:
|
||||||
|
txt = "\n".join([f"{x:.2f}" for x in beats])
|
||||||
|
f.write(txt.encode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ def main():
|
|||||||
|
|
||||||
os.chdir(cwd)
|
os.chdir(cwd)
|
||||||
f1 = fsenc(fn)
|
f1 = fsenc(fn)
|
||||||
f2 = os.path.join(b"noexif", f1)
|
f2 = fsenc(os.path.join(b"noexif", fn))
|
||||||
cmd = [
|
cmd = [
|
||||||
b"exiftool",
|
b"exiftool",
|
||||||
b"-exif:all=",
|
b"-exif:all=",
|
||||||
|
|||||||
@@ -57,6 +57,7 @@ hash -r
|
|||||||
command -v python3 && pybin=python3 || pybin=python
|
command -v python3 && pybin=python3 || pybin=python
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$pybin -c 'import numpy' ||
|
||||||
$pybin -m pip install --user numpy
|
$pybin -m pip install --user numpy
|
||||||
|
|
||||||
|
|
||||||
@@ -224,7 +225,7 @@ install_vamp() {
|
|||||||
$pybin -m pip install --user vamp
|
$pybin -m pip install --user vamp
|
||||||
|
|
||||||
cd "$td"
|
cd "$td"
|
||||||
echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
|
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
|
||||||
sha512sum -c <(
|
sha512sum -c <(
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
WARNING -- DANGEROUS PLUGIN --
|
||||||
|
if someone is able to upload files to a copyparty which is
|
||||||
|
running this plugin, they can execute malware on your machine
|
||||||
|
so please keep this on a LAN and protect it with a password
|
||||||
|
|
||||||
use copyparty as a chromecast replacement:
|
use copyparty as a chromecast replacement:
|
||||||
* post a URL and it will open in the default browser
|
* post a URL and it will open in the default browser
|
||||||
* upload a file and it will open in the default application
|
* upload a file and it will open in the default application
|
||||||
@@ -10,16 +15,17 @@ use copyparty as a chromecast replacement:
|
|||||||
|
|
||||||
the android app makes it a breeze to post pics and links:
|
the android app makes it a breeze to post pics and links:
|
||||||
https://github.com/9001/party-up/releases
|
https://github.com/9001/party-up/releases
|
||||||
(iOS devices have to rely on the web-UI)
|
|
||||||
|
|
||||||
goes without saying, but this is HELLA DANGEROUS,
|
iOS devices can use the web-UI or the shortcut instead:
|
||||||
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
|
https://github.com/9001/copyparty#ios-shortcuts
|
||||||
|
|
||||||
example copyparty config to use this:
|
example copyparty config to use this;
|
||||||
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py
|
lets the user "kevin" with password "hunter2" use this plugin:
|
||||||
|
-a kevin:hunter2 --urlform save,get -v.::w,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py
|
||||||
|
|
||||||
recommended deps:
|
recommended deps:
|
||||||
apt install xdotool libnotify-bin
|
apt install xdotool libnotify-bin mpv
|
||||||
|
python3 -m pip install --user -U streamlink yt-dlp
|
||||||
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
|
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
|
||||||
|
|
||||||
and you probably want `twitter-unmute.user.js` from the res folder
|
and you probably want `twitter-unmute.user.js` from the res folder
|
||||||
@@ -63,8 +69,10 @@ set -e
|
|||||||
EOF
|
EOF
|
||||||
chmod 755 /usr/local/bin/chromium-browser
|
chmod 755 /usr/local/bin/chromium-browser
|
||||||
|
|
||||||
# start the server (note: replace `-v.::rw:` with `-v.::w:` to disallow retrieving uploaded stuff)
|
# start the server
|
||||||
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
|
# note 1: replace hunter2 with a better password to access the server
|
||||||
|
# note 2: replace `-v.::rw` with `-v.::w` to disallow retrieving uploaded stuff
|
||||||
|
cd ~/Downloads; python3 copyparty-sfx.py -a kevin:hunter2 --urlform save,get -v.::rw,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -72,11 +80,23 @@ cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mt
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
import shutil
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
from urllib.parse import unquote_to_bytes as unquote
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
have_mpv = shutil.which("mpv")
|
||||||
|
have_vlc = shutil.which("vlc")
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
if len(sys.argv) > 2 and sys.argv[1] == "x":
|
||||||
|
# invoked on commandline for testing;
|
||||||
|
# python3 very-bad-idea.py x msg=https://youtu.be/dQw4w9WgXcQ
|
||||||
|
txt = " ".join(sys.argv[2:])
|
||||||
|
txt = quote(txt.replace(" ", "+"))
|
||||||
|
return open_post(txt.encode("utf-8"))
|
||||||
|
|
||||||
fp = os.path.abspath(sys.argv[1])
|
fp = os.path.abspath(sys.argv[1])
|
||||||
with open(fp, "rb") as f:
|
with open(fp, "rb") as f:
|
||||||
txt = f.read(4096)
|
txt = f.read(4096)
|
||||||
@@ -92,7 +112,7 @@ def open_post(txt):
|
|||||||
try:
|
try:
|
||||||
k, v = txt.split(" ", 1)
|
k, v = txt.split(" ", 1)
|
||||||
except:
|
except:
|
||||||
open_url(txt)
|
return open_url(txt)
|
||||||
|
|
||||||
if k == "key":
|
if k == "key":
|
||||||
sp.call(["xdotool", "key"] + v.split(" "))
|
sp.call(["xdotool", "key"] + v.split(" "))
|
||||||
@@ -128,6 +148,17 @@ def open_url(txt):
|
|||||||
# else:
|
# else:
|
||||||
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
|
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
|
||||||
|
|
||||||
|
# mpv is probably smart enough to use streamlink automatically
|
||||||
|
if try_mpv(txt):
|
||||||
|
print("mpv got it")
|
||||||
|
return
|
||||||
|
|
||||||
|
# or maybe streamlink would be a good choice to open this
|
||||||
|
if try_streamlink(txt):
|
||||||
|
print("streamlink got it")
|
||||||
|
return
|
||||||
|
|
||||||
|
# nope,
|
||||||
# close any error messages:
|
# close any error messages:
|
||||||
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
|
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
|
||||||
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
|
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
|
||||||
@@ -136,4 +167,39 @@ def open_url(txt):
|
|||||||
sp.call(["xdg-open", txt])
|
sp.call(["xdg-open", txt])
|
||||||
|
|
||||||
|
|
||||||
|
def try_mpv(url):
|
||||||
|
t0 = time.time()
|
||||||
|
try:
|
||||||
|
print("trying mpv...")
|
||||||
|
sp.check_call(["mpv", "--fs", url])
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
# if it ran for 15 sec it probably succeeded and terminated
|
||||||
|
t = time.time()
|
||||||
|
return t - t0 > 15
|
||||||
|
|
||||||
|
|
||||||
|
def try_streamlink(url):
|
||||||
|
t0 = time.time()
|
||||||
|
try:
|
||||||
|
import streamlink
|
||||||
|
|
||||||
|
print("trying streamlink...")
|
||||||
|
streamlink.Streamlink().resolve_url(url)
|
||||||
|
|
||||||
|
if have_mpv:
|
||||||
|
args = "-m streamlink -p mpv -a --fs"
|
||||||
|
else:
|
||||||
|
args = "-m streamlink"
|
||||||
|
|
||||||
|
cmd = [sys.executable] + args.split() + [url, "best"]
|
||||||
|
t0 = time.time()
|
||||||
|
sp.check_call(cmd)
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
# if it ran for 10 sec it probably succeeded and terminated
|
||||||
|
t = time.time()
|
||||||
|
return t - t0 > 10
|
||||||
|
|
||||||
|
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
DEPRECATED -- replaced by event hooks;
|
||||||
|
https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
use copyparty as a file downloader by POSTing URLs as
|
use copyparty as a file downloader by POSTing URLs as
|
||||||
application/x-www-form-urlencoded (for example using the
|
application/x-www-form-urlencoded (for example using the
|
||||||
message/pager function on the website)
|
message/pager function on the website)
|
||||||
|
|||||||
@@ -997,7 +997,7 @@ def main():
|
|||||||
ap.add_argument(
|
ap.add_argument(
|
||||||
"-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache"
|
"-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache"
|
||||||
)
|
)
|
||||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
||||||
ap.add_argument("-d", action="store_true", help="enable debug")
|
ap.add_argument("-d", action="store_true", help="enable debug")
|
||||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||||
|
|||||||
@@ -4,8 +4,9 @@ set -e
|
|||||||
# runs copyparty (or any other program really) in a chroot
|
# runs copyparty (or any other program really) in a chroot
|
||||||
#
|
#
|
||||||
# assumption: these directories, and everything within, are owned by root
|
# assumption: these directories, and everything within, are owned by root
|
||||||
sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
|
sysdirs=(); for v in /bin /lib /lib32 /lib64 /sbin /usr /etc/alternatives ; do
|
||||||
|
[ -e $v ] && sysdirs+=($v)
|
||||||
|
done
|
||||||
|
|
||||||
# error-handler
|
# error-handler
|
||||||
help() { cat <<'EOF'
|
help() { cat <<'EOF'
|
||||||
@@ -38,7 +39,7 @@ while true; do
|
|||||||
v="$1"; shift
|
v="$1"; shift
|
||||||
[ "$v" = -- ] && break # end of volumes
|
[ "$v" = -- ] && break # end of volumes
|
||||||
[ "$#" -eq 0 ] && break # invalid usage
|
[ "$#" -eq 0 ] && break # invalid usage
|
||||||
vols+=( "$(realpath "$v")" )
|
vols+=( "$(realpath "$v" || echo "$v")" )
|
||||||
done
|
done
|
||||||
pybin="$1"; shift
|
pybin="$1"; shift
|
||||||
pybin="$(command -v "$pybin")"
|
pybin="$(command -v "$pybin")"
|
||||||
@@ -82,7 +83,7 @@ jail="${jail%/}"
|
|||||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
||||||
while IFS= read -r v; do
|
while IFS= read -r v; do
|
||||||
[ -e "$v" ] || {
|
[ -e "$v" ] || {
|
||||||
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
|
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
||||||
@@ -97,9 +98,11 @@ done
|
|||||||
|
|
||||||
cln() {
|
cln() {
|
||||||
rv=$?
|
rv=$?
|
||||||
# cleanup if not in use
|
wait -f -p rv $p || true
|
||||||
lsof "$jail" | grep -qF "$jail" &&
|
cd /
|
||||||
echo "chroot is in use, will not cleanup" ||
|
echo "stopping chroot..."
|
||||||
|
lsof "$jail" | grep -F "$jail" &&
|
||||||
|
echo "chroot is in use; will not unmount" ||
|
||||||
{
|
{
|
||||||
mount | grep -F " on $jail" |
|
mount | grep -F " on $jail" |
|
||||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||||
@@ -115,6 +118,15 @@ mkdir -p "$jail/tmp"
|
|||||||
chmod 777 "$jail/tmp"
|
chmod 777 "$jail/tmp"
|
||||||
|
|
||||||
|
|
||||||
|
# create a dev
|
||||||
|
(cd $jail; mkdir -p dev; cd dev
|
||||||
|
[ -e null ] || mknod -m 666 null c 1 3
|
||||||
|
[ -e zero ] || mknod -m 666 zero c 1 5
|
||||||
|
[ -e random ] || mknod -m 444 random c 1 8
|
||||||
|
[ -e urandom ] || mknod -m 444 urandom c 1 9
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# run copyparty
|
# run copyparty
|
||||||
export HOME=$(getent passwd $uid | cut -d: -f6)
|
export HOME=$(getent passwd $uid | cut -d: -f6)
|
||||||
export USER=$(getent passwd $uid | cut -d: -f1)
|
export USER=$(getent passwd $uid | cut -d: -f1)
|
||||||
@@ -124,5 +136,6 @@ export LOGNAME="$USER"
|
|||||||
#echo "cpp [$cpp]"
|
#echo "cpp [$cpp]"
|
||||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
||||||
p=$!
|
p=$!
|
||||||
|
trap 'kill -USR1 $p' USR1
|
||||||
trap 'kill $p' INT TERM
|
trap 'kill $p' INT TERM
|
||||||
wait
|
wait
|
||||||
|
|||||||
@@ -1,16 +1,17 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
S_VERSION = "1.9"
|
||||||
|
S_BUILD_DT = "2023-05-07"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
up2k.py: upload to copyparty
|
u2c.py: upload to copyparty
|
||||||
2022-11-29, v0.22, ed <irc.rizon.net>, MIT-Licensed
|
2021, ed <irc.rizon.net>, MIT-Licensed
|
||||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py
|
||||||
|
|
||||||
- dependencies: requests
|
- dependencies: requests
|
||||||
- supports python 2.6, 2.7, and 3.3 through 3.11
|
- supports python 2.6, 2.7, and 3.3 through 3.12
|
||||||
|
- if something breaks just try again and it'll autoresume
|
||||||
- almost zero error-handling
|
|
||||||
- but if something breaks just try again and it'll autoresume
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -20,12 +21,15 @@ import math
|
|||||||
import time
|
import time
|
||||||
import atexit
|
import atexit
|
||||||
import signal
|
import signal
|
||||||
|
import socket
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
import platform
|
import platform
|
||||||
import threading
|
import threading
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
EXE = sys.executable.endswith("exe")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import argparse
|
import argparse
|
||||||
except:
|
except:
|
||||||
@@ -36,12 +40,15 @@ except:
|
|||||||
try:
|
try:
|
||||||
import requests
|
import requests
|
||||||
except ImportError:
|
except ImportError:
|
||||||
if sys.version_info > (2, 7):
|
if EXE:
|
||||||
|
raise
|
||||||
|
elif sys.version_info > (2, 7):
|
||||||
m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n"
|
m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n"
|
||||||
else:
|
else:
|
||||||
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
|
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
|
||||||
m = [" https://pypi.org/project/" + x + "/#files" for x in m.split()]
|
m = [" https://pypi.org/project/" + x + "/#files" for x in m.split()]
|
||||||
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
||||||
|
m += "\n for f in *.whl; do unzip $f; done; rm -r *.dist-info\n"
|
||||||
|
|
||||||
print(m.format(sys.executable))
|
print(m.format(sys.executable))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@@ -52,6 +59,7 @@ PY2 = sys.version_info < (3,)
|
|||||||
if PY2:
|
if PY2:
|
||||||
from Queue import Queue
|
from Queue import Queue
|
||||||
from urllib import quote, unquote
|
from urllib import quote, unquote
|
||||||
|
from urlparse import urlsplit, urlunsplit
|
||||||
|
|
||||||
sys.dont_write_bytecode = True
|
sys.dont_write_bytecode = True
|
||||||
bytes = str
|
bytes = str
|
||||||
@@ -59,6 +67,7 @@ else:
|
|||||||
from queue import Queue
|
from queue import Queue
|
||||||
from urllib.parse import unquote_to_bytes as unquote
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
from urllib.parse import quote_from_bytes as quote
|
from urllib.parse import quote_from_bytes as quote
|
||||||
|
from urllib.parse import urlsplit, urlunsplit
|
||||||
|
|
||||||
unicode = str
|
unicode = str
|
||||||
|
|
||||||
@@ -246,7 +255,13 @@ def eprint(*a, **ka):
|
|||||||
|
|
||||||
|
|
||||||
def flushing_print(*a, **ka):
|
def flushing_print(*a, **ka):
|
||||||
_print(*a, **ka)
|
try:
|
||||||
|
_print(*a, **ka)
|
||||||
|
except:
|
||||||
|
v = " ".join(str(x) for x in a)
|
||||||
|
v = v.encode("ascii", "replace").decode("ascii")
|
||||||
|
_print(v, **ka)
|
||||||
|
|
||||||
if "flush" not in ka:
|
if "flush" not in ka:
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
@@ -262,10 +277,10 @@ def termsize():
|
|||||||
try:
|
try:
|
||||||
import fcntl, termios, struct
|
import fcntl, termios, struct
|
||||||
|
|
||||||
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
r = struct.unpack(b"hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"AAAA"))
|
||||||
|
return r[::-1]
|
||||||
except:
|
except:
|
||||||
return
|
return None
|
||||||
return cr
|
|
||||||
|
|
||||||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||||
if not cr:
|
if not cr:
|
||||||
@@ -275,12 +290,11 @@ def termsize():
|
|||||||
os.close(fd)
|
os.close(fd)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
if not cr:
|
|
||||||
try:
|
try:
|
||||||
cr = (env["LINES"], env["COLUMNS"])
|
return cr or (int(env["COLUMNS"]), int(env["LINES"]))
|
||||||
except:
|
except:
|
||||||
cr = (25, 80)
|
return 80, 25
|
||||||
return int(cr[1]), int(cr[0])
|
|
||||||
|
|
||||||
|
|
||||||
class CTermsize(object):
|
class CTermsize(object):
|
||||||
@@ -326,6 +340,32 @@ class CTermsize(object):
|
|||||||
ss = CTermsize()
|
ss = CTermsize()
|
||||||
|
|
||||||
|
|
||||||
|
def undns(url):
|
||||||
|
usp = urlsplit(url)
|
||||||
|
hn = usp.hostname
|
||||||
|
gai = None
|
||||||
|
eprint("resolving host [{0}] ...".format(hn), end="")
|
||||||
|
try:
|
||||||
|
gai = socket.getaddrinfo(hn, None)
|
||||||
|
hn = gai[0][4][0]
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
raise
|
||||||
|
except:
|
||||||
|
t = "\n\033[31mfailed to resolve upload destination host;\033[0m\ngai={0}\n"
|
||||||
|
eprint(t.format(repr(gai)))
|
||||||
|
raise
|
||||||
|
|
||||||
|
if usp.port:
|
||||||
|
hn = "{0}:{1}".format(hn, usp.port)
|
||||||
|
if usp.username or usp.password:
|
||||||
|
hn = "{0}:{1}@{2}".format(usp.username, usp.password, hn)
|
||||||
|
|
||||||
|
usp = usp._replace(netloc=hn)
|
||||||
|
url = urlunsplit(usp)
|
||||||
|
eprint(" {0}".format(url))
|
||||||
|
return url
|
||||||
|
|
||||||
|
|
||||||
def _scd(err, top):
|
def _scd(err, top):
|
||||||
"""non-recursive listing of directory contents, along with stat() info"""
|
"""non-recursive listing of directory contents, along with stat() info"""
|
||||||
with os.scandir(top) as dh:
|
with os.scandir(top) as dh:
|
||||||
@@ -362,26 +402,46 @@ def walkdir(err, top, seen):
|
|||||||
|
|
||||||
seen = seen[:] + [atop]
|
seen = seen[:] + [atop]
|
||||||
for ap, inf in sorted(statdir(err, top)):
|
for ap, inf in sorted(statdir(err, top)):
|
||||||
|
yield ap, inf
|
||||||
if stat.S_ISDIR(inf.st_mode):
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
try:
|
try:
|
||||||
for x in walkdir(err, ap, seen):
|
for x in walkdir(err, ap, seen):
|
||||||
yield x
|
yield x
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
err.append((ap, str(ex)))
|
err.append((ap, str(ex)))
|
||||||
else:
|
|
||||||
yield ap, inf
|
|
||||||
|
|
||||||
|
|
||||||
def walkdirs(err, tops):
|
def walkdirs(err, tops):
|
||||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||||
sep = "{0}".format(os.sep).encode("ascii")
|
sep = "{0}".format(os.sep).encode("ascii")
|
||||||
|
if not VT100:
|
||||||
|
za = []
|
||||||
|
for td in tops:
|
||||||
|
try:
|
||||||
|
ap = os.path.abspath(os.path.realpath(td))
|
||||||
|
if td[-1:] in (b"\\", b"/"):
|
||||||
|
ap += sep
|
||||||
|
except:
|
||||||
|
# maybe cpython #88013 (ok)
|
||||||
|
ap = td
|
||||||
|
|
||||||
|
za.append(ap)
|
||||||
|
|
||||||
|
za = [x if x.startswith(b"\\\\") else b"\\\\?\\" + x for x in za]
|
||||||
|
za = [x.replace(b"/", b"\\") for x in za]
|
||||||
|
tops = za
|
||||||
|
|
||||||
for top in tops:
|
for top in tops:
|
||||||
|
isdir = os.path.isdir(top)
|
||||||
if top[-1:] == sep:
|
if top[-1:] == sep:
|
||||||
stop = top.rstrip(sep)
|
stop = top.rstrip(sep)
|
||||||
|
yield stop, b"", os.stat(stop)
|
||||||
else:
|
else:
|
||||||
stop = os.path.dirname(top)
|
stop, dn = os.path.split(top)
|
||||||
|
if isdir:
|
||||||
|
yield stop, dn, os.stat(stop)
|
||||||
|
|
||||||
if os.path.isdir(top):
|
if isdir:
|
||||||
for ap, inf in walkdir(err, top, []):
|
for ap, inf in walkdir(err, top, []):
|
||||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||||
else:
|
else:
|
||||||
@@ -472,14 +532,17 @@ def get_hashlist(file, pcb, mth):
|
|||||||
file.kchunks[k] = [v1, v2]
|
file.kchunks[k] = [v1, v2]
|
||||||
|
|
||||||
|
|
||||||
def handshake(url, file, pw, search):
|
def handshake(ar, file, search):
|
||||||
# type: (str, File, Any, bool) -> tuple[list[str], bool]
|
# type: (argparse.Namespace, File, bool) -> tuple[list[str], bool]
|
||||||
"""
|
"""
|
||||||
performs a handshake with the server; reply is:
|
performs a handshake with the server; reply is:
|
||||||
if search, a list of search results
|
if search, a list of search results
|
||||||
otherwise, a list of chunks to upload
|
otherwise, a list of chunks to upload
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
url = ar.url
|
||||||
|
pw = ar.a
|
||||||
|
|
||||||
req = {
|
req = {
|
||||||
"hash": [x[0] for x in file.cids],
|
"hash": [x[0] for x in file.cids],
|
||||||
"name": file.name,
|
"name": file.name,
|
||||||
@@ -488,36 +551,49 @@ def handshake(url, file, pw, search):
|
|||||||
}
|
}
|
||||||
if search:
|
if search:
|
||||||
req["srch"] = 1
|
req["srch"] = 1
|
||||||
|
elif ar.dr:
|
||||||
|
req["replace"] = True
|
||||||
|
|
||||||
headers = {"Content-Type": "text/plain"} # wtf ed
|
headers = {"Content-Type": "text/plain"} # <=1.5.1 compat
|
||||||
if pw:
|
if pw:
|
||||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||||
|
|
||||||
|
file.recheck = False
|
||||||
if file.url:
|
if file.url:
|
||||||
url = file.url
|
url = file.url
|
||||||
elif b"/" in file.rel:
|
elif b"/" in file.rel:
|
||||||
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
sc = 600
|
||||||
|
txt = ""
|
||||||
try:
|
try:
|
||||||
r = req_ses.post(url, headers=headers, json=req)
|
r = req_ses.post(url, headers=headers, json=req)
|
||||||
break
|
sc = r.status_code
|
||||||
|
txt = r.text
|
||||||
|
if sc < 400:
|
||||||
|
break
|
||||||
|
|
||||||
|
raise Exception("http {0}: {1}".format(sc, txt))
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
em = str(ex).split("SSLError(")[-1]
|
em = str(ex).split("SSLError(")[-1].split("\nURL: ")[0].strip()
|
||||||
|
|
||||||
|
if (
|
||||||
|
sc == 422
|
||||||
|
or "<pre>partial upload exists at a different" in txt
|
||||||
|
or "<pre>source file busy; please try again" in txt
|
||||||
|
):
|
||||||
|
file.recheck = True
|
||||||
|
return [], False
|
||||||
|
elif sc == 409 or "<pre>upload rejected, file already exists" in txt:
|
||||||
|
return [], False
|
||||||
|
elif "<pre>you don't have " in txt:
|
||||||
|
raise
|
||||||
|
|
||||||
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
sc = r.status_code
|
|
||||||
if sc >= 400:
|
|
||||||
txt = r.text
|
|
||||||
if sc == 422 or "<pre>partial upload exists at a different" in txt:
|
|
||||||
file.recheck = True
|
|
||||||
return [], False
|
|
||||||
elif sc == 409 or "<pre>upload rejected, file already exists" in txt:
|
|
||||||
return [], False
|
|
||||||
|
|
||||||
raise Exception("http {0}: {1}".format(sc, txt))
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = r.json()
|
r = r.json()
|
||||||
except:
|
except:
|
||||||
@@ -539,8 +615,8 @@ def handshake(url, file, pw, search):
|
|||||||
return r["hash"], r["sprs"]
|
return r["hash"], r["sprs"]
|
||||||
|
|
||||||
|
|
||||||
def upload(file, cid, pw):
|
def upload(file, cid, pw, stats):
|
||||||
# type: (File, str, Any) -> None
|
# type: (File, str, str, str) -> None
|
||||||
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
@@ -548,6 +624,10 @@ def upload(file, cid, pw):
|
|||||||
"X-Up2k-Wark": file.wark,
|
"X-Up2k-Wark": file.wark,
|
||||||
"Content-Type": "application/octet-stream",
|
"Content-Type": "application/octet-stream",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if stats:
|
||||||
|
headers["X-Up2k-Stat"] = stats
|
||||||
|
|
||||||
if pw:
|
if pw:
|
||||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||||
|
|
||||||
@@ -564,27 +644,20 @@ def upload(file, cid, pw):
|
|||||||
|
|
||||||
class Ctl(object):
|
class Ctl(object):
|
||||||
"""
|
"""
|
||||||
this will be the coordinator which runs everything in parallel
|
the coordinator which runs everything in parallel
|
||||||
(hashing, handshakes, uploads) but right now it's p dumb
|
(hashing, handshakes, uploads)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, ar):
|
def _scan(self):
|
||||||
self.ar = ar
|
ar = self.ar
|
||||||
ar.files = [
|
|
||||||
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
|
||||||
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
|
||||||
for x in ar.files
|
|
||||||
]
|
|
||||||
ar.url = ar.url.rstrip("/") + "/"
|
|
||||||
if "://" not in ar.url:
|
|
||||||
ar.url = "http://" + ar.url
|
|
||||||
|
|
||||||
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
||||||
|
|
||||||
nfiles = 0
|
nfiles = 0
|
||||||
nbytes = 0
|
nbytes = 0
|
||||||
err = []
|
err = []
|
||||||
for _, _, inf in walkdirs(err, ar.files):
|
for _, _, inf in walkdirs(err, ar.files):
|
||||||
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
|
continue
|
||||||
|
|
||||||
nfiles += 1
|
nfiles += 1
|
||||||
nbytes += inf.st_size
|
nbytes += inf.st_size
|
||||||
|
|
||||||
@@ -606,8 +679,16 @@ class Ctl(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||||
self.nfiles = nfiles
|
return nfiles, nbytes
|
||||||
self.nbytes = nbytes
|
|
||||||
|
def __init__(self, ar, stats=None):
|
||||||
|
self.ok = False
|
||||||
|
self.ar = ar
|
||||||
|
self.stats = stats or self._scan()
|
||||||
|
if not self.stats:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.nfiles, self.nbytes = self.stats
|
||||||
|
|
||||||
if ar.td:
|
if ar.td:
|
||||||
requests.packages.urllib3.disable_warnings()
|
requests.packages.urllib3.disable_warnings()
|
||||||
@@ -616,6 +697,8 @@ class Ctl(object):
|
|||||||
req_ses.verify = ar.te
|
req_ses.verify = ar.te
|
||||||
|
|
||||||
self.filegen = walkdirs([], ar.files)
|
self.filegen = walkdirs([], ar.files)
|
||||||
|
self.recheck = [] # type: list[File]
|
||||||
|
|
||||||
if ar.safe:
|
if ar.safe:
|
||||||
self._safe()
|
self._safe()
|
||||||
else:
|
else:
|
||||||
@@ -634,11 +717,11 @@ class Ctl(object):
|
|||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
self.t0_up = None
|
self.t0_up = None
|
||||||
self.spd = None
|
self.spd = None
|
||||||
|
self.eta = "99:99:99"
|
||||||
|
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.q_handshake = Queue() # type: Queue[File]
|
self.q_handshake = Queue() # type: Queue[File]
|
||||||
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||||
self.recheck = [] # type: list[File]
|
|
||||||
|
|
||||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
@@ -647,10 +730,15 @@ class Ctl(object):
|
|||||||
|
|
||||||
self._fancy()
|
self._fancy()
|
||||||
|
|
||||||
|
self.ok = True
|
||||||
|
|
||||||
def _safe(self):
|
def _safe(self):
|
||||||
"""minimal basic slow boring fallback codepath"""
|
"""minimal basic slow boring fallback codepath"""
|
||||||
search = self.ar.s
|
search = self.ar.s
|
||||||
for nf, (top, rel, inf) in enumerate(self.filegen):
|
for nf, (top, rel, inf) in enumerate(self.filegen):
|
||||||
|
if stat.S_ISDIR(inf.st_mode) or not rel:
|
||||||
|
continue
|
||||||
|
|
||||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||||
upath = file.abs.decode("utf-8", "replace")
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
|
|
||||||
@@ -660,7 +748,7 @@ class Ctl(object):
|
|||||||
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
|
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
|
||||||
while True:
|
while True:
|
||||||
print(" hs...")
|
print(" hs...")
|
||||||
hs, _ = handshake(self.ar.url, file, self.ar.a, search)
|
hs, _ = handshake(self.ar, file, search)
|
||||||
if search:
|
if search:
|
||||||
if hs:
|
if hs:
|
||||||
for hit in hs:
|
for hit in hs:
|
||||||
@@ -677,7 +765,8 @@ class Ctl(object):
|
|||||||
ncs = len(hs)
|
ncs = len(hs)
|
||||||
for nc, cid in enumerate(hs):
|
for nc, cid in enumerate(hs):
|
||||||
print(" {0} up {1}".format(ncs - nc, cid))
|
print(" {0} up {1}".format(ncs - nc, cid))
|
||||||
upload(file, cid, self.ar.a)
|
stats = "{0}/0/0/{1}".format(nf, self.nfiles - nf)
|
||||||
|
upload(file, cid, self.ar.a, stats)
|
||||||
|
|
||||||
print(" ok!")
|
print(" ok!")
|
||||||
if file.recheck:
|
if file.recheck:
|
||||||
@@ -688,10 +777,10 @@ class Ctl(object):
|
|||||||
|
|
||||||
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
||||||
for file in self.recheck:
|
for file in self.recheck:
|
||||||
handshake(self.ar.url, file, self.ar.a, search)
|
handshake(self.ar, file, search)
|
||||||
|
|
||||||
def _fancy(self):
|
def _fancy(self):
|
||||||
if VT100:
|
if VT100 and not self.ar.ns:
|
||||||
atexit.register(self.cleanup_vt100)
|
atexit.register(self.cleanup_vt100)
|
||||||
ss.scroll_region(3)
|
ss.scroll_region(3)
|
||||||
|
|
||||||
@@ -715,7 +804,7 @@ class Ctl(object):
|
|||||||
else:
|
else:
|
||||||
idles = 0
|
idles = 0
|
||||||
|
|
||||||
if VT100:
|
if VT100 and not self.ar.ns:
|
||||||
maxlen = ss.w - len(str(self.nfiles)) - 14
|
maxlen = ss.w - len(str(self.nfiles)) - 14
|
||||||
txt = "\033[s\033[{0}H".format(ss.g)
|
txt = "\033[s\033[{0}H".format(ss.g)
|
||||||
for y, k, st, f in [
|
for y, k, st, f in [
|
||||||
@@ -752,12 +841,12 @@ class Ctl(object):
|
|||||||
eta = (self.nbytes - self.up_b) / (spd + 1)
|
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||||
|
|
||||||
spd = humansize(spd)
|
spd = humansize(spd)
|
||||||
eta = str(datetime.timedelta(seconds=int(eta)))
|
self.eta = str(datetime.timedelta(seconds=int(eta)))
|
||||||
sleft = humansize(self.nbytes - self.up_b)
|
sleft = humansize(self.nbytes - self.up_b)
|
||||||
nleft = self.nfiles - self.up_f
|
nleft = self.nfiles - self.up_f
|
||||||
tail = "\033[K\033[u" if VT100 else "\r"
|
tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r"
|
||||||
|
|
||||||
t = "{0} eta @ {1}/s, {2}, {3}# left".format(eta, spd, sleft, nleft)
|
t = "{0} eta @ {1}/s, {2}, {3}# left".format(self.eta, spd, sleft, nleft)
|
||||||
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
||||||
|
|
||||||
if not self.recheck:
|
if not self.recheck:
|
||||||
@@ -765,7 +854,7 @@ class Ctl(object):
|
|||||||
|
|
||||||
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
||||||
for file in self.recheck:
|
for file in self.recheck:
|
||||||
handshake(self.ar.url, file, self.ar.a, False)
|
handshake(self.ar, file, False)
|
||||||
|
|
||||||
def cleanup_vt100(self):
|
def cleanup_vt100(self):
|
||||||
ss.scroll_region(None)
|
ss.scroll_region(None)
|
||||||
@@ -778,8 +867,10 @@ class Ctl(object):
|
|||||||
prd = None
|
prd = None
|
||||||
ls = {}
|
ls = {}
|
||||||
for top, rel, inf in self.filegen:
|
for top, rel, inf in self.filegen:
|
||||||
if self.ar.z:
|
isdir = stat.S_ISDIR(inf.st_mode)
|
||||||
rd = os.path.dirname(rel)
|
if self.ar.z or self.ar.drd:
|
||||||
|
rd = rel if isdir else os.path.dirname(rel)
|
||||||
|
srd = rd.decode("utf-8", "replace").replace("\\", "/")
|
||||||
if prd != rd:
|
if prd != rd:
|
||||||
prd = rd
|
prd = rd
|
||||||
headers = {}
|
headers = {}
|
||||||
@@ -788,19 +879,37 @@ class Ctl(object):
|
|||||||
|
|
||||||
ls = {}
|
ls = {}
|
||||||
try:
|
try:
|
||||||
print(" ls ~{0}".format(rd.decode("utf-8", "replace")))
|
print(" ls ~{0}".format(srd))
|
||||||
r = req_ses.get(
|
zb = self.ar.url.encode("utf-8")
|
||||||
self.ar.url.encode("utf-8") + quotep(rd) + b"?ls",
|
zb += quotep(rd.replace(b"\\", b"/"))
|
||||||
headers=headers,
|
r = req_ses.get(zb + b"?ls<&dots", headers=headers)
|
||||||
)
|
if not r:
|
||||||
for f in r.json()["files"]:
|
raise Exception("HTTP {0}".format(r.status_code))
|
||||||
rfn = f["href"].split("?")[0].encode("utf-8", "replace")
|
|
||||||
ls[unquote(rfn)] = f
|
|
||||||
except:
|
|
||||||
print(" mkdir ~{0}".format(rd.decode("utf-8", "replace")))
|
|
||||||
|
|
||||||
|
j = r.json()
|
||||||
|
for f in j["dirs"] + j["files"]:
|
||||||
|
rfn = f["href"].split("?")[0].rstrip("/")
|
||||||
|
ls[unquote(rfn.encode("utf-8", "replace"))] = f
|
||||||
|
except Exception as ex:
|
||||||
|
print(" mkdir ~{0} ({1})".format(srd, ex))
|
||||||
|
|
||||||
|
if self.ar.drd:
|
||||||
|
dp = os.path.join(top, rd)
|
||||||
|
lnodes = set(os.listdir(dp))
|
||||||
|
bnames = [x for x in ls if x not in lnodes]
|
||||||
|
if bnames:
|
||||||
|
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||||
|
names = [x.decode("utf-8", "replace") for x in bnames]
|
||||||
|
locs = [vpath + srd + "/" + x for x in names]
|
||||||
|
print("DELETING ~{0}/#{1}".format(srd, len(names)))
|
||||||
|
req_ses.post(self.ar.url + "?delete", json=locs)
|
||||||
|
|
||||||
|
if isdir:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if self.ar.z:
|
||||||
rf = ls.get(os.path.basename(rel), None)
|
rf = ls.get(os.path.basename(rel), None)
|
||||||
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1:
|
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 2:
|
||||||
self.nfiles -= 1
|
self.nfiles -= 1
|
||||||
self.nbytes -= inf.st_size
|
self.nbytes -= inf.st_size
|
||||||
continue
|
continue
|
||||||
@@ -850,7 +959,10 @@ class Ctl(object):
|
|||||||
self.handshaker_busy += 1
|
self.handshaker_busy += 1
|
||||||
|
|
||||||
upath = file.abs.decode("utf-8", "replace")
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
hs, sprs = handshake(self.ar.url, file, self.ar.a, search)
|
if not VT100:
|
||||||
|
upath = upath.lstrip("\\?")
|
||||||
|
|
||||||
|
hs, sprs = handshake(self.ar, file, search)
|
||||||
if search:
|
if search:
|
||||||
if hs:
|
if hs:
|
||||||
for hit in hs:
|
for hit in hs:
|
||||||
@@ -883,6 +995,9 @@ class Ctl(object):
|
|||||||
self.up_c += len(file.cids) - file.up_c
|
self.up_c += len(file.cids) - file.up_c
|
||||||
self.up_b += file.size - file.up_b
|
self.up_b += file.size - file.up_b
|
||||||
|
|
||||||
|
if not file.recheck:
|
||||||
|
self.up_done(file)
|
||||||
|
|
||||||
if hs and file.up_c:
|
if hs and file.up_c:
|
||||||
# some chunks failed
|
# some chunks failed
|
||||||
self.up_c -= len(hs)
|
self.up_c -= len(hs)
|
||||||
@@ -912,12 +1027,24 @@ class Ctl(object):
|
|||||||
self.uploader_busy += 1
|
self.uploader_busy += 1
|
||||||
self.t0_up = self.t0_up or time.time()
|
self.t0_up = self.t0_up or time.time()
|
||||||
|
|
||||||
|
zs = "{0}/{1}/{2}/{3} {4}/{5} {6}"
|
||||||
|
stats = zs.format(
|
||||||
|
self.up_f,
|
||||||
|
len(self.recheck),
|
||||||
|
self.uploader_busy,
|
||||||
|
self.nfiles - self.up_f,
|
||||||
|
int(self.nbytes / (1024 * 1024)),
|
||||||
|
int((self.nbytes - self.up_b) / (1024 * 1024)),
|
||||||
|
self.eta,
|
||||||
|
)
|
||||||
|
|
||||||
file, cid = task
|
file, cid = task
|
||||||
try:
|
try:
|
||||||
upload(file, cid, self.ar.a)
|
upload(file, cid, self.ar.a, stats)
|
||||||
except:
|
except Exception as ex:
|
||||||
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
|
t = "upload failed, retrying: {0} #{1} ({2})\n"
|
||||||
pass # handshake will fix it
|
eprint(t.format(file.name, cid[:8], ex))
|
||||||
|
# handshake will fix it
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
sz = file.kchunks[cid][1]
|
sz = file.kchunks[cid][1]
|
||||||
@@ -933,6 +1060,10 @@ class Ctl(object):
|
|||||||
self.up_c += 1
|
self.up_c += 1
|
||||||
self.uploader_busy -= 1
|
self.uploader_busy -= 1
|
||||||
|
|
||||||
|
def up_done(self, file):
|
||||||
|
if self.ar.dl:
|
||||||
|
os.unlink(file.abs)
|
||||||
|
|
||||||
|
|
||||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||||
pass
|
pass
|
||||||
@@ -946,8 +1077,15 @@ def main():
|
|||||||
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
||||||
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
||||||
|
|
||||||
|
ver = "{0}, v{1}".format(S_BUILD_DT, S_VERSION)
|
||||||
|
if "--version" in sys.argv:
|
||||||
|
print(ver)
|
||||||
|
return
|
||||||
|
|
||||||
|
sys.argv = [x for x in sys.argv if x != "--ws"]
|
||||||
|
|
||||||
# fmt: off
|
# fmt: off
|
||||||
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
ap = app = argparse.ArgumentParser(formatter_class=APF, description="copyparty up2k uploader / filesearch tool, " + ver, epilog="""
|
||||||
NOTE:
|
NOTE:
|
||||||
source file/folder selection uses rsync syntax, meaning that:
|
source file/folder selection uses rsync syntax, meaning that:
|
||||||
"foo" uploads the entire folder to URL/foo/
|
"foo" uploads the entire folder to URL/foo/
|
||||||
@@ -957,21 +1095,92 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||||
ap.add_argument("-v", action="store_true", help="verbose")
|
ap.add_argument("-v", action="store_true", help="verbose")
|
||||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
||||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||||
|
ap.add_argument("--version", action="store_true", help="show version and exit")
|
||||||
|
|
||||||
|
ap = app.add_argument_group("compatibility")
|
||||||
|
ap.add_argument("--cls", action="store_true", help="clear screen before start")
|
||||||
|
ap.add_argument("--rh", type=int, metavar="TRIES", default=0, help="resolve server hostname before upload (good for buggy networks, but TLS certs will break)")
|
||||||
|
|
||||||
|
ap = app.add_argument_group("folder sync")
|
||||||
|
ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
|
||||||
|
ap.add_argument("--dr", action="store_true", help="delete remote files which don't exist locally")
|
||||||
|
ap.add_argument("--drd", action="store_true", help="delete remote files during upload instead of afterwards; reduces peak disk space usage, but will reupload instead of detecting renames")
|
||||||
|
|
||||||
ap = app.add_argument_group("performance tweaks")
|
ap = app.add_argument_group("performance tweaks")
|
||||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||||
|
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles)")
|
||||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||||
|
|
||||||
ap = app.add_argument_group("tls")
|
ap = app.add_argument_group("tls")
|
||||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
Ctl(app.parse_args())
|
try:
|
||||||
|
ar = app.parse_args()
|
||||||
|
finally:
|
||||||
|
if EXE and not sys.argv[1:]:
|
||||||
|
eprint("*** hit enter to exit ***")
|
||||||
|
try:
|
||||||
|
input()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if ar.drd:
|
||||||
|
ar.dr = True
|
||||||
|
|
||||||
|
for k in "dl dr drd".split():
|
||||||
|
errs = []
|
||||||
|
if ar.safe and getattr(ar, k):
|
||||||
|
errs.append(k)
|
||||||
|
|
||||||
|
if errs:
|
||||||
|
raise Exception("--safe is incompatible with " + str(errs))
|
||||||
|
|
||||||
|
ar.files = [
|
||||||
|
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||||
|
+ (x[-1:] if x[-1:] in ("\\", "/") else "").encode("utf-8")
|
||||||
|
for x in ar.files
|
||||||
|
]
|
||||||
|
|
||||||
|
ar.url = ar.url.rstrip("/") + "/"
|
||||||
|
if "://" not in ar.url:
|
||||||
|
ar.url = "http://" + ar.url
|
||||||
|
|
||||||
|
if ar.a and ar.a.startswith("$"):
|
||||||
|
fn = ar.a[1:]
|
||||||
|
print("reading password from file [{0}]".format(fn))
|
||||||
|
with open(fn, "rb") as f:
|
||||||
|
ar.a = f.read().decode("utf-8").strip()
|
||||||
|
|
||||||
|
for n in range(ar.rh):
|
||||||
|
try:
|
||||||
|
ar.url = undns(ar.url)
|
||||||
|
break
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
raise
|
||||||
|
except:
|
||||||
|
if n > ar.rh - 2:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if ar.cls:
|
||||||
|
eprint("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
|
||||||
|
|
||||||
|
ctl = Ctl(ar)
|
||||||
|
|
||||||
|
if ar.dr and not ar.drd and ctl.ok:
|
||||||
|
print("\npass 2/2: delete")
|
||||||
|
ar.drd = True
|
||||||
|
ar.z = True
|
||||||
|
ctl = Ctl(ar, ctl.stats)
|
||||||
|
|
||||||
|
sys.exit(0 if ctl.ok else 1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
@@ -29,11 +29,11 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
|
|||||||
* disables thumbnails and folder-type detection in windows explorer
|
* disables thumbnails and folder-type detection in windows explorer
|
||||||
* makes it way faster (especially for slow/networked locations (such as partyfuse))
|
* makes it way faster (especially for slow/networked locations (such as partyfuse))
|
||||||
|
|
||||||
### [`webdav-basicauth.reg`](webdav-basicauth.reg)
|
### [`webdav-cfg.reg`](webdav-cfg.bat)
|
||||||
* enables webdav basic-auth over plaintext http; takes effect after a reboot OR after running `webdav-unlimit.bat`
|
* improves the native webdav support in windows;
|
||||||
|
* removes the 47.6 MiB filesize limit when downloading from webdav
|
||||||
### [`webdav-unlimit.bat`](webdav-unlimit.bat)
|
* optionally enables webdav basic-auth over plaintext http
|
||||||
* removes the 47.6 MiB filesize limit when downloading from webdav
|
* optionally helps disable wpad, removing the 10sec latency
|
||||||
|
|
||||||
### [`cfssl.sh`](cfssl.sh)
|
### [`cfssl.sh`](cfssl.sh)
|
||||||
* creates CA and server certificates using cfssl
|
* creates CA and server certificates using cfssl
|
||||||
|
|||||||
14
contrib/apache/copyparty.conf
Normal file
14
contrib/apache/copyparty.conf
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# when running copyparty behind a reverse proxy,
|
||||||
|
# the following arguments are recommended:
|
||||||
|
#
|
||||||
|
# -i 127.0.0.1 only accept connections from nginx
|
||||||
|
#
|
||||||
|
# if you are doing location-based proxying (such as `/stuff` below)
|
||||||
|
# you must run copyparty with --rp-loc=stuff
|
||||||
|
#
|
||||||
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
|
|
||||||
|
LoadModule proxy_module modules/mod_proxy.so
|
||||||
|
ProxyPass "/stuff" "http://127.0.0.1:3923/stuff"
|
||||||
|
# do not specify ProxyPassReverse
|
||||||
|
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
|
||||||
@@ -1,14 +1,44 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
cat >/dev/null <<'EOF'
|
||||||
|
|
||||||
|
NOTE: copyparty is now able to do this automatically;
|
||||||
|
however you may wish to use this script instead if
|
||||||
|
you have specific needs (or if copyparty breaks)
|
||||||
|
|
||||||
|
this script generates a new self-signed TLS certificate and
|
||||||
|
replaces the default insecure one that comes with copyparty
|
||||||
|
|
||||||
|
as it is trivial to impersonate a copyparty server using the
|
||||||
|
default certificate, it is highly recommended to do this
|
||||||
|
|
||||||
|
this will create a self-signed CA, and a Server certificate
|
||||||
|
which gets signed by that CA -- you can run it multiple times
|
||||||
|
with different server-FQDNs / IPs to create additional certs
|
||||||
|
for all your different servers / (non-)copyparty services
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
|
||||||
# ca-name and server-fqdn
|
# ca-name and server-fqdn
|
||||||
ca_name="$1"
|
ca_name="$1"
|
||||||
srv_fqdn="$2"
|
srv_fqdn="$2"
|
||||||
|
|
||||||
[ -z "$srv_fqdn" ] && {
|
[ -z "$srv_fqdn" ] && { cat <<'EOF'
|
||||||
echo "need arg 1: ca name"
|
need arg 1: ca name
|
||||||
echo "need arg 2: server fqdn and/or IPs, comma-separated"
|
need arg 2: server fqdn and/or IPs, comma-separated
|
||||||
echo "optional arg 3: if set, write cert into copyparty cfg"
|
optional arg 3: if set, write cert into copyparty cfg
|
||||||
|
|
||||||
|
example:
|
||||||
|
./cfssl.sh PartyCo partybox.local y
|
||||||
|
EOF
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
command -v cfssljson 2>/dev/null || {
|
||||||
|
echo please install cfssl and try again
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -59,12 +89,14 @@ show() {
|
|||||||
}
|
}
|
||||||
show ca.pem
|
show ca.pem
|
||||||
show "$srv_fqdn.pem"
|
show "$srv_fqdn.pem"
|
||||||
|
echo
|
||||||
|
echo "successfully generated new certificates"
|
||||||
|
|
||||||
# write cert into copyparty config
|
# write cert into copyparty config
|
||||||
[ -z "$3" ] || {
|
[ -z "$3" ] || {
|
||||||
mkdir -p ~/.config/copyparty
|
mkdir -p ~/.config/copyparty
|
||||||
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||||
|
echo "successfully replaced copyparty certificate"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>⇆🎉 redirect</title>
|
<title>💾🎉 redirect</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<style>
|
<style>
|
||||||
|
|
||||||
|
|||||||
BIN
contrib/ios/upload-to-copyparty.shortcut
Normal file
BIN
contrib/ios/upload-to-copyparty.shortcut
Normal file
Binary file not shown.
@@ -1,7 +1,6 @@
|
|||||||
# when running copyparty behind a reverse proxy,
|
# when running copyparty behind a reverse proxy,
|
||||||
# the following arguments are recommended:
|
# the following arguments are recommended:
|
||||||
#
|
#
|
||||||
# --http-only lower latency on initial connection
|
|
||||||
# -i 127.0.0.1 only accept connections from nginx
|
# -i 127.0.0.1 only accept connections from nginx
|
||||||
#
|
#
|
||||||
# -nc must match or exceed the webserver's max number of concurrent clients;
|
# -nc must match or exceed the webserver's max number of concurrent clients;
|
||||||
@@ -9,7 +8,9 @@
|
|||||||
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
||||||
#
|
#
|
||||||
# you may also consider adding -j0 for CPU-intensive configurations
|
# you may also consider adding -j0 for CPU-intensive configurations
|
||||||
# (not that i can really think of any good examples)
|
# (5'000 requests per second, or 20gbps upload/download in parallel)
|
||||||
|
#
|
||||||
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
|
|
||||||
upstream cpp {
|
upstream cpp {
|
||||||
server 127.0.0.1:3923;
|
server 127.0.0.1:3923;
|
||||||
@@ -37,3 +38,9 @@ server {
|
|||||||
proxy_set_header Connection "Keep-Alive";
|
proxy_set_header Connection "Keep-Alive";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# default client_max_body_size (1M) blocks uploads larger than 256 MiB
|
||||||
|
client_max_body_size 1024M;
|
||||||
|
client_header_timeout 610m;
|
||||||
|
client_body_timeout 610m;
|
||||||
|
send_timeout 610m;
|
||||||
|
|||||||
282
contrib/nixos/modules/copyparty.nix
Normal file
282
contrib/nixos/modules/copyparty.nix
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
{ config, pkgs, lib, ... }:
|
||||||
|
|
||||||
|
with lib;
|
||||||
|
|
||||||
|
let
|
||||||
|
mkKeyValue = key: value:
|
||||||
|
if value == true then
|
||||||
|
# sets with a true boolean value are coerced to just the key name
|
||||||
|
key
|
||||||
|
else if value == false then
|
||||||
|
# or omitted completely when false
|
||||||
|
""
|
||||||
|
else
|
||||||
|
(generators.mkKeyValueDefault { inherit mkValueString; } ": " key value);
|
||||||
|
|
||||||
|
mkAttrsString = value: (generators.toKeyValue { inherit mkKeyValue; } value);
|
||||||
|
|
||||||
|
mkValueString = value:
|
||||||
|
if isList value then
|
||||||
|
(concatStringsSep ", " (map mkValueString value))
|
||||||
|
else if isAttrs value then
|
||||||
|
"\n" + (mkAttrsString value)
|
||||||
|
else
|
||||||
|
(generators.mkValueStringDefault { } value);
|
||||||
|
|
||||||
|
mkSectionName = value: "[" + (escape [ "[" "]" ] value) + "]";
|
||||||
|
|
||||||
|
mkSection = name: attrs: ''
|
||||||
|
${mkSectionName name}
|
||||||
|
${mkAttrsString attrs}
|
||||||
|
'';
|
||||||
|
|
||||||
|
mkVolume = name: attrs: ''
|
||||||
|
${mkSectionName name}
|
||||||
|
${attrs.path}
|
||||||
|
${mkAttrsString {
|
||||||
|
accs = attrs.access;
|
||||||
|
flags = attrs.flags;
|
||||||
|
}}
|
||||||
|
'';
|
||||||
|
|
||||||
|
passwordPlaceholder = name: "{{password-${name}}}";
|
||||||
|
|
||||||
|
accountsWithPlaceholders = mapAttrs (name: attrs: passwordPlaceholder name);
|
||||||
|
|
||||||
|
configStr = ''
|
||||||
|
${mkSection "global" cfg.settings}
|
||||||
|
${mkSection "accounts" (accountsWithPlaceholders cfg.accounts)}
|
||||||
|
${concatStringsSep "\n" (mapAttrsToList mkVolume cfg.volumes)}
|
||||||
|
'';
|
||||||
|
|
||||||
|
name = "copyparty";
|
||||||
|
cfg = config.services.copyparty;
|
||||||
|
configFile = pkgs.writeText "${name}.conf" configStr;
|
||||||
|
runtimeConfigPath = "/run/${name}/${name}.conf";
|
||||||
|
home = "/var/lib/${name}";
|
||||||
|
defaultShareDir = "${home}/data";
|
||||||
|
in {
|
||||||
|
options.services.copyparty = {
|
||||||
|
enable = mkEnableOption "web-based file manager";
|
||||||
|
|
||||||
|
package = mkOption {
|
||||||
|
type = types.package;
|
||||||
|
default = pkgs.copyparty;
|
||||||
|
defaultText = "pkgs.copyparty";
|
||||||
|
description = ''
|
||||||
|
Package of the application to run, exposed for overriding purposes.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
openFilesLimit = mkOption {
|
||||||
|
default = 4096;
|
||||||
|
type = types.either types.int types.str;
|
||||||
|
description = "Number of files to allow copyparty to open.";
|
||||||
|
};
|
||||||
|
|
||||||
|
settings = mkOption {
|
||||||
|
type = types.attrs;
|
||||||
|
description = ''
|
||||||
|
Global settings to apply.
|
||||||
|
Directly maps to values in the [global] section of the copyparty config.
|
||||||
|
See `${getExe cfg.package} --help` for more details.
|
||||||
|
'';
|
||||||
|
default = {
|
||||||
|
i = "127.0.0.1";
|
||||||
|
no-reload = true;
|
||||||
|
};
|
||||||
|
example = literalExpression ''
|
||||||
|
{
|
||||||
|
i = "0.0.0.0";
|
||||||
|
no-reload = true;
|
||||||
|
}
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
accounts = mkOption {
|
||||||
|
type = types.attrsOf (types.submodule ({ ... }: {
|
||||||
|
options = {
|
||||||
|
passwordFile = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
description = ''
|
||||||
|
Runtime file path to a file containing the user password.
|
||||||
|
Must be readable by the copyparty user.
|
||||||
|
'';
|
||||||
|
example = "/run/keys/copyparty/ed";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}));
|
||||||
|
description = ''
|
||||||
|
A set of copyparty accounts to create.
|
||||||
|
'';
|
||||||
|
default = { };
|
||||||
|
example = literalExpression ''
|
||||||
|
{
|
||||||
|
ed.passwordFile = "/run/keys/copyparty/ed";
|
||||||
|
};
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
volumes = mkOption {
|
||||||
|
type = types.attrsOf (types.submodule ({ ... }: {
|
||||||
|
options = {
|
||||||
|
path = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
description = ''
|
||||||
|
Path of a directory to share.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
access = mkOption {
|
||||||
|
type = types.attrs;
|
||||||
|
description = ''
|
||||||
|
Attribute list of permissions and the users to apply them to.
|
||||||
|
|
||||||
|
The key must be a string containing any combination of allowed permission:
|
||||||
|
"r" (read): list folder contents, download files
|
||||||
|
"w" (write): upload files; need "r" to see the uploads
|
||||||
|
"m" (move): move files and folders; need "w" at destination
|
||||||
|
"d" (delete): permanently delete files and folders
|
||||||
|
"g" (get): download files, but cannot see folder contents
|
||||||
|
"G" (upget): "get", but can see filekeys of their own uploads
|
||||||
|
"a" (upget): can see uploader IPs, config-reload
|
||||||
|
|
||||||
|
For example: "rwmd"
|
||||||
|
|
||||||
|
The value must be one of:
|
||||||
|
an account name, defined in `accounts`
|
||||||
|
a list of account names
|
||||||
|
"*", which means "any account"
|
||||||
|
'';
|
||||||
|
example = literalExpression ''
|
||||||
|
{
|
||||||
|
# wG = write-upget = see your own uploads only
|
||||||
|
wG = "*";
|
||||||
|
# read-write-modify-delete for users "ed" and "k"
|
||||||
|
rwmd = ["ed" "k"];
|
||||||
|
};
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
flags = mkOption {
|
||||||
|
type = types.attrs;
|
||||||
|
description = ''
|
||||||
|
Attribute list of volume flags to apply.
|
||||||
|
See `${getExe cfg.package} --help-flags` for more details.
|
||||||
|
'';
|
||||||
|
example = literalExpression ''
|
||||||
|
{
|
||||||
|
# "fk" enables filekeys (necessary for upget permission) (4 chars long)
|
||||||
|
fk = 4;
|
||||||
|
# scan for new files every 60sec
|
||||||
|
scan = 60;
|
||||||
|
# volflag "e2d" enables the uploads database
|
||||||
|
e2d = true;
|
||||||
|
# "d2t" disables multimedia parsers (in case the uploads are malicious)
|
||||||
|
d2t = true;
|
||||||
|
# skips hashing file contents if path matches *.iso
|
||||||
|
nohash = "\.iso$";
|
||||||
|
};
|
||||||
|
'';
|
||||||
|
default = { };
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}));
|
||||||
|
description = "A set of copyparty volumes to create";
|
||||||
|
default = {
|
||||||
|
"/" = {
|
||||||
|
path = defaultShareDir;
|
||||||
|
access = { r = "*"; };
|
||||||
|
};
|
||||||
|
};
|
||||||
|
example = literalExpression ''
|
||||||
|
{
|
||||||
|
"/" = {
|
||||||
|
path = ${defaultShareDir};
|
||||||
|
access = {
|
||||||
|
# wG = write-upget = see your own uploads only
|
||||||
|
wG = "*";
|
||||||
|
# read-write-modify-delete for users "ed" and "k"
|
||||||
|
rwmd = ["ed" "k"];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
config = mkIf cfg.enable {
|
||||||
|
systemd.services.copyparty = {
|
||||||
|
description = "http file sharing hub";
|
||||||
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
|
||||||
|
environment = {
|
||||||
|
PYTHONUNBUFFERED = "true";
|
||||||
|
XDG_CONFIG_HOME = "${home}/.config";
|
||||||
|
};
|
||||||
|
|
||||||
|
preStart = let
|
||||||
|
replaceSecretCommand = name: attrs:
|
||||||
|
"${getExe pkgs.replace-secret} '${
|
||||||
|
passwordPlaceholder name
|
||||||
|
}' '${attrs.passwordFile}' ${runtimeConfigPath}";
|
||||||
|
in ''
|
||||||
|
set -euo pipefail
|
||||||
|
install -m 600 ${configFile} ${runtimeConfigPath}
|
||||||
|
${concatStringsSep "\n"
|
||||||
|
(mapAttrsToList replaceSecretCommand cfg.accounts)}
|
||||||
|
'';
|
||||||
|
|
||||||
|
serviceConfig = {
|
||||||
|
Type = "simple";
|
||||||
|
ExecStart = "${getExe cfg.package} -c ${runtimeConfigPath}";
|
||||||
|
|
||||||
|
# Hardening options
|
||||||
|
User = "copyparty";
|
||||||
|
Group = "copyparty";
|
||||||
|
RuntimeDirectory = name;
|
||||||
|
RuntimeDirectoryMode = "0700";
|
||||||
|
StateDirectory = [ name "${name}/data" "${name}/.config" ];
|
||||||
|
StateDirectoryMode = "0700";
|
||||||
|
WorkingDirectory = home;
|
||||||
|
TemporaryFileSystem = "/:ro";
|
||||||
|
BindReadOnlyPaths = [
|
||||||
|
"/nix/store"
|
||||||
|
"-/etc/resolv.conf"
|
||||||
|
"-/etc/nsswitch.conf"
|
||||||
|
"-/etc/hosts"
|
||||||
|
"-/etc/localtime"
|
||||||
|
] ++ (mapAttrsToList (k: v: "-${v.passwordFile}") cfg.accounts);
|
||||||
|
BindPaths = [ home ] ++ (mapAttrsToList (k: v: v.path) cfg.volumes);
|
||||||
|
# Would re-mount paths ignored by temporary root
|
||||||
|
#ProtectSystem = "strict";
|
||||||
|
ProtectHome = true;
|
||||||
|
PrivateTmp = true;
|
||||||
|
PrivateDevices = true;
|
||||||
|
ProtectKernelTunables = true;
|
||||||
|
ProtectControlGroups = true;
|
||||||
|
RestrictSUIDSGID = true;
|
||||||
|
PrivateMounts = true;
|
||||||
|
ProtectKernelModules = true;
|
||||||
|
ProtectKernelLogs = true;
|
||||||
|
ProtectHostname = true;
|
||||||
|
ProtectClock = true;
|
||||||
|
ProtectProc = "invisible";
|
||||||
|
ProcSubset = "pid";
|
||||||
|
RestrictNamespaces = true;
|
||||||
|
RemoveIPC = true;
|
||||||
|
UMask = "0077";
|
||||||
|
LimitNOFILE = cfg.openFilesLimit;
|
||||||
|
NoNewPrivileges = true;
|
||||||
|
LockPersonality = true;
|
||||||
|
RestrictRealtime = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
users.groups.copyparty = { };
|
||||||
|
users.users.copyparty = {
|
||||||
|
description = "Service user for copyparty";
|
||||||
|
group = "copyparty";
|
||||||
|
home = home;
|
||||||
|
isSystemUser = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -14,5 +14,5 @@ name="$SVCNAME"
|
|||||||
command_background=true
|
command_background=true
|
||||||
pidfile="/var/run/$SVCNAME.pid"
|
pidfile="/var/run/$SVCNAME.pid"
|
||||||
|
|
||||||
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
|
command="/usr/bin/python3 /usr/local/bin/copyparty-sfx.py"
|
||||||
command_args="-q -v /mnt::rw"
|
command_args="-q -v /mnt::rw"
|
||||||
|
|||||||
55
contrib/package/arch/PKGBUILD
Normal file
55
contrib/package/arch/PKGBUILD
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
# Maintainer: icxes <dev.null@need.moe>
|
||||||
|
pkgname=copyparty
|
||||||
|
pkgver="1.8.6"
|
||||||
|
pkgrel=1
|
||||||
|
pkgdesc="Portable file sharing hub"
|
||||||
|
arch=("any")
|
||||||
|
url="https://github.com/9001/${pkgname}"
|
||||||
|
license=('MIT')
|
||||||
|
depends=("python" "lsof" "python-jinja")
|
||||||
|
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
|
||||||
|
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
||||||
|
"python-mutagen: music tags (alternative)"
|
||||||
|
"python-pillow: thumbnails for images"
|
||||||
|
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
|
||||||
|
"libkeyfinder-git: detection of musical keys"
|
||||||
|
"qm-vamp-plugins: BPM detection"
|
||||||
|
"python-pyopenssl: ftps functionality"
|
||||||
|
"python-argon2_cffi: hashed passwords in config"
|
||||||
|
"python-impacket-git: smb support (bad idea)"
|
||||||
|
)
|
||||||
|
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||||
|
backup=("etc/${pkgname}.d/init" )
|
||||||
|
sha256sums=("a37aacc30b9bec375ff6e7815fd763ec555b9bfbd70415aefdd18552c6491faa")
|
||||||
|
|
||||||
|
build() {
|
||||||
|
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||||
|
|
||||||
|
pushd copyparty/web
|
||||||
|
make -j$(nproc)
|
||||||
|
rm Makefile
|
||||||
|
popd
|
||||||
|
|
||||||
|
python3 -m build -wn
|
||||||
|
}
|
||||||
|
|
||||||
|
package() {
|
||||||
|
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||||
|
python3 -m installer -d "$pkgdir" dist/*.whl
|
||||||
|
|
||||||
|
install -dm755 "${pkgdir}/etc/${pkgname}.d"
|
||||||
|
install -Dm755 "bin/prisonparty.sh" "${pkgdir}/usr/bin/prisonparty"
|
||||||
|
install -Dm644 "contrib/package/arch/${pkgname}.conf" "${pkgdir}/etc/${pkgname}.d/init"
|
||||||
|
install -Dm644 "contrib/package/arch/${pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}.service"
|
||||||
|
install -Dm644 "contrib/package/arch/prisonparty.service" "${pkgdir}/usr/lib/systemd/system/prisonparty.service"
|
||||||
|
install -Dm644 "contrib/package/arch/index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md"
|
||||||
|
install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
|
||||||
|
|
||||||
|
find /etc/${pkgname}.d -iname '*.conf' 2>/dev/null | grep -qE . && return
|
||||||
|
echo "┏━━━━━━━━━━━━━━━──-"
|
||||||
|
echo "┃ Configure ${pkgname} by adding .conf files into /etc/${pkgname}.d/"
|
||||||
|
echo "┃ and maybe copy+edit one of the following to /etc/systemd/system/:"
|
||||||
|
echo "┣━♦ /usr/lib/systemd/system/${pkgname}.service (standard)"
|
||||||
|
echo "┣━♦ /usr/lib/systemd/system/prisonparty.service (chroot)"
|
||||||
|
echo "┗━━━━━━━━━━━━━━━──-"
|
||||||
|
}
|
||||||
7
contrib/package/arch/copyparty.conf
Normal file
7
contrib/package/arch/copyparty.conf
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
## import all *.conf files from the current folder (/etc/copyparty.d)
|
||||||
|
% ./
|
||||||
|
|
||||||
|
# add additional .conf files to this folder;
|
||||||
|
# see example config files for reference:
|
||||||
|
# https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf
|
||||||
|
# https://github.com/9001/copyparty/tree/hovudstraum/docs/copyparty.d
|
||||||
32
contrib/package/arch/copyparty.service
Normal file
32
contrib/package/arch/copyparty.service
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# this will start `/usr/bin/copyparty-sfx.py`
|
||||||
|
# and read config from `/etc/copyparty.d/*.conf`
|
||||||
|
#
|
||||||
|
# you probably want to:
|
||||||
|
# change "User=cpp" and "/home/cpp/" to another user
|
||||||
|
#
|
||||||
|
# unless you add -q to disable logging, you may want to remove the
|
||||||
|
# following line to allow buffering (slightly better performance):
|
||||||
|
# Environment=PYTHONUNBUFFERED=x
|
||||||
|
|
||||||
|
[Unit]
|
||||||
|
Description=copyparty file server
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=notify
|
||||||
|
SyslogIdentifier=copyparty
|
||||||
|
Environment=PYTHONUNBUFFERED=x
|
||||||
|
WorkingDirectory=/var/lib/copyparty-jail
|
||||||
|
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||||
|
|
||||||
|
# user to run as + where the TLS certificate is (if any)
|
||||||
|
User=cpp
|
||||||
|
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
||||||
|
|
||||||
|
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||||
|
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
|
# run copyparty
|
||||||
|
ExecStart=/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
3
contrib/package/arch/index.md
Normal file
3
contrib/package/arch/index.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
this is `/var/lib/copyparty-jail`, the fallback webroot when copyparty has not yet been configured
|
||||||
|
|
||||||
|
please add some `*.conf` files to `/etc/copyparty.d/`
|
||||||
31
contrib/package/arch/prisonparty.service
Normal file
31
contrib/package/arch/prisonparty.service
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# this will start `/usr/bin/copyparty-sfx.py`
|
||||||
|
# in a chroot, preventing accidental access elsewhere
|
||||||
|
# and read config from `/etc/copyparty.d/*.conf`
|
||||||
|
#
|
||||||
|
# expose additional filesystem locations to copyparty
|
||||||
|
# by listing them between the last `1000` and `--`
|
||||||
|
#
|
||||||
|
# `1000 1000` = what user to run copyparty as
|
||||||
|
#
|
||||||
|
# unless you add -q to disable logging, you may want to remove the
|
||||||
|
# following line to allow buffering (slightly better performance):
|
||||||
|
# Environment=PYTHONUNBUFFERED=x
|
||||||
|
|
||||||
|
[Unit]
|
||||||
|
Description=copyparty file server
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
SyslogIdentifier=prisonparty
|
||||||
|
Environment=PYTHONUNBUFFERED=x
|
||||||
|
WorkingDirectory=/var/lib/copyparty-jail
|
||||||
|
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||||
|
|
||||||
|
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||||
|
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
|
# run copyparty
|
||||||
|
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail 1000 1000 /etc/copyparty.d -- \
|
||||||
|
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
59
contrib/package/nix/copyparty/default.nix
Normal file
59
contrib/package/nix/copyparty/default.nix
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, ffmpeg, mutagen,
|
||||||
|
|
||||||
|
# use argon2id-hashed passwords in config files (sha2 is always available)
|
||||||
|
withHashedPasswords ? true,
|
||||||
|
|
||||||
|
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
|
||||||
|
withThumbnails ? true,
|
||||||
|
|
||||||
|
# create thumbnails with PyVIPS; even faster, uses more memory
|
||||||
|
# -- can be combined with Pillow to support more filetypes
|
||||||
|
withFastThumbnails ? false,
|
||||||
|
|
||||||
|
# enable FFmpeg; thumbnails for most filetypes (also video and audio), extract audio metadata, transcode audio to opus
|
||||||
|
# -- possibly dangerous if you allow anonymous uploads, since FFmpeg has a huge attack surface
|
||||||
|
# -- can be combined with Thumbnails and/or FastThumbnails, since FFmpeg is slower than both
|
||||||
|
withMediaProcessing ? true,
|
||||||
|
|
||||||
|
# if MediaProcessing is not enabled, you probably want this instead (less accurate, but much safer and faster)
|
||||||
|
withBasicAudioMetadata ? false,
|
||||||
|
|
||||||
|
# enable FTPS support in the FTP server
|
||||||
|
withFTPS ? false,
|
||||||
|
|
||||||
|
# samba/cifs server; dangerous and buggy, enable if you really need it
|
||||||
|
withSMB ? false,
|
||||||
|
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
pinData = lib.importJSON ./pin.json;
|
||||||
|
pyEnv = python.withPackages (ps:
|
||||||
|
with ps; [
|
||||||
|
jinja2
|
||||||
|
]
|
||||||
|
++ lib.optional withSMB impacket
|
||||||
|
++ lib.optional withFTPS pyopenssl
|
||||||
|
++ lib.optional withThumbnails pillow
|
||||||
|
++ lib.optional withFastThumbnails pyvips
|
||||||
|
++ lib.optional withMediaProcessing ffmpeg
|
||||||
|
++ lib.optional withBasicAudioMetadata mutagen
|
||||||
|
++ lib.optional withHashedPasswords argon2-cffi
|
||||||
|
);
|
||||||
|
in stdenv.mkDerivation {
|
||||||
|
pname = "copyparty";
|
||||||
|
version = pinData.version;
|
||||||
|
src = fetchurl {
|
||||||
|
url = pinData.url;
|
||||||
|
hash = pinData.hash;
|
||||||
|
};
|
||||||
|
buildInputs = [ makeWrapper ];
|
||||||
|
dontUnpack = true;
|
||||||
|
dontBuild = true;
|
||||||
|
installPhase = ''
|
||||||
|
install -Dm755 $src $out/share/copyparty-sfx.py
|
||||||
|
makeWrapper ${pyEnv.interpreter} $out/bin/copyparty \
|
||||||
|
--set PATH '${lib.makeBinPath ([ utillinux ] ++ lib.optional withMediaProcessing ffmpeg)}:$PATH' \
|
||||||
|
--add-flags "$out/share/copyparty-sfx.py"
|
||||||
|
'';
|
||||||
|
}
|
||||||
5
contrib/package/nix/copyparty/pin.json
Normal file
5
contrib/package/nix/copyparty/pin.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"url": "https://github.com/9001/copyparty/releases/download/v1.8.6/copyparty-sfx.py",
|
||||||
|
"version": "1.8.6",
|
||||||
|
"hash": "sha256-yTcMW4QVf1QH8jfYpn5BdG5LXilcrmakdbTk9NsVTGE="
|
||||||
|
}
|
||||||
77
contrib/package/nix/copyparty/update.py
Executable file
77
contrib/package/nix/copyparty/update.py
Executable file
@@ -0,0 +1,77 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Update the Nix package pin
|
||||||
|
#
|
||||||
|
# Usage: ./update.sh [PATH]
|
||||||
|
# When the [PATH] is not set, it will fetch the latest release from the repo.
|
||||||
|
# With [PATH] set, it will hash the given file and generate the URL,
|
||||||
|
# base on the version contained within the file
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
OUTPUT_FILE = Path("pin.json")
|
||||||
|
TARGET_ASSET = "copyparty-sfx.py"
|
||||||
|
HASH_TYPE = "sha256"
|
||||||
|
LATEST_RELEASE_URL = "https://api.github.com/repos/9001/copyparty/releases/latest"
|
||||||
|
DOWNLOAD_URL = lambda version: f"https://github.com/9001/copyparty/releases/download/v{version}/{TARGET_ASSET}"
|
||||||
|
|
||||||
|
|
||||||
|
def get_formatted_hash(binary):
|
||||||
|
hasher = hashlib.new("sha256")
|
||||||
|
hasher.update(binary)
|
||||||
|
asset_hash = hasher.digest()
|
||||||
|
encoded_hash = base64.b64encode(asset_hash).decode("ascii")
|
||||||
|
return f"{HASH_TYPE}-{encoded_hash}"
|
||||||
|
|
||||||
|
|
||||||
|
def version_from_sfx(binary):
|
||||||
|
result = re.search(b'^VER = "(.*)"$', binary, re.MULTILINE)
|
||||||
|
if result:
|
||||||
|
return result.groups(1)[0].decode("ascii")
|
||||||
|
|
||||||
|
raise ValueError("version not found in provided file")
|
||||||
|
|
||||||
|
|
||||||
|
def remote_release_pin():
|
||||||
|
import requests
|
||||||
|
|
||||||
|
response = requests.get(LATEST_RELEASE_URL).json()
|
||||||
|
version = response["tag_name"].lstrip("v")
|
||||||
|
asset_info = [a for a in response["assets"] if a["name"] == TARGET_ASSET][0]
|
||||||
|
download_url = asset_info["browser_download_url"]
|
||||||
|
asset = requests.get(download_url)
|
||||||
|
formatted_hash = get_formatted_hash(asset.content)
|
||||||
|
|
||||||
|
result = {"url": download_url, "version": version, "hash": formatted_hash}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def local_release_pin(path):
|
||||||
|
asset = path.read_bytes()
|
||||||
|
version = version_from_sfx(asset)
|
||||||
|
download_url = DOWNLOAD_URL(version)
|
||||||
|
formatted_hash = get_formatted_hash(asset)
|
||||||
|
|
||||||
|
result = {"url": download_url, "version": version, "hash": formatted_hash}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
asset_path = Path(sys.argv[1])
|
||||||
|
result = local_release_pin(asset_path)
|
||||||
|
else:
|
||||||
|
result = remote_release_pin()
|
||||||
|
|
||||||
|
print(result)
|
||||||
|
json_result = json.dumps(result, indent=4)
|
||||||
|
OUTPUT_FILE.write_text(json_result)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,13 +1,22 @@
|
|||||||
<!--
|
<!--
|
||||||
|
NOTE: DEPRECATED; please use the javascript version instead:
|
||||||
|
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/minimal-up2k.js
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
save this as .epilogue.html inside a write-only folder to declutter the UI, makes it look like
|
save this as .epilogue.html inside a write-only folder to declutter the UI, makes it look like
|
||||||
https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png
|
https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png
|
||||||
|
|
||||||
|
only works if you disable the prologue/epilogue sandbox with --no-sb-lg
|
||||||
|
which should probably be combined with --no-dot-ren to prevent damage
|
||||||
|
(`no_sb_lg` can also be set per-volume with volflags)
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
|
|
||||||
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
|
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
|
||||||
|
|
||||||
#ops, #tree, #path, #epi+h2, /* main tabs and navigators (tree/breadcrumbs) */
|
#ops, #tree, #path, #wfp, /* main tabs and navigators (tree/breadcrumbs) */
|
||||||
|
|
||||||
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ almost the same as minimal-up2k.html except this one...:
|
|||||||
var u2min = `
|
var u2min = `
|
||||||
<style>
|
<style>
|
||||||
|
|
||||||
#ops, #path, #tree, #files, #epi+div+h2,
|
#ops, #path, #tree, #files, #wfp,
|
||||||
#u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd {
|
#u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd {
|
||||||
display: none !important;
|
display: none !important;
|
||||||
}
|
}
|
||||||
@@ -55,5 +55,5 @@ var u2min = `
|
|||||||
if (!has(perms, 'read')) {
|
if (!has(perms, 'read')) {
|
||||||
var e2 = mknod('div');
|
var e2 = mknod('div');
|
||||||
e2.innerHTML = u2min;
|
e2.innerHTML = u2min;
|
||||||
ebi('wrap').insertBefore(e2, QS('#epi+h2'));
|
ebi('wrap').insertBefore(e2, QS('#wfp'));
|
||||||
}
|
}
|
||||||
|
|||||||
208
contrib/plugins/rave.js
Normal file
208
contrib/plugins/rave.js
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
/* untz untz untz untz */
|
||||||
|
|
||||||
|
(function () {
|
||||||
|
|
||||||
|
var can, ctx, W, H, fft, buf, bars, barw, pv,
|
||||||
|
hue = 0,
|
||||||
|
ibeat = 0,
|
||||||
|
beats = [9001],
|
||||||
|
beats_url = '',
|
||||||
|
uofs = 0,
|
||||||
|
ops = ebi('ops'),
|
||||||
|
raving = false,
|
||||||
|
recalc = 0,
|
||||||
|
cdown = 0,
|
||||||
|
FC = 0.9,
|
||||||
|
css = `<style>
|
||||||
|
|
||||||
|
#fft {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
z-index: -1;
|
||||||
|
}
|
||||||
|
body {
|
||||||
|
box-shadow: inset 0 0 0 white;
|
||||||
|
}
|
||||||
|
#ops>a,
|
||||||
|
#path>a {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
/*
|
||||||
|
body.untz {
|
||||||
|
animation: untz-body 200ms ease-out;
|
||||||
|
}
|
||||||
|
@keyframes untz-body {
|
||||||
|
0% {inset 0 0 20em white}
|
||||||
|
100% {inset 0 0 0 white}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
:root, html.a, html.b, html.c, html.d, html.e {
|
||||||
|
--row-alt: rgba(48,52,78,0.2);
|
||||||
|
}
|
||||||
|
#files td {
|
||||||
|
background: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
</style>`;
|
||||||
|
|
||||||
|
QS('body').appendChild(mknod('div', null, css));
|
||||||
|
|
||||||
|
function rave_load() {
|
||||||
|
console.log('rave_load');
|
||||||
|
can = mknod('canvas', 'fft');
|
||||||
|
QS('body').appendChild(can);
|
||||||
|
ctx = can.getContext('2d');
|
||||||
|
|
||||||
|
fft = new AnalyserNode(actx, {
|
||||||
|
"fftSize": 2048,
|
||||||
|
"maxDecibels": 0,
|
||||||
|
"smoothingTimeConstant": 0.7,
|
||||||
|
});
|
||||||
|
ibeat = 0;
|
||||||
|
beats = [9001];
|
||||||
|
buf = new Uint8Array(fft.frequencyBinCount);
|
||||||
|
bars = buf.length * FC;
|
||||||
|
afilt.filters.push(fft);
|
||||||
|
if (!raving) {
|
||||||
|
raving = true;
|
||||||
|
raver();
|
||||||
|
}
|
||||||
|
beats_url = mp.au.src.split('?')[0].replace(/(.*\/)(.*)/, '$1.beats/$2.txt');
|
||||||
|
console.log("reading beats from", beats_url);
|
||||||
|
var xhr = new XHR();
|
||||||
|
xhr.open('GET', beats_url, true);
|
||||||
|
xhr.onload = readbeats;
|
||||||
|
xhr.url = beats_url;
|
||||||
|
xhr.send();
|
||||||
|
}
|
||||||
|
|
||||||
|
function rave_unload() {
|
||||||
|
qsr('#fft');
|
||||||
|
can = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function readbeats() {
|
||||||
|
if (this.url != beats_url)
|
||||||
|
return console.log('old beats??', this.url, beats_url);
|
||||||
|
|
||||||
|
var sbeats = this.responseText.replace(/\r/g, '').split(/\n/g);
|
||||||
|
if (sbeats.length < 3)
|
||||||
|
return;
|
||||||
|
|
||||||
|
beats = [];
|
||||||
|
for (var a = 0; a < sbeats.length; a++)
|
||||||
|
beats.push(parseFloat(sbeats[a]));
|
||||||
|
|
||||||
|
var end = beats.slice(-2),
|
||||||
|
t = end[1],
|
||||||
|
d = t - end[0];
|
||||||
|
|
||||||
|
while (d > 0.1 && t < 1200)
|
||||||
|
beats.push(t += d);
|
||||||
|
}
|
||||||
|
|
||||||
|
function hrand() {
|
||||||
|
return Math.random() - 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
function raver() {
|
||||||
|
if (!can) {
|
||||||
|
raving = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
requestAnimationFrame(raver);
|
||||||
|
if (!mp || !mp.au || mp.au.paused)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (--uofs >= 0) {
|
||||||
|
document.body.style.marginLeft = hrand() * uofs + 'px';
|
||||||
|
ebi('tree').style.marginLeft = hrand() * uofs + 'px';
|
||||||
|
for (var a of QSA('#ops>a, #path>a, #pctl>a'))
|
||||||
|
a.style.transform = 'translate(' + hrand() * uofs * 1 + 'px, ' + hrand() * uofs * 0.7 + 'px) rotate(' + Math.random() * uofs * 0.7 + 'deg)'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (--recalc < 0) {
|
||||||
|
recalc = 60;
|
||||||
|
var tree = ebi('tree'),
|
||||||
|
x = tree.style.display == 'none' ? 0 : tree.offsetWidth;
|
||||||
|
|
||||||
|
//W = can.width = window.innerWidth - x;
|
||||||
|
//H = can.height = window.innerHeight;
|
||||||
|
//H = ebi('widget').offsetTop;
|
||||||
|
W = can.width = bars;
|
||||||
|
H = can.height = 512;
|
||||||
|
barw = 1; //parseInt(0.8 + W / bars);
|
||||||
|
can.style.left = x + 'px';
|
||||||
|
can.style.width = (window.innerWidth - x) + 'px';
|
||||||
|
can.style.height = ebi('widget').offsetTop + 'px';
|
||||||
|
}
|
||||||
|
|
||||||
|
//if (--cdown == 1)
|
||||||
|
// clmod(ops, 'untz');
|
||||||
|
|
||||||
|
fft.getByteFrequencyData(buf);
|
||||||
|
|
||||||
|
var imax = 0, vmax = 0;
|
||||||
|
for (var a = 10; a < 50; a++)
|
||||||
|
if (vmax < buf[a]) {
|
||||||
|
vmax = buf[a];
|
||||||
|
imax = a;
|
||||||
|
}
|
||||||
|
|
||||||
|
hue = hue * 0.93 + imax * 0.07;
|
||||||
|
|
||||||
|
ctx.fillStyle = 'rgba(0,0,0,0)';
|
||||||
|
ctx.fillRect(0, 0, W, H);
|
||||||
|
ctx.clearRect(0, 0, W, H);
|
||||||
|
ctx.fillStyle = 'hsla(' + (hue * 2.5) + ',100%,50%,0.7)';
|
||||||
|
|
||||||
|
var x = 0, mul = (H / 256) * 0.5;
|
||||||
|
for (var a = 0; a < buf.length * FC; a++) {
|
||||||
|
var v = buf[a] * mul * (1 + 0.69 * a / buf.length);
|
||||||
|
ctx.fillRect(x, H - v, barw, v);
|
||||||
|
x += barw;
|
||||||
|
}
|
||||||
|
|
||||||
|
var t = mp.au.currentTime + 0.05;
|
||||||
|
|
||||||
|
if (ibeat >= beats.length || beats[ibeat] > t)
|
||||||
|
return;
|
||||||
|
|
||||||
|
while (ibeat < beats.length && beats[ibeat++] < t)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
return untz();
|
||||||
|
|
||||||
|
var cv = 0;
|
||||||
|
for (var a = 0; a < 128; a++)
|
||||||
|
cv += buf[a];
|
||||||
|
|
||||||
|
if (cv - pv > 1000) {
|
||||||
|
console.log(pv, cv, cv - pv);
|
||||||
|
if (cdown < 0) {
|
||||||
|
clmod(ops, 'untz', 1);
|
||||||
|
cdown = 20;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pv = cv;
|
||||||
|
}
|
||||||
|
|
||||||
|
function untz() {
|
||||||
|
console.log('untz');
|
||||||
|
uofs = 14;
|
||||||
|
document.body.animate([
|
||||||
|
{ boxShadow: 'inset 0 0 1em #f0c' },
|
||||||
|
{ boxShadow: 'inset 0 0 20em #f0c', offset: 0.2 },
|
||||||
|
{ boxShadow: 'inset 0 0 0 #f0c' },
|
||||||
|
], { duration: 200, iterations: 1 });
|
||||||
|
}
|
||||||
|
|
||||||
|
afilt.plugs.push({
|
||||||
|
"en": true,
|
||||||
|
"load": rave_load,
|
||||||
|
"unload": rave_unload
|
||||||
|
});
|
||||||
|
|
||||||
|
})();
|
||||||
@@ -1,3 +1,6 @@
|
|||||||
|
# NOTE: this is now a built-in feature in copyparty
|
||||||
|
# but you may still want this if you have specific needs
|
||||||
|
#
|
||||||
# systemd service which generates a new TLS certificate on each boot,
|
# systemd service which generates a new TLS certificate on each boot,
|
||||||
# that way the one-year expiry time won't cause any issues --
|
# that way the one-year expiry time won't cause any issues --
|
||||||
# just have everyone trust the ca.pem once every 10 years
|
# just have everyone trust the ca.pem once every 10 years
|
||||||
|
|||||||
@@ -2,12 +2,16 @@
|
|||||||
# and share '/mnt' with anonymous read+write
|
# and share '/mnt' with anonymous read+write
|
||||||
#
|
#
|
||||||
# installation:
|
# installation:
|
||||||
# cp -pv copyparty.service /etc/systemd/system
|
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
||||||
# restorecon -vr /etc/systemd/system/copyparty.service
|
# cp -pv copyparty.service /etc/systemd/system/
|
||||||
|
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
|
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
|
||||||
# firewall-cmd --reload
|
# firewall-cmd --reload
|
||||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||||
#
|
#
|
||||||
|
# if it fails to start, first check this: systemctl status copyparty
|
||||||
|
# then try starting it while viewing logs: journalctl -fan 100
|
||||||
|
#
|
||||||
# you may want to:
|
# you may want to:
|
||||||
# change "User=cpp" and "/home/cpp/" to another user
|
# change "User=cpp" and "/home/cpp/" to another user
|
||||||
# remove the nft lines to only listen on port 3923
|
# remove the nft lines to only listen on port 3923
|
||||||
@@ -18,6 +22,7 @@
|
|||||||
# add '-i 127.0.0.1' to only allow local connections
|
# add '-i 127.0.0.1' to only allow local connections
|
||||||
# add '-e2dsa' to enable filesystem scanning + indexing
|
# add '-e2dsa' to enable filesystem scanning + indexing
|
||||||
# add '-e2ts' to enable metadata indexing
|
# add '-e2ts' to enable metadata indexing
|
||||||
|
# remove '--ansi' to disable colored logs
|
||||||
#
|
#
|
||||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||||
# accept connections; correctly delaying units depending on copyparty.
|
# accept connections; correctly delaying units depending on copyparty.
|
||||||
@@ -44,7 +49,7 @@ ExecReload=/bin/kill -s USR1 $MAINPID
|
|||||||
User=cpp
|
User=cpp
|
||||||
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
||||||
|
|
||||||
# setup forwarding from ports 80 and 443 to port 3923
|
# OPTIONAL: setup forwarding from ports 80 and 443 to port 3923
|
||||||
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
|
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
|
||||||
ExecStartPre=+nft add table ip nat
|
ExecStartPre=+nft add table ip nat
|
||||||
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
|
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
|
||||||
@@ -55,7 +60,7 @@ ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
|
|||||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
# copyparty settings
|
# copyparty settings
|
||||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -e2d -v /mnt::rw
|
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
|||||||
@@ -6,12 +6,17 @@
|
|||||||
# 1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin
|
# 1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin
|
||||||
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
||||||
#
|
#
|
||||||
|
# expose additional filesystem locations to copyparty
|
||||||
|
# by listing them between the last `1000` and `--`
|
||||||
|
#
|
||||||
|
# `1000 1000` = what user to run copyparty as
|
||||||
|
#
|
||||||
# you may want to:
|
# you may want to:
|
||||||
# change '/mnt::rw' to another location or permission-set
|
# change '/mnt::rw' to another location or permission-set
|
||||||
# (remember to change the '/mnt' chroot arg too)
|
# (remember to change the '/mnt' chroot arg too)
|
||||||
#
|
#
|
||||||
# enable line-buffering for realtime logging (slight performance cost):
|
# unless you add -q to disable logging, you may want to remove the
|
||||||
# inside the [Service] block, add the following line:
|
# following line to allow buffering (slightly better performance):
|
||||||
# Environment=PYTHONUNBUFFERED=x
|
# Environment=PYTHONUNBUFFERED=x
|
||||||
|
|
||||||
[Unit]
|
[Unit]
|
||||||
@@ -19,7 +24,14 @@ Description=copyparty file server
|
|||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
SyslogIdentifier=prisonparty
|
SyslogIdentifier=prisonparty
|
||||||
WorkingDirectory=/usr/local/bin
|
Environment=PYTHONUNBUFFERED=x
|
||||||
|
WorkingDirectory=/var/lib/copyparty-jail
|
||||||
|
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||||
|
|
||||||
|
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||||
|
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
|
# run copyparty
|
||||||
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
|
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
|
||||||
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
@echo off
|
@echo off
|
||||||
rem removes the 47.6 MiB filesize limit when downloading from webdav
|
rem removes the 47.6 MiB filesize limit when downloading from webdav
|
||||||
rem + optionally allows/enables password-auth over plaintext http
|
rem + optionally allows/enables password-auth over plaintext http
|
||||||
rem + optionally helps disable wpad
|
rem + optionally helps disable wpad, removing the 10sec latency
|
||||||
|
|
||||||
setlocal enabledelayedexpansion
|
|
||||||
|
|
||||||
net session >nul 2>&1
|
net session >nul 2>&1
|
||||||
if %errorlevel% neq 0 (
|
if %errorlevel% neq 0 (
|
||||||
@@ -20,30 +18,26 @@ echo OK;
|
|||||||
echo allow webdav basic-auth over plaintext http?
|
echo allow webdav basic-auth over plaintext http?
|
||||||
echo Y: login works, but the password will be visible in wireshark etc
|
echo Y: login works, but the password will be visible in wireshark etc
|
||||||
echo N: login will NOT work unless you use https and valid certificates
|
echo N: login will NOT work unless you use https and valid certificates
|
||||||
set c=.
|
choice
|
||||||
set /p "c=(Y/N): "
|
if %errorlevel% equ 1 (
|
||||||
echo(
|
reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f
|
||||||
if /i not "!c!"=="y" goto :g1
|
rem default is 1 (require tls)
|
||||||
reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f
|
)
|
||||||
rem default is 1 (require tls)
|
|
||||||
|
|
||||||
:g1
|
|
||||||
echo(
|
echo(
|
||||||
echo OK;
|
echo OK;
|
||||||
echo do you want to disable wpad?
|
echo do you want to disable wpad?
|
||||||
echo can give a HUGE speed boost depending on network settings
|
echo can give a HUGE speed boost depending on network settings
|
||||||
set c=.
|
choice
|
||||||
set /p "c=(Y/N): "
|
if %errorlevel% equ 1 (
|
||||||
echo(
|
echo(
|
||||||
if /i not "!c!"=="y" goto :g2
|
echo i'm about to open the [Connections] tab in [Internet Properties] for you;
|
||||||
echo(
|
echo please click [LAN settings] and disable [Automatically detect settings]
|
||||||
echo i'm about to open the [Connections] tab in [Internet Properties] for you;
|
echo(
|
||||||
echo please click [LAN settings] and disable [Automatically detect settings]
|
pause
|
||||||
echo(
|
control inetcpl.cpl,,4
|
||||||
pause
|
)
|
||||||
control inetcpl.cpl,,4
|
|
||||||
|
|
||||||
:g2
|
|
||||||
net stop webclient
|
net stop webclient
|
||||||
net start webclient
|
net start webclient
|
||||||
echo(
|
echo(
|
||||||
|
|||||||
@@ -6,6 +6,10 @@ import platform
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
_:tuple[int,int]=(0,0) # _____________________________________________________________________ hey there! if you are reading this, your python is too old to run copyparty without some help. Please use https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py or the pypi package instead, or see https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building if you want to build it yourself :-) ************************************************************************************************************************************************
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
except:
|
except:
|
||||||
@@ -27,13 +31,20 @@ WINDOWS: Any = (
|
|||||||
else False
|
else False
|
||||||
)
|
)
|
||||||
|
|
||||||
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
|
VT100 = "--ansi" in sys.argv or (
|
||||||
|
os.environ.get("NO_COLOR", "").lower() in ("", "0", "false")
|
||||||
|
and sys.stdout.isatty()
|
||||||
|
and "--no-ansi" not in sys.argv
|
||||||
|
and (not WINDOWS or WINDOWS >= [10, 0, 14393])
|
||||||
|
)
|
||||||
# introduced in anniversary update
|
# introduced in anniversary update
|
||||||
|
|
||||||
ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
|
ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
|
||||||
|
|
||||||
MACOS = platform.system() == "Darwin"
|
MACOS = platform.system() == "Darwin"
|
||||||
|
|
||||||
|
EXE = bool(getattr(sys, "frozen", False))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
CORES = len(os.sched_getaffinity(0))
|
CORES = len(os.sched_getaffinity(0))
|
||||||
except:
|
except:
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (1, 5, 1)
|
VERSION = (1, 8, 7)
|
||||||
CODENAME = "babel"
|
CODENAME = "argon"
|
||||||
BUILD_DT = (2022, 12, 3)
|
BUILD_DT = (2023, 7, 23)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -24,13 +24,15 @@ def listdir(p: str = ".") -> list[str]:
|
|||||||
return [fsdec(x) for x in os.listdir(fsenc(p))]
|
return [fsdec(x) for x in os.listdir(fsenc(p))]
|
||||||
|
|
||||||
|
|
||||||
def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> None:
|
def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> bool:
|
||||||
bname = fsenc(name)
|
bname = fsenc(name)
|
||||||
try:
|
try:
|
||||||
os.makedirs(bname, mode)
|
os.makedirs(bname, mode)
|
||||||
|
return True
|
||||||
except:
|
except:
|
||||||
if not exist_ok or not os.path.isdir(bname):
|
if not exist_ok or not os.path.isdir(bname):
|
||||||
raise
|
raise
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def mkdir(p: str, mode: int = 0o755) -> None:
|
def mkdir(p: str, mode: int = 0o755) -> None:
|
||||||
|
|||||||
@@ -3,12 +3,13 @@ from __future__ import print_function, unicode_literals
|
|||||||
|
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
import queue
|
import queue
|
||||||
|
|
||||||
from .__init__ import CORES, TYPE_CHECKING
|
from .__init__ import CORES, TYPE_CHECKING
|
||||||
from .broker_mpw import MpWorker
|
from .broker_mpw import MpWorker
|
||||||
from .broker_util import try_exec
|
from .broker_util import ExceptionalQueue, try_exec
|
||||||
from .util import Daemon, mp
|
from .util import Daemon, mp
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -93,16 +94,32 @@ class BrokerMp(object):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
# new ipc invoking managed service in hub
|
# new ipc invoking managed service in hub
|
||||||
obj = self.hub
|
try:
|
||||||
for node in dest.split("."):
|
obj = self.hub
|
||||||
obj = getattr(obj, node)
|
for node in dest.split("."):
|
||||||
|
obj = getattr(obj, node)
|
||||||
|
|
||||||
# TODO will deadlock if dest performs another ipc
|
# TODO will deadlock if dest performs another ipc
|
||||||
rv = try_exec(retq_id, obj, *args)
|
rv = try_exec(retq_id, obj, *args)
|
||||||
|
except:
|
||||||
|
rv = ["exception", "stack", traceback.format_exc()]
|
||||||
|
|
||||||
if retq_id:
|
if retq_id:
|
||||||
proc.q_pend.put((retq_id, "retq", rv))
|
proc.q_pend.put((retq_id, "retq", rv))
|
||||||
|
|
||||||
|
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||||
|
|
||||||
|
# new non-ipc invoking managed service in hub
|
||||||
|
obj = self.hub
|
||||||
|
for node in dest.split("."):
|
||||||
|
obj = getattr(obj, node)
|
||||||
|
|
||||||
|
rv = try_exec(True, obj, *args)
|
||||||
|
|
||||||
|
retq = ExceptionalQueue(1)
|
||||||
|
retq.put(rv)
|
||||||
|
return retq
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any) -> None:
|
||||||
"""
|
"""
|
||||||
send message to non-hub component in other process,
|
send message to non-hub component in other process,
|
||||||
|
|||||||
222
copyparty/cert.py
Normal file
222
copyparty/cert.py
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
import calendar
|
||||||
|
import errno
|
||||||
|
import filecmp
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import time
|
||||||
|
|
||||||
|
from .util import Netdev, runcmd
|
||||||
|
|
||||||
|
HAVE_CFSSL = True
|
||||||
|
|
||||||
|
if True: # pylint: disable=using-constant-test
|
||||||
|
from .util import RootLogger
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_cert(log: "RootLogger", args) -> None:
|
||||||
|
"""
|
||||||
|
the default cert (and the entire TLS support) is only here to enable the
|
||||||
|
crypto.subtle javascript API, which is necessary due to the webkit guys
|
||||||
|
being massive memers (https://www.chromium.org/blink/webcrypto)
|
||||||
|
|
||||||
|
i feel awful about this and so should they
|
||||||
|
"""
|
||||||
|
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||||
|
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
|
||||||
|
if not os.path.isfile(args.cert):
|
||||||
|
if cert_appdata != args.cert:
|
||||||
|
raise Exception("certificate file does not exist: " + args.cert)
|
||||||
|
|
||||||
|
shutil.copy(cert_insec, args.cert)
|
||||||
|
|
||||||
|
with open(args.cert, "rb") as f:
|
||||||
|
buf = f.read()
|
||||||
|
o1 = buf.find(b" PRIVATE KEY-")
|
||||||
|
o2 = buf.find(b" CERTIFICATE-")
|
||||||
|
m = "unsupported certificate format: "
|
||||||
|
if o1 < 0:
|
||||||
|
raise Exception(m + "no private key inside pem")
|
||||||
|
if o2 < 0:
|
||||||
|
raise Exception(m + "no server certificate inside pem")
|
||||||
|
if o1 > o2:
|
||||||
|
raise Exception(m + "private key must appear before server certificate")
|
||||||
|
|
||||||
|
try:
|
||||||
|
if filecmp.cmp(args.cert, cert_insec):
|
||||||
|
t = "using default TLS certificate; https will be insecure:\033[36m {}"
|
||||||
|
log("cert", t.format(args.cert), 3)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# speaking of the default cert,
|
||||||
|
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
|
||||||
|
|
||||||
|
|
||||||
|
def _read_crt(args, fn):
|
||||||
|
try:
|
||||||
|
if not os.path.exists(os.path.join(args.crt_dir, fn)):
|
||||||
|
return 0, {}
|
||||||
|
|
||||||
|
acmd = ["cfssl-certinfo", "-cert", fn]
|
||||||
|
rc, so, se = runcmd(acmd, cwd=args.crt_dir)
|
||||||
|
if rc:
|
||||||
|
return 0, {}
|
||||||
|
|
||||||
|
inf = json.loads(so)
|
||||||
|
zs = inf["not_after"]
|
||||||
|
expiry = calendar.timegm(time.strptime(zs, "%Y-%m-%dT%H:%M:%SZ"))
|
||||||
|
return expiry, inf
|
||||||
|
except OSError as ex:
|
||||||
|
if ex.errno == errno.ENOENT:
|
||||||
|
raise
|
||||||
|
return 0, {}
|
||||||
|
except:
|
||||||
|
return 0, {}
|
||||||
|
|
||||||
|
|
||||||
|
def _gen_ca(log: "RootLogger", args):
|
||||||
|
expiry = _read_crt(args, "ca.pem")[0]
|
||||||
|
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
|
||||||
|
return
|
||||||
|
|
||||||
|
backdate = "{}m".format(int(args.crt_back * 60))
|
||||||
|
expiry = "{}m".format(int(args.crt_cdays * 60 * 24))
|
||||||
|
cn = args.crt_cnc.replace("--crt-cn", args.crt_cn)
|
||||||
|
algo, ksz = args.crt_alg.split("-")
|
||||||
|
req = {
|
||||||
|
"CN": cn,
|
||||||
|
"CA": {"backdate": backdate, "expiry": expiry, "pathlen": 0},
|
||||||
|
"key": {"algo": algo, "size": int(ksz)},
|
||||||
|
"names": [{"O": cn}],
|
||||||
|
}
|
||||||
|
sin = json.dumps(req).encode("utf-8")
|
||||||
|
log("cert", "creating new ca ...", 6)
|
||||||
|
|
||||||
|
cmd = "cfssl gencert -initca -"
|
||||||
|
rc, so, se = runcmd(cmd.split(), 30, sin=sin)
|
||||||
|
if rc:
|
||||||
|
raise Exception("failed to create ca-cert: {}, {}".format(rc, se), 3)
|
||||||
|
|
||||||
|
cmd = "cfssljson -bare ca"
|
||||||
|
sin = so.encode("utf-8")
|
||||||
|
rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir)
|
||||||
|
if rc:
|
||||||
|
raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3)
|
||||||
|
|
||||||
|
bname = os.path.join(args.crt_dir, "ca")
|
||||||
|
os.rename(bname + "-key.pem", bname + ".key")
|
||||||
|
os.unlink(bname + ".csr")
|
||||||
|
|
||||||
|
log("cert", "new ca OK", 2)
|
||||||
|
|
||||||
|
|
||||||
|
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||||
|
names = args.crt_ns.split(",") if args.crt_ns else []
|
||||||
|
if not args.crt_exact:
|
||||||
|
for n in names[:]:
|
||||||
|
names.append("*.{}".format(n))
|
||||||
|
if not args.crt_noip:
|
||||||
|
for ip in netdevs.keys():
|
||||||
|
names.append(ip.split("/")[0])
|
||||||
|
if args.crt_nolo:
|
||||||
|
names = [x for x in names if x not in ("localhost", "127.0.0.1", "::1")]
|
||||||
|
if not args.crt_nohn:
|
||||||
|
names.append(args.name)
|
||||||
|
names.append(args.name + ".local")
|
||||||
|
if not names:
|
||||||
|
names = ["127.0.0.1"]
|
||||||
|
if "127.0.0.1" in names or "::1" in names:
|
||||||
|
names.append("localhost")
|
||||||
|
names = list({x: 1 for x in names}.keys())
|
||||||
|
|
||||||
|
try:
|
||||||
|
expiry, inf = _read_crt(args, "srv.pem")
|
||||||
|
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry
|
||||||
|
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||||
|
for n in names:
|
||||||
|
if n not in inf["sans"]:
|
||||||
|
raise Exception("does not have {}".format(n))
|
||||||
|
if expired:
|
||||||
|
raise Exception("old server-cert has expired")
|
||||||
|
if not filecmp.cmp(args.cert, cert_insec):
|
||||||
|
return
|
||||||
|
except Exception as ex:
|
||||||
|
log("cert", "will create new server-cert; {}".format(ex))
|
||||||
|
|
||||||
|
log("cert", "creating server-cert ...", 6)
|
||||||
|
|
||||||
|
backdate = "{}m".format(int(args.crt_back * 60))
|
||||||
|
expiry = "{}m".format(int(args.crt_sdays * 60 * 24))
|
||||||
|
cfg = {
|
||||||
|
"signing": {
|
||||||
|
"default": {
|
||||||
|
"backdate": backdate,
|
||||||
|
"expiry": expiry,
|
||||||
|
"usages": ["signing", "key encipherment", "server auth"],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
with open(os.path.join(args.crt_dir, "cfssl.json"), "wb") as f:
|
||||||
|
f.write(json.dumps(cfg).encode("utf-8"))
|
||||||
|
|
||||||
|
cn = args.crt_cns.replace("--crt-cn", args.crt_cn)
|
||||||
|
algo, ksz = args.crt_alg.split("-")
|
||||||
|
req = {
|
||||||
|
"key": {"algo": algo, "size": int(ksz)},
|
||||||
|
"names": [{"O": cn}],
|
||||||
|
}
|
||||||
|
sin = json.dumps(req).encode("utf-8")
|
||||||
|
|
||||||
|
cmd = "cfssl gencert -config=cfssl.json -ca ca.pem -ca-key ca.key -profile=www"
|
||||||
|
acmd = cmd.split() + ["-hostname=" + ",".join(names), "-"]
|
||||||
|
rc, so, se = runcmd(acmd, 30, sin=sin, cwd=args.crt_dir)
|
||||||
|
if rc:
|
||||||
|
raise Exception("failed to create cert: {}, {}".format(rc, se))
|
||||||
|
|
||||||
|
cmd = "cfssljson -bare srv"
|
||||||
|
sin = so.encode("utf-8")
|
||||||
|
rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir)
|
||||||
|
if rc:
|
||||||
|
raise Exception("failed to translate cert: {}, {}".format(rc, se))
|
||||||
|
|
||||||
|
bname = os.path.join(args.crt_dir, "srv")
|
||||||
|
os.rename(bname + "-key.pem", bname + ".key")
|
||||||
|
os.unlink(bname + ".csr")
|
||||||
|
|
||||||
|
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
||||||
|
ca = f.read()
|
||||||
|
|
||||||
|
with open(bname + ".key", "rb") as f:
|
||||||
|
skey = f.read()
|
||||||
|
|
||||||
|
with open(bname + ".pem", "rb") as f:
|
||||||
|
scrt = f.read()
|
||||||
|
|
||||||
|
with open(args.cert, "wb") as f:
|
||||||
|
f.write(skey + scrt + ca)
|
||||||
|
|
||||||
|
log("cert", "new server-cert OK", 2)
|
||||||
|
|
||||||
|
|
||||||
|
def gencert(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||||
|
global HAVE_CFSSL
|
||||||
|
|
||||||
|
if args.http_only:
|
||||||
|
return
|
||||||
|
|
||||||
|
if args.no_crt or not HAVE_CFSSL:
|
||||||
|
ensure_cert(log, args)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
_gen_ca(log, args)
|
||||||
|
_gen_srv(log, args, netdevs)
|
||||||
|
except Exception as ex:
|
||||||
|
HAVE_CFSSL = False
|
||||||
|
log("cert", "could not create TLS certificates: {}".format(ex), 3)
|
||||||
|
if getattr(ex, "errno", 0) == errno.ENOENT:
|
||||||
|
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest"
|
||||||
|
log("cert", t, 6)
|
||||||
|
|
||||||
|
ensure_cert(log, args)
|
||||||
170
copyparty/cfg.py
Normal file
170
copyparty/cfg.py
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
# awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' '
|
||||||
|
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nw p q s ss sss v z zv"
|
||||||
|
onedash = set(zs.split())
|
||||||
|
|
||||||
|
|
||||||
|
def vf_bmap() -> dict[str, str]:
|
||||||
|
"""argv-to-volflag: simple bools"""
|
||||||
|
ret = {
|
||||||
|
"never_symlink": "neversymlink",
|
||||||
|
"no_dedup": "copydupes",
|
||||||
|
"no_dupe": "nodupe",
|
||||||
|
"no_forget": "noforget",
|
||||||
|
"th_no_crop": "nocrop",
|
||||||
|
"dav_auth": "davauth",
|
||||||
|
"dav_rt": "davrt",
|
||||||
|
}
|
||||||
|
for k in (
|
||||||
|
"dotsrch",
|
||||||
|
"e2t",
|
||||||
|
"e2ts",
|
||||||
|
"e2tsr",
|
||||||
|
"e2v",
|
||||||
|
"e2vu",
|
||||||
|
"e2vp",
|
||||||
|
"grid",
|
||||||
|
"hardlink",
|
||||||
|
"magic",
|
||||||
|
"no_sb_md",
|
||||||
|
"no_sb_lg",
|
||||||
|
"rand",
|
||||||
|
"xdev",
|
||||||
|
"xlink",
|
||||||
|
"xvol",
|
||||||
|
):
|
||||||
|
ret[k] = k
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def vf_vmap() -> dict[str, str]:
|
||||||
|
"""argv-to-volflag: simple values"""
|
||||||
|
ret = {"th_convt": "convt", "th_size": "thsize"}
|
||||||
|
for k in ("dbd", "lg_sbf", "md_sbf", "nrand", "unlist"):
|
||||||
|
ret[k] = k
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def vf_cmap() -> dict[str, str]:
|
||||||
|
"""argv-to-volflag: complex/lists"""
|
||||||
|
ret = {}
|
||||||
|
for k in ("html_head", "mte", "mth"):
|
||||||
|
ret[k] = k
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
permdescs = {
|
||||||
|
"r": "read; list folder contents, download files",
|
||||||
|
"w": 'write; upload files; need "r" to see the uploads',
|
||||||
|
"m": 'move; move files and folders; need "w" at destination',
|
||||||
|
"d": "delete; permanently delete files and folders",
|
||||||
|
"g": "get; download files, but cannot see folder contents",
|
||||||
|
"G": 'upget; same as "g" but can see filekeys of their own uploads',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
flagcats = {
|
||||||
|
"uploads, general": {
|
||||||
|
"nodupe": "rejects existing files (instead of symlinking them)",
|
||||||
|
"hardlink": "does dedup with hardlinks instead of symlinks",
|
||||||
|
"neversymlink": "disables symlink fallback; full copy instead",
|
||||||
|
"copydupes": "disables dedup, always saves full copies of dupes",
|
||||||
|
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
||||||
|
"nosub": "forces all uploads into the top folder of the vfs",
|
||||||
|
"magic": "enables filetype detection for nameless uploads",
|
||||||
|
"gz": "allows server-side gzip of uploads with ?gz (also c,xz)",
|
||||||
|
"pk": "forces server-side compression, optional arg: xz,9",
|
||||||
|
},
|
||||||
|
"upload rules": {
|
||||||
|
"maxn=250,600": "max 250 uploads over 15min",
|
||||||
|
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
|
||||||
|
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
||||||
|
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
||||||
|
"rand": "force randomized filenames, 9 chars long by default",
|
||||||
|
"nrand=N": "randomized filenames are N chars long",
|
||||||
|
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
||||||
|
"df=1g": "ensure 1 GiB free disk space",
|
||||||
|
},
|
||||||
|
"upload rotation\n(moves all uploads into the specified folder structure)": {
|
||||||
|
"rotn=100,3": "3 levels of subfolders with 100 entries in each",
|
||||||
|
"rotf=%Y-%m/%d-%H": "date-formatted organizing",
|
||||||
|
"lifetime=3600": "uploads are deleted after 1 hour",
|
||||||
|
},
|
||||||
|
"database, general": {
|
||||||
|
"e2d": "enable database; makes files searchable + enables upload dedup",
|
||||||
|
"e2ds": "scan writable folders for new files on startup; also sets -e2d",
|
||||||
|
"e2dsa": "scans all folders for new files on startup; also sets -e2d",
|
||||||
|
"e2t": "enable multimedia indexing; makes it possible to search for tags",
|
||||||
|
"e2ts": "scan existing files for tags on startup; also sets -e2t",
|
||||||
|
"e2tsa": "delete all metadata from DB (full rescan); also sets -e2ts",
|
||||||
|
"d2ts": "disables metadata collection for existing files",
|
||||||
|
"d2ds": "disables onboot indexing, overrides -e2ds*",
|
||||||
|
"d2t": "disables metadata collection, overrides -e2t*",
|
||||||
|
"d2v": "disables file verification, overrides -e2v*",
|
||||||
|
"d2d": "disables all database stuff, overrides -e2*",
|
||||||
|
"hist=/tmp/cdb": "puts thumbnails and indexes at that location",
|
||||||
|
"scan=60": "scan for new files every 60sec, same as --re-maxage",
|
||||||
|
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
|
||||||
|
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
|
||||||
|
"noforget": "don't forget files when deleted from disk",
|
||||||
|
"fat32": "avoid excessive reindexing on android sdcardfs",
|
||||||
|
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
||||||
|
"xlink": "cross-volume dupe detection / linking",
|
||||||
|
"xdev": "do not descend into other filesystems",
|
||||||
|
"xvol": "do not follow symlinks leaving the volume root",
|
||||||
|
"dotsrch": "show dotfiles in search results",
|
||||||
|
"nodotsrch": "hide dotfiles in search results (default)",
|
||||||
|
},
|
||||||
|
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
|
||||||
|
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
|
||||||
|
"mtp=ahash,vhash=media-hash.py": "collects two tags at once",
|
||||||
|
},
|
||||||
|
"thumbnails": {
|
||||||
|
"dthumb": "disables all thumbnails",
|
||||||
|
"dvthumb": "disables video thumbnails",
|
||||||
|
"dathumb": "disables audio thumbnails (spectrograms)",
|
||||||
|
"dithumb": "disables image thumbnails",
|
||||||
|
"thsize": "thumbnail res; WxH",
|
||||||
|
"nocrop": "disable center-cropping",
|
||||||
|
"convt": "conversion timeout in seconds",
|
||||||
|
},
|
||||||
|
"handlers\n(better explained in --help-handlers)": {
|
||||||
|
"on404=PY": "handle 404s by executing PY file",
|
||||||
|
"on403=PY": "handle 403s by executing PY file",
|
||||||
|
},
|
||||||
|
"event hooks\n(better explained in --help-hooks)": {
|
||||||
|
"xbu=CMD": "execute CMD before a file upload starts",
|
||||||
|
"xau=CMD": "execute CMD after a file upload finishes",
|
||||||
|
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
|
||||||
|
"xbr=CMD": "execute CMD before a file rename/move",
|
||||||
|
"xar=CMD": "execute CMD after a file rename/move",
|
||||||
|
"xbd=CMD": "execute CMD before a file delete",
|
||||||
|
"xad=CMD": "execute CMD after a file delete",
|
||||||
|
"xm=CMD": "execute CMD on message",
|
||||||
|
"xban=CMD": "execute CMD if someone gets banned",
|
||||||
|
},
|
||||||
|
"client and ux": {
|
||||||
|
"grid": "show grid/thumbnails by default",
|
||||||
|
"unlist": "dont list files matching REGEX",
|
||||||
|
"html_head=TXT": "includes TXT in the <head>",
|
||||||
|
"robots": "allows indexing by search engines (default)",
|
||||||
|
"norobots": "kindly asks search engines to leave",
|
||||||
|
"no_sb_md": "disable js sandbox for markdown files",
|
||||||
|
"no_sb_lg": "disable js sandbox for prologue/epilogue",
|
||||||
|
"sb_md": "enable js sandbox for markdown files (default)",
|
||||||
|
"sb_lg": "enable js sandbox for prologue/epilogue (default)",
|
||||||
|
"md_sbf": "list of markdown-sandbox safeguards to disable",
|
||||||
|
"lg_sbf": "list of *logue-sandbox safeguards to disable",
|
||||||
|
"nohtml": "return html and markdown as text/html",
|
||||||
|
},
|
||||||
|
"others": {
|
||||||
|
"fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission',
|
||||||
|
"davauth": "ask webdav clients to login for all folders",
|
||||||
|
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
flagdescs = {k.split("=")[0]: v for tab in flagcats.values() for k, v in tab.items()}
|
||||||
@@ -2,6 +2,7 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import errno
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import stat
|
import stat
|
||||||
@@ -13,9 +14,21 @@ from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
|||||||
from pyftpdlib.handlers import FTPHandler
|
from pyftpdlib.handlers import FTPHandler
|
||||||
from pyftpdlib.servers import FTPServer
|
from pyftpdlib.servers import FTPServer
|
||||||
|
|
||||||
from .__init__ import PY2, TYPE_CHECKING, E
|
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
|
||||||
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import Daemon, Pebkac, exclude_dotfiles, fsenc, ipnorm
|
from .util import (
|
||||||
|
Daemon,
|
||||||
|
Pebkac,
|
||||||
|
exclude_dotfiles,
|
||||||
|
fsenc,
|
||||||
|
ipnorm,
|
||||||
|
pybin,
|
||||||
|
relchk,
|
||||||
|
runhook,
|
||||||
|
sanitize_fn,
|
||||||
|
vjoin,
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from pyftpdlib.ioloop import IOLoop
|
from pyftpdlib.ioloop import IOLoop
|
||||||
@@ -34,6 +47,12 @@ if True: # pylint: disable=using-constant-test
|
|||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
|
class FSE(FilesystemError):
|
||||||
|
def __init__(self, msg: str, severity: int = 0) -> None:
|
||||||
|
super(FilesystemError, self).__init__(msg)
|
||||||
|
self.severity = severity
|
||||||
|
|
||||||
|
|
||||||
class FtpAuth(DummyAuthorizer):
|
class FtpAuth(DummyAuthorizer):
|
||||||
def __init__(self, hub: "SvcHub") -> None:
|
def __init__(self, hub: "SvcHub") -> None:
|
||||||
super(FtpAuth, self).__init__()
|
super(FtpAuth, self).__init__()
|
||||||
@@ -43,6 +62,7 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
self, username: str, password: str, handler: Any
|
self, username: str, password: str, handler: Any
|
||||||
) -> None:
|
) -> None:
|
||||||
handler.username = "{}:{}".format(username, password)
|
handler.username = "{}:{}".format(username, password)
|
||||||
|
handler.uname = "*"
|
||||||
|
|
||||||
ip = handler.addr[0]
|
ip = handler.addr[0]
|
||||||
if ip.startswith("::ffff:"):
|
if ip.startswith("::ffff:"):
|
||||||
@@ -59,10 +79,13 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
raise AuthenticationFailed("banned")
|
raise AuthenticationFailed("banned")
|
||||||
|
|
||||||
asrv = self.hub.asrv
|
asrv = self.hub.asrv
|
||||||
if username == "anonymous":
|
uname = "*"
|
||||||
uname = "*"
|
if username != "anonymous":
|
||||||
else:
|
for zs in (password, username):
|
||||||
uname = asrv.iacct.get(password, "") or asrv.iacct.get(username, "") or "*"
|
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
|
||||||
|
if zs:
|
||||||
|
uname = zs
|
||||||
|
break
|
||||||
|
|
||||||
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
|
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
|
||||||
g = self.hub.gpwd
|
g = self.hub.gpwd
|
||||||
@@ -74,14 +97,14 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
|
|
||||||
raise AuthenticationFailed("Authentication failed.")
|
raise AuthenticationFailed("Authentication failed.")
|
||||||
|
|
||||||
handler.username = uname
|
handler.uname = handler.username = uname
|
||||||
|
|
||||||
def get_home_dir(self, username: str) -> str:
|
def get_home_dir(self, username: str) -> str:
|
||||||
return "/"
|
return "/"
|
||||||
|
|
||||||
def has_user(self, username: str) -> bool:
|
def has_user(self, username: str) -> bool:
|
||||||
asrv = self.hub.asrv
|
asrv = self.hub.asrv
|
||||||
return username in asrv.acct
|
return username in asrv.acct or username in asrv.iacct
|
||||||
|
|
||||||
def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool:
|
def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool:
|
||||||
return True # handled at filesystem layer
|
return True # handled at filesystem layer
|
||||||
@@ -100,17 +123,18 @@ class FtpFs(AbstractedFS):
|
|||||||
def __init__(
|
def __init__(
|
||||||
self, root: str, cmd_channel: Any
|
self, root: str, cmd_channel: Any
|
||||||
) -> None: # pylint: disable=super-init-not-called
|
) -> None: # pylint: disable=super-init-not-called
|
||||||
self.h = self.cmd_channel = cmd_channel # type: FTPHandler
|
self.h = cmd_channel # type: FTPHandler
|
||||||
|
self.cmd_channel = cmd_channel # type: FTPHandler
|
||||||
self.hub: "SvcHub" = cmd_channel.hub
|
self.hub: "SvcHub" = cmd_channel.hub
|
||||||
self.args = cmd_channel.args
|
self.args = cmd_channel.args
|
||||||
|
self.uname = cmd_channel.uname
|
||||||
self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*")
|
|
||||||
|
|
||||||
self.cwd = "/" # pyftpdlib convention of leading slash
|
self.cwd = "/" # pyftpdlib convention of leading slash
|
||||||
self.root = "/var/lib/empty"
|
self.root = "/var/lib/empty"
|
||||||
|
|
||||||
self.can_read = self.can_write = self.can_move = False
|
self.can_read = self.can_write = self.can_move = False
|
||||||
self.can_delete = self.can_get = self.can_upget = False
|
self.can_delete = self.can_get = self.can_upget = False
|
||||||
|
self.can_admin = False
|
||||||
|
|
||||||
self.listdirinfo = self.listdir
|
self.listdirinfo = self.listdir
|
||||||
self.chdir(".")
|
self.chdir(".")
|
||||||
@@ -122,16 +146,36 @@ class FtpFs(AbstractedFS):
|
|||||||
w: bool = False,
|
w: bool = False,
|
||||||
m: bool = False,
|
m: bool = False,
|
||||||
d: bool = False,
|
d: bool = False,
|
||||||
) -> str:
|
) -> tuple[str, VFS, str]:
|
||||||
try:
|
try:
|
||||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
vpath = vpath.replace("\\", "/").strip("/")
|
||||||
|
rd, fn = os.path.split(vpath)
|
||||||
|
if ANYWIN and relchk(rd):
|
||||||
|
logging.warning("malicious vpath: %s", vpath)
|
||||||
|
t = "Unsupported characters in [{}]"
|
||||||
|
raise FSE(t.format(vpath), 1)
|
||||||
|
|
||||||
|
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
|
||||||
|
vpath = vjoin(rd, fn)
|
||||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||||
if not vfs.realpath:
|
if not vfs.realpath:
|
||||||
raise FilesystemError("no filesystem mounted at this path")
|
t = "No filesystem mounted at [{}]"
|
||||||
|
raise FSE(t.format(vpath))
|
||||||
|
|
||||||
return os.path.join(vfs.realpath, rem)
|
if "xdev" in vfs.flags or "xvol" in vfs.flags:
|
||||||
|
ap = vfs.canonical(rem)
|
||||||
|
avfs = vfs.chk_ap(ap)
|
||||||
|
t = "Permission denied in [{}]"
|
||||||
|
if not avfs:
|
||||||
|
raise FSE(t.format(vpath), 1)
|
||||||
|
|
||||||
|
cr, cw, cm, cd, _, _, _ = avfs.can_access("", self.h.uname)
|
||||||
|
if r and not cr or w and not cw or m and not cm or d and not cd:
|
||||||
|
raise FSE(t.format(vpath), 1)
|
||||||
|
|
||||||
|
return os.path.join(vfs.realpath, rem), vfs, rem
|
||||||
except Pebkac as ex:
|
except Pebkac as ex:
|
||||||
raise FilesystemError(str(ex))
|
raise FSE(str(ex))
|
||||||
|
|
||||||
def rv2a(
|
def rv2a(
|
||||||
self,
|
self,
|
||||||
@@ -140,7 +184,7 @@ class FtpFs(AbstractedFS):
|
|||||||
w: bool = False,
|
w: bool = False,
|
||||||
m: bool = False,
|
m: bool = False,
|
||||||
d: bool = False,
|
d: bool = False,
|
||||||
) -> str:
|
) -> tuple[str, VFS, str]:
|
||||||
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
|
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
|
||||||
|
|
||||||
def ftp2fs(self, ftppath: str) -> str:
|
def ftp2fs(self, ftppath: str) -> str:
|
||||||
@@ -154,7 +198,7 @@ class FtpFs(AbstractedFS):
|
|||||||
def validpath(self, path: str) -> bool:
|
def validpath(self, path: str) -> bool:
|
||||||
if "/.hist/" in path:
|
if "/.hist/" in path:
|
||||||
if "/up2k." in path or path.endswith("/dir.txt"):
|
if "/up2k." in path or path.endswith("/dir.txt"):
|
||||||
raise FilesystemError("access to this file is forbidden")
|
raise FSE("Access to this file is forbidden", 1)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -162,7 +206,7 @@ class FtpFs(AbstractedFS):
|
|||||||
r = "r" in mode
|
r = "r" in mode
|
||||||
w = "w" in mode or "a" in mode or "+" in mode
|
w = "w" in mode or "a" in mode or "+" in mode
|
||||||
|
|
||||||
ap = self.rv2a(filename, r, w)
|
ap = self.rv2a(filename, r, w)[0]
|
||||||
if w:
|
if w:
|
||||||
try:
|
try:
|
||||||
st = bos.stat(ap)
|
st = bos.stat(ap)
|
||||||
@@ -171,7 +215,7 @@ class FtpFs(AbstractedFS):
|
|||||||
td = 0
|
td = 0
|
||||||
|
|
||||||
if td < -1 or td > self.args.ftp_wt:
|
if td < -1 or td > self.args.ftp_wt:
|
||||||
raise FilesystemError("cannot open existing file for writing")
|
raise FSE("Cannot open existing file for writing")
|
||||||
|
|
||||||
self.validpath(ap)
|
self.validpath(ap)
|
||||||
return open(fsenc(ap), mode)
|
return open(fsenc(ap), mode)
|
||||||
@@ -180,9 +224,17 @@ class FtpFs(AbstractedFS):
|
|||||||
nwd = join(self.cwd, path)
|
nwd = join(self.cwd, path)
|
||||||
vfs, rem = self.hub.asrv.vfs.get(nwd, self.uname, False, False)
|
vfs, rem = self.hub.asrv.vfs.get(nwd, self.uname, False, False)
|
||||||
ap = vfs.canonical(rem)
|
ap = vfs.canonical(rem)
|
||||||
if not bos.path.isdir(ap):
|
try:
|
||||||
|
st = bos.stat(ap)
|
||||||
|
if not stat.S_ISDIR(st.st_mode):
|
||||||
|
raise Exception()
|
||||||
|
except:
|
||||||
# returning 550 is library-default and suitable
|
# returning 550 is library-default and suitable
|
||||||
raise FilesystemError("Failed to change directory")
|
raise FSE("No such file or directory")
|
||||||
|
|
||||||
|
avfs = vfs.chk_ap(ap, st)
|
||||||
|
if not avfs:
|
||||||
|
raise FSE("Permission denied", 1)
|
||||||
|
|
||||||
self.cwd = nwd
|
self.cwd = nwd
|
||||||
(
|
(
|
||||||
@@ -192,16 +244,19 @@ class FtpFs(AbstractedFS):
|
|||||||
self.can_delete,
|
self.can_delete,
|
||||||
self.can_get,
|
self.can_get,
|
||||||
self.can_upget,
|
self.can_upget,
|
||||||
) = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
|
self.can_admin,
|
||||||
|
) = avfs.can_access("", self.h.uname)
|
||||||
|
|
||||||
def mkdir(self, path: str) -> None:
|
def mkdir(self, path: str) -> None:
|
||||||
ap = self.rv2a(path, w=True)
|
ap = self.rv2a(path, w=True)[0]
|
||||||
bos.mkdir(ap)
|
bos.makedirs(ap) # filezilla expects this
|
||||||
|
|
||||||
def listdir(self, path: str) -> list[str]:
|
def listdir(self, path: str) -> list[str]:
|
||||||
vpath = join(self.cwd, path).lstrip("/")
|
vpath = join(self.cwd, path)
|
||||||
try:
|
try:
|
||||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False)
|
ap, vfs, rem = self.v2a(vpath, True, False)
|
||||||
|
if not bos.path.isdir(ap):
|
||||||
|
raise FSE("No such file or directory", 1)
|
||||||
|
|
||||||
fsroot, vfs_ls1, vfs_virt = vfs.ls(
|
fsroot, vfs_ls1, vfs_virt = vfs.ls(
|
||||||
rem,
|
rem,
|
||||||
@@ -217,8 +272,12 @@ class FtpFs(AbstractedFS):
|
|||||||
|
|
||||||
vfs_ls.sort()
|
vfs_ls.sort()
|
||||||
return vfs_ls
|
return vfs_ls
|
||||||
except:
|
except Exception as ex:
|
||||||
if vpath:
|
# panic on malicious names
|
||||||
|
if getattr(ex, "severity", 0):
|
||||||
|
raise
|
||||||
|
|
||||||
|
if vpath.strip("/"):
|
||||||
# display write-only folders as empty
|
# display write-only folders as empty
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@@ -227,43 +286,49 @@ class FtpFs(AbstractedFS):
|
|||||||
return list(sorted(list(r.keys())))
|
return list(sorted(list(r.keys())))
|
||||||
|
|
||||||
def rmdir(self, path: str) -> None:
|
def rmdir(self, path: str) -> None:
|
||||||
ap = self.rv2a(path, d=True)
|
ap = self.rv2a(path, d=True)[0]
|
||||||
bos.rmdir(ap)
|
try:
|
||||||
|
bos.rmdir(ap)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.ENOENT:
|
||||||
|
raise
|
||||||
|
|
||||||
def remove(self, path: str) -> None:
|
def remove(self, path: str) -> None:
|
||||||
if self.args.no_del:
|
if self.args.no_del:
|
||||||
raise FilesystemError("the delete feature is disabled in server config")
|
raise FSE("The delete feature is disabled in server config")
|
||||||
|
|
||||||
vp = join(self.cwd, path).lstrip("/")
|
vp = join(self.cwd, path).lstrip("/")
|
||||||
try:
|
try:
|
||||||
self.hub.up2k.handle_rm(self.uname, self.h.remote_ip, [vp], [])
|
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise FilesystemError(str(ex))
|
raise FSE(str(ex))
|
||||||
|
|
||||||
def rename(self, src: str, dst: str) -> None:
|
def rename(self, src: str, dst: str) -> None:
|
||||||
if not self.can_move:
|
if not self.can_move:
|
||||||
raise FilesystemError("not allowed for user " + self.h.username)
|
raise FSE("Not allowed for user " + self.h.uname)
|
||||||
|
|
||||||
if self.args.no_mv:
|
if self.args.no_mv:
|
||||||
t = "the rename/move feature is disabled in server config"
|
raise FSE("The rename/move feature is disabled in server config")
|
||||||
raise FilesystemError(t)
|
|
||||||
|
|
||||||
svp = join(self.cwd, src).lstrip("/")
|
svp = join(self.cwd, src).lstrip("/")
|
||||||
dvp = join(self.cwd, dst).lstrip("/")
|
dvp = join(self.cwd, dst).lstrip("/")
|
||||||
try:
|
try:
|
||||||
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise FilesystemError(str(ex))
|
raise FSE(str(ex))
|
||||||
|
|
||||||
def chmod(self, path: str, mode: str) -> None:
|
def chmod(self, path: str, mode: str) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def stat(self, path: str) -> os.stat_result:
|
def stat(self, path: str) -> os.stat_result:
|
||||||
try:
|
try:
|
||||||
ap = self.rv2a(path, r=True)
|
ap = self.rv2a(path, r=True)[0]
|
||||||
return bos.stat(ap)
|
return bos.stat(ap)
|
||||||
except:
|
except FSE as ex:
|
||||||
ap = self.rv2a(path)
|
if ex.severity:
|
||||||
|
raise
|
||||||
|
|
||||||
|
ap = self.rv2a(path)[0]
|
||||||
st = bos.stat(ap)
|
st = bos.stat(ap)
|
||||||
if not stat.S_ISDIR(st.st_mode):
|
if not stat.S_ISDIR(st.st_mode):
|
||||||
raise
|
raise
|
||||||
@@ -271,44 +336,50 @@ class FtpFs(AbstractedFS):
|
|||||||
return st
|
return st
|
||||||
|
|
||||||
def utime(self, path: str, timeval: float) -> None:
|
def utime(self, path: str, timeval: float) -> None:
|
||||||
ap = self.rv2a(path, w=True)
|
ap = self.rv2a(path, w=True)[0]
|
||||||
return bos.utime(ap, (timeval, timeval))
|
return bos.utime(ap, (timeval, timeval))
|
||||||
|
|
||||||
def lstat(self, path: str) -> os.stat_result:
|
def lstat(self, path: str) -> os.stat_result:
|
||||||
ap = self.rv2a(path)
|
ap = self.rv2a(path)[0]
|
||||||
return bos.stat(ap)
|
return bos.stat(ap)
|
||||||
|
|
||||||
def isfile(self, path: str) -> bool:
|
def isfile(self, path: str) -> bool:
|
||||||
try:
|
try:
|
||||||
st = self.stat(path)
|
st = self.stat(path)
|
||||||
return stat.S_ISREG(st.st_mode)
|
return stat.S_ISREG(st.st_mode)
|
||||||
except:
|
except Exception as ex:
|
||||||
|
if getattr(ex, "severity", 0):
|
||||||
|
raise
|
||||||
|
|
||||||
return False # expected for mojibake in ftp_SIZE()
|
return False # expected for mojibake in ftp_SIZE()
|
||||||
|
|
||||||
def islink(self, path: str) -> bool:
|
def islink(self, path: str) -> bool:
|
||||||
ap = self.rv2a(path)
|
ap = self.rv2a(path)[0]
|
||||||
return bos.path.islink(ap)
|
return bos.path.islink(ap)
|
||||||
|
|
||||||
def isdir(self, path: str) -> bool:
|
def isdir(self, path: str) -> bool:
|
||||||
try:
|
try:
|
||||||
st = self.stat(path)
|
st = self.stat(path)
|
||||||
return stat.S_ISDIR(st.st_mode)
|
return stat.S_ISDIR(st.st_mode)
|
||||||
except:
|
except Exception as ex:
|
||||||
|
if getattr(ex, "severity", 0):
|
||||||
|
raise
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def getsize(self, path: str) -> int:
|
def getsize(self, path: str) -> int:
|
||||||
ap = self.rv2a(path)
|
ap = self.rv2a(path)[0]
|
||||||
return bos.path.getsize(ap)
|
return bos.path.getsize(ap)
|
||||||
|
|
||||||
def getmtime(self, path: str) -> float:
|
def getmtime(self, path: str) -> float:
|
||||||
ap = self.rv2a(path)
|
ap = self.rv2a(path)[0]
|
||||||
return bos.path.getmtime(ap)
|
return bos.path.getmtime(ap)
|
||||||
|
|
||||||
def realpath(self, path: str) -> str:
|
def realpath(self, path: str) -> str:
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def lexists(self, path: str) -> bool:
|
def lexists(self, path: str) -> bool:
|
||||||
ap = self.rv2a(path)
|
ap = self.rv2a(path)[0]
|
||||||
return bos.path.lexists(ap)
|
return bos.path.lexists(ap)
|
||||||
|
|
||||||
def get_user_by_uid(self, uid: int) -> str:
|
def get_user_by_uid(self, uid: int) -> str:
|
||||||
@@ -322,16 +393,21 @@ class FtpHandler(FTPHandler):
|
|||||||
abstracted_fs = FtpFs
|
abstracted_fs = FtpFs
|
||||||
hub: "SvcHub"
|
hub: "SvcHub"
|
||||||
args: argparse.Namespace
|
args: argparse.Namespace
|
||||||
|
uname: str
|
||||||
|
|
||||||
def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None:
|
def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None:
|
||||||
self.hub: "SvcHub" = FtpHandler.hub
|
self.hub: "SvcHub" = FtpHandler.hub
|
||||||
self.args: argparse.Namespace = FtpHandler.args
|
self.args: argparse.Namespace = FtpHandler.args
|
||||||
|
self.uname = "*"
|
||||||
|
|
||||||
if PY2:
|
if PY2:
|
||||||
FTPHandler.__init__(self, conn, server, ioloop)
|
FTPHandler.__init__(self, conn, server, ioloop)
|
||||||
else:
|
else:
|
||||||
super(FtpHandler, self).__init__(conn, server, ioloop)
|
super(FtpHandler, self).__init__(conn, server, ioloop)
|
||||||
|
|
||||||
|
cip = self.remote_ip
|
||||||
|
self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip
|
||||||
|
|
||||||
# abspath->vpath mapping to resolve log_transfer paths
|
# abspath->vpath mapping to resolve log_transfer paths
|
||||||
self.vfs_map: dict[str, str] = {}
|
self.vfs_map: dict[str, str] = {}
|
||||||
|
|
||||||
@@ -341,8 +417,24 @@ class FtpHandler(FTPHandler):
|
|||||||
def ftp_STOR(self, file: str, mode: str = "w") -> Any:
|
def ftp_STOR(self, file: str, mode: str = "w") -> Any:
|
||||||
# Optional[str]
|
# Optional[str]
|
||||||
vp = join(self.fs.cwd, file).lstrip("/")
|
vp = join(self.fs.cwd, file).lstrip("/")
|
||||||
ap = self.fs.v2a(vp)
|
ap, vfs, rem = self.fs.v2a(vp, w=True)
|
||||||
self.vfs_map[ap] = vp
|
self.vfs_map[ap] = vp
|
||||||
|
xbu = vfs.flags.get("xbu")
|
||||||
|
if xbu and not runhook(
|
||||||
|
None,
|
||||||
|
xbu,
|
||||||
|
ap,
|
||||||
|
vfs.canonical(rem),
|
||||||
|
"",
|
||||||
|
self.uname,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
self.cli_ip,
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
):
|
||||||
|
raise FSE("Upload blocked by xbu server config")
|
||||||
|
|
||||||
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
|
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
|
||||||
ret = FTPHandler.ftp_STOR(self, file, mode)
|
ret = FTPHandler.ftp_STOR(self, file, mode)
|
||||||
# print("ftp_STOR: {} {} OK".format(vp, mode))
|
# print("ftp_STOR: {} {} OK".format(vp, mode))
|
||||||
@@ -363,15 +455,17 @@ class FtpHandler(FTPHandler):
|
|||||||
# print("xfer_end: {} => {}".format(ap, vp))
|
# print("xfer_end: {} => {}".format(ap, vp))
|
||||||
if vp:
|
if vp:
|
||||||
vp, fn = os.path.split(vp)
|
vp, fn = os.path.split(vp)
|
||||||
vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True)
|
vfs, rem = self.hub.asrv.vfs.get(vp, self.uname, False, True)
|
||||||
vfs, rem = vfs.get_dbv(rem)
|
vfs, rem = vfs.get_dbv(rem)
|
||||||
self.hub.up2k.hash_file(
|
self.hub.up2k.hash_file(
|
||||||
vfs.realpath,
|
vfs.realpath,
|
||||||
|
vfs.vpath,
|
||||||
vfs.flags,
|
vfs.flags,
|
||||||
rem,
|
rem,
|
||||||
fn,
|
fn,
|
||||||
self.remote_ip,
|
self.cli_ip,
|
||||||
time.time(),
|
time.time(),
|
||||||
|
self.uname,
|
||||||
)
|
)
|
||||||
|
|
||||||
return FTPHandler.log_transfer(
|
return FTPHandler.log_transfer(
|
||||||
@@ -402,10 +496,10 @@ class Ftpd(object):
|
|||||||
h1 = SftpHandler
|
h1 = SftpHandler
|
||||||
except:
|
except:
|
||||||
t = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
|
t = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
|
||||||
print(t.format(sys.executable))
|
print(t.format(pybin))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
h1.certfile = os.path.join(self.args.E.cfg, "cert.pem")
|
h1.certfile = self.args.cert
|
||||||
h1.tls_control_required = True
|
h1.tls_control_required = True
|
||||||
h1.tls_data_required = True
|
h1.tls_data_required = True
|
||||||
|
|
||||||
@@ -413,9 +507,9 @@ class Ftpd(object):
|
|||||||
|
|
||||||
for h_lp in hs:
|
for h_lp in hs:
|
||||||
h2, lp = h_lp
|
h2, lp = h_lp
|
||||||
h2.hub = hub
|
FtpHandler.hub = h2.hub = hub
|
||||||
h2.args = hub.args
|
FtpHandler.args = h2.args = hub.args
|
||||||
h2.authorizer = FtpAuth(hub)
|
FtpHandler.authorizer = h2.authorizer = FtpAuth(hub)
|
||||||
|
|
||||||
if self.args.ftp_pr:
|
if self.args.ftp_pr:
|
||||||
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
|
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
|
||||||
@@ -435,10 +529,21 @@ class Ftpd(object):
|
|||||||
lgr = logging.getLogger("pyftpdlib")
|
lgr = logging.getLogger("pyftpdlib")
|
||||||
lgr.setLevel(logging.DEBUG if self.args.ftpv else logging.INFO)
|
lgr.setLevel(logging.DEBUG if self.args.ftpv else logging.INFO)
|
||||||
|
|
||||||
|
ips = self.args.i
|
||||||
|
if "::" in ips:
|
||||||
|
ips.append("0.0.0.0")
|
||||||
|
|
||||||
|
if self.args.ftp4:
|
||||||
|
ips = [x for x in ips if ":" not in x]
|
||||||
|
|
||||||
ioloop = IOLoop()
|
ioloop = IOLoop()
|
||||||
for ip in self.args.i:
|
for ip in ips:
|
||||||
for h, lp in hs:
|
for h, lp in hs:
|
||||||
FTPServer((ip, int(lp)), h, ioloop)
|
try:
|
||||||
|
FTPServer((ip, int(lp)), h, ioloop)
|
||||||
|
except:
|
||||||
|
if ip != "0.0.0.0" or "::" not in ips:
|
||||||
|
raise
|
||||||
|
|
||||||
Daemon(ioloop.loop, "ftp")
|
Daemon(ioloop.loop, "ftp")
|
||||||
|
|
||||||
|
|||||||
1186
copyparty/httpcli.py
1186
copyparty/httpcli.py
File diff suppressed because it is too large
Load Diff
@@ -54,7 +54,6 @@ class HttpConn(object):
|
|||||||
self.args: argparse.Namespace = hsrv.args # mypy404
|
self.args: argparse.Namespace = hsrv.args # mypy404
|
||||||
self.E: EnvParams = self.args.E
|
self.E: EnvParams = self.args.E
|
||||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||||
self.cert_path = hsrv.cert_path
|
|
||||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||||
self.iphash: HMaccas = hsrv.broker.iphash
|
self.iphash: HMaccas = hsrv.broker.iphash
|
||||||
self.bans: dict[str, int] = hsrv.bans
|
self.bans: dict[str, int] = hsrv.bans
|
||||||
@@ -65,6 +64,7 @@ class HttpConn(object):
|
|||||||
self.ico: Ico = Ico(self.args) # mypy404
|
self.ico: Ico = Ico(self.args) # mypy404
|
||||||
|
|
||||||
self.t0: float = time.time() # mypy404
|
self.t0: float = time.time() # mypy404
|
||||||
|
self.freshen_pwd: float = 0.0
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.nreq: int = -1 # mypy404
|
self.nreq: int = -1 # mypy404
|
||||||
self.nbyte: int = 0 # mypy404
|
self.nbyte: int = 0 # mypy404
|
||||||
@@ -102,17 +102,18 @@ class HttpConn(object):
|
|||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log_func(self.log_src, msg, c)
|
self.log_func(self.log_src, msg, c)
|
||||||
|
|
||||||
def get_u2idx(self) -> U2idx:
|
def get_u2idx(self) -> Optional[U2idx]:
|
||||||
# one u2idx per tcp connection;
|
# grab from a pool of u2idx instances;
|
||||||
# sqlite3 fully parallelizes under python threads
|
# sqlite3 fully parallelizes under python threads
|
||||||
|
# but avoid running out of FDs by creating too many
|
||||||
if not self.u2idx:
|
if not self.u2idx:
|
||||||
self.u2idx = U2idx(self)
|
self.u2idx = self.hsrv.get_u2idx(str(self.addr))
|
||||||
|
|
||||||
return self.u2idx
|
return self.u2idx
|
||||||
|
|
||||||
def _detect_https(self) -> bool:
|
def _detect_https(self) -> bool:
|
||||||
method = None
|
method = None
|
||||||
if self.cert_path:
|
if True:
|
||||||
try:
|
try:
|
||||||
method = self.s.recv(4, socket.MSG_PEEK)
|
method = self.s.recv(4, socket.MSG_PEEK)
|
||||||
except socket.timeout:
|
except socket.timeout:
|
||||||
@@ -146,7 +147,7 @@ class HttpConn(object):
|
|||||||
self.sr = None
|
self.sr = None
|
||||||
if self.args.https_only:
|
if self.args.https_only:
|
||||||
is_https = True
|
is_https = True
|
||||||
elif self.args.http_only or not HAVE_SSL:
|
elif self.args.http_only:
|
||||||
is_https = False
|
is_https = False
|
||||||
else:
|
else:
|
||||||
# raise Exception("asdf")
|
# raise Exception("asdf")
|
||||||
@@ -160,7 +161,7 @@ class HttpConn(object):
|
|||||||
self.log_src = self.log_src.replace("[36m", "[35m")
|
self.log_src = self.log_src.replace("[36m", "[35m")
|
||||||
try:
|
try:
|
||||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||||
ctx.load_cert_chain(self.cert_path)
|
ctx.load_cert_chain(self.args.cert)
|
||||||
if self.args.ssl_ver:
|
if self.args.ssl_ver:
|
||||||
ctx.options &= ~self.args.ssl_flags_en
|
ctx.options &= ~self.args.ssl_flags_en
|
||||||
ctx.options |= self.args.ssl_flags_de
|
ctx.options |= self.args.ssl_flags_de
|
||||||
@@ -214,3 +215,7 @@ class HttpConn(object):
|
|||||||
self.cli = HttpCli(self)
|
self.cli = HttpCli(self)
|
||||||
if not self.cli.run():
|
if not self.cli.run():
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self.u2idx:
|
||||||
|
self.hsrv.put_u2idx(str(self.addr), self.u2idx)
|
||||||
|
self.u2idx = None
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
|||||||
import base64
|
import base64
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
@@ -11,9 +12,19 @@ import time
|
|||||||
|
|
||||||
import queue
|
import queue
|
||||||
|
|
||||||
|
from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams
|
||||||
|
|
||||||
|
try:
|
||||||
|
MNFE = ModuleNotFoundError
|
||||||
|
except:
|
||||||
|
MNFE = ImportError
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import jinja2
|
import jinja2
|
||||||
except ImportError:
|
except MNFE:
|
||||||
|
if EXE:
|
||||||
|
raise
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"""\033[1;31m
|
"""\033[1;31m
|
||||||
you do not have jinja2 installed,\033[33m
|
you do not have jinja2 installed,\033[33m
|
||||||
@@ -23,14 +34,29 @@ except ImportError:
|
|||||||
* (try another python version, if you have one)
|
* (try another python version, if you have one)
|
||||||
* (try copyparty.sfx instead)
|
* (try copyparty.sfx instead)
|
||||||
""".format(
|
""".format(
|
||||||
os.path.basename(sys.executable)
|
sys.executable
|
||||||
|
)
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
except SyntaxError:
|
||||||
|
if EXE:
|
||||||
|
raise
|
||||||
|
|
||||||
|
print(
|
||||||
|
"""\033[1;31m
|
||||||
|
your jinja2 version is incompatible with your python version;\033[33m
|
||||||
|
please try to replace it with an older version:\033[0m
|
||||||
|
* {} -m pip install --user jinja2==2.11.3
|
||||||
|
* (try another python version, if you have one)
|
||||||
|
* (try copyparty.sfx instead)
|
||||||
|
""".format(
|
||||||
|
sys.executable
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
from .__init__ import MACOS, TYPE_CHECKING, EnvParams
|
|
||||||
from .bos import bos
|
|
||||||
from .httpconn import HttpConn
|
from .httpconn import HttpConn
|
||||||
|
from .u2idx import U2idx
|
||||||
from .util import (
|
from .util import (
|
||||||
E_SCK,
|
E_SCK,
|
||||||
FHC,
|
FHC,
|
||||||
@@ -39,6 +65,7 @@ from .util import (
|
|||||||
Magician,
|
Magician,
|
||||||
Netdev,
|
Netdev,
|
||||||
NetMap,
|
NetMap,
|
||||||
|
absreal,
|
||||||
ipnorm,
|
ipnorm,
|
||||||
min_ex,
|
min_ex,
|
||||||
shut_socket,
|
shut_socket,
|
||||||
@@ -81,8 +108,7 @@ class HttpSrv(object):
|
|||||||
self.bans: dict[str, int] = {}
|
self.bans: dict[str, int] = {}
|
||||||
self.aclose: dict[str, int] = {}
|
self.aclose: dict[str, int] = {}
|
||||||
|
|
||||||
self.ip = ""
|
self.bound: set[tuple[str, int]] = set()
|
||||||
self.port = 0
|
|
||||||
self.name = "hsrv" + nsuf
|
self.name = "hsrv" + nsuf
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
@@ -103,6 +129,9 @@ class HttpSrv(object):
|
|||||||
self.cb_ts = 0.0
|
self.cb_ts = 0.0
|
||||||
self.cb_v = ""
|
self.cb_v = ""
|
||||||
|
|
||||||
|
self.u2idx_free: dict[str, U2idx] = {}
|
||||||
|
self.u2idx_n = 0
|
||||||
|
|
||||||
env = jinja2.Environment()
|
env = jinja2.Environment()
|
||||||
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
|
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
|
||||||
jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"]
|
jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"]
|
||||||
@@ -110,17 +139,21 @@ class HttpSrv(object):
|
|||||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
||||||
self.prism = os.path.exists(zs)
|
self.prism = os.path.exists(zs)
|
||||||
|
|
||||||
|
self.statics: set[str] = set()
|
||||||
|
self._build_statics()
|
||||||
|
|
||||||
|
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
||||||
|
|
||||||
|
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
||||||
|
if not self.args.no_dav:
|
||||||
|
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
|
||||||
|
self.mallow += zs.split()
|
||||||
|
|
||||||
if self.args.zs:
|
if self.args.zs:
|
||||||
from .ssdp import SSDPr
|
from .ssdp import SSDPr
|
||||||
|
|
||||||
self.ssdp = SSDPr(broker)
|
self.ssdp = SSDPr(broker)
|
||||||
|
|
||||||
cert_path = os.path.join(self.E.cfg, "cert.pem")
|
|
||||||
if bos.path.exists(cert_path):
|
|
||||||
self.cert_path = cert_path
|
|
||||||
else:
|
|
||||||
self.cert_path = ""
|
|
||||||
|
|
||||||
if self.tp_q:
|
if self.tp_q:
|
||||||
self.start_threads(4)
|
self.start_threads(4)
|
||||||
|
|
||||||
@@ -141,8 +174,20 @@ class HttpSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def _build_statics(self) -> None:
|
||||||
|
for dp, _, df in os.walk(os.path.join(self.E.mod, "web")):
|
||||||
|
for fn in df:
|
||||||
|
ap = absreal(os.path.join(dp, fn))
|
||||||
|
self.statics.add(ap)
|
||||||
|
if ap.endswith(".gz") or ap.endswith(".br"):
|
||||||
|
self.statics.add(ap[:-3])
|
||||||
|
|
||||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||||
self.nm = NetMap([self.ip], netdevs)
|
ips = set()
|
||||||
|
for ip, _ in self.bound:
|
||||||
|
ips.add(ip)
|
||||||
|
|
||||||
|
self.nm = NetMap(list(ips), netdevs)
|
||||||
|
|
||||||
def start_threads(self, n: int) -> None:
|
def start_threads(self, n: int) -> None:
|
||||||
self.tp_nthr += n
|
self.tp_nthr += n
|
||||||
@@ -178,20 +223,19 @@ class HttpSrv(object):
|
|||||||
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
||||||
if self.args.j != 1:
|
if self.args.j != 1:
|
||||||
# lost in the pickle; redefine
|
# lost in the pickle; redefine
|
||||||
try:
|
if not ANYWIN or self.args.reuseaddr:
|
||||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
except:
|
|
||||||
pass
|
|
||||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
||||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
|
||||||
sck.settimeout(None) # < does not inherit, ^ does
|
|
||||||
|
|
||||||
self.ip, self.port = sck.getsockname()[:2]
|
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
|
sck.settimeout(None) # < does not inherit, ^ opts above do
|
||||||
|
|
||||||
|
ip, port = sck.getsockname()[:2]
|
||||||
self.srvs.append(sck)
|
self.srvs.append(sck)
|
||||||
|
self.bound.add((ip, port))
|
||||||
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
||||||
Daemon(
|
Daemon(
|
||||||
self.thr_listen,
|
self.thr_listen,
|
||||||
"httpsrv-n{}-listen-{}-{}".format(self.nid or "0", self.ip, self.port),
|
"httpsrv-n{}-listen-{}-{}".format(self.nid or "0", ip, port),
|
||||||
(sck,),
|
(sck,),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -429,6 +473,9 @@ class HttpSrv(object):
|
|||||||
self.clients.remove(cli)
|
self.clients.remove(cli)
|
||||||
self.ncli -= 1
|
self.ncli -= 1
|
||||||
|
|
||||||
|
if cli.u2idx:
|
||||||
|
self.put_u2idx(str(addr), cli.u2idx)
|
||||||
|
|
||||||
def cachebuster(self) -> str:
|
def cachebuster(self) -> str:
|
||||||
if time.time() - self.cb_ts < 1:
|
if time.time() - self.cb_ts < 1:
|
||||||
return self.cb_v
|
return self.cb_v
|
||||||
@@ -450,3 +497,31 @@ class HttpSrv(object):
|
|||||||
self.cb_v = v.decode("ascii")[-4:]
|
self.cb_v = v.decode("ascii")[-4:]
|
||||||
self.cb_ts = time.time()
|
self.cb_ts = time.time()
|
||||||
return self.cb_v
|
return self.cb_v
|
||||||
|
|
||||||
|
def get_u2idx(self, ident: str) -> Optional[U2idx]:
|
||||||
|
utab = self.u2idx_free
|
||||||
|
for _ in range(100): # 5/0.05 = 5sec
|
||||||
|
with self.mutex:
|
||||||
|
if utab:
|
||||||
|
if ident in utab:
|
||||||
|
return utab.pop(ident)
|
||||||
|
|
||||||
|
return utab.pop(list(utab.keys())[0])
|
||||||
|
|
||||||
|
if self.u2idx_n < CORES:
|
||||||
|
self.u2idx_n += 1
|
||||||
|
return U2idx(self)
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
# not using conditional waits, on a hunch that
|
||||||
|
# average performance will be faster like this
|
||||||
|
# since most servers won't be fully saturated
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def put_u2idx(self, ident: str, u2idx: U2idx) -> None:
|
||||||
|
with self.mutex:
|
||||||
|
while ident in self.u2idx_free:
|
||||||
|
ident += "a"
|
||||||
|
|
||||||
|
self.u2idx_free[ident] = u2idx
|
||||||
|
|||||||
@@ -17,7 +17,9 @@ class Ico(object):
|
|||||||
def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]:
|
def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]:
|
||||||
"""placeholder to make thumbnails not break"""
|
"""placeholder to make thumbnails not break"""
|
||||||
|
|
||||||
zb = hashlib.sha1(ext.encode("utf-8")).digest()[2:4]
|
bext = ext.encode("ascii", "replace")
|
||||||
|
ext = bext.decode("utf-8")
|
||||||
|
zb = hashlib.sha1(bext).digest()[2:4]
|
||||||
if PY2:
|
if PY2:
|
||||||
zb = [ord(x) for x in zb]
|
zb = [ord(x) for x in zb]
|
||||||
|
|
||||||
@@ -33,7 +35,7 @@ class Ico(object):
|
|||||||
h = int(100 / (float(sw) / float(sh)))
|
h = int(100 / (float(sw) / float(sh)))
|
||||||
w = 100
|
w = 100
|
||||||
|
|
||||||
if chrome and as_thumb:
|
if chrome:
|
||||||
# cannot handle more than ~2000 unique SVGs
|
# cannot handle more than ~2000 unique SVGs
|
||||||
if HAVE_PIL:
|
if HAVE_PIL:
|
||||||
# svg: 3s, cache: 6s, this: 8s
|
# svg: 3s, cache: 6s, this: 8s
|
||||||
@@ -43,8 +45,19 @@ class Ico(object):
|
|||||||
w = 64
|
w = 64
|
||||||
img = Image.new("RGB", (w, h), "#" + c[:6])
|
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||||
pb = ImageDraw.Draw(img)
|
pb = ImageDraw.Draw(img)
|
||||||
tw, th = pb.textsize(ext)
|
try:
|
||||||
pb.text(((w - tw) // 2, (h - th) // 2), ext, fill="#" + c[6:])
|
_, _, tw, th = pb.textbbox((0, 0), ext)
|
||||||
|
except:
|
||||||
|
tw, th = pb.textsize(ext)
|
||||||
|
|
||||||
|
tw += len(ext)
|
||||||
|
cw = tw // len(ext)
|
||||||
|
x = ((w - tw) // 2) - (cw * 2) // 3
|
||||||
|
fill = "#" + c[6:]
|
||||||
|
for ch in ext:
|
||||||
|
pb.text((x, (h - th) // 2), " %s " % (ch,), fill=fill)
|
||||||
|
x += cw
|
||||||
|
|
||||||
img = img.resize((w * 3, h * 3), Image.NEAREST)
|
img = img.resize((w * 3, h * 3), Image.NEAREST)
|
||||||
|
|
||||||
buf = BytesIO()
|
buf = BytesIO()
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import errno
|
||||||
import random
|
import random
|
||||||
import select
|
import select
|
||||||
import socket
|
import socket
|
||||||
@@ -11,6 +12,7 @@ from ipaddress import IPv4Network, IPv6Network
|
|||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import TYPE_CHECKING
|
||||||
from .__init__ import unicode as U
|
from .__init__ import unicode as U
|
||||||
from .multicast import MC_Sck, MCast
|
from .multicast import MC_Sck, MCast
|
||||||
|
from .stolen.dnslib import AAAA
|
||||||
from .stolen.dnslib import CLASS as DC
|
from .stolen.dnslib import CLASS as DC
|
||||||
from .stolen.dnslib import (
|
from .stolen.dnslib import (
|
||||||
NSEC,
|
NSEC,
|
||||||
@@ -20,12 +22,11 @@ from .stolen.dnslib import (
|
|||||||
SRV,
|
SRV,
|
||||||
TXT,
|
TXT,
|
||||||
A,
|
A,
|
||||||
AAAA,
|
|
||||||
DNSHeader,
|
DNSHeader,
|
||||||
DNSQuestion,
|
DNSQuestion,
|
||||||
DNSRecord,
|
DNSRecord,
|
||||||
)
|
)
|
||||||
from .util import CachedSet, Daemon, Netdev, min_ex
|
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
@@ -55,10 +56,11 @@ class MDNS_Sck(MC_Sck):
|
|||||||
self.bp_bye = b""
|
self.bp_bye = b""
|
||||||
|
|
||||||
self.last_tx = 0.0
|
self.last_tx = 0.0
|
||||||
|
self.tx_ex = False
|
||||||
|
|
||||||
|
|
||||||
class MDNS(MCast):
|
class MDNS(MCast):
|
||||||
def __init__(self, hub: "SvcHub") -> None:
|
def __init__(self, hub: "SvcHub", ngen: int) -> None:
|
||||||
al = hub.args
|
al = hub.args
|
||||||
grp4 = "" if al.zm6 else MDNS4
|
grp4 = "" if al.zm6 else MDNS4
|
||||||
grp6 = "" if al.zm4 else MDNS6
|
grp6 = "" if al.zm4 else MDNS6
|
||||||
@@ -66,7 +68,8 @@ class MDNS(MCast):
|
|||||||
hub, MDNS_Sck, al.zm_on, al.zm_off, grp4, grp6, 5353, hub.args.zmv
|
hub, MDNS_Sck, al.zm_on, al.zm_off, grp4, grp6, 5353, hub.args.zmv
|
||||||
)
|
)
|
||||||
self.srv: dict[socket.socket, MDNS_Sck] = {}
|
self.srv: dict[socket.socket, MDNS_Sck] = {}
|
||||||
|
self.logsrc = "mDNS-{}".format(ngen)
|
||||||
|
self.ngen = ngen
|
||||||
self.ttl = 300
|
self.ttl = 300
|
||||||
|
|
||||||
zs = self.args.name + ".local."
|
zs = self.args.name + ".local."
|
||||||
@@ -89,7 +92,7 @@ class MDNS(MCast):
|
|||||||
self.defend: dict[MDNS_Sck, float] = {} # server -> deadline
|
self.defend: dict[MDNS_Sck, float] = {} # server -> deadline
|
||||||
|
|
||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log_func("mDNS", msg, c)
|
self.log_func(self.logsrc, msg, c)
|
||||||
|
|
||||||
def build_svcs(self) -> tuple[dict[str, dict[str, Any]], set[str]]:
|
def build_svcs(self) -> tuple[dict[str, dict[str, Any]], set[str]]:
|
||||||
zms = self.args.zms
|
zms = self.args.zms
|
||||||
@@ -275,6 +278,18 @@ class MDNS(MCast):
|
|||||||
zf = time.time() + 2
|
zf = time.time() + 2
|
||||||
self.probing = zf # cant unicast so give everyone an extra sec
|
self.probing = zf # cant unicast so give everyone an extra sec
|
||||||
self.unsolicited = [zf, zf + 1, zf + 3, zf + 7] # rfc-8.3
|
self.unsolicited = [zf, zf + 1, zf + 3, zf + 7] # rfc-8.3
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.run2()
|
||||||
|
except OSError as ex:
|
||||||
|
if ex.errno != errno.EBADF:
|
||||||
|
raise
|
||||||
|
|
||||||
|
self.log("stopping due to {}".format(ex), "90")
|
||||||
|
|
||||||
|
self.log("stopped", 2)
|
||||||
|
|
||||||
|
def run2(self) -> None:
|
||||||
last_hop = time.time()
|
last_hop = time.time()
|
||||||
ihop = self.args.mc_hop
|
ihop = self.args.mc_hop
|
||||||
while self.running:
|
while self.running:
|
||||||
@@ -287,12 +302,15 @@ class MDNS(MCast):
|
|||||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||||
self.rx4.cln()
|
self.rx4.cln()
|
||||||
self.rx6.cln()
|
self.rx6.cln()
|
||||||
|
buf = b""
|
||||||
|
addr = ("0", 0)
|
||||||
for sck in rx:
|
for sck in rx:
|
||||||
buf, addr = sck.recvfrom(4096)
|
|
||||||
try:
|
try:
|
||||||
|
buf, addr = sck.recvfrom(4096)
|
||||||
self.eat(buf, addr, sck)
|
self.eat(buf, addr, sck)
|
||||||
except:
|
except:
|
||||||
if not self.running:
|
if not self.running:
|
||||||
|
self.log("stopped", 2)
|
||||||
return
|
return
|
||||||
|
|
||||||
t = "{} {} \033[33m|{}| {}\n{}".format(
|
t = "{} {} \033[33m|{}| {}\n{}".format(
|
||||||
@@ -311,27 +329,34 @@ class MDNS(MCast):
|
|||||||
|
|
||||||
def stop(self, panic=False) -> None:
|
def stop(self, panic=False) -> None:
|
||||||
self.running = False
|
self.running = False
|
||||||
if not panic:
|
for srv in self.srv.values():
|
||||||
for srv in self.srv.values():
|
try:
|
||||||
srv.sck.sendto(srv.bp_bye, (srv.grp, 5353))
|
if panic:
|
||||||
|
srv.sck.close()
|
||||||
|
else:
|
||||||
|
srv.sck.sendto(srv.bp_bye, (srv.grp, 5353))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
self.srv = {}
|
self.srv = {}
|
||||||
|
|
||||||
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
||||||
cip = addr[0]
|
cip = addr[0]
|
||||||
v6 = ":" in cip
|
v6 = ":" in cip
|
||||||
if cip.startswith("169.254") or v6 and not cip.startswith("fe80"):
|
if (cip.startswith("169.254") and not self.ll_ok) or (
|
||||||
|
v6 and not cip.startswith("fe80")
|
||||||
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
cache = self.rx6 if v6 else self.rx4
|
cache = self.rx6 if v6 else self.rx4
|
||||||
if buf in cache.c:
|
if buf in cache.c:
|
||||||
return
|
return
|
||||||
|
|
||||||
cache.add(buf)
|
|
||||||
srv: Optional[MDNS_Sck] = self.srv[sck] if v6 else self.map_client(cip) # type: ignore
|
srv: Optional[MDNS_Sck] = self.srv[sck] if v6 else self.map_client(cip) # type: ignore
|
||||||
if not srv:
|
if not srv:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
cache.add(buf)
|
||||||
now = time.time()
|
now = time.time()
|
||||||
|
|
||||||
if self.args.zmv and cip != srv.ip and cip not in srv.ips:
|
if self.args.zmv and cip != srv.ip and cip not in srv.ips:
|
||||||
@@ -369,6 +394,14 @@ class MDNS(MCast):
|
|||||||
# avahi broadcasting 127.0.0.1-only packets
|
# avahi broadcasting 127.0.0.1-only packets
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# check if we've been given additional IPs
|
||||||
|
for ip in list_ips():
|
||||||
|
if ip in cips:
|
||||||
|
self.sips.add(ip)
|
||||||
|
|
||||||
|
if not self.sips.isdisjoint(cips):
|
||||||
|
return
|
||||||
|
|
||||||
t = "mdns zeroconf: "
|
t = "mdns zeroconf: "
|
||||||
if self.probing:
|
if self.probing:
|
||||||
t += "Cannot start; hostname '{}' is occupied"
|
t += "Cannot start; hostname '{}' is occupied"
|
||||||
@@ -502,6 +535,15 @@ class MDNS(MCast):
|
|||||||
if now < srv.last_tx + cooldown:
|
if now < srv.last_tx + cooldown:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
srv.sck.sendto(msg, (srv.grp, 5353))
|
try:
|
||||||
srv.last_tx = now
|
srv.sck.sendto(msg, (srv.grp, 5353))
|
||||||
|
srv.last_tx = now
|
||||||
|
except Exception as ex:
|
||||||
|
if srv.tx_ex:
|
||||||
|
return True
|
||||||
|
|
||||||
|
srv.tx_ex = True
|
||||||
|
t = "tx({},|{}|,{}): {}"
|
||||||
|
self.log(t.format(srv.ip, len(msg), cooldown, ex), 3)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -8,9 +8,19 @@ import shutil
|
|||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS, E, unicode
|
from .__init__ import EXE, PY2, WINDOWS, E, unicode
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import REKOBO_LKEY, fsenc, min_ex, retchk, runcmd, uncyg
|
from .util import (
|
||||||
|
FFMPEG_URL,
|
||||||
|
REKOBO_LKEY,
|
||||||
|
fsenc,
|
||||||
|
min_ex,
|
||||||
|
pybin,
|
||||||
|
retchk,
|
||||||
|
runcmd,
|
||||||
|
sfsenc,
|
||||||
|
uncyg,
|
||||||
|
)
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
@@ -259,7 +269,9 @@ class MTag(object):
|
|||||||
self.args = args
|
self.args = args
|
||||||
self.usable = True
|
self.usable = True
|
||||||
self.prefer_mt = not args.no_mtag_ff
|
self.prefer_mt = not args.no_mtag_ff
|
||||||
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
|
self.backend = (
|
||||||
|
"ffprobe" if args.no_mutagen or (HAVE_FFPROBE and EXE) else "mutagen"
|
||||||
|
)
|
||||||
self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff
|
self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff
|
||||||
mappings = args.mtm
|
mappings = args.mtm
|
||||||
or_ffprobe = " or FFprobe"
|
or_ffprobe = " or FFprobe"
|
||||||
@@ -285,9 +297,14 @@ class MTag(object):
|
|||||||
self.log(msg, c=3)
|
self.log(msg, c=3)
|
||||||
|
|
||||||
if not self.usable:
|
if not self.usable:
|
||||||
|
if EXE:
|
||||||
|
t = "copyparty.exe cannot use mutagen; need ffprobe.exe to read media tags: "
|
||||||
|
self.log(t + FFMPEG_URL)
|
||||||
|
return
|
||||||
|
|
||||||
msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
|
msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
|
||||||
pybin = os.path.basename(sys.executable)
|
pyname = os.path.basename(pybin)
|
||||||
self.log(msg.format(or_ffprobe, " " * 37, pybin), c=1)
|
self.log(msg.format(or_ffprobe, " " * 37, pyname), c=1)
|
||||||
return
|
return
|
||||||
|
|
||||||
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||||
@@ -456,7 +473,10 @@ class MTag(object):
|
|||||||
self.log("mutagen: {}\033[0m".format(" ".join(zl)), "90")
|
self.log("mutagen: {}\033[0m".format(" ".join(zl)), "90")
|
||||||
if not md.info.length and not md.info.codec:
|
if not md.info.length and not md.info.codec:
|
||||||
raise Exception()
|
raise Exception()
|
||||||
except:
|
except Exception as ex:
|
||||||
|
if self.args.mtag_v:
|
||||||
|
self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90")
|
||||||
|
|
||||||
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||||
|
|
||||||
sz = bos.path.getsize(abspath)
|
sz = bos.path.getsize(abspath)
|
||||||
@@ -519,12 +539,15 @@ class MTag(object):
|
|||||||
|
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
try:
|
try:
|
||||||
|
if EXE:
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||||
pypath = str(os.pathsep.join(zsl))
|
pypath = str(os.pathsep.join(zsl))
|
||||||
env["PYTHONPATH"] = pypath
|
env["PYTHONPATH"] = pypath
|
||||||
except:
|
except:
|
||||||
if not E.ox:
|
if not E.ox and not EXE:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
ret: dict[str, Any] = {}
|
ret: dict[str, Any] = {}
|
||||||
@@ -532,7 +555,7 @@ class MTag(object):
|
|||||||
try:
|
try:
|
||||||
cmd = [parser.bin, abspath]
|
cmd = [parser.bin, abspath]
|
||||||
if parser.bin.endswith(".py"):
|
if parser.bin.endswith(".py"):
|
||||||
cmd = [sys.executable] + cmd
|
cmd = [pybin] + cmd
|
||||||
|
|
||||||
args = {
|
args = {
|
||||||
"env": env,
|
"env": env,
|
||||||
@@ -551,7 +574,7 @@ class MTag(object):
|
|||||||
else:
|
else:
|
||||||
cmd = ["nice"] + cmd
|
cmd = ["nice"] + cmd
|
||||||
|
|
||||||
bcmd = [fsenc(x) for x in cmd]
|
bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])]
|
||||||
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
||||||
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
||||||
v = v.strip()
|
v = v.strip()
|
||||||
|
|||||||
@@ -5,10 +5,17 @@ import socket
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
import ipaddress
|
import ipaddress
|
||||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
from ipaddress import (
|
||||||
|
IPv4Address,
|
||||||
|
IPv4Network,
|
||||||
|
IPv6Address,
|
||||||
|
IPv6Network,
|
||||||
|
ip_address,
|
||||||
|
ip_network,
|
||||||
|
)
|
||||||
|
|
||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import MACOS, TYPE_CHECKING
|
||||||
from .util import MACOS, Netdev, min_ex, spack
|
from .util import Netdev, find_prefix, min_ex, spack
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
@@ -75,6 +82,7 @@ class MCast(object):
|
|||||||
|
|
||||||
self.srv: dict[socket.socket, MC_Sck] = {} # listening sockets
|
self.srv: dict[socket.socket, MC_Sck] = {} # listening sockets
|
||||||
self.sips: set[str] = set() # all listening ips (including failed attempts)
|
self.sips: set[str] = set() # all listening ips (including failed attempts)
|
||||||
|
self.ll_ok: set[str] = set() # fallback linklocal IPv4 and IPv6 addresses
|
||||||
self.b2srv: dict[bytes, MC_Sck] = {} # binary-ip -> server socket
|
self.b2srv: dict[bytes, MC_Sck] = {} # binary-ip -> server socket
|
||||||
self.b4: list[bytes] = [] # sorted list of binary-ips
|
self.b4: list[bytes] = [] # sorted list of binary-ips
|
||||||
self.b6: list[bytes] = [] # sorted list of binary-ips
|
self.b6: list[bytes] = [] # sorted list of binary-ips
|
||||||
@@ -102,15 +110,23 @@ class MCast(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
||||||
|
ips = find_prefix(ips, netdevs)
|
||||||
# ip -> ip/prefix
|
|
||||||
ips = [[x for x in netdevs if x.startswith(y + "/")][0] for y in ips]
|
|
||||||
|
|
||||||
on = self.on[:]
|
on = self.on[:]
|
||||||
off = self.off[:]
|
off = self.off[:]
|
||||||
for lst in (on, off):
|
for lst in (on, off):
|
||||||
for av in list(lst):
|
for av in list(lst):
|
||||||
|
try:
|
||||||
|
arg_net = ip_network(av, False)
|
||||||
|
except:
|
||||||
|
arg_net = None
|
||||||
|
|
||||||
for sk, sv in netdevs.items():
|
for sk, sv in netdevs.items():
|
||||||
|
if arg_net:
|
||||||
|
net_ip = ip_address(sk.split("/")[0])
|
||||||
|
if net_ip in arg_net and sk not in lst:
|
||||||
|
lst.append(sk)
|
||||||
|
|
||||||
if (av == str(sv.idx) or av == sv.name) and sk not in lst:
|
if (av == str(sv.idx) or av == sv.name) and sk not in lst:
|
||||||
lst.append(sk)
|
lst.append(sk)
|
||||||
|
|
||||||
@@ -166,9 +182,21 @@ class MCast(object):
|
|||||||
srv.ips[oth_ip.split("/")[0]] = ipaddress.ip_network(oth_ip, False)
|
srv.ips[oth_ip.split("/")[0]] = ipaddress.ip_network(oth_ip, False)
|
||||||
|
|
||||||
# gvfs breaks if a linklocal ip appears in a dns reply
|
# gvfs breaks if a linklocal ip appears in a dns reply
|
||||||
srv.ips = {k: v for k, v in srv.ips.items() if not k.startswith("fe80")}
|
ll = {
|
||||||
|
k: v
|
||||||
|
for k, v in srv.ips.items()
|
||||||
|
if k.startswith("169.254") or k.startswith("fe80")
|
||||||
|
}
|
||||||
|
rt = {k: v for k, v in srv.ips.items() if k not in ll}
|
||||||
|
|
||||||
|
if self.args.ll or not rt:
|
||||||
|
self.ll_ok.update(list(ll))
|
||||||
|
|
||||||
|
if not self.args.ll:
|
||||||
|
srv.ips = rt or ll
|
||||||
|
|
||||||
if not srv.ips:
|
if not srv.ips:
|
||||||
self.log("no routable IPs on {}; skipping [{}]".format(netdev, ip), 3)
|
self.log("no IPs on {}; skipping [{}]".format(netdev, ip), 3)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -318,6 +346,16 @@ class MCast(object):
|
|||||||
# just give it something
|
# just give it something
|
||||||
ret = list(self.srv.values())[0]
|
ret = list(self.srv.values())[0]
|
||||||
|
|
||||||
|
if not ret and cip.startswith("169.254"):
|
||||||
|
# idk how to map LL IPv4 msgs to nics;
|
||||||
|
# just pick one and hope for the best
|
||||||
|
lls = (
|
||||||
|
x
|
||||||
|
for x in self.srv.values()
|
||||||
|
if next((y for y in x.ips if y in self.ll_ok), None)
|
||||||
|
)
|
||||||
|
ret = next(lls, None)
|
||||||
|
|
||||||
if ret:
|
if ret:
|
||||||
t = "new client on {} ({}): {}"
|
t = "new client on {} ({}): {}"
|
||||||
self.log(t.format(ret.name, ret.net, cip), 6)
|
self.log(t.format(ret.name, ret.net, cip), 6)
|
||||||
|
|||||||
145
copyparty/pwhash.py
Normal file
145
copyparty/pwhash.py
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from .__init__ import unicode
|
||||||
|
|
||||||
|
|
||||||
|
class PWHash(object):
|
||||||
|
def __init__(self, args: argparse.Namespace):
|
||||||
|
self.args = args
|
||||||
|
|
||||||
|
try:
|
||||||
|
alg, ac = args.ah_alg.split(",")
|
||||||
|
except:
|
||||||
|
alg = args.ah_alg
|
||||||
|
ac = {}
|
||||||
|
|
||||||
|
if alg == "none":
|
||||||
|
alg = ""
|
||||||
|
|
||||||
|
self.alg = alg
|
||||||
|
self.ac = ac
|
||||||
|
if not alg:
|
||||||
|
self.on = False
|
||||||
|
self.hash = unicode
|
||||||
|
return
|
||||||
|
|
||||||
|
self.on = True
|
||||||
|
self.salt = args.ah_salt.encode("utf-8")
|
||||||
|
self.cache: dict[str, str] = {}
|
||||||
|
self.mutex = threading.Lock()
|
||||||
|
self.hash = self._cache_hash
|
||||||
|
|
||||||
|
if alg == "sha2":
|
||||||
|
self._hash = self._gen_sha2
|
||||||
|
elif alg == "scrypt":
|
||||||
|
self._hash = self._gen_scrypt
|
||||||
|
elif alg == "argon2":
|
||||||
|
self._hash = self._gen_argon2
|
||||||
|
else:
|
||||||
|
t = "unsupported password hashing algorithm [{}], must be one of these: argon2 scrypt sha2 none"
|
||||||
|
raise Exception(t.format(alg))
|
||||||
|
|
||||||
|
def _cache_hash(self, plain: str) -> str:
|
||||||
|
with self.mutex:
|
||||||
|
try:
|
||||||
|
return self.cache[plain]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not plain:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if len(plain) > 255:
|
||||||
|
raise Exception("password too long")
|
||||||
|
|
||||||
|
if len(self.cache) > 9000:
|
||||||
|
self.cache = {}
|
||||||
|
|
||||||
|
ret = self._hash(plain)
|
||||||
|
self.cache[plain] = ret
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def _gen_sha2(self, plain: str) -> str:
|
||||||
|
its = int(self.ac[0]) if self.ac else 424242
|
||||||
|
bplain = plain.encode("utf-8")
|
||||||
|
ret = b"\n"
|
||||||
|
for _ in range(its):
|
||||||
|
ret = hashlib.sha512(self.salt + bplain + ret).digest()
|
||||||
|
|
||||||
|
return "+" + base64.urlsafe_b64encode(ret[:24]).decode("utf-8")
|
||||||
|
|
||||||
|
def _gen_scrypt(self, plain: str) -> str:
|
||||||
|
cost = 2 << 13
|
||||||
|
its = 2
|
||||||
|
blksz = 8
|
||||||
|
para = 4
|
||||||
|
try:
|
||||||
|
cost = 2 << int(self.ac[0])
|
||||||
|
its = int(self.ac[1])
|
||||||
|
blksz = int(self.ac[2])
|
||||||
|
para = int(self.ac[3])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
ret = plain.encode("utf-8")
|
||||||
|
for _ in range(its):
|
||||||
|
ret = hashlib.scrypt(ret, salt=self.salt, n=cost, r=blksz, p=para, dklen=24)
|
||||||
|
|
||||||
|
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
|
||||||
|
|
||||||
|
def _gen_argon2(self, plain: str) -> str:
|
||||||
|
from argon2.low_level import Type as ArgonType
|
||||||
|
from argon2.low_level import hash_secret
|
||||||
|
|
||||||
|
time_cost = 3
|
||||||
|
mem_cost = 256
|
||||||
|
parallelism = 4
|
||||||
|
version = 19
|
||||||
|
try:
|
||||||
|
time_cost = int(self.ac[0])
|
||||||
|
mem_cost = int(self.ac[1])
|
||||||
|
parallelism = int(self.ac[2])
|
||||||
|
version = int(self.ac[3])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
bplain = plain.encode("utf-8")
|
||||||
|
|
||||||
|
bret = hash_secret(
|
||||||
|
secret=bplain,
|
||||||
|
salt=self.salt,
|
||||||
|
time_cost=time_cost,
|
||||||
|
memory_cost=mem_cost * 1024,
|
||||||
|
parallelism=parallelism,
|
||||||
|
hash_len=24,
|
||||||
|
type=ArgonType.ID,
|
||||||
|
version=version,
|
||||||
|
)
|
||||||
|
ret = bret.split(b"$")[-1].decode("utf-8")
|
||||||
|
return "+" + ret.replace("/", "_").replace("+", "-")
|
||||||
|
|
||||||
|
def stdin(self) -> None:
|
||||||
|
while True:
|
||||||
|
ln = sys.stdin.readline().strip()
|
||||||
|
if not ln:
|
||||||
|
break
|
||||||
|
print(self.hash(ln))
|
||||||
|
|
||||||
|
def cli(self) -> None:
|
||||||
|
import getpass
|
||||||
|
|
||||||
|
while True:
|
||||||
|
p1 = getpass.getpass("password> ")
|
||||||
|
p2 = getpass.getpass("again or just hit ENTER> ")
|
||||||
|
if p2 and p1 != p2:
|
||||||
|
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
|
||||||
|
continue
|
||||||
|
print(self.hash(p1))
|
||||||
|
print()
|
||||||
0
copyparty/res/__init__.py
Normal file
0
copyparty/res/__init__.py
Normal file
@@ -9,13 +9,13 @@ import sys
|
|||||||
import time
|
import time
|
||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
|
|
||||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
from .__init__ import ANYWIN, EXE, TYPE_CHECKING
|
||||||
from .authsrv import LEELOO_DALLAS, VFS
|
from .authsrv import LEELOO_DALLAS, VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import Daemon, min_ex
|
from .util import Daemon, min_ex, pybin, runhook
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any
|
from typing import Any, Union
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
@@ -42,8 +42,12 @@ class SMB(object):
|
|||||||
from impacket import smbserver
|
from impacket import smbserver
|
||||||
from impacket.ntlm import compute_lmhash, compute_nthash
|
from impacket.ntlm import compute_lmhash, compute_nthash
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
if EXE:
|
||||||
|
print("copyparty.exe cannot do SMB")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
m = "\033[36m\n{}\033[31m\n\nERROR: need 'impacket'; please run this command:\033[33m\n {} -m pip install --user impacket\n\033[0m"
|
m = "\033[36m\n{}\033[31m\n\nERROR: need 'impacket'; please run this command:\033[33m\n {} -m pip install --user impacket\n\033[0m"
|
||||||
print(m.format(min_ex(), sys.executable))
|
print(m.format(min_ex(), pybin))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# patch vfs into smbserver.os
|
# patch vfs into smbserver.os
|
||||||
@@ -109,6 +113,9 @@ class SMB(object):
|
|||||||
self.stop = srv.stop
|
self.stop = srv.stop
|
||||||
self.log("smb", "listening @ {}:{}".format(ip, port))
|
self.log("smb", "listening @ {}:{}".format(ip, port))
|
||||||
|
|
||||||
|
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
|
self.log("smb", msg, c)
|
||||||
|
|
||||||
def start(self) -> None:
|
def start(self) -> None:
|
||||||
Daemon(self.srv.start)
|
Daemon(self.srv.start)
|
||||||
|
|
||||||
@@ -165,8 +172,15 @@ class SMB(object):
|
|||||||
yeet("blocked write (no --smbw): " + vpath)
|
yeet("blocked write (no --smbw): " + vpath)
|
||||||
|
|
||||||
vfs, ap = self._v2a("open", vpath, *a)
|
vfs, ap = self._v2a("open", vpath, *a)
|
||||||
if wr and not vfs.axs.uwrite:
|
if wr:
|
||||||
yeet("blocked write (no-write-acc): " + vpath)
|
if not vfs.axs.uwrite:
|
||||||
|
yeet("blocked write (no-write-acc): " + vpath)
|
||||||
|
|
||||||
|
xbu = vfs.flags.get("xbu")
|
||||||
|
if xbu and not runhook(
|
||||||
|
self.nlog, xbu, ap, vpath, "", "", 0, 0, "1.7.6.2", 0, ""
|
||||||
|
):
|
||||||
|
yeet("blocked by xbu server config: " + vpath)
|
||||||
|
|
||||||
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
||||||
if wr:
|
if wr:
|
||||||
@@ -194,11 +208,13 @@ class SMB(object):
|
|||||||
vfs, rem = vfs.get_dbv(rem)
|
vfs, rem = vfs.get_dbv(rem)
|
||||||
self.hub.up2k.hash_file(
|
self.hub.up2k.hash_file(
|
||||||
vfs.realpath,
|
vfs.realpath,
|
||||||
|
vfs.vpath,
|
||||||
vfs.flags,
|
vfs.flags,
|
||||||
rem,
|
rem,
|
||||||
fn,
|
fn,
|
||||||
"1.7.6.2",
|
"1.7.6.2",
|
||||||
time.time(),
|
time.time(),
|
||||||
|
"",
|
||||||
)
|
)
|
||||||
|
|
||||||
def _rename(self, vp1: str, vp2: str) -> None:
|
def _rename(self, vp1: str, vp2: str) -> None:
|
||||||
@@ -245,7 +261,7 @@ class SMB(object):
|
|||||||
yeet("blocked delete (no-del-acc): " + vpath)
|
yeet("blocked delete (no-del-acc): " + vpath)
|
||||||
|
|
||||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||||
self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], [])
|
self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], [], False)
|
||||||
|
|
||||||
def _utime(self, vpath: str, times: tuple[float, float]) -> None:
|
def _utime(self, vpath: str, times: tuple[float, float]) -> None:
|
||||||
if not self.args.smbw:
|
if not self.args.smbw:
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import errno
|
||||||
import re
|
import re
|
||||||
import select
|
import select
|
||||||
import socket
|
import socket
|
||||||
@@ -8,7 +9,7 @@ from email.utils import formatdate
|
|||||||
|
|
||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import TYPE_CHECKING
|
||||||
from .multicast import MC_Sck, MCast
|
from .multicast import MC_Sck, MCast
|
||||||
from .util import CachedSet, min_ex
|
from .util import CachedSet, html_escape, min_ex
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .broker_util import BrokerCli
|
from .broker_util import BrokerCli
|
||||||
@@ -73,13 +74,15 @@ class SSDPr(object):
|
|||||||
</device>
|
</device>
|
||||||
</root>"""
|
</root>"""
|
||||||
|
|
||||||
|
c = html_escape
|
||||||
sip, sport = hc.s.getsockname()[:2]
|
sip, sport = hc.s.getsockname()[:2]
|
||||||
|
sip = sip.replace("::ffff:", "")
|
||||||
proto = "https" if self.args.https_only else "http"
|
proto = "https" if self.args.https_only else "http"
|
||||||
ubase = "{}://{}:{}".format(proto, sip, sport)
|
ubase = "{}://{}:{}".format(proto, sip, sport)
|
||||||
zsl = self.args.zsl
|
zsl = self.args.zsl
|
||||||
url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/")
|
url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/")
|
||||||
name = "{} @ {}".format(self.args.doctitle, self.args.name)
|
name = "{} @ {}".format(self.args.doctitle, self.args.name)
|
||||||
zs = zs.strip().format(ubase, url, name, self.args.zsid)
|
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
|
||||||
hc.reply(zs.encode("utf-8", "replace"))
|
hc.reply(zs.encode("utf-8", "replace"))
|
||||||
return False # close connectino
|
return False # close connectino
|
||||||
|
|
||||||
@@ -87,19 +90,22 @@ class SSDPr(object):
|
|||||||
class SSDPd(MCast):
|
class SSDPd(MCast):
|
||||||
"""communicates with ssdp clients over multicast"""
|
"""communicates with ssdp clients over multicast"""
|
||||||
|
|
||||||
def __init__(self, hub: "SvcHub") -> None:
|
def __init__(self, hub: "SvcHub", ngen: int) -> None:
|
||||||
al = hub.args
|
al = hub.args
|
||||||
vinit = al.zsv and not al.zmv
|
vinit = al.zsv and not al.zmv
|
||||||
super(SSDPd, self).__init__(
|
super(SSDPd, self).__init__(
|
||||||
hub, SSDP_Sck, al.zs_on, al.zs_off, GRP, "", 1900, vinit
|
hub, SSDP_Sck, al.zs_on, al.zs_off, GRP, "", 1900, vinit
|
||||||
)
|
)
|
||||||
self.srv: dict[socket.socket, SSDP_Sck] = {}
|
self.srv: dict[socket.socket, SSDP_Sck] = {}
|
||||||
|
self.logsrc = "SSDP-{}".format(ngen)
|
||||||
|
self.ngen = ngen
|
||||||
|
|
||||||
self.rxc = CachedSet(0.7)
|
self.rxc = CachedSet(0.7)
|
||||||
self.txc = CachedSet(5) # win10: every 3 sec
|
self.txc = CachedSet(5) # win10: every 3 sec
|
||||||
self.ptn_st = re.compile(b"\nst: *upnp:rootdevice", re.I)
|
self.ptn_st = re.compile(b"\nst: *upnp:rootdevice", re.I)
|
||||||
|
|
||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log_func("SSDP", msg, c)
|
self.log_func(self.logsrc, msg, c)
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
try:
|
try:
|
||||||
@@ -124,17 +130,30 @@ class SSDPd(MCast):
|
|||||||
srv.hport = hp
|
srv.hport = hp
|
||||||
|
|
||||||
self.log("listening")
|
self.log("listening")
|
||||||
|
try:
|
||||||
|
self.run2()
|
||||||
|
except OSError as ex:
|
||||||
|
if ex.errno != errno.EBADF:
|
||||||
|
raise
|
||||||
|
|
||||||
|
self.log("stopping due to {}".format(ex), "90")
|
||||||
|
|
||||||
|
self.log("stopped", 2)
|
||||||
|
|
||||||
|
def run2(self) -> None:
|
||||||
while self.running:
|
while self.running:
|
||||||
rdy = select.select(self.srv, [], [], 180)
|
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
||||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||||
self.rxc.cln()
|
self.rxc.cln()
|
||||||
|
buf = b""
|
||||||
|
addr = ("0", 0)
|
||||||
for sck in rx:
|
for sck in rx:
|
||||||
buf, addr = sck.recvfrom(4096)
|
|
||||||
try:
|
try:
|
||||||
|
buf, addr = sck.recvfrom(4096)
|
||||||
self.eat(buf, addr)
|
self.eat(buf, addr)
|
||||||
except:
|
except:
|
||||||
if not self.running:
|
if not self.running:
|
||||||
return
|
break
|
||||||
|
|
||||||
t = "{} {} \033[33m|{}| {}\n{}".format(
|
t = "{} {} \033[33m|{}| {}\n{}".format(
|
||||||
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
||||||
@@ -143,23 +162,29 @@ class SSDPd(MCast):
|
|||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
self.running = False
|
self.running = False
|
||||||
|
for srv in self.srv.values():
|
||||||
|
try:
|
||||||
|
srv.sck.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
self.srv = {}
|
self.srv = {}
|
||||||
|
|
||||||
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
||||||
cip = addr[0]
|
cip = addr[0]
|
||||||
if cip.startswith("169.254"):
|
if cip.startswith("169.254") and not self.ll_ok:
|
||||||
return
|
return
|
||||||
|
|
||||||
if buf in self.rxc.c:
|
if buf in self.rxc.c:
|
||||||
return
|
return
|
||||||
|
|
||||||
self.rxc.add(buf)
|
|
||||||
srv: Optional[SSDP_Sck] = self.map_client(cip) # type: ignore
|
srv: Optional[SSDP_Sck] = self.map_client(cip) # type: ignore
|
||||||
if not srv:
|
if not srv:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
self.rxc.add(buf)
|
||||||
if not buf.startswith(b"M-SEARCH * HTTP/1."):
|
if not buf.startswith(b"M-SEARCH * HTTP/1."):
|
||||||
raise Exception("not an ssdp message")
|
return
|
||||||
|
|
||||||
if not self.ptn_st.search(buf):
|
if not self.ptn_st.search(buf):
|
||||||
return
|
return
|
||||||
@@ -183,7 +208,8 @@ BOOTID.UPNP.ORG: 0
|
|||||||
CONFIGID.UPNP.ORG: 1
|
CONFIGID.UPNP.ORG: 1
|
||||||
|
|
||||||
"""
|
"""
|
||||||
zs = zs.format(formatdate(usegmt=True), srv.ip, srv.hport, self.args.zsid)
|
v4 = srv.ip.replace("::ffff:", "")
|
||||||
|
zs = zs.format(formatdate(usegmt=True), v4, srv.hport, self.args.zsid)
|
||||||
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
||||||
srv.sck.sendto(zb, addr[:2])
|
srv.sck.sendto(zb, addr[:2])
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import stat
|
||||||
import tarfile
|
import tarfile
|
||||||
|
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
@@ -79,6 +80,9 @@ class StreamTar(StreamArc):
|
|||||||
src = f["ap"]
|
src = f["ap"]
|
||||||
fsi = f["st"]
|
fsi = f["st"]
|
||||||
|
|
||||||
|
if stat.S_ISDIR(fsi.st_mode):
|
||||||
|
return
|
||||||
|
|
||||||
inf = tarfile.TarInfo(name=name)
|
inf = tarfile.TarInfo(name=name)
|
||||||
inf.mode = fsi.st_mode
|
inf.mode = fsi.st_mode
|
||||||
inf.size = fsi.st_size
|
inf.size = fsi.st_size
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
|||||||
import argparse
|
import argparse
|
||||||
import base64
|
import base64
|
||||||
import calendar
|
import calendar
|
||||||
|
import errno
|
||||||
import gzip
|
import gzip
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@@ -27,13 +28,15 @@ if True: # pylint: disable=using-constant-test
|
|||||||
import typing
|
import typing
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, VT100, EnvParams, unicode
|
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
|
||||||
from .authsrv import AuthSrv
|
from .authsrv import AuthSrv
|
||||||
|
from .cert import ensure_cert
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||||
from .tcpsrv import TcpSrv
|
from .tcpsrv import TcpSrv
|
||||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||||
from .up2k import Up2k
|
from .up2k import Up2k
|
||||||
from .util import (
|
from .util import (
|
||||||
|
FFMPEG_URL,
|
||||||
VERSIONS,
|
VERSIONS,
|
||||||
Daemon,
|
Daemon,
|
||||||
Garda,
|
Garda,
|
||||||
@@ -43,6 +46,7 @@ from .util import (
|
|||||||
ansi_re,
|
ansi_re,
|
||||||
min_ex,
|
min_ex,
|
||||||
mp,
|
mp,
|
||||||
|
pybin,
|
||||||
start_log_thrs,
|
start_log_thrs,
|
||||||
start_stackmon,
|
start_stackmon,
|
||||||
)
|
)
|
||||||
@@ -66,10 +70,18 @@ class SvcHub(object):
|
|||||||
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
|
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, args: argparse.Namespace, argv: list[str], printed: str) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
args: argparse.Namespace,
|
||||||
|
dargs: argparse.Namespace,
|
||||||
|
argv: list[str],
|
||||||
|
printed: str,
|
||||||
|
) -> None:
|
||||||
self.args = args
|
self.args = args
|
||||||
|
self.dargs = dargs
|
||||||
self.argv = argv
|
self.argv = argv
|
||||||
self.E: EnvParams = args.E
|
self.E: EnvParams = args.E
|
||||||
|
self.no_ansi = args.no_ansi
|
||||||
self.logf: Optional[typing.TextIO] = None
|
self.logf: Optional[typing.TextIO] = None
|
||||||
self.logf_base_fn = ""
|
self.logf_base_fn = ""
|
||||||
self.stop_req = False
|
self.stop_req = False
|
||||||
@@ -96,13 +108,13 @@ class SvcHub(object):
|
|||||||
if args.sss or args.s >= 3:
|
if args.sss or args.s >= 3:
|
||||||
args.ss = True
|
args.ss = True
|
||||||
args.no_dav = True
|
args.no_dav = True
|
||||||
|
args.no_logues = True
|
||||||
|
args.no_readme = True
|
||||||
args.lo = args.lo or "cpp-%Y-%m%d-%H%M%S.txt.xz"
|
args.lo = args.lo or "cpp-%Y-%m%d-%H%M%S.txt.xz"
|
||||||
args.ls = args.ls or "**,*,ln,p,r"
|
args.ls = args.ls or "**,*,ln,p,r"
|
||||||
|
|
||||||
if args.ss or args.s >= 2:
|
if args.ss or args.s >= 2:
|
||||||
args.s = True
|
args.s = True
|
||||||
args.no_logues = True
|
|
||||||
args.no_readme = True
|
|
||||||
args.unpost = 0
|
args.unpost = 0
|
||||||
args.no_del = True
|
args.no_del = True
|
||||||
args.no_mv = True
|
args.no_mv = True
|
||||||
@@ -118,6 +130,9 @@ class SvcHub(object):
|
|||||||
args.no_robots = True
|
args.no_robots = True
|
||||||
args.force_js = True
|
args.force_js = True
|
||||||
|
|
||||||
|
if not self._process_config():
|
||||||
|
raise Exception("bad config")
|
||||||
|
|
||||||
self.log = self._log_disabled if args.q else self._log_enabled
|
self.log = self._log_disabled if args.q else self._log_enabled
|
||||||
if args.lo:
|
if args.lo:
|
||||||
self._setup_logfile(printed)
|
self._setup_logfile(printed)
|
||||||
@@ -139,25 +154,26 @@ class SvcHub(object):
|
|||||||
self.log("root", t.format(args.j))
|
self.log("root", t.format(args.j))
|
||||||
|
|
||||||
if not args.no_fpool and args.j != 1:
|
if not args.no_fpool and args.j != 1:
|
||||||
t = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
t = "WARNING: ignoring --use-fpool because multithreading (-j{}) is enabled"
|
||||||
if ANYWIN:
|
self.log("root", t.format(args.j), c=3)
|
||||||
t = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
|
args.no_fpool = True
|
||||||
args.no_fpool = True
|
|
||||||
|
|
||||||
self.log("root", t, c=3)
|
|
||||||
|
|
||||||
bri = "zy"[args.theme % 2 :][:1]
|
bri = "zy"[args.theme % 2 :][:1]
|
||||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||||
|
|
||||||
if not args.hardlink and args.never_symlink:
|
|
||||||
args.no_dedup = True
|
|
||||||
|
|
||||||
if args.log_fk:
|
if args.log_fk:
|
||||||
args.log_fk = re.compile(args.log_fk)
|
args.log_fk = re.compile(args.log_fk)
|
||||||
|
|
||||||
# initiate all services to manage
|
# initiate all services to manage
|
||||||
self.asrv = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log, dargs=self.dargs)
|
||||||
|
|
||||||
|
if args.cgen:
|
||||||
|
self.asrv.cgen()
|
||||||
|
|
||||||
|
if args.exit == "cfg":
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
if args.ls:
|
if args.ls:
|
||||||
self.asrv.dbg_ls()
|
self.asrv.dbg_ls()
|
||||||
|
|
||||||
@@ -166,9 +182,6 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.log("root", "max clients: {}".format(self.args.nc))
|
self.log("root", "max clients: {}".format(self.args.nc))
|
||||||
|
|
||||||
if not self._process_config():
|
|
||||||
raise Exception("bad config")
|
|
||||||
|
|
||||||
self.tcpsrv = TcpSrv(self)
|
self.tcpsrv = TcpSrv(self)
|
||||||
self.up2k = Up2k(self)
|
self.up2k = Up2k(self)
|
||||||
|
|
||||||
@@ -182,6 +195,7 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.args.th_dec = list(decs.keys())
|
self.args.th_dec = list(decs.keys())
|
||||||
self.thumbsrv = None
|
self.thumbsrv = None
|
||||||
|
want_ff = False
|
||||||
if not args.no_thumb:
|
if not args.no_thumb:
|
||||||
t = ", ".join(self.args.th_dec) or "(None available)"
|
t = ", ".join(self.args.th_dec) or "(None available)"
|
||||||
self.log("thumb", "decoder preference: {}".format(t))
|
self.log("thumb", "decoder preference: {}".format(t))
|
||||||
@@ -193,8 +207,12 @@ class SvcHub(object):
|
|||||||
if self.args.th_dec:
|
if self.args.th_dec:
|
||||||
self.thumbsrv = ThumbSrv(self)
|
self.thumbsrv = ThumbSrv(self)
|
||||||
else:
|
else:
|
||||||
|
want_ff = True
|
||||||
msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg"
|
msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg"
|
||||||
msg = msg.format(" " * 37, os.path.basename(sys.executable))
|
msg = msg.format(" " * 37, os.path.basename(pybin))
|
||||||
|
if EXE:
|
||||||
|
msg = "copyparty.exe cannot use Pillow or pyvips; need ffprobe.exe and ffmpeg.exe to create thumbnails"
|
||||||
|
|
||||||
self.log("thumb", msg, c=3)
|
self.log("thumb", msg, c=3)
|
||||||
|
|
||||||
if not args.no_acode and args.no_thumb:
|
if not args.no_acode and args.no_thumb:
|
||||||
@@ -206,6 +224,10 @@ class SvcHub(object):
|
|||||||
msg = "setting --no-acode because either FFmpeg or FFprobe is not available"
|
msg = "setting --no-acode because either FFmpeg or FFprobe is not available"
|
||||||
self.log("thumb", msg, c=6)
|
self.log("thumb", msg, c=6)
|
||||||
args.no_acode = True
|
args.no_acode = True
|
||||||
|
want_ff = True
|
||||||
|
|
||||||
|
if want_ff and ANYWIN:
|
||||||
|
self.log("thumb", "download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
|
||||||
|
|
||||||
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
||||||
|
|
||||||
@@ -218,7 +240,8 @@ class SvcHub(object):
|
|||||||
if args.ftp or args.ftps:
|
if args.ftp or args.ftps:
|
||||||
from .ftpd import Ftpd
|
from .ftpd import Ftpd
|
||||||
|
|
||||||
self.ftpd = Ftpd(self)
|
self.ftpd: Optional[Ftpd] = None
|
||||||
|
Daemon(self.start_ftpd, "start_ftpd")
|
||||||
zms += "f" if args.ftp else "F"
|
zms += "f" if args.ftp else "F"
|
||||||
|
|
||||||
if args.smb:
|
if args.smb:
|
||||||
@@ -236,6 +259,7 @@ class SvcHub(object):
|
|||||||
if not args.zms:
|
if not args.zms:
|
||||||
args.zms = zms
|
args.zms = zms
|
||||||
|
|
||||||
|
self.zc_ngen = 0
|
||||||
self.mdns: Optional["MDNS"] = None
|
self.mdns: Optional["MDNS"] = None
|
||||||
self.ssdp: Optional["SSDPd"] = None
|
self.ssdp: Optional["SSDPd"] = None
|
||||||
|
|
||||||
@@ -247,6 +271,28 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.broker = Broker(self)
|
self.broker = Broker(self)
|
||||||
|
|
||||||
|
def start_ftpd(self) -> None:
|
||||||
|
time.sleep(30)
|
||||||
|
if self.ftpd:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.restart_ftpd()
|
||||||
|
|
||||||
|
def restart_ftpd(self) -> None:
|
||||||
|
if not hasattr(self, "ftpd"):
|
||||||
|
return
|
||||||
|
|
||||||
|
from .ftpd import Ftpd
|
||||||
|
|
||||||
|
if self.ftpd:
|
||||||
|
return # todo
|
||||||
|
|
||||||
|
if not os.path.exists(self.args.cert):
|
||||||
|
ensure_cert(self.log, self.args)
|
||||||
|
|
||||||
|
self.ftpd = Ftpd(self)
|
||||||
|
self.log("root", "started FTPd")
|
||||||
|
|
||||||
def thr_httpsrv_up(self) -> None:
|
def thr_httpsrv_up(self) -> None:
|
||||||
time.sleep(1 if self.args.ign_ebind_all else 5)
|
time.sleep(1 if self.args.ign_ebind_all else 5)
|
||||||
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||||
@@ -295,11 +341,53 @@ class SvcHub(object):
|
|||||||
al.zs_on = al.zs_on or al.z_on
|
al.zs_on = al.zs_on or al.z_on
|
||||||
al.zm_off = al.zm_off or al.z_off
|
al.zm_off = al.zm_off or al.z_off
|
||||||
al.zs_off = al.zs_off or al.z_off
|
al.zs_off = al.zs_off or al.z_off
|
||||||
for n in ("zm_on", "zm_off", "zs_on", "zs_off"):
|
ns = "zm_on zm_off zs_on zs_off acao acam"
|
||||||
vs = getattr(al, n).replace(" ", ",").split(",")
|
for n in ns.split(" "):
|
||||||
|
vs = getattr(al, n).split(",")
|
||||||
|
vs = [x.strip() for x in vs]
|
||||||
vs = [x for x in vs if x]
|
vs = [x for x in vs if x]
|
||||||
setattr(al, n, vs)
|
setattr(al, n, vs)
|
||||||
|
|
||||||
|
ns = "acao acam"
|
||||||
|
for n in ns.split(" "):
|
||||||
|
vs = getattr(al, n)
|
||||||
|
vd = {zs: 1 for zs in vs}
|
||||||
|
setattr(al, n, vd)
|
||||||
|
|
||||||
|
ns = "acao"
|
||||||
|
for n in ns.split(" "):
|
||||||
|
vs = getattr(al, n)
|
||||||
|
vs = [x.lower() for x in vs]
|
||||||
|
setattr(al, n, vs)
|
||||||
|
|
||||||
|
R = al.rp_loc
|
||||||
|
if "//" in R or ":" in R:
|
||||||
|
t = "found URL in --rp-loc; it should be just the location, for example /foo/bar"
|
||||||
|
raise Exception(t)
|
||||||
|
|
||||||
|
al.R = R = R.strip("/")
|
||||||
|
al.SR = "/" + R if R else ""
|
||||||
|
al.RS = R + "/" if R else ""
|
||||||
|
al.SRS = "/" + R + "/" if R else "/"
|
||||||
|
|
||||||
|
if al.rsp_jtr:
|
||||||
|
al.rsp_slp = 0.000001
|
||||||
|
|
||||||
|
al.th_covers = set(al.th_covers.split(","))
|
||||||
|
|
||||||
|
for k in "c".split(" "):
|
||||||
|
vl = getattr(al, k)
|
||||||
|
if not vl:
|
||||||
|
continue
|
||||||
|
|
||||||
|
vl = [os.path.expanduser(x) if x.startswith("~") else x for x in vl]
|
||||||
|
setattr(al, k, vl)
|
||||||
|
|
||||||
|
for k in "lo hist ssl_log".split(" "):
|
||||||
|
vs = getattr(al, k)
|
||||||
|
if vs and vs.startswith("~"):
|
||||||
|
setattr(al, k, os.path.expanduser(vs))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _setlimits(self) -> None:
|
def _setlimits(self) -> None:
|
||||||
@@ -354,6 +442,7 @@ class SvcHub(object):
|
|||||||
|
|
||||||
def _setup_logfile(self, printed: str) -> None:
|
def _setup_logfile(self, printed: str) -> None:
|
||||||
base_fn = fn = sel_fn = self._logname()
|
base_fn = fn = sel_fn = self._logname()
|
||||||
|
do_xz = fn.lower().endswith(".xz")
|
||||||
if fn != self.args.lo:
|
if fn != self.args.lo:
|
||||||
ctr = 0
|
ctr = 0
|
||||||
# yup this is a race; if started sufficiently concurrently, two
|
# yup this is a race; if started sufficiently concurrently, two
|
||||||
@@ -365,7 +454,7 @@ class SvcHub(object):
|
|||||||
fn = sel_fn
|
fn = sel_fn
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if fn.lower().endswith(".xz"):
|
if do_xz:
|
||||||
import lzma
|
import lzma
|
||||||
|
|
||||||
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
||||||
@@ -376,7 +465,7 @@ class SvcHub(object):
|
|||||||
|
|
||||||
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
|
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
|
||||||
|
|
||||||
argv = [sys.executable] + self.argv
|
argv = [pybin] + self.argv
|
||||||
if hasattr(shlex, "quote"):
|
if hasattr(shlex, "quote"):
|
||||||
argv = [shlex.quote(x) for x in argv]
|
argv = [shlex.quote(x) for x in argv]
|
||||||
else:
|
else:
|
||||||
@@ -392,24 +481,10 @@ class SvcHub(object):
|
|||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
self.tcpsrv.run()
|
self.tcpsrv.run()
|
||||||
|
if getattr(self.args, "z_chk", 0) and (
|
||||||
if getattr(self.args, "zm", False):
|
getattr(self.args, "zm", False) or getattr(self.args, "zs", False)
|
||||||
try:
|
):
|
||||||
from .mdns import MDNS
|
Daemon(self.tcpsrv.netmon, "netmon")
|
||||||
|
|
||||||
self.mdns = MDNS(self)
|
|
||||||
Daemon(self.mdns.run, "mdns")
|
|
||||||
except:
|
|
||||||
self.log("root", "mdns startup failed;\n" + min_ex(), 3)
|
|
||||||
|
|
||||||
if getattr(self.args, "zs", False):
|
|
||||||
try:
|
|
||||||
from .ssdp import SSDPd
|
|
||||||
|
|
||||||
self.ssdp = SSDPd(self)
|
|
||||||
Daemon(self.ssdp.run, "ssdp")
|
|
||||||
except:
|
|
||||||
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
|
||||||
|
|
||||||
Daemon(self.thr_httpsrv_up, "sig-hsrv-up2")
|
Daemon(self.thr_httpsrv_up, "sig-hsrv-up2")
|
||||||
|
|
||||||
@@ -441,6 +516,33 @@ class SvcHub(object):
|
|||||||
else:
|
else:
|
||||||
self.stop_thr()
|
self.stop_thr()
|
||||||
|
|
||||||
|
def start_zeroconf(self) -> None:
|
||||||
|
self.zc_ngen += 1
|
||||||
|
|
||||||
|
if getattr(self.args, "zm", False):
|
||||||
|
try:
|
||||||
|
from .mdns import MDNS
|
||||||
|
|
||||||
|
if self.mdns:
|
||||||
|
self.mdns.stop(True)
|
||||||
|
|
||||||
|
self.mdns = MDNS(self, self.zc_ngen)
|
||||||
|
Daemon(self.mdns.run, "mdns")
|
||||||
|
except:
|
||||||
|
self.log("root", "mdns startup failed;\n" + min_ex(), 3)
|
||||||
|
|
||||||
|
if getattr(self.args, "zs", False):
|
||||||
|
try:
|
||||||
|
from .ssdp import SSDPd
|
||||||
|
|
||||||
|
if self.ssdp:
|
||||||
|
self.ssdp.stop()
|
||||||
|
|
||||||
|
self.ssdp = SSDPd(self, self.zc_ngen)
|
||||||
|
Daemon(self.ssdp.run, "ssdp")
|
||||||
|
except:
|
||||||
|
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
||||||
|
|
||||||
def reload(self) -> str:
|
def reload(self) -> str:
|
||||||
if self.reloading:
|
if self.reloading:
|
||||||
return "cannot reload; already in progress"
|
return "cannot reload; already in progress"
|
||||||
@@ -570,8 +672,14 @@ class SvcHub(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
with self.log_mutex:
|
with self.log_mutex:
|
||||||
ts = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")[:-3]
|
zd = datetime.utcnow()
|
||||||
self.logf.write("@{} [{}\033[0m] {}\n".format(ts, src, msg))
|
ts = "%04d-%04d-%06d.%03d" % (
|
||||||
|
zd.year,
|
||||||
|
zd.month * 100 + zd.day,
|
||||||
|
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
||||||
|
zd.microsecond // 1000,
|
||||||
|
)
|
||||||
|
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
|
||||||
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
if now >= self.next_day:
|
if now >= self.next_day:
|
||||||
@@ -598,26 +706,36 @@ class SvcHub(object):
|
|||||||
now = time.time()
|
now = time.time()
|
||||||
if now >= self.next_day:
|
if now >= self.next_day:
|
||||||
dt = datetime.utcfromtimestamp(now)
|
dt = datetime.utcfromtimestamp(now)
|
||||||
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
|
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
||||||
|
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
||||||
|
print(zs, end="")
|
||||||
self._set_next_day()
|
self._set_next_day()
|
||||||
|
if self.logf:
|
||||||
|
self.logf.write(zs)
|
||||||
|
|
||||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
|
fmt = "\033[36m%s \033[33m%-21s \033[0m%s\n"
|
||||||
if not VT100:
|
if self.no_ansi:
|
||||||
fmt = "{} {:21} {}\n"
|
fmt = "%s %-21s %s\n"
|
||||||
if "\033" in msg:
|
if "\033" in msg:
|
||||||
msg = ansi_re.sub("", msg)
|
msg = ansi_re.sub("", msg)
|
||||||
if "\033" in src:
|
if "\033" in src:
|
||||||
src = ansi_re.sub("", src)
|
src = ansi_re.sub("", src)
|
||||||
elif c:
|
elif c:
|
||||||
if isinstance(c, int):
|
if isinstance(c, int):
|
||||||
msg = "\033[3{}m{}\033[0m".format(c, msg)
|
msg = "\033[3%sm%s\033[0m" % (c, msg)
|
||||||
elif "\033" not in c:
|
elif "\033" not in c:
|
||||||
msg = "\033[{}m{}\033[0m".format(c, msg)
|
msg = "\033[%sm%s\033[0m" % (c, msg)
|
||||||
else:
|
else:
|
||||||
msg = "{}{}\033[0m".format(c, msg)
|
msg = "%s%s\033[0m" % (c, msg)
|
||||||
|
|
||||||
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
|
zd = datetime.utcfromtimestamp(now)
|
||||||
msg = fmt.format(ts, src, msg)
|
ts = "%02d:%02d:%02d.%03d" % (
|
||||||
|
zd.hour,
|
||||||
|
zd.minute,
|
||||||
|
zd.second,
|
||||||
|
zd.microsecond // 1000,
|
||||||
|
)
|
||||||
|
msg = fmt % (ts, src, msg)
|
||||||
try:
|
try:
|
||||||
print(msg, end="")
|
print(msg, end="")
|
||||||
except UnicodeEncodeError:
|
except UnicodeEncodeError:
|
||||||
@@ -625,13 +743,20 @@ class SvcHub(object):
|
|||||||
print(msg.encode("utf-8", "replace").decode(), end="")
|
print(msg.encode("utf-8", "replace").decode(), end="")
|
||||||
except:
|
except:
|
||||||
print(msg.encode("ascii", "replace").decode(), end="")
|
print(msg.encode("ascii", "replace").decode(), end="")
|
||||||
|
except OSError as ex:
|
||||||
|
if ex.errno != errno.EPIPE:
|
||||||
|
raise
|
||||||
|
|
||||||
if self.logf:
|
if self.logf:
|
||||||
self.logf.write(msg)
|
self.logf.write(msg)
|
||||||
|
|
||||||
def pr(self, *a: Any, **ka: Any) -> None:
|
def pr(self, *a: Any, **ka: Any) -> None:
|
||||||
with self.log_mutex:
|
try:
|
||||||
print(*a, **ka)
|
with self.log_mutex:
|
||||||
|
print(*a, **ka)
|
||||||
|
except OSError as ex:
|
||||||
|
if ex.errno != errno.EPIPE:
|
||||||
|
raise
|
||||||
|
|
||||||
def check_mp_support(self) -> str:
|
def check_mp_support(self) -> str:
|
||||||
if MACOS:
|
if MACOS:
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import calendar
|
import calendar
|
||||||
|
import stat
|
||||||
import time
|
import time
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
@@ -238,6 +239,9 @@ class StreamZip(StreamArc):
|
|||||||
src = f["ap"]
|
src = f["ap"]
|
||||||
st = f["st"]
|
st = f["st"]
|
||||||
|
|
||||||
|
if stat.S_ISDIR(st.st_mode):
|
||||||
|
return
|
||||||
|
|
||||||
sz = st.st_size
|
sz = st.st_size
|
||||||
ts = st.st_mtime
|
ts = st.st_mtime
|
||||||
|
|
||||||
|
|||||||
@@ -5,8 +5,10 @@ import os
|
|||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, VT100, unicode
|
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, unicode
|
||||||
|
from .cert import gencert
|
||||||
from .stolen.qrcodegen import QrCode
|
from .stolen.qrcodegen import QrCode
|
||||||
from .util import (
|
from .util import (
|
||||||
E_ACCESS,
|
E_ACCESS,
|
||||||
@@ -28,6 +30,9 @@ if TYPE_CHECKING:
|
|||||||
if not hasattr(socket, "IPPROTO_IPV6"):
|
if not hasattr(socket, "IPPROTO_IPV6"):
|
||||||
setattr(socket, "IPPROTO_IPV6", 41)
|
setattr(socket, "IPPROTO_IPV6", 41)
|
||||||
|
|
||||||
|
if not hasattr(socket, "IP_FREEBIND"):
|
||||||
|
setattr(socket, "IP_FREEBIND", 15)
|
||||||
|
|
||||||
|
|
||||||
class TcpSrv(object):
|
class TcpSrv(object):
|
||||||
"""
|
"""
|
||||||
@@ -46,6 +51,8 @@ class TcpSrv(object):
|
|||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.srv: list[socket.socket] = []
|
self.srv: list[socket.socket] = []
|
||||||
self.bound: list[tuple[str, int]] = []
|
self.bound: list[tuple[str, int]] = []
|
||||||
|
self.netdevs: dict[str, Netdev] = {}
|
||||||
|
self.netlist = ""
|
||||||
self.nsrv = 0
|
self.nsrv = 0
|
||||||
self.qr = ""
|
self.qr = ""
|
||||||
pad = False
|
pad = False
|
||||||
@@ -121,6 +128,20 @@ class TcpSrv(object):
|
|||||||
else:
|
else:
|
||||||
self.netdevs = {}
|
self.netdevs = {}
|
||||||
|
|
||||||
|
# keep IPv6 LL-only nics
|
||||||
|
ll_ok: set[str] = set()
|
||||||
|
for ip, nd in self.netdevs.items():
|
||||||
|
if not ip.startswith("fe80"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
just_ll = True
|
||||||
|
for ip2, nd2 in self.netdevs.items():
|
||||||
|
if nd == nd2 and ":" in ip2 and not ip2.startswith("fe80"):
|
||||||
|
just_ll = False
|
||||||
|
|
||||||
|
if just_ll or self.args.ll:
|
||||||
|
ll_ok.add(ip.split("/")[0])
|
||||||
|
|
||||||
qr1: dict[str, list[int]] = {}
|
qr1: dict[str, list[int]] = {}
|
||||||
qr2: dict[str, list[int]] = {}
|
qr2: dict[str, list[int]] = {}
|
||||||
msgs = []
|
msgs = []
|
||||||
@@ -128,7 +149,7 @@ class TcpSrv(object):
|
|||||||
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
|
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
|
||||||
t = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
|
t = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
|
||||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||||
if ip.startswith("fe80"):
|
if ip.startswith("fe80") and ip not in ll_ok:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for port in sorted(self.args.p):
|
for port in sorted(self.args.p):
|
||||||
@@ -195,21 +216,28 @@ class TcpSrv(object):
|
|||||||
def _listen(self, ip: str, port: int) -> None:
|
def _listen(self, ip: str, port: int) -> None:
|
||||||
ipv = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
ipv = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
||||||
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
||||||
try:
|
|
||||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
if not ANYWIN or self.args.reuseaddr:
|
||||||
except:
|
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
pass
|
|
||||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
||||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
srv.settimeout(None) # < does not inherit, ^ does
|
srv.settimeout(None) # < does not inherit, ^ opts above do
|
||||||
|
|
||||||
try:
|
try:
|
||||||
srv.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False)
|
srv.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False)
|
||||||
except:
|
except:
|
||||||
pass # will create another ipv4 socket instead
|
pass # will create another ipv4 socket instead
|
||||||
|
|
||||||
|
if not ANYWIN and self.args.freebind:
|
||||||
|
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
srv.bind((ip, port))
|
srv.bind((ip, port))
|
||||||
|
sport = srv.getsockname()[1]
|
||||||
|
if port != sport:
|
||||||
|
# linux 6.0.16 lets you bind a port which is in use
|
||||||
|
# except it just gives you a random port instead
|
||||||
|
raise OSError(E_ADDR_IN_USE[0], "")
|
||||||
self.srv.append(srv)
|
self.srv.append(srv)
|
||||||
except (OSError, socket.error) as ex:
|
except (OSError, socket.error) as ex:
|
||||||
if ex.errno in E_ADDR_IN_USE:
|
if ex.errno in E_ADDR_IN_USE:
|
||||||
@@ -228,6 +256,14 @@ class TcpSrv(object):
|
|||||||
ip, port = srv.getsockname()[:2]
|
ip, port = srv.getsockname()[:2]
|
||||||
try:
|
try:
|
||||||
srv.listen(self.args.nc)
|
srv.listen(self.args.nc)
|
||||||
|
try:
|
||||||
|
ok = srv.getsockopt(socket.SOL_SOCKET, socket.SO_ACCEPTCONN)
|
||||||
|
except:
|
||||||
|
ok = 1 # macos
|
||||||
|
|
||||||
|
if not ok:
|
||||||
|
# some linux don't throw on listen(0.0.0.0) after listen(::)
|
||||||
|
raise Exception("failed to listen on {}".format(srv.getsockname()))
|
||||||
except:
|
except:
|
||||||
if ip == "0.0.0.0" and ("::", port) in bound:
|
if ip == "0.0.0.0" and ("::", port) in bound:
|
||||||
# dualstack
|
# dualstack
|
||||||
@@ -255,7 +291,13 @@ class TcpSrv(object):
|
|||||||
self.srv = srvs
|
self.srv = srvs
|
||||||
self.bound = bound
|
self.bound = bound
|
||||||
self.nsrv = len(srvs)
|
self.nsrv = len(srvs)
|
||||||
|
self._distribute_netdevs()
|
||||||
|
|
||||||
|
def _distribute_netdevs(self):
|
||||||
self.hub.broker.say("set_netdevs", self.netdevs)
|
self.hub.broker.say("set_netdevs", self.netdevs)
|
||||||
|
self.hub.start_zeroconf()
|
||||||
|
gencert(self.log, self.args, self.netdevs)
|
||||||
|
self.hub.restart_ftpd()
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
self.stopping = True
|
self.stopping = True
|
||||||
@@ -267,6 +309,27 @@ class TcpSrv(object):
|
|||||||
|
|
||||||
self.log("tcpsrv", "ok bye")
|
self.log("tcpsrv", "ok bye")
|
||||||
|
|
||||||
|
def netmon(self):
|
||||||
|
while not self.stopping:
|
||||||
|
time.sleep(self.args.z_chk)
|
||||||
|
netdevs = self.detect_interfaces(self.args.i)
|
||||||
|
if not netdevs:
|
||||||
|
continue
|
||||||
|
|
||||||
|
added = "nothing"
|
||||||
|
removed = "nothing"
|
||||||
|
for k, v in netdevs.items():
|
||||||
|
if k not in self.netdevs:
|
||||||
|
added = "{} = {}".format(k, v)
|
||||||
|
for k, v in self.netdevs.items():
|
||||||
|
if k not in netdevs:
|
||||||
|
removed = "{} = {}".format(k, v)
|
||||||
|
|
||||||
|
t = "network change detected:\n added {}\033[0;33m\nremoved {}"
|
||||||
|
self.log("tcpsrv", t.format(added, removed), 3)
|
||||||
|
self.netdevs = netdevs
|
||||||
|
self._distribute_netdevs()
|
||||||
|
|
||||||
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
||||||
from .stolen.ifaddr import get_adapters
|
from .stolen.ifaddr import get_adapters
|
||||||
|
|
||||||
@@ -276,10 +339,6 @@ class TcpSrv(object):
|
|||||||
for nip in nic.ips:
|
for nip in nic.ips:
|
||||||
ipa = nip.ip[0] if ":" in str(nip.ip) else nip.ip
|
ipa = nip.ip[0] if ":" in str(nip.ip) else nip.ip
|
||||||
sip = "{}/{}".format(ipa, nip.network_prefix)
|
sip = "{}/{}".format(ipa, nip.network_prefix)
|
||||||
if sip.startswith("169.254"):
|
|
||||||
# browsers dont impl linklocal
|
|
||||||
continue
|
|
||||||
|
|
||||||
nd = Netdev(sip, nic.index or 0, nic.nice_name, "")
|
nd = Netdev(sip, nic.index or 0, nic.nice_name, "")
|
||||||
eps[sip] = nd
|
eps[sip] = nd
|
||||||
try:
|
try:
|
||||||
@@ -291,6 +350,12 @@ class TcpSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
netlist = str(sorted(eps.items()))
|
||||||
|
if netlist == self.netlist and self.netdevs:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
self.netlist = netlist
|
||||||
|
|
||||||
if "0.0.0.0" not in listen_ips and "::" not in listen_ips:
|
if "0.0.0.0" not in listen_ips and "::" not in listen_ips:
|
||||||
eps = {k: v for k, v in eps.items() if k.split("/")[0] in listen_ips}
|
eps = {k: v for k, v in eps.items() if k.split("/")[0] in listen_ips}
|
||||||
|
|
||||||
@@ -439,7 +504,7 @@ class TcpSrv(object):
|
|||||||
zoom = 1
|
zoom = 1
|
||||||
|
|
||||||
qr = qrc.render(zoom, pad)
|
qr = qrc.render(zoom, pad)
|
||||||
if not VT100:
|
if self.args.no_ansi:
|
||||||
return "{}\n{}".format(txt, qr)
|
return "{}\n{}".format(txt, qr)
|
||||||
|
|
||||||
halfc = "\033[40;48;5;{0}m{1}\033[47;48;5;{2}m"
|
halfc = "\033[40;48;5;{0}m{1}\033[47;48;5;{2}m"
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
|
|||||||
|
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
@@ -11,14 +12,17 @@ import time
|
|||||||
|
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
|
||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import ANYWIN, TYPE_CHECKING
|
||||||
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||||
from .util import (
|
from .util import (
|
||||||
|
FFMPEG_URL,
|
||||||
BytesIO,
|
BytesIO,
|
||||||
Cooldown,
|
Cooldown,
|
||||||
Daemon,
|
Daemon,
|
||||||
Pebkac,
|
Pebkac,
|
||||||
|
afsenc,
|
||||||
fsenc,
|
fsenc,
|
||||||
min_ex,
|
min_ex,
|
||||||
runcmd,
|
runcmd,
|
||||||
@@ -61,12 +65,16 @@ try:
|
|||||||
HAVE_AVIF = True
|
HAVE_AVIF = True
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
logging.getLogger("PIL").setLevel(logging.WARNING)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
HAVE_VIPS = True
|
HAVE_VIPS = True
|
||||||
import pyvips
|
import pyvips
|
||||||
|
|
||||||
|
logging.getLogger("pyvips").setLevel(logging.WARNING)
|
||||||
except:
|
except:
|
||||||
HAVE_VIPS = False
|
HAVE_VIPS = False
|
||||||
|
|
||||||
@@ -77,14 +85,14 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
|||||||
# base64 = 64 = 4096
|
# base64 = 64 = 4096
|
||||||
rd, fn = vsplit(rem)
|
rd, fn = vsplit(rem)
|
||||||
if rd:
|
if rd:
|
||||||
h = hashlib.sha512(fsenc(rd)).digest()
|
h = hashlib.sha512(afsenc(rd)).digest()
|
||||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||||
else:
|
else:
|
||||||
rd = "top"
|
rd = "top"
|
||||||
|
|
||||||
# could keep original filenames but this is safer re pathlen
|
# could keep original filenames but this is safer re pathlen
|
||||||
h = hashlib.sha512(fsenc(fn)).digest()
|
h = hashlib.sha512(afsenc(fn)).digest()
|
||||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
|
|
||||||
if fmt in ("opus", "caf"):
|
if fmt in ("opus", "caf"):
|
||||||
@@ -103,8 +111,6 @@ class ThumbSrv(object):
|
|||||||
self.args = hub.args
|
self.args = hub.args
|
||||||
self.log_func = hub.log
|
self.log_func = hub.log
|
||||||
|
|
||||||
res = hub.args.th_size.split("x")
|
|
||||||
self.res = tuple([int(x) for x in res])
|
|
||||||
self.poke_cd = Cooldown(self.args.th_poke)
|
self.poke_cd = Cooldown(self.args.th_poke)
|
||||||
|
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
@@ -112,7 +118,7 @@ class ThumbSrv(object):
|
|||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.nthr = max(1, self.args.th_mt)
|
self.nthr = max(1, self.args.th_mt)
|
||||||
|
|
||||||
self.q: Queue[Optional[tuple[str, str]]] = Queue(self.nthr * 4)
|
self.q: Queue[Optional[tuple[str, str, VFS]]] = Queue(self.nthr * 4)
|
||||||
for n in range(self.nthr):
|
for n in range(self.nthr):
|
||||||
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
|
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
|
||||||
|
|
||||||
@@ -128,6 +134,8 @@ class ThumbSrv(object):
|
|||||||
msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
|
msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
|
||||||
msg += ", ".join(missing)
|
msg += ", ".join(missing)
|
||||||
self.log(msg, c=3)
|
self.log(msg, c=3)
|
||||||
|
if ANYWIN and self.args.no_acode:
|
||||||
|
self.log("download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
|
||||||
|
|
||||||
if self.args.th_clean:
|
if self.args.th_clean:
|
||||||
Daemon(self.cleaner, "thumb.cln")
|
Daemon(self.cleaner, "thumb.cln")
|
||||||
@@ -175,6 +183,10 @@ class ThumbSrv(object):
|
|||||||
with self.mutex:
|
with self.mutex:
|
||||||
return not self.nthr
|
return not self.nthr
|
||||||
|
|
||||||
|
def getres(self, vn: VFS) -> tuple[int, int]:
|
||||||
|
w, h = vn.flags["thsize"].split("x")
|
||||||
|
return int(w), int(h)
|
||||||
|
|
||||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
@@ -191,18 +203,24 @@ class ThumbSrv(object):
|
|||||||
self.log("wait {}".format(tpath))
|
self.log("wait {}".format(tpath))
|
||||||
except:
|
except:
|
||||||
thdir = os.path.dirname(tpath)
|
thdir = os.path.dirname(tpath)
|
||||||
bos.makedirs(thdir)
|
bos.makedirs(os.path.join(thdir, "w"))
|
||||||
|
|
||||||
inf_path = os.path.join(thdir, "dir.txt")
|
inf_path = os.path.join(thdir, "dir.txt")
|
||||||
if not bos.path.exists(inf_path):
|
if not bos.path.exists(inf_path):
|
||||||
with open(inf_path, "wb") as f:
|
with open(inf_path, "wb") as f:
|
||||||
f.write(fsenc(os.path.dirname(abspath)))
|
f.write(afsenc(os.path.dirname(abspath)))
|
||||||
|
|
||||||
self.busy[tpath] = [cond]
|
self.busy[tpath] = [cond]
|
||||||
do_conv = True
|
do_conv = True
|
||||||
|
|
||||||
if do_conv:
|
if do_conv:
|
||||||
self.q.put((abspath, tpath))
|
allvols = list(self.asrv.vfs.all_vols.values())
|
||||||
|
vn = next((x for x in allvols if x.realpath == ptop), None)
|
||||||
|
if not vn:
|
||||||
|
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
||||||
|
vn = self.asrv.vfs.all_aps[0][1]
|
||||||
|
|
||||||
|
self.q.put((abspath, tpath, vn))
|
||||||
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
@@ -239,50 +257,62 @@ class ThumbSrv(object):
|
|||||||
if not task:
|
if not task:
|
||||||
break
|
break
|
||||||
|
|
||||||
abspath, tpath = task
|
abspath, tpath, vn = task
|
||||||
ext = abspath.split(".")[-1].lower()
|
ext = abspath.split(".")[-1].lower()
|
||||||
png_ok = False
|
png_ok = False
|
||||||
fun = None
|
funs = []
|
||||||
if not bos.path.exists(tpath):
|
if not bos.path.exists(tpath):
|
||||||
for lib in self.args.th_dec:
|
for lib in self.args.th_dec:
|
||||||
if fun:
|
if lib == "pil" and ext in self.fmt_pil:
|
||||||
break
|
funs.append(self.conv_pil)
|
||||||
elif lib == "pil" and ext in self.fmt_pil:
|
|
||||||
fun = self.conv_pil
|
|
||||||
elif lib == "vips" and ext in self.fmt_vips:
|
elif lib == "vips" and ext in self.fmt_vips:
|
||||||
fun = self.conv_vips
|
funs.append(self.conv_vips)
|
||||||
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
||||||
fun = self.conv_ffmpeg
|
funs.append(self.conv_ffmpeg)
|
||||||
elif lib == "ff" and ext in self.fmt_ffa:
|
elif lib == "ff" and ext in self.fmt_ffa:
|
||||||
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
||||||
fun = self.conv_opus
|
funs.append(self.conv_opus)
|
||||||
elif tpath.endswith(".png"):
|
elif tpath.endswith(".png"):
|
||||||
fun = self.conv_waves
|
funs.append(self.conv_waves)
|
||||||
png_ok = True
|
png_ok = True
|
||||||
else:
|
else:
|
||||||
fun = self.conv_spec
|
funs.append(self.conv_spec)
|
||||||
|
|
||||||
if not png_ok and tpath.endswith(".png"):
|
if not png_ok and tpath.endswith(".png"):
|
||||||
raise Pebkac(400, "png only allowed for waveforms")
|
raise Pebkac(400, "png only allowed for waveforms")
|
||||||
|
|
||||||
if fun:
|
tdir, tfn = os.path.split(tpath)
|
||||||
|
ttpath = os.path.join(tdir, "w", tfn)
|
||||||
|
try:
|
||||||
|
bos.unlink(ttpath)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for fun in funs:
|
||||||
try:
|
try:
|
||||||
fun(abspath, tpath)
|
fun(abspath, ttpath, vn)
|
||||||
|
break
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
msg = "{} could not create thumbnail of {}\n{}"
|
msg = "{} could not create thumbnail of {}\n{}"
|
||||||
msg = msg.format(fun.__name__, abspath, min_ex())
|
msg = msg.format(fun.__name__, abspath, min_ex())
|
||||||
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
||||||
self.log(msg, c)
|
self.log(msg, c)
|
||||||
if getattr(ex, "returncode", 0) != 321:
|
if getattr(ex, "returncode", 0) != 321:
|
||||||
with open(tpath, "wb") as _:
|
if fun == funs[-1]:
|
||||||
pass
|
with open(ttpath, "wb") as _:
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
# ffmpeg may spawn empty files on windows
|
# ffmpeg may spawn empty files on windows
|
||||||
try:
|
try:
|
||||||
os.unlink(tpath)
|
os.unlink(ttpath)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
bos.rename(ttpath, tpath)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
subs = self.busy[tpath]
|
subs = self.busy[tpath]
|
||||||
del self.busy[tpath]
|
del self.busy[tpath]
|
||||||
@@ -294,9 +324,10 @@ class ThumbSrv(object):
|
|||||||
with self.mutex:
|
with self.mutex:
|
||||||
self.nthr -= 1
|
self.nthr -= 1
|
||||||
|
|
||||||
def fancy_pillow(self, im: "Image.Image") -> "Image.Image":
|
def fancy_pillow(self, im: "Image.Image", vn: VFS) -> "Image.Image":
|
||||||
# exif_transpose is expensive (loads full image + unconditional copy)
|
# exif_transpose is expensive (loads full image + unconditional copy)
|
||||||
r = max(*self.res) * 2
|
res = self.getres(vn)
|
||||||
|
r = max(*res) * 2
|
||||||
im.thumbnail((r, r), resample=Image.LANCZOS)
|
im.thumbnail((r, r), resample=Image.LANCZOS)
|
||||||
try:
|
try:
|
||||||
k = next(k for k, v in ExifTags.TAGS.items() if v == "Orientation")
|
k = next(k for k, v in ExifTags.TAGS.items() if v == "Orientation")
|
||||||
@@ -310,23 +341,23 @@ class ThumbSrv(object):
|
|||||||
if rot in rots:
|
if rot in rots:
|
||||||
im = im.transpose(rots[rot])
|
im = im.transpose(rots[rot])
|
||||||
|
|
||||||
if self.args.th_no_crop:
|
if "nocrop" in vn.flags:
|
||||||
im.thumbnail(self.res, resample=Image.LANCZOS)
|
im.thumbnail(res, resample=Image.LANCZOS)
|
||||||
else:
|
else:
|
||||||
iw, ih = im.size
|
iw, ih = im.size
|
||||||
dw, dh = self.res
|
dw, dh = res
|
||||||
res = (min(iw, dw), min(ih, dh))
|
res = (min(iw, dw), min(ih, dh))
|
||||||
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
||||||
|
|
||||||
return im
|
return im
|
||||||
|
|
||||||
def conv_pil(self, abspath: str, tpath: str) -> None:
|
def conv_pil(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||||
with Image.open(fsenc(abspath)) as im:
|
with Image.open(fsenc(abspath)) as im:
|
||||||
try:
|
try:
|
||||||
im = self.fancy_pillow(im)
|
im = self.fancy_pillow(im, vn)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self.log("fancy_pillow {}".format(ex), "90")
|
self.log("fancy_pillow {}".format(ex), "90")
|
||||||
im.thumbnail(self.res)
|
im.thumbnail(self.getres(vn))
|
||||||
|
|
||||||
fmts = ["RGB", "L"]
|
fmts = ["RGB", "L"]
|
||||||
args = {"quality": 40}
|
args = {"quality": 40}
|
||||||
@@ -349,12 +380,12 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
im.save(tpath, **args)
|
im.save(tpath, **args)
|
||||||
|
|
||||||
def conv_vips(self, abspath: str, tpath: str) -> None:
|
def conv_vips(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||||
crops = ["centre", "none"]
|
crops = ["centre", "none"]
|
||||||
if self.args.th_no_crop:
|
if "nocrop" in vn.flags:
|
||||||
crops = ["none"]
|
crops = ["none"]
|
||||||
|
|
||||||
w, h = self.res
|
w, h = self.getres(vn)
|
||||||
kw = {"height": h, "size": "down", "intent": "relative"}
|
kw = {"height": h, "size": "down", "intent": "relative"}
|
||||||
|
|
||||||
for c in crops:
|
for c in crops:
|
||||||
@@ -363,12 +394,13 @@ class ThumbSrv(object):
|
|||||||
img = pyvips.Image.thumbnail(abspath, w, **kw)
|
img = pyvips.Image.thumbnail(abspath, w, **kw)
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
pass
|
if c == crops[-1]:
|
||||||
|
raise
|
||||||
|
|
||||||
img.write_to_file(tpath, Q=40)
|
img.write_to_file(tpath, Q=40)
|
||||||
|
|
||||||
def conv_ffmpeg(self, abspath: str, tpath: str) -> None:
|
def conv_ffmpeg(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if not ret:
|
if not ret:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -380,12 +412,13 @@ class ThumbSrv(object):
|
|||||||
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
||||||
|
|
||||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||||
if self.args.th_no_crop:
|
if "nocrop" in vn.flags:
|
||||||
scale += "decrease,setsar=1:1"
|
scale += "decrease,setsar=1:1"
|
||||||
else:
|
else:
|
||||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||||
|
|
||||||
bscale = scale.format(*list(self.res)).encode("utf-8")
|
res = self.getres(vn)
|
||||||
|
bscale = scale.format(*list(res)).encode("utf-8")
|
||||||
# fmt: off
|
# fmt: off
|
||||||
cmd = [
|
cmd = [
|
||||||
b"ffmpeg",
|
b"ffmpeg",
|
||||||
@@ -417,11 +450,11 @@ class ThumbSrv(object):
|
|||||||
]
|
]
|
||||||
|
|
||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
self._run_ff(cmd)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
def _run_ff(self, cmd: list[bytes]) -> None:
|
def _run_ff(self, cmd: list[bytes], vn: VFS) -> None:
|
||||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||||
ret, _, serr = runcmd(cmd, timeout=self.args.th_convt)
|
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"])
|
||||||
if not ret:
|
if not ret:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -464,8 +497,8 @@ class ThumbSrv(object):
|
|||||||
self.log(t + txt, c=c)
|
self.log(t + txt, c=c)
|
||||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||||
|
|
||||||
def conv_waves(self, abspath: str, tpath: str) -> None:
|
def conv_waves(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in ret:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
|
|
||||||
@@ -490,10 +523,10 @@ class ThumbSrv(object):
|
|||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
self._run_ff(cmd)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
def conv_spec(self, abspath: str, tpath: str) -> None:
|
def conv_spec(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in ret:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
|
|
||||||
@@ -533,23 +566,34 @@ class ThumbSrv(object):
|
|||||||
]
|
]
|
||||||
|
|
||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
self._run_ff(cmd)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
def conv_opus(self, abspath: str, tpath: str) -> None:
|
def conv_opus(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||||
if self.args.no_acode:
|
if self.args.no_acode:
|
||||||
raise Exception("disabled in server config")
|
raise Exception("disabled in server config")
|
||||||
|
|
||||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in ret:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
|
|
||||||
|
try:
|
||||||
|
dur = ret[".dur"][1]
|
||||||
|
except:
|
||||||
|
dur = 0
|
||||||
|
|
||||||
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
|
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
|
||||||
want_caf = tpath.endswith(".caf")
|
want_caf = tpath.endswith(".caf")
|
||||||
tmp_opus = tpath
|
tmp_opus = tpath
|
||||||
if want_caf:
|
if want_caf:
|
||||||
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
|
tmp_opus = tpath + ".opus"
|
||||||
|
try:
|
||||||
|
bos.unlink(tmp_opus)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
|
caf_src = abspath if src_opus else tmp_opus
|
||||||
|
|
||||||
|
if not want_caf or not src_opus:
|
||||||
# fmt: off
|
# fmt: off
|
||||||
cmd = [
|
cmd = [
|
||||||
b"ffmpeg",
|
b"ffmpeg",
|
||||||
@@ -564,9 +608,34 @@ class ThumbSrv(object):
|
|||||||
fsenc(tmp_opus)
|
fsenc(tmp_opus)
|
||||||
]
|
]
|
||||||
# fmt: on
|
# fmt: on
|
||||||
self._run_ff(cmd)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
if want_caf:
|
# iOS fails to play some "insufficiently complex" files
|
||||||
|
# (average file shorter than 8 seconds), so of course we
|
||||||
|
# fix that by mixing in some inaudible pink noise :^)
|
||||||
|
# 6.3 sec seems like the cutoff so lets do 7, and
|
||||||
|
# 7 sec of psyqui-musou.opus @ 3:50 is 174 KiB
|
||||||
|
if want_caf and (dur < 20 or bos.path.getsize(caf_src) < 256 * 1024):
|
||||||
|
# fmt: off
|
||||||
|
cmd = [
|
||||||
|
b"ffmpeg",
|
||||||
|
b"-nostdin",
|
||||||
|
b"-v", b"error",
|
||||||
|
b"-hide_banner",
|
||||||
|
b"-i", fsenc(abspath),
|
||||||
|
b"-filter_complex", b"anoisesrc=a=0.001:d=7:c=pink,asplit[l][r]; [l][r]amerge[s]; [0:a:0][s]amix",
|
||||||
|
b"-map_metadata", b"-1",
|
||||||
|
b"-ac", b"2",
|
||||||
|
b"-c:a", b"libopus",
|
||||||
|
b"-b:a", b"128k",
|
||||||
|
b"-f", b"caf",
|
||||||
|
fsenc(tpath)
|
||||||
|
]
|
||||||
|
# fmt: on
|
||||||
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
|
elif want_caf:
|
||||||
|
# simple remux should be safe
|
||||||
# fmt: off
|
# fmt: off
|
||||||
cmd = [
|
cmd = [
|
||||||
b"ffmpeg",
|
b"ffmpeg",
|
||||||
@@ -581,7 +650,13 @@ class ThumbSrv(object):
|
|||||||
fsenc(tpath)
|
fsenc(tpath)
|
||||||
]
|
]
|
||||||
# fmt: on
|
# fmt: on
|
||||||
self._run_ff(cmd)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
|
if tmp_opus != tpath:
|
||||||
|
try:
|
||||||
|
bos.unlink(tmp_opus)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
def poke(self, tdir: str) -> None:
|
def poke(self, tdir: str) -> None:
|
||||||
if not self.poke_cd.poke(tdir):
|
if not self.poke_cd.poke(tdir):
|
||||||
|
|||||||
@@ -34,14 +34,14 @@ if True: # pylint: disable=using-constant-test
|
|||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .httpconn import HttpConn
|
from .httpsrv import HttpSrv
|
||||||
|
|
||||||
|
|
||||||
class U2idx(object):
|
class U2idx(object):
|
||||||
def __init__(self, conn: "HttpConn") -> None:
|
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||||
self.log_func = conn.log_func
|
self.log_func = hsrv.log
|
||||||
self.asrv = conn.asrv
|
self.asrv = hsrv.asrv
|
||||||
self.args = conn.args
|
self.args = hsrv.args
|
||||||
self.timeout = self.args.srch_time
|
self.timeout = self.args.srch_time
|
||||||
|
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
@@ -51,7 +51,7 @@ class U2idx(object):
|
|||||||
self.active_id = ""
|
self.active_id = ""
|
||||||
self.active_cur: Optional["sqlite3.Cursor"] = None
|
self.active_cur: Optional["sqlite3.Cursor"] = None
|
||||||
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
||||||
self.mem_cur = sqlite3.connect(":memory:").cursor()
|
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor()
|
||||||
self.mem_cur.execute(r"create table a (b text)")
|
self.mem_cur.execute(r"create table a (b text)")
|
||||||
|
|
||||||
self.p_end = 0.0
|
self.p_end = 0.0
|
||||||
@@ -69,7 +69,7 @@ class U2idx(object):
|
|||||||
|
|
||||||
fsize = body["size"]
|
fsize = body["size"]
|
||||||
fhash = body["hash"]
|
fhash = body["hash"]
|
||||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
wark = up2k_wark_from_hashlist(self.args.warksalt, fsize, fhash)
|
||||||
|
|
||||||
uq = "substr(w,1,16) = ? and w = ?"
|
uq = "substr(w,1,16) = ? and w = ?"
|
||||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||||
@@ -97,19 +97,23 @@ class U2idx(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
cur = None
|
cur = None
|
||||||
if ANYWIN:
|
if ANYWIN and not bos.path.exists(db_path + "-wal"):
|
||||||
uri = ""
|
uri = ""
|
||||||
try:
|
try:
|
||||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||||
cur = sqlite3.connect(uri, 2, uri=True).cursor()
|
db = sqlite3.connect(uri, 2, uri=True, check_same_thread=False)
|
||||||
|
cur = db.cursor()
|
||||||
|
cur.execute('pragma table_info("up")').fetchone()
|
||||||
self.log("ro: {}".format(db_path))
|
self.log("ro: {}".format(db_path))
|
||||||
except:
|
except:
|
||||||
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
||||||
|
# may not fail until the pragma so unset it
|
||||||
|
cur = None
|
||||||
|
|
||||||
if not cur:
|
if not cur:
|
||||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||||
cur = sqlite3.connect(db_path, 2).cursor()
|
cur = sqlite3.connect(db_path, 2, check_same_thread=False).cursor()
|
||||||
self.log("opened {}".format(db_path))
|
self.log("opened {}".format(db_path))
|
||||||
|
|
||||||
self.cur[ptop] = cur
|
self.cur[ptop] = cur
|
||||||
@@ -117,10 +121,10 @@ class U2idx(object):
|
|||||||
|
|
||||||
def search(
|
def search(
|
||||||
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
|
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
|
||||||
) -> tuple[list[dict[str, Any]], list[str]]:
|
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||||
"""search by query params"""
|
"""search by query params"""
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
return [], []
|
return [], [], False
|
||||||
|
|
||||||
q = ""
|
q = ""
|
||||||
v: Union[str, int] = ""
|
v: Union[str, int] = ""
|
||||||
@@ -272,7 +276,7 @@ class U2idx(object):
|
|||||||
have_up: bool,
|
have_up: bool,
|
||||||
have_mt: bool,
|
have_mt: bool,
|
||||||
lim: int,
|
lim: int,
|
||||||
) -> tuple[list[dict[str, Any]], list[str]]:
|
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||||
done_flag: list[bool] = []
|
done_flag: list[bool] = []
|
||||||
self.active_id = "{:.6f}_{}".format(
|
self.active_id = "{:.6f}_{}".format(
|
||||||
time.time(), threading.current_thread().ident
|
time.time(), threading.current_thread().ident
|
||||||
@@ -290,6 +294,7 @@ class U2idx(object):
|
|||||||
self.log("qs: {!r} {!r}".format(uq, uv))
|
self.log("qs: {!r} {!r}".format(uq, uv))
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
|
seen_rps: set[str] = set()
|
||||||
lim = min(lim, int(self.args.srch_hits))
|
lim = min(lim, int(self.args.srch_hits))
|
||||||
taglist = {}
|
taglist = {}
|
||||||
for (vtop, ptop, flags) in vols:
|
for (vtop, ptop, flags) in vols:
|
||||||
@@ -308,16 +313,21 @@ class U2idx(object):
|
|||||||
|
|
||||||
sret = []
|
sret = []
|
||||||
fk = flags.get("fk")
|
fk = flags.get("fk")
|
||||||
|
dots = flags.get("dotsrch")
|
||||||
c = cur.execute(uq, tuple(vuv))
|
c = cur.execute(uq, tuple(vuv))
|
||||||
for hit in c:
|
for hit in c:
|
||||||
w, ts, sz, rd, fn, ip, at = hit[:7]
|
w, ts, sz, rd, fn, ip, at = hit[:7]
|
||||||
lim -= 1
|
|
||||||
if lim < 0:
|
|
||||||
break
|
|
||||||
|
|
||||||
if rd.startswith("//") or fn.startswith("//"):
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
|
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
||||||
|
if not dots and "/." in ("/" + rp):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if rp in seen_rps:
|
||||||
|
continue
|
||||||
|
|
||||||
if not fk:
|
if not fk:
|
||||||
suf = ""
|
suf = ""
|
||||||
else:
|
else:
|
||||||
@@ -334,8 +344,12 @@ class U2idx(object):
|
|||||||
)[:fk]
|
)[:fk]
|
||||||
)
|
)
|
||||||
|
|
||||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf
|
lim -= 1
|
||||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
if lim < 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
seen_rps.add(rp)
|
||||||
|
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
|
||||||
|
|
||||||
for hit in sret:
|
for hit in sret:
|
||||||
w = hit["w"]
|
w = hit["w"]
|
||||||
@@ -354,17 +368,9 @@ class U2idx(object):
|
|||||||
done_flag.append(True)
|
done_flag.append(True)
|
||||||
self.active_id = ""
|
self.active_id = ""
|
||||||
|
|
||||||
# undupe hits from multiple metadata keys
|
|
||||||
if len(ret) > 1:
|
|
||||||
ret = [ret[0]] + [
|
|
||||||
y
|
|
||||||
for x, y in zip(ret[:-1], ret[1:])
|
|
||||||
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
|
|
||||||
]
|
|
||||||
|
|
||||||
ret.sort(key=itemgetter("rp"))
|
ret.sort(key=itemgetter("rp"))
|
||||||
|
|
||||||
return ret, list(taglist.keys())
|
return ret, list(taglist.keys()), lim < 0
|
||||||
|
|
||||||
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
|
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
|
||||||
for _ in range(self.timeout):
|
for _ in range(self.timeout):
|
||||||
|
|||||||
1055
copyparty/up2k.py
1055
copyparty/up2k.py
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,7 @@ import contextlib
|
|||||||
import errno
|
import errno
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import math
|
import math
|
||||||
import mimetypes
|
import mimetypes
|
||||||
@@ -13,6 +14,7 @@ import os
|
|||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
import select
|
import select
|
||||||
|
import shutil
|
||||||
import signal
|
import signal
|
||||||
import socket
|
import socket
|
||||||
import stat
|
import stat
|
||||||
@@ -29,7 +31,7 @@ from email.utils import formatdate
|
|||||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
|
||||||
from .__init__ import ANYWIN, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
|
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
|
||||||
from .__version__ import S_BUILD_DT, S_VERSION
|
from .__version__ import S_BUILD_DT, S_VERSION
|
||||||
from .stolen import surrogateescape
|
from .stolen import surrogateescape
|
||||||
|
|
||||||
@@ -142,12 +144,15 @@ SYMTIME = sys.version_info > (3, 6) and os.utime in os.supports_follow_symlinks
|
|||||||
|
|
||||||
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">'
|
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">'
|
||||||
|
|
||||||
|
FFMPEG_URL = "https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z"
|
||||||
|
|
||||||
HTTPCODE = {
|
HTTPCODE = {
|
||||||
200: "OK",
|
200: "OK",
|
||||||
201: "Created",
|
201: "Created",
|
||||||
204: "No Content",
|
204: "No Content",
|
||||||
206: "Partial Content",
|
206: "Partial Content",
|
||||||
207: "Multi-Status",
|
207: "Multi-Status",
|
||||||
|
301: "Moved Permanently",
|
||||||
302: "Found",
|
302: "Found",
|
||||||
304: "Not Modified",
|
304: "Not Modified",
|
||||||
400: "Bad Request",
|
400: "Bad Request",
|
||||||
@@ -166,6 +171,7 @@ HTTPCODE = {
|
|||||||
500: "Internal Server Error",
|
500: "Internal Server Error",
|
||||||
501: "Not Implemented",
|
501: "Not Implemented",
|
||||||
503: "Service Unavailable",
|
503: "Service Unavailable",
|
||||||
|
999: "MissingNo",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -227,6 +233,7 @@ application msi=x-ms-installer cab=vnd.ms-cab-compressed rpm=x-rpm crx=x-chrome-
|
|||||||
application epub=epub+zip mobi=x-mobipocket-ebook lit=x-ms-reader rss=rss+xml atom=atom+xml torrent=x-bittorrent
|
application epub=epub+zip mobi=x-mobipocket-ebook lit=x-ms-reader rss=rss+xml atom=atom+xml torrent=x-bittorrent
|
||||||
application p7s=pkcs7-signature dcm=dicom shx=vnd.shx shp=vnd.shp dbf=x-dbf gml=gml+xml gpx=gpx+xml amf=x-amf
|
application p7s=pkcs7-signature dcm=dicom shx=vnd.shx shp=vnd.shp dbf=x-dbf gml=gml+xml gpx=gpx+xml amf=x-amf
|
||||||
application swf=x-shockwave-flash m3u=vnd.apple.mpegurl db3=vnd.sqlite3 sqlite=vnd.sqlite3
|
application swf=x-shockwave-flash m3u=vnd.apple.mpegurl db3=vnd.sqlite3 sqlite=vnd.sqlite3
|
||||||
|
text ass=plain ssa=plain
|
||||||
image jpg=jpeg xpm=x-xpixmap psd=vnd.adobe.photoshop jpf=jpx tif=tiff ico=x-icon djvu=vnd.djvu
|
image jpg=jpeg xpm=x-xpixmap psd=vnd.adobe.photoshop jpf=jpx tif=tiff ico=x-icon djvu=vnd.djvu
|
||||||
image heic=heic-sequence heif=heif-sequence hdr=vnd.radiance svg=svg+xml
|
image heic=heic-sequence heif=heif-sequence hdr=vnd.radiance svg=svg+xml
|
||||||
audio caf=x-caf mp3=mpeg m4a=mp4 mid=midi mpc=musepack aif=aiff au=basic qcp=qcelp
|
audio caf=x-caf mp3=mpeg m4a=mp4 mid=midi mpc=musepack aif=aiff au=basic qcp=qcelp
|
||||||
@@ -287,6 +294,19 @@ REKOBO_KEY = {
|
|||||||
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
||||||
|
|
||||||
|
|
||||||
|
pybin = sys.executable or ""
|
||||||
|
if EXE:
|
||||||
|
pybin = ""
|
||||||
|
for zsg in "python3 python".split():
|
||||||
|
try:
|
||||||
|
zsg = shutil.which(zsg)
|
||||||
|
if zsg:
|
||||||
|
pybin = zsg
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def py_desc() -> str:
|
def py_desc() -> str:
|
||||||
interp = platform.python_implementation()
|
interp = platform.python_implementation()
|
||||||
py_ver = ".".join([str(x) for x in sys.version_info])
|
py_ver = ".".join([str(x) for x in sys.version_info])
|
||||||
@@ -360,8 +380,11 @@ class Daemon(threading.Thread):
|
|||||||
name: Optional[str] = None,
|
name: Optional[str] = None,
|
||||||
a: Optional[Iterable[Any]] = None,
|
a: Optional[Iterable[Any]] = None,
|
||||||
r: bool = True,
|
r: bool = True,
|
||||||
|
ka: Optional[dict[Any, Any]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
threading.Thread.__init__(self, target=target, name=name, args=a or ())
|
threading.Thread.__init__(
|
||||||
|
self, target=target, name=name, args=a or (), kwargs=ka
|
||||||
|
)
|
||||||
self.daemon = True
|
self.daemon = True
|
||||||
if r:
|
if r:
|
||||||
self.start()
|
self.start()
|
||||||
@@ -377,6 +400,9 @@ class Netdev(object):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "{}-{}{}".format(self.idx, self.name, self.desc)
|
return "{}-{}{}".format(self.idx, self.name, self.desc)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "'{}-{}'".format(self.idx, self.name)
|
||||||
|
|
||||||
def __lt__(self, rhs):
|
def __lt__(self, rhs):
|
||||||
return str(self) < str(rhs)
|
return str(self) < str(rhs)
|
||||||
|
|
||||||
@@ -436,9 +462,7 @@ class HLog(logging.Handler):
|
|||||||
else:
|
else:
|
||||||
c = 1
|
c = 1
|
||||||
|
|
||||||
if record.name.startswith("PIL") and lv < logging.WARNING:
|
if record.name == "pyftpdlib":
|
||||||
return
|
|
||||||
elif record.name == "pyftpdlib":
|
|
||||||
m = self.ptn_ftp.match(msg)
|
m = self.ptn_ftp.match(msg)
|
||||||
if m:
|
if m:
|
||||||
ip = m.group(1)
|
ip = m.group(1)
|
||||||
@@ -468,7 +492,7 @@ class NetMap(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
||||||
ips = [[x for x in netdevs if x.startswith(y + "/")][0] for y in ips]
|
ips = find_prefix(ips, netdevs)
|
||||||
|
|
||||||
self.cache: dict[str, str] = {}
|
self.cache: dict[str, str] = {}
|
||||||
self.b2sip: dict[bytes, str] = {}
|
self.b2sip: dict[bytes, str] = {}
|
||||||
@@ -514,7 +538,7 @@ class _Unrecv(object):
|
|||||||
self.log = log
|
self.log = log
|
||||||
self.buf: bytes = b""
|
self.buf: bytes = b""
|
||||||
|
|
||||||
def recv(self, nbytes: int) -> bytes:
|
def recv(self, nbytes: int, spins: int = 1) -> bytes:
|
||||||
if self.buf:
|
if self.buf:
|
||||||
ret = self.buf[:nbytes]
|
ret = self.buf[:nbytes]
|
||||||
self.buf = self.buf[nbytes:]
|
self.buf = self.buf[nbytes:]
|
||||||
@@ -525,6 +549,10 @@ class _Unrecv(object):
|
|||||||
ret = self.s.recv(nbytes)
|
ret = self.s.recv(nbytes)
|
||||||
break
|
break
|
||||||
except socket.timeout:
|
except socket.timeout:
|
||||||
|
spins -= 1
|
||||||
|
if spins <= 0:
|
||||||
|
ret = b""
|
||||||
|
break
|
||||||
continue
|
continue
|
||||||
except:
|
except:
|
||||||
ret = b""
|
ret = b""
|
||||||
@@ -567,7 +595,7 @@ class _LUnrecv(object):
|
|||||||
self.log = log
|
self.log = log
|
||||||
self.buf = b""
|
self.buf = b""
|
||||||
|
|
||||||
def recv(self, nbytes: int) -> bytes:
|
def recv(self, nbytes: int, spins: int) -> bytes:
|
||||||
if self.buf:
|
if self.buf:
|
||||||
ret = self.buf[:nbytes]
|
ret = self.buf[:nbytes]
|
||||||
self.buf = self.buf[nbytes:]
|
self.buf = self.buf[nbytes:]
|
||||||
@@ -586,7 +614,7 @@ class _LUnrecv(object):
|
|||||||
def recv_ex(self, nbytes: int, raise_on_trunc: bool = True) -> bytes:
|
def recv_ex(self, nbytes: int, raise_on_trunc: bool = True) -> bytes:
|
||||||
"""read an exact number of bytes"""
|
"""read an exact number of bytes"""
|
||||||
try:
|
try:
|
||||||
ret = self.recv(nbytes)
|
ret = self.recv(nbytes, 1)
|
||||||
err = False
|
err = False
|
||||||
except:
|
except:
|
||||||
ret = b""
|
ret = b""
|
||||||
@@ -594,7 +622,7 @@ class _LUnrecv(object):
|
|||||||
|
|
||||||
while not err and len(ret) < nbytes:
|
while not err and len(ret) < nbytes:
|
||||||
try:
|
try:
|
||||||
ret += self.recv(nbytes - len(ret))
|
ret += self.recv(nbytes - len(ret), 1)
|
||||||
except OSError:
|
except OSError:
|
||||||
err = True
|
err = True
|
||||||
|
|
||||||
@@ -645,6 +673,7 @@ class FHC(object):
|
|||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.cache: dict[str, FHC.CE] = {}
|
self.cache: dict[str, FHC.CE] = {}
|
||||||
|
self.aps: set[str] = set()
|
||||||
|
|
||||||
def close(self, path: str) -> None:
|
def close(self, path: str) -> None:
|
||||||
try:
|
try:
|
||||||
@@ -656,6 +685,7 @@ class FHC(object):
|
|||||||
fh.close()
|
fh.close()
|
||||||
|
|
||||||
del self.cache[path]
|
del self.cache[path]
|
||||||
|
self.aps.remove(path)
|
||||||
|
|
||||||
def clean(self) -> None:
|
def clean(self) -> None:
|
||||||
if not self.cache:
|
if not self.cache:
|
||||||
@@ -676,6 +706,7 @@ class FHC(object):
|
|||||||
return self.cache[path].fhs.pop()
|
return self.cache[path].fhs.pop()
|
||||||
|
|
||||||
def put(self, path: str, fh: typing.BinaryIO) -> None:
|
def put(self, path: str, fh: typing.BinaryIO) -> None:
|
||||||
|
self.aps.add(path)
|
||||||
try:
|
try:
|
||||||
ce = self.cache[path]
|
ce = self.cache[path]
|
||||||
ce.fhs.append(fh)
|
ce.fhs.append(fh)
|
||||||
@@ -1146,20 +1177,12 @@ def ren_open(
|
|||||||
fun = kwargs.pop("fun", open)
|
fun = kwargs.pop("fun", open)
|
||||||
fdir = kwargs.pop("fdir", None)
|
fdir = kwargs.pop("fdir", None)
|
||||||
suffix = kwargs.pop("suffix", None)
|
suffix = kwargs.pop("suffix", None)
|
||||||
overwrite = kwargs.pop("overwrite", None)
|
|
||||||
|
|
||||||
if fname == os.devnull:
|
if fname == os.devnull:
|
||||||
with fun(fname, *args, **kwargs) as f:
|
with fun(fname, *args, **kwargs) as f:
|
||||||
yield {"orz": (f, fname)}
|
yield {"orz": (f, fname)}
|
||||||
return
|
return
|
||||||
|
|
||||||
if overwrite:
|
|
||||||
assert fdir
|
|
||||||
fpath = os.path.join(fdir, fname)
|
|
||||||
with fun(fsenc(fpath), *args, **kwargs) as f:
|
|
||||||
yield {"orz": (f, fname)}
|
|
||||||
return
|
|
||||||
|
|
||||||
if suffix:
|
if suffix:
|
||||||
ext = fname.split(".")[-1]
|
ext = fname.split(".")[-1]
|
||||||
if len(ext) < 7:
|
if len(ext) < 7:
|
||||||
@@ -1186,7 +1209,7 @@ def ren_open(
|
|||||||
else:
|
else:
|
||||||
fpath = fname
|
fpath = fname
|
||||||
|
|
||||||
if suffix and os.path.exists(fsenc(fpath)):
|
if suffix and os.path.lexists(fsenc(fpath)):
|
||||||
fpath += suffix
|
fpath += suffix
|
||||||
fname += suffix
|
fname += suffix
|
||||||
ext += suffix
|
ext += suffix
|
||||||
@@ -1274,7 +1297,7 @@ class MultipartParser(object):
|
|||||||
rfc1341/rfc1521/rfc2047/rfc2231/rfc2388/rfc6266/the-real-world
|
rfc1341/rfc1521/rfc2047/rfc2231/rfc2388/rfc6266/the-real-world
|
||||||
(only the fallback non-js uploader relies on these filenames)
|
(only the fallback non-js uploader relies on these filenames)
|
||||||
"""
|
"""
|
||||||
for ln in read_header(self.sr):
|
for ln in read_header(self.sr, 2, 2592000):
|
||||||
self.log(ln)
|
self.log(ln)
|
||||||
|
|
||||||
m = self.re_ctype.match(ln)
|
m = self.re_ctype.match(ln)
|
||||||
@@ -1474,15 +1497,15 @@ def get_boundary(headers: dict[str, str]) -> str:
|
|||||||
return m.group(2)
|
return m.group(2)
|
||||||
|
|
||||||
|
|
||||||
def read_header(sr: Unrecv) -> list[str]:
|
def read_header(sr: Unrecv, t_idle: int, t_tot: int) -> list[str]:
|
||||||
t0 = time.time()
|
t0 = time.time()
|
||||||
ret = b""
|
ret = b""
|
||||||
while True:
|
while True:
|
||||||
if time.time() - t0 > 120:
|
if time.time() - t0 >= t_tot:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ret += sr.recv(1024)
|
ret += sr.recv(1024, t_idle // 2)
|
||||||
except:
|
except:
|
||||||
if not ret:
|
if not ret:
|
||||||
return []
|
return []
|
||||||
@@ -1506,10 +1529,32 @@ def read_header(sr: Unrecv) -> list[str]:
|
|||||||
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
||||||
|
|
||||||
|
|
||||||
|
def rand_name(fdir: str, fn: str, rnd: int) -> str:
|
||||||
|
ok = False
|
||||||
|
try:
|
||||||
|
ext = "." + fn.rsplit(".", 1)[1]
|
||||||
|
except:
|
||||||
|
ext = ""
|
||||||
|
|
||||||
|
for extra in range(16):
|
||||||
|
for _ in range(16):
|
||||||
|
if ok:
|
||||||
|
break
|
||||||
|
|
||||||
|
nc = rnd + extra
|
||||||
|
nb = int((6 + 6 * nc) / 8)
|
||||||
|
zb = os.urandom(nb)
|
||||||
|
zb = base64.urlsafe_b64encode(zb)
|
||||||
|
fn = zb[:nc].decode("utf-8") + ext
|
||||||
|
ok = not os.path.exists(fsenc(os.path.join(fdir, fn)))
|
||||||
|
|
||||||
|
return fn
|
||||||
|
|
||||||
|
|
||||||
def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
|
def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||||
return base64.urlsafe_b64encode(
|
return base64.urlsafe_b64encode(
|
||||||
hashlib.sha512(
|
hashlib.sha512(
|
||||||
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace")
|
("%s %s %s %s" % (salt, fspath, fsize, inode)).encode("utf-8", "replace")
|
||||||
).digest()
|
).digest()
|
||||||
).decode("ascii")
|
).decode("ascii")
|
||||||
|
|
||||||
@@ -1548,14 +1593,16 @@ def gen_filekey_dbg(
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def gencookie(k: str, v: str, dur: Optional[int]) -> str:
|
def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str:
|
||||||
v = v.replace(";", "")
|
v = v.replace("%", "%25").replace(";", "%3B")
|
||||||
if dur:
|
if dur:
|
||||||
exp = formatdate(time.time() + dur, usegmt=True)
|
exp = formatdate(time.time() + dur, usegmt=True)
|
||||||
else:
|
else:
|
||||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||||
|
|
||||||
return "{}={}; Path=/; Expires={}; SameSite=Lax".format(k, v, exp)
|
return "{}={}; Path=/{}; Expires={}{}; SameSite=Lax".format(
|
||||||
|
k, v, r, exp, "; Secure" if tls else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def humansize(sz: float, terse: bool = False) -> str:
|
def humansize(sz: float, terse: bool = False) -> str:
|
||||||
@@ -1580,7 +1627,12 @@ def unhumanize(sz: str) -> int:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
mc = sz[-1:].lower()
|
mc = sz[-1:].lower()
|
||||||
mi = {"k": 1024, "m": 1024 * 1024, "g": 1024 * 1024 * 1024}.get(mc, 1)
|
mi = {
|
||||||
|
"k": 1024,
|
||||||
|
"m": 1024 * 1024,
|
||||||
|
"g": 1024 * 1024 * 1024,
|
||||||
|
"t": 1024 * 1024 * 1024 * 1024,
|
||||||
|
}.get(mc, 1)
|
||||||
return int(float(sz[:-1]) * mi)
|
return int(float(sz[:-1]) * mi)
|
||||||
|
|
||||||
|
|
||||||
@@ -1616,7 +1668,7 @@ def uncyg(path: str) -> str:
|
|||||||
if len(path) > 2 and path[2] != "/":
|
if len(path) > 2 and path[2] != "/":
|
||||||
return path
|
return path
|
||||||
|
|
||||||
return "{}:\\{}".format(path[1], path[3:])
|
return "%s:\\%s" % (path[1], path[3:])
|
||||||
|
|
||||||
|
|
||||||
def undot(path: str) -> str:
|
def undot(path: str) -> str:
|
||||||
@@ -1659,7 +1711,7 @@ def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
|
|||||||
|
|
||||||
bad = ["con", "prn", "aux", "nul"]
|
bad = ["con", "prn", "aux", "nul"]
|
||||||
for n in range(1, 10):
|
for n in range(1, 10):
|
||||||
bad += "com{0} lpt{0}".format(n).split(" ")
|
bad += ("com%s lpt%s" % (n, n)).split(" ")
|
||||||
|
|
||||||
if fn.lower().split(".")[0] in bad:
|
if fn.lower().split(".")[0] in bad:
|
||||||
fn = "_" + fn
|
fn = "_" + fn
|
||||||
@@ -1681,7 +1733,7 @@ def relchk(rp: str) -> str:
|
|||||||
|
|
||||||
def absreal(fpath: str) -> str:
|
def absreal(fpath: str) -> str:
|
||||||
try:
|
try:
|
||||||
return fsdec(os.path.abspath(os.path.realpath(fsenc(fpath))))
|
return fsdec(os.path.abspath(os.path.realpath(afsenc(fpath))))
|
||||||
except:
|
except:
|
||||||
if not WINDOWS:
|
if not WINDOWS:
|
||||||
raise
|
raise
|
||||||
@@ -1711,6 +1763,15 @@ def ipnorm(ip: str) -> str:
|
|||||||
return ip
|
return ip
|
||||||
|
|
||||||
|
|
||||||
|
def find_prefix(ips: list[str], netdevs: dict[str, Netdev]) -> list[str]:
|
||||||
|
ret = []
|
||||||
|
for ip in ips:
|
||||||
|
hit = next((x for x in netdevs if x.startswith(ip + "/")), None)
|
||||||
|
if hit:
|
||||||
|
ret.append(hit)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def html_escape(s: str, quot: bool = False, crlf: bool = False) -> str:
|
def html_escape(s: str, quot: bool = False, crlf: bool = False) -> str:
|
||||||
"""html.escape but also newlines"""
|
"""html.escape but also newlines"""
|
||||||
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||||
@@ -1792,6 +1853,32 @@ def _w8enc3(txt: str) -> bytes:
|
|||||||
return txt.encode(FS_ENCODING, "surrogateescape")
|
return txt.encode(FS_ENCODING, "surrogateescape")
|
||||||
|
|
||||||
|
|
||||||
|
def _msdec(txt: bytes) -> str:
|
||||||
|
ret = txt.decode(FS_ENCODING, "surrogateescape")
|
||||||
|
return ret[4:] if ret.startswith("\\\\?\\") else ret
|
||||||
|
|
||||||
|
|
||||||
|
def _msaenc(txt: str) -> bytes:
|
||||||
|
return txt.replace("/", "\\").encode(FS_ENCODING, "surrogateescape")
|
||||||
|
|
||||||
|
|
||||||
|
def _uncify(txt: str) -> str:
|
||||||
|
txt = txt.replace("/", "\\")
|
||||||
|
if ":" not in txt and not txt.startswith("\\\\"):
|
||||||
|
txt = absreal(txt)
|
||||||
|
|
||||||
|
return txt if txt.startswith("\\\\") else "\\\\?\\" + txt
|
||||||
|
|
||||||
|
|
||||||
|
def _msenc(txt: str) -> bytes:
|
||||||
|
txt = txt.replace("/", "\\")
|
||||||
|
if ":" not in txt and not txt.startswith("\\\\"):
|
||||||
|
txt = absreal(txt)
|
||||||
|
|
||||||
|
ret = txt.encode(FS_ENCODING, "surrogateescape")
|
||||||
|
return ret if ret.startswith(b"\\\\") else b"\\\\?\\" + ret
|
||||||
|
|
||||||
|
|
||||||
w8dec = _w8dec3 if not PY2 else _w8dec2
|
w8dec = _w8dec3 if not PY2 else _w8dec2
|
||||||
w8enc = _w8enc3 if not PY2 else _w8enc2
|
w8enc = _w8enc3 if not PY2 else _w8enc2
|
||||||
|
|
||||||
@@ -1806,9 +1893,16 @@ def w8b64enc(txt: str) -> str:
|
|||||||
return base64.urlsafe_b64encode(w8enc(txt)).decode("ascii")
|
return base64.urlsafe_b64encode(w8enc(txt)).decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
if not PY2 or not WINDOWS:
|
if not PY2 and WINDOWS:
|
||||||
fsenc = w8enc
|
sfsenc = w8enc
|
||||||
|
afsenc = _msaenc
|
||||||
|
fsenc = _msenc
|
||||||
|
fsdec = _msdec
|
||||||
|
uncify = _uncify
|
||||||
|
elif not PY2 or not WINDOWS:
|
||||||
|
fsenc = afsenc = sfsenc = w8enc
|
||||||
fsdec = w8dec
|
fsdec = w8dec
|
||||||
|
uncify = str
|
||||||
else:
|
else:
|
||||||
# moonrunes become \x3f with bytestrings,
|
# moonrunes become \x3f with bytestrings,
|
||||||
# losing mojibake support is worth
|
# losing mojibake support is worth
|
||||||
@@ -1818,8 +1912,9 @@ else:
|
|||||||
def _not_actually_mbcs_dec(txt: bytes) -> str:
|
def _not_actually_mbcs_dec(txt: bytes) -> str:
|
||||||
return txt
|
return txt
|
||||||
|
|
||||||
fsenc = _not_actually_mbcs_enc
|
fsenc = afsenc = sfsenc = _not_actually_mbcs_enc
|
||||||
fsdec = _not_actually_mbcs_dec
|
fsdec = _not_actually_mbcs_dec
|
||||||
|
uncify = str
|
||||||
|
|
||||||
|
|
||||||
def s3enc(mem_cur: "sqlite3.Cursor", rd: str, fn: str) -> tuple[str, str]:
|
def s3enc(mem_cur: "sqlite3.Cursor", rd: str, fn: str) -> tuple[str, str]:
|
||||||
@@ -1930,6 +2025,8 @@ def shut_socket(log: "NamedLogger", sck: socket.socket, timeout: int = 3) -> Non
|
|||||||
sck.shutdown(socket.SHUT_RDWR)
|
sck.shutdown(socket.SHUT_RDWR)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
except Exception as ex:
|
||||||
|
log("shut({}): {}".format(fd, ex), "90")
|
||||||
finally:
|
finally:
|
||||||
td = time.time() - t0
|
td = time.time() - t0
|
||||||
if td >= 1:
|
if td >= 1:
|
||||||
@@ -2007,6 +2104,20 @@ def read_socket_chunked(
|
|||||||
raise Pebkac(400, t.format(x))
|
raise Pebkac(400, t.format(x))
|
||||||
|
|
||||||
|
|
||||||
|
def list_ips() -> list[str]:
|
||||||
|
from .stolen.ifaddr import get_adapters
|
||||||
|
|
||||||
|
ret: set[str] = set()
|
||||||
|
for nic in get_adapters():
|
||||||
|
for ipo in nic.ips:
|
||||||
|
if len(ipo.ip) < 7:
|
||||||
|
ret.add(ipo.ip[0]) # ipv6 is (ip,0,0)
|
||||||
|
else:
|
||||||
|
ret.add(ipo.ip)
|
||||||
|
|
||||||
|
return list(ret)
|
||||||
|
|
||||||
|
|
||||||
def yieldfile(fn: str) -> Generator[bytes, None, None]:
|
def yieldfile(fn: str) -> Generator[bytes, None, None]:
|
||||||
with open(fsenc(fn), "rb", 512 * 1024) as f:
|
with open(fsenc(fn), "rb", 512 * 1024) as f:
|
||||||
while True:
|
while True:
|
||||||
@@ -2167,7 +2278,7 @@ def rmdirs(
|
|||||||
dirs = [os.path.join(top, x) for x in dirs]
|
dirs = [os.path.join(top, x) for x in dirs]
|
||||||
ok = []
|
ok = []
|
||||||
ng = []
|
ng = []
|
||||||
for d in dirs[::-1]:
|
for d in reversed(dirs):
|
||||||
a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
|
a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
|
||||||
ok += a
|
ok += a
|
||||||
ng += b
|
ng += b
|
||||||
@@ -2182,18 +2293,21 @@ def rmdirs(
|
|||||||
return ok, ng
|
return ok, ng
|
||||||
|
|
||||||
|
|
||||||
def rmdirs_up(top: str) -> tuple[list[str], list[str]]:
|
def rmdirs_up(top: str, stop: str) -> tuple[list[str], list[str]]:
|
||||||
"""rmdir on self, then all parents"""
|
"""rmdir on self, then all parents"""
|
||||||
|
if top == stop:
|
||||||
|
return [], [top]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.rmdir(fsenc(top))
|
os.rmdir(fsenc(top))
|
||||||
except:
|
except:
|
||||||
return [], [top]
|
return [], [top]
|
||||||
|
|
||||||
par = os.path.dirname(top)
|
par = os.path.dirname(top)
|
||||||
if not par:
|
if not par or par == stop:
|
||||||
return [top], []
|
return [top], []
|
||||||
|
|
||||||
ok, ng = rmdirs_up(par)
|
ok, ng = rmdirs_up(par, stop)
|
||||||
return [top] + ok, ng
|
return [top] + ok, ng
|
||||||
|
|
||||||
|
|
||||||
@@ -2214,7 +2328,7 @@ def unescape_cookie(orig: str) -> str:
|
|||||||
ret += chr(int(esc[1:], 16))
|
ret += chr(int(esc[1:], 16))
|
||||||
except:
|
except:
|
||||||
ret += esc
|
ret += esc
|
||||||
esc = ""
|
esc = ""
|
||||||
|
|
||||||
else:
|
else:
|
||||||
ret += ch
|
ret += ch
|
||||||
@@ -2314,7 +2428,7 @@ def killtree(root: int) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def runcmd(
|
def runcmd(
|
||||||
argv: Union[list[bytes], list[str]], timeout: Optional[int] = None, **ka: Any
|
argv: Union[list[bytes], list[str]], timeout: Optional[float] = None, **ka: Any
|
||||||
) -> tuple[int, str, str]:
|
) -> tuple[int, str, str]:
|
||||||
kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one
|
kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one
|
||||||
capture = ka.pop("capture", 3) # 0=none 1=stdout 2=stderr 3=both
|
capture = ka.pop("capture", 3) # 0=none 1=stdout 2=stderr 3=both
|
||||||
@@ -2367,7 +2481,7 @@ def chkcmd(argv: Union[list[bytes], list[str]], **ka: Any) -> tuple[str, str]:
|
|||||||
return sout, serr
|
return sout, serr
|
||||||
|
|
||||||
|
|
||||||
def mchkcmd(argv: Union[list[bytes], list[str]], timeout: int = 10) -> None:
|
def mchkcmd(argv: Union[list[bytes], list[str]], timeout: float = 10) -> None:
|
||||||
if PY2:
|
if PY2:
|
||||||
with open(os.devnull, "wb") as f:
|
with open(os.devnull, "wb") as f:
|
||||||
rv = sp.call(argv, stdout=f, stderr=f)
|
rv = sp.call(argv, stdout=f, stderr=f)
|
||||||
@@ -2427,6 +2541,221 @@ def retchk(
|
|||||||
raise Exception(t)
|
raise Exception(t)
|
||||||
|
|
||||||
|
|
||||||
|
def _parsehook(
|
||||||
|
log: Optional["NamedLogger"], cmd: str
|
||||||
|
) -> tuple[bool, bool, bool, float, dict[str, Any], str]:
|
||||||
|
chk = False
|
||||||
|
fork = False
|
||||||
|
jtxt = False
|
||||||
|
wait = 0.0
|
||||||
|
tout = 0.0
|
||||||
|
kill = "t"
|
||||||
|
cap = 0
|
||||||
|
ocmd = cmd
|
||||||
|
while "," in cmd[:6]:
|
||||||
|
arg, cmd = cmd.split(",", 1)
|
||||||
|
if arg == "c":
|
||||||
|
chk = True
|
||||||
|
elif arg == "f":
|
||||||
|
fork = True
|
||||||
|
elif arg == "j":
|
||||||
|
jtxt = True
|
||||||
|
elif arg.startswith("w"):
|
||||||
|
wait = float(arg[1:])
|
||||||
|
elif arg.startswith("t"):
|
||||||
|
tout = float(arg[1:])
|
||||||
|
elif arg.startswith("c"):
|
||||||
|
cap = int(arg[1:]) # 0=none 1=stdout 2=stderr 3=both
|
||||||
|
elif arg.startswith("k"):
|
||||||
|
kill = arg[1:] # [t]ree [m]ain [n]one
|
||||||
|
elif arg.startswith("i"):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
t = "hook: invalid flag {} in {}"
|
||||||
|
(log or print)(t.format(arg, ocmd))
|
||||||
|
|
||||||
|
env = os.environ.copy()
|
||||||
|
try:
|
||||||
|
if EXE:
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
|
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||||
|
pypath = str(os.pathsep.join(zsl))
|
||||||
|
env["PYTHONPATH"] = pypath
|
||||||
|
except:
|
||||||
|
if not EXE:
|
||||||
|
raise
|
||||||
|
|
||||||
|
sp_ka = {
|
||||||
|
"env": env,
|
||||||
|
"timeout": tout,
|
||||||
|
"kill": kill,
|
||||||
|
"capture": cap,
|
||||||
|
}
|
||||||
|
|
||||||
|
if cmd.startswith("~"):
|
||||||
|
cmd = os.path.expanduser(cmd)
|
||||||
|
|
||||||
|
return chk, fork, jtxt, wait, sp_ka, cmd
|
||||||
|
|
||||||
|
|
||||||
|
def runihook(
|
||||||
|
log: Optional["NamedLogger"],
|
||||||
|
cmd: str,
|
||||||
|
vol: "VFS",
|
||||||
|
ups: list[tuple[str, int, int, str, str, str, int]],
|
||||||
|
) -> bool:
|
||||||
|
ocmd = cmd
|
||||||
|
chk, fork, jtxt, wait, sp_ka, cmd = _parsehook(log, cmd)
|
||||||
|
bcmd = [sfsenc(cmd)]
|
||||||
|
if cmd.endswith(".py"):
|
||||||
|
bcmd = [sfsenc(pybin)] + bcmd
|
||||||
|
|
||||||
|
vps = [vjoin(*list(s3dec(x[3], x[4]))) for x in ups]
|
||||||
|
aps = [djoin(vol.realpath, x) for x in vps]
|
||||||
|
if jtxt:
|
||||||
|
# 0w 1mt 2sz 3rd 4fn 5ip 6at
|
||||||
|
ja = [
|
||||||
|
{
|
||||||
|
"ap": uncify(ap), # utf8 for json
|
||||||
|
"vp": vp,
|
||||||
|
"wark": x[0][:16],
|
||||||
|
"mt": x[1],
|
||||||
|
"sz": x[2],
|
||||||
|
"ip": x[5],
|
||||||
|
"at": x[6],
|
||||||
|
}
|
||||||
|
for x, vp, ap in zip(ups, vps, aps)
|
||||||
|
]
|
||||||
|
sp_ka["sin"] = json.dumps(ja).encode("utf-8", "replace")
|
||||||
|
else:
|
||||||
|
sp_ka["sin"] = b"\n".join(fsenc(x) for x in aps)
|
||||||
|
|
||||||
|
t0 = time.time()
|
||||||
|
if fork:
|
||||||
|
Daemon(runcmd, ocmd, [bcmd], ka=sp_ka)
|
||||||
|
else:
|
||||||
|
rc, v, err = runcmd(bcmd, **sp_ka) # type: ignore
|
||||||
|
if chk and rc:
|
||||||
|
retchk(rc, bcmd, err, log, 5)
|
||||||
|
return False
|
||||||
|
|
||||||
|
wait -= time.time() - t0
|
||||||
|
if wait > 0:
|
||||||
|
time.sleep(wait)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _runhook(
|
||||||
|
log: Optional["NamedLogger"],
|
||||||
|
cmd: str,
|
||||||
|
ap: str,
|
||||||
|
vp: str,
|
||||||
|
host: str,
|
||||||
|
uname: str,
|
||||||
|
mt: float,
|
||||||
|
sz: int,
|
||||||
|
ip: str,
|
||||||
|
at: float,
|
||||||
|
txt: str,
|
||||||
|
) -> bool:
|
||||||
|
ocmd = cmd
|
||||||
|
chk, fork, jtxt, wait, sp_ka, cmd = _parsehook(log, cmd)
|
||||||
|
if jtxt:
|
||||||
|
ja = {
|
||||||
|
"ap": ap,
|
||||||
|
"vp": vp,
|
||||||
|
"mt": mt,
|
||||||
|
"sz": sz,
|
||||||
|
"ip": ip,
|
||||||
|
"at": at or time.time(),
|
||||||
|
"host": host,
|
||||||
|
"user": uname,
|
||||||
|
"txt": txt,
|
||||||
|
}
|
||||||
|
arg = json.dumps(ja)
|
||||||
|
else:
|
||||||
|
arg = txt or ap
|
||||||
|
|
||||||
|
acmd = [cmd, arg]
|
||||||
|
if cmd.endswith(".py"):
|
||||||
|
acmd = [pybin] + acmd
|
||||||
|
|
||||||
|
bcmd = [fsenc(x) if x == ap else sfsenc(x) for x in acmd]
|
||||||
|
|
||||||
|
t0 = time.time()
|
||||||
|
if fork:
|
||||||
|
Daemon(runcmd, ocmd, [bcmd], ka=sp_ka)
|
||||||
|
else:
|
||||||
|
rc, v, err = runcmd(bcmd, **sp_ka) # type: ignore
|
||||||
|
if chk and rc:
|
||||||
|
retchk(rc, bcmd, err, log, 5)
|
||||||
|
return False
|
||||||
|
|
||||||
|
wait -= time.time() - t0
|
||||||
|
if wait > 0:
|
||||||
|
time.sleep(wait)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def runhook(
|
||||||
|
log: Optional["NamedLogger"],
|
||||||
|
cmds: list[str],
|
||||||
|
ap: str,
|
||||||
|
vp: str,
|
||||||
|
host: str,
|
||||||
|
uname: str,
|
||||||
|
mt: float,
|
||||||
|
sz: int,
|
||||||
|
ip: str,
|
||||||
|
at: float,
|
||||||
|
txt: str,
|
||||||
|
) -> bool:
|
||||||
|
vp = vp.replace("\\", "/")
|
||||||
|
for cmd in cmds:
|
||||||
|
try:
|
||||||
|
if not _runhook(log, cmd, ap, vp, host, uname, mt, sz, ip, at, txt):
|
||||||
|
return False
|
||||||
|
except Exception as ex:
|
||||||
|
(log or print)("hook: {}".format(ex))
|
||||||
|
if ",c," in "," + cmd:
|
||||||
|
return False
|
||||||
|
break
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def loadpy(ap: str, hot: bool) -> Any:
|
||||||
|
"""
|
||||||
|
a nice can of worms capable of causing all sorts of bugs
|
||||||
|
depending on what other inconveniently named files happen
|
||||||
|
to be in the same folder
|
||||||
|
"""
|
||||||
|
if ap.startswith("~"):
|
||||||
|
ap = os.path.expanduser(ap)
|
||||||
|
|
||||||
|
mdir, mfile = os.path.split(absreal(ap))
|
||||||
|
mname = mfile.rsplit(".", 1)[0]
|
||||||
|
sys.path.insert(0, mdir)
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
mod = __import__(mname)
|
||||||
|
if hot:
|
||||||
|
reload(mod)
|
||||||
|
else:
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
mod = importlib.import_module(mname)
|
||||||
|
if hot:
|
||||||
|
importlib.reload(mod)
|
||||||
|
|
||||||
|
sys.path.remove(mdir)
|
||||||
|
return mod
|
||||||
|
|
||||||
|
|
||||||
def gzip_orig_sz(fn: str) -> int:
|
def gzip_orig_sz(fn: str) -> int:
|
||||||
with open(fsenc(fn), "rb") as f:
|
with open(fsenc(fn), "rb") as f:
|
||||||
f.seek(-4, 2)
|
f.seek(-4, 2)
|
||||||
@@ -2536,7 +2865,7 @@ def termsize() -> tuple[int, int]:
|
|||||||
def ioctl_GWINSZ(fd: int) -> Optional[tuple[int, int]]:
|
def ioctl_GWINSZ(fd: int) -> Optional[tuple[int, int]]:
|
||||||
try:
|
try:
|
||||||
cr = sunpack(b"hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"AAAA"))
|
cr = sunpack(b"hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"AAAA"))
|
||||||
return int(cr[1]), int(cr[0])
|
return cr[::-1]
|
||||||
except:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -2549,15 +2878,23 @@ def termsize() -> tuple[int, int]:
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if cr:
|
|
||||||
return cr
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return int(env["COLUMNS"]), int(env["LINES"])
|
return cr or (int(env["COLUMNS"]), int(env["LINES"]))
|
||||||
except:
|
except:
|
||||||
return 80, 25
|
return 80, 25
|
||||||
|
|
||||||
|
|
||||||
|
def hidedir(dp) -> None:
|
||||||
|
if ANYWIN:
|
||||||
|
try:
|
||||||
|
k32 = ctypes.WinDLL("kernel32")
|
||||||
|
attrs = k32.GetFileAttributesW(dp)
|
||||||
|
if attrs >= 0:
|
||||||
|
k32.SetFileAttributesW(dp, attrs | 2)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Pebkac(Exception):
|
class Pebkac(Exception):
|
||||||
def __init__(self, code: int, msg: Optional[str] = None) -> None:
|
def __init__(self, code: int, msg: Optional[str] = None) -> None:
|
||||||
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ pk: $(addsuffix .gz, $(wildcard *.js *.css))
|
|||||||
un: $(addsuffix .un, $(wildcard *.gz))
|
un: $(addsuffix .un, $(wildcard *.gz))
|
||||||
|
|
||||||
%.gz: %
|
%.gz: %
|
||||||
pigz -11 -J 34 -I 5730 $<
|
pigz -11 -J 34 -I 573 $<
|
||||||
|
|
||||||
%.un: %
|
%.un: %
|
||||||
pigz -d $<
|
pigz -d $<
|
||||||
|
|||||||
1
copyparty/web/a/u2c.py
Symbolic link
1
copyparty/web/a/u2c.py
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../../../bin/u2c.py
|
||||||
@@ -1 +0,0 @@
|
|||||||
../../../bin/up2k.py
|
|
||||||
@@ -27,8 +27,8 @@ window.baguetteBox = (function () {
|
|||||||
isOverlayVisible = false,
|
isOverlayVisible = false,
|
||||||
touch = {}, // start-pos
|
touch = {}, // start-pos
|
||||||
touchFlag = false, // busy
|
touchFlag = false, // busy
|
||||||
re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i,
|
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||||
re_v = /.+\.(webm|mkv|mp4)(\?|$)/i,
|
re_v = /^[^?]+\.(webm|mkv|mp4)(\?|$)/i,
|
||||||
anims = ['slideIn', 'fadeIn', 'none'],
|
anims = ['slideIn', 'fadeIn', 'none'],
|
||||||
data = {}, // all galleries
|
data = {}, // all galleries
|
||||||
imagesElements = [],
|
imagesElements = [],
|
||||||
@@ -127,7 +127,7 @@ window.baguetteBox = (function () {
|
|||||||
var gallery = [];
|
var gallery = [];
|
||||||
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
|
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
|
||||||
var imageElementClickHandler = function (e) {
|
var imageElementClickHandler = function (e) {
|
||||||
if (ctrl(e))
|
if (ctrl(e) || e && e.shiftKey)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
e.preventDefault ? e.preventDefault() : e.returnValue = false;
|
e.preventDefault ? e.preventDefault() : e.returnValue = false;
|
||||||
@@ -277,8 +277,8 @@ window.baguetteBox = (function () {
|
|||||||
playpause();
|
playpause();
|
||||||
else if (k == "KeyU" || k == "KeyO")
|
else if (k == "KeyU" || k == "KeyO")
|
||||||
relseek(k == "KeyU" ? -10 : 10);
|
relseek(k == "KeyU" ? -10 : 10);
|
||||||
else if (k.indexOf('Digit') === 0)
|
else if (k.indexOf('Digit') === 0 && v)
|
||||||
vid().currentTime = vid().duration * parseInt(k.slice(-1)) * 0.1;
|
v.currentTime = v.duration * parseInt(k.slice(-1)) * 0.1;
|
||||||
else if (k == "KeyM" && v) {
|
else if (k == "KeyM" && v) {
|
||||||
v.muted = vmute = !vmute;
|
v.muted = vmute = !vmute;
|
||||||
mp_ctl();
|
mp_ctl();
|
||||||
@@ -580,6 +580,7 @@ window.baguetteBox = (function () {
|
|||||||
function hideOverlay(e) {
|
function hideOverlay(e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
playvid(false);
|
playvid(false);
|
||||||
|
removeFromCache('#files');
|
||||||
if (options.noScrollbars) {
|
if (options.noScrollbars) {
|
||||||
document.documentElement.style.overflowY = 'auto';
|
document.documentElement.style.overflowY = 'auto';
|
||||||
document.body.style.overflowY = 'auto';
|
document.body.style.overflowY = 'auto';
|
||||||
@@ -812,10 +813,16 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function vid() {
|
function vid() {
|
||||||
|
if (currentIndex >= imagesElements.length)
|
||||||
|
return;
|
||||||
|
|
||||||
return imagesElements[currentIndex].querySelector('video');
|
return imagesElements[currentIndex].querySelector('video');
|
||||||
}
|
}
|
||||||
|
|
||||||
function vidimg() {
|
function vidimg() {
|
||||||
|
if (currentIndex >= imagesElements.length)
|
||||||
|
return;
|
||||||
|
|
||||||
return imagesElements[currentIndex].querySelector('img, video');
|
return imagesElements[currentIndex].querySelector('img, video');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -55,6 +55,7 @@
|
|||||||
--u2-sbtn-b1: #999;
|
--u2-sbtn-b1: #999;
|
||||||
--u2-txt-bg: var(--bg-u5);
|
--u2-txt-bg: var(--bg-u5);
|
||||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||||
|
--u2-tab-b1: rgba(128,128,128,0.8);
|
||||||
--u2-tab-1-fg: #fd7;
|
--u2-tab-1-fg: #fd7;
|
||||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
||||||
--u2-tab-1-b1: #7c5;
|
--u2-tab-1-b1: #7c5;
|
||||||
@@ -93,6 +94,7 @@
|
|||||||
--g-fsel-bg: #d39;
|
--g-fsel-bg: #d39;
|
||||||
--g-fsel-b1: #f4a;
|
--g-fsel-b1: #f4a;
|
||||||
--g-fsel-ts: #804;
|
--g-fsel-ts: #804;
|
||||||
|
--g-dfg: var(--srv-3);
|
||||||
--g-fg: var(--a-hil);
|
--g-fg: var(--a-hil);
|
||||||
--g-bg: var(--bg-u2);
|
--g-bg: var(--bg-u2);
|
||||||
--g-b1: var(--bg-u4);
|
--g-b1: var(--bg-u4);
|
||||||
@@ -269,6 +271,7 @@ html.bz {
|
|||||||
--btn-1h-fg: #000;
|
--btn-1h-fg: #000;
|
||||||
--txt-sh: a;
|
--txt-sh: a;
|
||||||
|
|
||||||
|
--u2-tab-b1: var(--bg-u5);
|
||||||
--u2-tab-1-fg: var(--fg-max);
|
--u2-tab-1-fg: var(--fg-max);
|
||||||
--u2-tab-1-bg: var(--bg);
|
--u2-tab-1-bg: var(--bg);
|
||||||
|
|
||||||
@@ -327,6 +330,8 @@ html.c {
|
|||||||
}
|
}
|
||||||
html.cz {
|
html.cz {
|
||||||
--bgg: var(--bg-u2);
|
--bgg: var(--bg-u2);
|
||||||
|
--srv-3: #fff;
|
||||||
|
--u2-tab-b1: var(--bg-d3);
|
||||||
}
|
}
|
||||||
html.cy {
|
html.cy {
|
||||||
--fg: #fff;
|
--fg: #fff;
|
||||||
@@ -354,6 +359,7 @@ html.cy {
|
|||||||
--chk-fg: #fd0;
|
--chk-fg: #fd0;
|
||||||
|
|
||||||
--srv-1: #f00;
|
--srv-1: #f00;
|
||||||
|
--srv-3: #fff;
|
||||||
--op-aa-bg: #fff;
|
--op-aa-bg: #fff;
|
||||||
|
|
||||||
--u2-b1-bg: #f00;
|
--u2-b1-bg: #f00;
|
||||||
@@ -408,10 +414,11 @@ html.dz {
|
|||||||
--op-aa-bg: var(--bg-d2);
|
--op-aa-bg: var(--bg-d2);
|
||||||
--op-a-sh: rgba(0,0,0,0.5);
|
--op-a-sh: rgba(0,0,0,0.5);
|
||||||
|
|
||||||
--u2-btn-b1: #999;
|
--u2-btn-b1: var(--fg-weak);
|
||||||
--u2-sbtn-b1: #999;
|
--u2-sbtn-b1: var(--fg-weak);
|
||||||
--u2-txt-bg: var(--bg-u5);
|
--u2-txt-bg: var(--bg-u5);
|
||||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||||
|
--u2-tab-b1: var(--fg-weak);
|
||||||
--u2-tab-1-fg: #fff;
|
--u2-tab-1-fg: #fff;
|
||||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
||||||
--u2-tab-1-b1: #7c5;
|
--u2-tab-1-b1: #7c5;
|
||||||
@@ -420,6 +427,12 @@ html.dz {
|
|||||||
--u2-b-fg: #fff;
|
--u2-b-fg: #fff;
|
||||||
--u2-b1-bg: #3a3;
|
--u2-b1-bg: #3a3;
|
||||||
--u2-b2-bg: #3a3;
|
--u2-b2-bg: #3a3;
|
||||||
|
--u2-o-bg: var(--btn-bg);
|
||||||
|
--u2-o-b1: var(--bg-u5);
|
||||||
|
--u2-o-h-bg: var(--fg-weak);
|
||||||
|
--u2-o-1-bg: var(--fg-weak);
|
||||||
|
--u2-o-1-b1: var(--a);
|
||||||
|
--u2-o-1h-bg: var(--a);
|
||||||
--u2-inf-bg: #07a;
|
--u2-inf-bg: #07a;
|
||||||
--u2-inf-b1: #0be;
|
--u2-inf-b1: #0be;
|
||||||
--u2-ok-bg: #380;
|
--u2-ok-bg: #380;
|
||||||
@@ -572,6 +585,11 @@ html.dy {
|
|||||||
* {
|
* {
|
||||||
line-height: 1.2em;
|
line-height: 1.2em;
|
||||||
}
|
}
|
||||||
|
::selection {
|
||||||
|
color: var(--bg-d1);
|
||||||
|
background: var(--fg);
|
||||||
|
text-shadow: none;
|
||||||
|
}
|
||||||
html,body,tr,th,td,#files,a {
|
html,body,tr,th,td,#files,a {
|
||||||
color: inherit;
|
color: inherit;
|
||||||
background: none;
|
background: none;
|
||||||
@@ -754,8 +772,9 @@ html.y #files thead th {
|
|||||||
display: inline;
|
display: inline;
|
||||||
}
|
}
|
||||||
#path a {
|
#path a {
|
||||||
margin: 0 0 0 -.2em;
|
padding: 0 .35em;
|
||||||
padding: 0 0 0 .4em;
|
position: relative;
|
||||||
|
z-index: 1;
|
||||||
/* ie: */
|
/* ie: */
|
||||||
border-bottom: .1em solid #777\9;
|
border-bottom: .1em solid #777\9;
|
||||||
margin-right: 1em\9;
|
margin-right: 1em\9;
|
||||||
@@ -763,18 +782,17 @@ html.y #files thead th {
|
|||||||
#path a:first-child {
|
#path a:first-child {
|
||||||
padding-left: .8em;
|
padding-left: .8em;
|
||||||
}
|
}
|
||||||
#path a:not(:last-child):after {
|
#path i {
|
||||||
content: '';
|
|
||||||
width: 1.05em;
|
width: 1.05em;
|
||||||
height: 1.05em;
|
height: 1.05em;
|
||||||
margin: -.2em .3em -.2em -.4em;
|
margin: -.5em .15em -.15em -.7em;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
border: 1px solid rgba(255,224,192,0.3);
|
border: 1px solid rgba(255,224,192,0.3);
|
||||||
border-width: .05em .05em 0 0;
|
border-width: .05em .05em 0 0;
|
||||||
transform: rotate(45deg);
|
transform: rotate(45deg);
|
||||||
background: linear-gradient(45deg, rgba(0,0,0,0) 40%, rgba(0,0,0,0.25) 75%, rgba(0,0,0,0.35));
|
background: linear-gradient(45deg, rgba(0,0,0,0) 40%, rgba(0,0,0,0.25) 75%, rgba(0,0,0,0.35));
|
||||||
}
|
}
|
||||||
html.y #path a:not(:last-child)::after {
|
html.y #path i {
|
||||||
background: none;
|
background: none;
|
||||||
border-color: rgba(0,0,0,0.2);
|
border-color: rgba(0,0,0,0.2);
|
||||||
border-width: .1em .1em 0 0;
|
border-width: .1em .1em 0 0;
|
||||||
@@ -788,11 +806,31 @@ html.y #path a:hover {
|
|||||||
}
|
}
|
||||||
.logue {
|
.logue {
|
||||||
padding: .2em 0;
|
padding: .2em 0;
|
||||||
|
position: relative;
|
||||||
|
z-index: 1;
|
||||||
}
|
}
|
||||||
.logue.hidden,
|
.logue.hidden,
|
||||||
.logue:empty {
|
.logue:empty {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
|
#doc>iframe,
|
||||||
|
.logue>iframe {
|
||||||
|
background: var(--bgg);
|
||||||
|
border: 1px solid var(--bgg);
|
||||||
|
border-width: 0 .3em 0 .3em;
|
||||||
|
border-radius: .5em;
|
||||||
|
visibility: hidden;
|
||||||
|
margin: 0 -.3em;
|
||||||
|
width: 100%;
|
||||||
|
height: 0;
|
||||||
|
}
|
||||||
|
#doc>iframe.focus,
|
||||||
|
.logue>iframe.focus {
|
||||||
|
box-shadow: 0 0 .1em .1em var(--a);
|
||||||
|
}
|
||||||
|
#pro.logue>iframe {
|
||||||
|
height: 100vh;
|
||||||
|
}
|
||||||
#pro.logue {
|
#pro.logue {
|
||||||
margin-bottom: .8em;
|
margin-bottom: .8em;
|
||||||
}
|
}
|
||||||
@@ -817,6 +855,10 @@ html.y #path a:hover {
|
|||||||
.mdo {
|
.mdo {
|
||||||
max-width: 52em;
|
max-width: 52em;
|
||||||
}
|
}
|
||||||
|
.mdo.sb,
|
||||||
|
#epi.logue.mdo>iframe {
|
||||||
|
max-width: 54em;
|
||||||
|
}
|
||||||
.mdo,
|
.mdo,
|
||||||
.mdo * {
|
.mdo * {
|
||||||
line-height: 1.4em;
|
line-height: 1.4em;
|
||||||
@@ -937,6 +979,9 @@ html.y #path a:hover {
|
|||||||
#ggrid>a.dir:before {
|
#ggrid>a.dir:before {
|
||||||
content: '📂';
|
content: '📂';
|
||||||
}
|
}
|
||||||
|
#ggrid>a.dir>span {
|
||||||
|
color: var(--g-dfg);
|
||||||
|
}
|
||||||
#ggrid>a.au:before {
|
#ggrid>a.au:before {
|
||||||
content: '💾';
|
content: '💾';
|
||||||
}
|
}
|
||||||
@@ -983,6 +1028,9 @@ html.np_open #ggrid>a.au:before {
|
|||||||
background: var(--g-sel-bg);
|
background: var(--g-sel-bg);
|
||||||
border-color: var(--g-sel-b1);
|
border-color: var(--g-sel-b1);
|
||||||
}
|
}
|
||||||
|
#ggrid>a.sel>span {
|
||||||
|
color: var(--g-sel-fg);
|
||||||
|
}
|
||||||
#ggrid>a.sel,
|
#ggrid>a.sel,
|
||||||
#ggrid>a[tt].sel {
|
#ggrid>a[tt].sel {
|
||||||
border-top: 1px solid var(--g-fsel-b1);
|
border-top: 1px solid var(--g-fsel-b1);
|
||||||
@@ -1036,6 +1084,9 @@ html.np_open #ggrid>a.au:before {
|
|||||||
background: var(--bg-d3);
|
background: var(--bg-d3);
|
||||||
box-shadow: -.2em .2em 0 var(--f-sel-sh), -.2em -.2em 0 var(--f-sel-sh);
|
box-shadow: -.2em .2em 0 var(--f-sel-sh), -.2em -.2em 0 var(--f-sel-sh);
|
||||||
}
|
}
|
||||||
|
#player {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
#widget {
|
#widget {
|
||||||
position: fixed;
|
position: fixed;
|
||||||
font-size: 1.4em;
|
font-size: 1.4em;
|
||||||
@@ -1075,18 +1126,18 @@ html.y #widget.open {
|
|||||||
top: -.12em;
|
top: -.12em;
|
||||||
}
|
}
|
||||||
#wtico {
|
#wtico {
|
||||||
cursor: url(/.cpr/dd/4.png), pointer;
|
cursor: url(dd/4.png), pointer;
|
||||||
animation: cursor 500ms;
|
animation: cursor 500ms;
|
||||||
}
|
}
|
||||||
#wtico:hover {
|
#wtico:hover {
|
||||||
animation: cursor 500ms infinite;
|
animation: cursor 500ms infinite;
|
||||||
}
|
}
|
||||||
@keyframes cursor {
|
@keyframes cursor {
|
||||||
0% {cursor: url(/.cpr/dd/2.png), pointer}
|
0% {cursor: url(dd/2.png), pointer}
|
||||||
30% {cursor: url(/.cpr/dd/3.png), pointer}
|
30% {cursor: url(dd/3.png), pointer}
|
||||||
50% {cursor: url(/.cpr/dd/4.png), pointer}
|
50% {cursor: url(dd/4.png), pointer}
|
||||||
75% {cursor: url(/.cpr/dd/5.png), pointer}
|
75% {cursor: url(dd/5.png), pointer}
|
||||||
85% {cursor: url(/.cpr/dd/4.png), pointer}
|
85% {cursor: url(dd/4.png), pointer}
|
||||||
}
|
}
|
||||||
@keyframes spin {
|
@keyframes spin {
|
||||||
100% {transform: rotate(360deg)}
|
100% {transform: rotate(360deg)}
|
||||||
@@ -1118,10 +1169,10 @@ html.y #widget.open {
|
|||||||
background: #fff;
|
background: #fff;
|
||||||
background: var(--bg-u3);
|
background: var(--bg-u3);
|
||||||
}
|
}
|
||||||
#wfm, #wzip, #wnp {
|
#wfs, #wfm, #wzip, #wnp {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
#wzip, #wnp {
|
#wfs, #wzip, #wnp {
|
||||||
margin-right: .2em;
|
margin-right: .2em;
|
||||||
padding-right: .2em;
|
padding-right: .2em;
|
||||||
border: 1px solid var(--bg-u5);
|
border: 1px solid var(--bg-u5);
|
||||||
@@ -1133,6 +1184,7 @@ html.y #widget.open {
|
|||||||
padding-left: .2em;
|
padding-left: .2em;
|
||||||
border-left-width: .1em;
|
border-left-width: .1em;
|
||||||
}
|
}
|
||||||
|
#wfs.act,
|
||||||
#wfm.act {
|
#wfm.act {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
@@ -1156,6 +1208,13 @@ html.y #widget.open {
|
|||||||
position: relative;
|
position: relative;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
|
#wfs {
|
||||||
|
font-size: .36em;
|
||||||
|
text-align: right;
|
||||||
|
line-height: 1.3em;
|
||||||
|
padding: 0 .3em 0 0;
|
||||||
|
border-width: 0 .25em 0 0;
|
||||||
|
}
|
||||||
#wfm span,
|
#wfm span,
|
||||||
#wnp span {
|
#wnp span {
|
||||||
font-size: .6em;
|
font-size: .6em;
|
||||||
@@ -1171,7 +1230,8 @@ html.y #widget.open {
|
|||||||
#wfm a.hide {
|
#wfm a.hide {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
#files tbody tr.fcut td {
|
#files tbody tr.fcut td,
|
||||||
|
#ggrid>a.fcut {
|
||||||
animation: fcut .5s ease-out;
|
animation: fcut .5s ease-out;
|
||||||
}
|
}
|
||||||
@keyframes fcut {
|
@keyframes fcut {
|
||||||
@@ -1294,6 +1354,10 @@ html.y #ops svg circle {
|
|||||||
padding: .3em .6em;
|
padding: .3em .6em;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
#noie {
|
||||||
|
color: #b60;
|
||||||
|
margin: 0 0 0 .5em;
|
||||||
|
}
|
||||||
.opbox {
|
.opbox {
|
||||||
padding: .5em;
|
padding: .5em;
|
||||||
border-radius: 0 .3em .3em 0;
|
border-radius: 0 .3em .3em 0;
|
||||||
@@ -1698,6 +1762,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
.ghead {
|
.ghead {
|
||||||
|
background: var(--bg-u2);
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
padding: .2em .5em;
|
padding: .2em .5em;
|
||||||
line-height: 2.3em;
|
line-height: 2.3em;
|
||||||
@@ -2411,7 +2476,7 @@ html.y #bbox-overlay figcaption a {
|
|||||||
width: 21em;
|
width: 21em;
|
||||||
}
|
}
|
||||||
#u2cards {
|
#u2cards {
|
||||||
padding: 1em 1em .3em 1em;
|
padding: 1em 1em .42em 1em;
|
||||||
margin: 0 auto;
|
margin: 0 auto;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
@@ -2436,7 +2501,8 @@ html.y #bbox-overlay figcaption a {
|
|||||||
#u2cards a {
|
#u2cards a {
|
||||||
padding: .2em 1em;
|
padding: .2em 1em;
|
||||||
background: var(--u2-tab-bg);
|
background: var(--u2-tab-bg);
|
||||||
border: 1px solid rgba(128,128,128,0.8);
|
border: 1px solid #999;
|
||||||
|
border-color: var(--u2-tab-b1);
|
||||||
border-width: 0 0 1px 0;
|
border-width: 0 0 1px 0;
|
||||||
}
|
}
|
||||||
#u2cards a:first-child {
|
#u2cards a:first-child {
|
||||||
@@ -2557,7 +2623,6 @@ html.b #u2conf a.b:hover {
|
|||||||
#u2conf input[type="checkbox"]:checked+label:hover {
|
#u2conf input[type="checkbox"]:checked+label:hover {
|
||||||
background: var(--u2-o-1h-bg);
|
background: var(--u2-o-1h-bg);
|
||||||
}
|
}
|
||||||
#op_up2k.srch #u2conf td:nth-child(1)>*,
|
|
||||||
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
||||||
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
||||||
background: #777;
|
background: #777;
|
||||||
@@ -2895,6 +2960,7 @@ html.b #treepar {
|
|||||||
html.b #wrap {
|
html.b #wrap {
|
||||||
margin-top: 2em;
|
margin-top: 2em;
|
||||||
}
|
}
|
||||||
|
html.by .ghead,
|
||||||
html.bz .ghead {
|
html.bz .ghead {
|
||||||
background: var(--bg);
|
background: var(--bg);
|
||||||
padding: .2em 0;
|
padding: .2em 0;
|
||||||
|
|||||||
@@ -8,8 +8,8 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8, minimum-scale=0.6">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8, minimum-scale=0.6">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#333">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/browser.css?_={{ ts }}">
|
||||||
{%- if css %}
|
{%- if css %}
|
||||||
<link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
@@ -36,7 +36,7 @@
|
|||||||
<input type="file" name="f" multiple /><br />
|
<input type="file" name="f" multiple /><br />
|
||||||
<input type="submit" value="start upload">
|
<input type="submit" value="start upload">
|
||||||
</form>
|
</form>
|
||||||
<a id="bbsw" href="?b=u"><br />switch to basic browser</a>
|
<a id="bbsw" href="?b=u" rel="nofollow"><br />switch to basic browser</a>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div id="op_mkdir" class="opview opbox act">
|
<div id="op_mkdir" class="opview opbox act">
|
||||||
@@ -71,7 +71,7 @@
|
|||||||
<h1 id="path">
|
<h1 id="path">
|
||||||
<a href="#" id="entree">🌲</a>
|
<a href="#" id="entree">🌲</a>
|
||||||
{%- for n in vpnodes %}
|
{%- for n in vpnodes %}
|
||||||
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
<a href="{{ r }}/{{ n[0] }}">{{ n[1] }}</a>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
</h1>
|
</h1>
|
||||||
|
|
||||||
@@ -85,7 +85,7 @@
|
|||||||
<div id="bdoc"></div>
|
<div id="bdoc"></div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<div id="pro" class="logue">{{ logues[0] }}</div>
|
<div id="pro" class="logue">{{ "" if sb_lg else logues[0] }}</div>
|
||||||
|
|
||||||
<table id="files">
|
<table id="files">
|
||||||
<thead>
|
<thead>
|
||||||
@@ -119,9 +119,9 @@
|
|||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<div id="epi" class="logue">{{ logues[1] }}</div>
|
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
|
||||||
|
|
||||||
<h2><a href="/?h" id="goh">control-panel</a></h2>
|
<h2 id="wfp"><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
||||||
|
|
||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
|
|
||||||
@@ -134,8 +134,11 @@
|
|||||||
<div id="widget"></div>
|
<div id="widget"></div>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
var acct = "{{ acct }}",
|
var SR = {{ r|tojson }},
|
||||||
|
TS = "{{ ts }}",
|
||||||
|
acct = "{{ acct }}",
|
||||||
perms = {{ perms }},
|
perms = {{ perms }},
|
||||||
|
dgrid = {{ dgrid|tojson }},
|
||||||
themes = {{ themes }},
|
themes = {{ themes }},
|
||||||
dtheme = "{{ dtheme }}",
|
dtheme = "{{ dtheme }}",
|
||||||
srvinf = "{{ srv_info }}",
|
srvinf = "{{ srv_info }}",
|
||||||
@@ -149,21 +152,25 @@
|
|||||||
have_del = {{ have_del|tojson }},
|
have_del = {{ have_del|tojson }},
|
||||||
have_unpost = {{ have_unpost }},
|
have_unpost = {{ have_unpost }},
|
||||||
have_zip = {{ have_zip|tojson }},
|
have_zip = {{ have_zip|tojson }},
|
||||||
|
sb_md = "{{ sb_md }}",
|
||||||
|
sb_lg = "{{ sb_lg }}",
|
||||||
lifetime = {{ lifetime }},
|
lifetime = {{ lifetime }},
|
||||||
turbolvl = {{ turbolvl }},
|
turbolvl = {{ turbolvl }},
|
||||||
|
idxh = {{ idxh }},
|
||||||
|
frand = {{ frand|tojson }},
|
||||||
u2sort = "{{ u2sort }}",
|
u2sort = "{{ u2sort }}",
|
||||||
have_emp = {{ have_emp|tojson }},
|
have_emp = {{ have_emp|tojson }},
|
||||||
txt_ext = "{{ txt_ext }}",
|
txt_ext = "{{ txt_ext }}",
|
||||||
{% if no_prism %}no_prism = 1,{% endif %}
|
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||||
readme = {{ readme|tojson }},
|
readme = {{ readme|tojson }},
|
||||||
ls0 = {{ ls0|tojson }};
|
ls0 = {{ ls0|tojson }};
|
||||||
|
|
||||||
document.documentElement.className = localStorage.theme || dtheme;
|
document.documentElement.className = localStorage.theme || dtheme;
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/baguettebox.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/baguettebox.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/browser.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/up2k.js?_={{ ts }}"></script>
|
||||||
{%- if js %}
|
{%- if js %}
|
||||||
<script src="{{ js }}?_={{ ts }}"></script>
|
<script src="{{ js }}?_={{ ts }}"></script>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -57,7 +57,7 @@
|
|||||||
<div>{{ logues[1] }}</div><br />
|
<div>{{ logues[1] }}</div><br />
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<h2><a href="/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
<h2><a href="{{ r }}/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||||
|
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@@ -5,10 +5,10 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#333">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/md.css?_={{ ts }}">
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<link rel="stylesheet" href="/.cpr/md2.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/md2.css?_={{ ts }}">
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
@@ -128,7 +128,8 @@ write markdown (most html is 🙆 too)
|
|||||||
|
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var last_modified = {{ lastmod }},
|
var SR = {{ r|tojson }},
|
||||||
|
last_modified = {{ lastmod }},
|
||||||
have_emp = {{ have_emp|tojson }},
|
have_emp = {{ have_emp|tojson }},
|
||||||
dfavico = "{{ favico }}";
|
dfavico = "{{ favico }}";
|
||||||
|
|
||||||
@@ -153,10 +154,10 @@ l.light = drk? 0:1;
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/md.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/md.js?_={{ ts }}"></script>
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<script src="/.cpr/md2.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|||||||
@@ -231,11 +231,11 @@ function convert_markdown(md_text, dest_dom) {
|
|||||||
var nodes = md_dom.getElementsByTagName('a');
|
var nodes = md_dom.getElementsByTagName('a');
|
||||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||||
var href = nodes[a].getAttribute('href');
|
var href = nodes[a].getAttribute('href');
|
||||||
var txt = nodes[a].textContent;
|
var txt = nodes[a].innerHTML;
|
||||||
|
|
||||||
if (!txt)
|
if (!txt)
|
||||||
nodes[a].textContent = href;
|
nodes[a].textContent = href;
|
||||||
else if (href !== txt)
|
else if (href !== txt && !nodes[a].className)
|
||||||
nodes[a].className = 'vis';
|
nodes[a].className = 'vis';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -930,7 +930,9 @@ var set_lno = (function () {
|
|||||||
(function () {
|
(function () {
|
||||||
function keydown(ev) {
|
function keydown(ev) {
|
||||||
ev = ev || window.event;
|
ev = ev || window.event;
|
||||||
var kc = ev.code || ev.keyCode || ev.which;
|
var kc = ev.code || ev.keyCode || ev.which,
|
||||||
|
editing = document.activeElement == dom_src;
|
||||||
|
|
||||||
//console.log(ev.key, ev.code, ev.keyCode, ev.which);
|
//console.log(ev.key, ev.code, ev.keyCode, ev.which);
|
||||||
if (ctrl(ev) && (ev.code == "KeyS" || kc == 83)) {
|
if (ctrl(ev) && (ev.code == "KeyS" || kc == 83)) {
|
||||||
save();
|
save();
|
||||||
@@ -941,12 +943,17 @@ var set_lno = (function () {
|
|||||||
if (d)
|
if (d)
|
||||||
d.click();
|
d.click();
|
||||||
}
|
}
|
||||||
if (document.activeElement != dom_src)
|
if (editing)
|
||||||
return true;
|
set_lno();
|
||||||
|
|
||||||
set_lno();
|
|
||||||
|
|
||||||
if (ctrl(ev)) {
|
if (ctrl(ev)) {
|
||||||
|
if (ev.code == "KeyE") {
|
||||||
|
dom_nsbs.click();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (!editing)
|
||||||
|
return true;
|
||||||
|
|
||||||
if (ev.code == "KeyH" || kc == 72) {
|
if (ev.code == "KeyH" || kc == 72) {
|
||||||
md_header(ev.shiftKey);
|
md_header(ev.shiftKey);
|
||||||
return false;
|
return false;
|
||||||
@@ -971,10 +978,6 @@ var set_lno = (function () {
|
|||||||
iter_uni();
|
iter_uni();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (ev.code == "KeyE") {
|
|
||||||
dom_nsbs.click();
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
var up = ev.code == "ArrowUp" || kc == 38;
|
var up = ev.code == "ArrowUp" || kc == 38;
|
||||||
var dn = ev.code == "ArrowDown" || kc == 40;
|
var dn = ev.code == "ArrowDown" || kc == 40;
|
||||||
if (up || dn) {
|
if (up || dn) {
|
||||||
@@ -987,6 +990,9 @@ var set_lno = (function () {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
if (!editing)
|
||||||
|
return true;
|
||||||
|
|
||||||
if (ev.code == "Tab" || kc == 9) {
|
if (ev.code == "Tab" || kc == 9) {
|
||||||
md_indent(ev.shiftKey);
|
md_indent(ev.shiftKey);
|
||||||
return false;
|
return false;
|
||||||
|
|||||||
@@ -5,10 +5,10 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#333">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/mde.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/deps/mini-fa.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="/.cpr/deps/easymde.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/deps/easymde.css?_={{ ts }}">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="mw">
|
<div id="mw">
|
||||||
@@ -26,7 +26,8 @@
|
|||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var last_modified = {{ lastmod }},
|
var SR = {{ r|tojson }},
|
||||||
|
last_modified = {{ lastmod }},
|
||||||
have_emp = {{ have_emp|tojson }},
|
have_emp = {{ have_emp|tojson }},
|
||||||
dfavico = "{{ favico }}";
|
dfavico = "{{ favico }}";
|
||||||
|
|
||||||
@@ -48,8 +49,8 @@ l.light = drk? 0:1;
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#333">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/msg.css?_={{ ts }}">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
|
|||||||
@@ -16,7 +16,8 @@ html {
|
|||||||
h1 {
|
h1 {
|
||||||
border-bottom: 1px solid #ccc;
|
border-bottom: 1px solid #ccc;
|
||||||
margin: 2em 0 .4em 0;
|
margin: 2em 0 .4em 0;
|
||||||
padding: 0 0 .2em 0;
|
padding: 0;
|
||||||
|
line-height: 1em;
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
}
|
}
|
||||||
li {
|
li {
|
||||||
@@ -26,6 +27,7 @@ a {
|
|||||||
color: #047;
|
color: #047;
|
||||||
background: #fff;
|
background: #fff;
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
|
white-space: nowrap;
|
||||||
border-bottom: 1px solid #8ab;
|
border-bottom: 1px solid #8ab;
|
||||||
border-radius: .2em;
|
border-radius: .2em;
|
||||||
padding: .2em .6em;
|
padding: .2em .6em;
|
||||||
@@ -34,6 +36,11 @@ a {
|
|||||||
td a {
|
td a {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
#w {
|
||||||
|
color: #fff;
|
||||||
|
background: #940;
|
||||||
|
border-color: #b70;
|
||||||
|
}
|
||||||
.af,
|
.af,
|
||||||
.logout {
|
.logout {
|
||||||
float: right;
|
float: right;
|
||||||
@@ -49,12 +56,30 @@ a.g {
|
|||||||
border-color: #3a0;
|
border-color: #3a0;
|
||||||
box-shadow: 0 .3em 1em #4c0;
|
box-shadow: 0 .3em 1em #4c0;
|
||||||
}
|
}
|
||||||
#repl {
|
#repl,
|
||||||
|
#pb a {
|
||||||
border: none;
|
border: none;
|
||||||
background: none;
|
background: none;
|
||||||
color: inherit;
|
color: inherit;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
}
|
}
|
||||||
|
#repl {
|
||||||
|
position: fixed;
|
||||||
|
bottom: .25em;
|
||||||
|
left: .2em;
|
||||||
|
}
|
||||||
|
#pb {
|
||||||
|
opacity: .5;
|
||||||
|
position: fixed;
|
||||||
|
bottom: .25em;
|
||||||
|
right: .3em;
|
||||||
|
}
|
||||||
|
#pb span {
|
||||||
|
opacity: .6;
|
||||||
|
}
|
||||||
|
#pb a {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
table {
|
table {
|
||||||
border-collapse: collapse;
|
border-collapse: collapse;
|
||||||
}
|
}
|
||||||
@@ -155,15 +180,19 @@ html.z a.g {
|
|||||||
border-color: #af4;
|
border-color: #af4;
|
||||||
box-shadow: 0 .3em 1em #7d0;
|
box-shadow: 0 .3em 1em #7d0;
|
||||||
}
|
}
|
||||||
html.z input {
|
input {
|
||||||
color: #fff;
|
color: #a50;
|
||||||
background: #626;
|
background: #fff;
|
||||||
border: 1px solid #c2c;
|
border: 1px solid #a50;
|
||||||
border-width: 1px 0 0 0;
|
|
||||||
border-radius: .5em;
|
border-radius: .5em;
|
||||||
padding: .5em .7em;
|
padding: .5em .7em;
|
||||||
margin: 0 .5em 0 0;
|
margin: 0 .5em 0 0;
|
||||||
}
|
}
|
||||||
|
html.z input {
|
||||||
|
color: #fff;
|
||||||
|
background: #626;
|
||||||
|
border-color: #c2c;
|
||||||
|
}
|
||||||
html.z .num {
|
html.z .num {
|
||||||
border-color: #777;
|
border-color: #777;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,19 +8,19 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#333">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<div id="wrap">
|
<div id="wrap">
|
||||||
<a id="a" href="/?h" class="af">refresh</a>
|
<a id="a" href="{{ r }}/?h" class="af">refresh</a>
|
||||||
<a id="v" href="/?hc" class="af">connect</a>
|
<a id="v" href="{{ r }}/?hc" class="af">connect</a>
|
||||||
|
|
||||||
{%- if this.uname == '*' %}
|
{%- if this.uname == '*' %}
|
||||||
<p id="b">howdy stranger <small>(you're not logged in)</small></p>
|
<p id="b">howdy stranger <small>(you're not logged in)</small></p>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<a id="c" href="/?pw=x" class="logout">logout</a>
|
<a id="c" href="{{ r }}/?pw=x" class="logout">logout</a>
|
||||||
<p><span id="m">welcome back,</span> <strong>{{ this.uname }}</strong></p>
|
<p><span id="m">welcome back,</span> <strong>{{ this.uname }}</strong></p>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
@@ -46,15 +46,15 @@
|
|||||||
<tbody>
|
<tbody>
|
||||||
{% for mp in avol %}
|
{% for mp in avol %}
|
||||||
{%- if mp in vstate and vstate[mp] %}
|
{%- if mp in vstate and vstate[mp] %}
|
||||||
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a class="s" href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
<tr><td><a href="{{ r }}{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a class="s" href="{{ r }}{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
</td></tr></table>
|
</td></tr></table>
|
||||||
<div class="btns">
|
<div class="btns">
|
||||||
<a id="d" href="/?stack">dump stack</a>
|
<a id="d" href="{{ r }}/?stack">dump stack</a>
|
||||||
<a id="e" href="/?reload=cfg">reload cfg</a>
|
<a id="e" href="{{ r }}/?reload=cfg">reload cfg</a>
|
||||||
</div>
|
</div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
@@ -62,7 +62,7 @@
|
|||||||
<h1 id="f">you can browse:</h1>
|
<h1 id="f">you can browse:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
{% for mp in rvol %}
|
{% for mp in rvol %}
|
||||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
<li><a href="{{ r }}{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</ul>
|
</ul>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
@@ -71,7 +71,7 @@
|
|||||||
<h1 id="g">you can upload to:</h1>
|
<h1 id="g">you can upload to:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
{% for mp in wvol %}
|
{% for mp in wvol %}
|
||||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
<li><a href="{{ r }}{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</ul>
|
</ul>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
@@ -79,34 +79,41 @@
|
|||||||
<h1 id="cc">client config:</h1>
|
<h1 id="cc">client config:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
{% if k304 %}
|
{% if k304 %}
|
||||||
<li><a id="h" href="/?k304=n">disable k304</a> (currently enabled)
|
<li><a id="h" href="{{ r }}/?k304=n">disable k304</a> (currently enabled)
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<li><a id="i" href="/?k304=y" class="r">enable k304</a> (currently disabled)
|
<li><a id="i" href="{{ r }}/?k304=y" class="r">enable k304</a> (currently disabled)
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||||
|
|
||||||
<li><a id="k" href="/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
<h1 id="l">login for more:</h1>
|
<h1 id="l">login for more:</h1>
|
||||||
<ul>
|
<div>
|
||||||
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
|
<form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||||
<input type="hidden" name="act" value="login" />
|
<input type="hidden" name="act" value="login" />
|
||||||
<input type="password" name="cppwd" />
|
<input type="password" name="cppwd" />
|
||||||
<input type="submit" value="Login" />
|
<input type="submit" value="Login" />
|
||||||
|
{% if ahttps %}
|
||||||
|
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||||
|
{% endif %}
|
||||||
</form>
|
</form>
|
||||||
</ul>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
|
{%- if not this.args.nb %}
|
||||||
|
<span id="pb"><span>powered by</span> <a href="{{ this.args.pb_url }}">copyparty {{ver}}</a></span>
|
||||||
|
{%- endif %}
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var lang="{{ lang }}",
|
var SR = {{ r|tojson }},
|
||||||
|
lang="{{ lang }}",
|
||||||
dfavico="{{ favico }}";
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
document.documentElement.className=localStorage.theme||"{{ this.args.theme }}";
|
document.documentElement.className=localStorage.theme||"{{ this.args.theme }}";
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/splash.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/splash.js?_={{ ts }}"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@@ -17,15 +17,16 @@ var Ls = {
|
|||||||
"l1": "logg inn:",
|
"l1": "logg inn:",
|
||||||
"m1": "velkommen tilbake,",
|
"m1": "velkommen tilbake,",
|
||||||
"n1": "404: filen finnes ikke ┐( ´ -`)┌",
|
"n1": "404: filen finnes ikke ┐( ´ -`)┌",
|
||||||
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="/?h">gå hjem</a>',
|
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||||
"p1": "403: tilgang nektet ~┻━┻",
|
"p1": "403: tilgang nektet ~┻━┻",
|
||||||
"q1": 'du må logge inn eller <a href="/?h">gå hjem</a>',
|
"q1": 'du må logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||||
"r1": "gå hjem",
|
"r1": "gå hjem",
|
||||||
".s1": "kartlegg",
|
".s1": "kartlegg",
|
||||||
"t1": "handling",
|
"t1": "handling",
|
||||||
"u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder",
|
"u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder",
|
||||||
"v1": "koble til",
|
"v1": "koble til",
|
||||||
"v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!"
|
"v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!",
|
||||||
|
"w1": "bytt til https",
|
||||||
},
|
},
|
||||||
"eng": {
|
"eng": {
|
||||||
"d2": "shows the state of all active threads",
|
"d2": "shows the state of all active threads",
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user