Compare commits
368 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cadaeeeace | ||
|
|
767696185b | ||
|
|
c1efd227b7 | ||
|
|
a50d0563c3 | ||
|
|
e5641ddd16 | ||
|
|
700111ffeb | ||
|
|
b8adeb824a | ||
|
|
30cc9defcb | ||
|
|
61875bd773 | ||
|
|
30905c6f5d | ||
|
|
9986136dfb | ||
|
|
1c0d978979 | ||
|
|
0a0364e9f8 | ||
|
|
3376fbde1a | ||
|
|
ac21fa7782 | ||
|
|
c1c8dc5e82 | ||
|
|
5a38311481 | ||
|
|
9f8edb7f32 | ||
|
|
c5a6ac8417 | ||
|
|
50e01d6904 | ||
|
|
9b46291a20 | ||
|
|
14497b2425 | ||
|
|
f7ceae5a5f | ||
|
|
c9492d16ba | ||
|
|
9fb9ada3aa | ||
|
|
db0abbfdda | ||
|
|
e7f0009e57 | ||
|
|
4444f0f6ff | ||
|
|
418842d2d3 | ||
|
|
cafe53c055 | ||
|
|
7673beef72 | ||
|
|
b28bfe64c0 | ||
|
|
135ece3fbd | ||
|
|
bd3640d256 | ||
|
|
fc0405c8f3 | ||
|
|
7df890d964 | ||
|
|
8341041857 | ||
|
|
1b7634932d | ||
|
|
48a3898aa6 | ||
|
|
5d13ebb4ac | ||
|
|
015b87ee99 | ||
|
|
0a48acf6be | ||
|
|
2b6a3afd38 | ||
|
|
18aa82fb2f | ||
|
|
f5407b2997 | ||
|
|
474d5a155b | ||
|
|
afcd98b794 | ||
|
|
4f80e44ff7 | ||
|
|
406e413594 | ||
|
|
033b50ae1b | ||
|
|
bee26e853b | ||
|
|
04a1f7040e | ||
|
|
f9d5bb3b29 | ||
|
|
ca0cd04085 | ||
|
|
999ee2e7bc | ||
|
|
1ff7f968e8 | ||
|
|
3966266207 | ||
|
|
d03e96a392 | ||
|
|
4c843c6df9 | ||
|
|
0896c5295c | ||
|
|
cc0c9839eb | ||
|
|
d0aa20e17c | ||
|
|
1a658dedb7 | ||
|
|
8d376b854c | ||
|
|
490c16b01d | ||
|
|
2437a4e864 | ||
|
|
007d948cb9 | ||
|
|
335fcc8535 | ||
|
|
9eaa9904e0 | ||
|
|
0778da6c4d | ||
|
|
a1bb10012d | ||
|
|
1441ccee4f | ||
|
|
491803d8b7 | ||
|
|
3dcc386b6f | ||
|
|
5aa54d1217 | ||
|
|
88b876027c | ||
|
|
fcc3aa98fd | ||
|
|
f2f5e266b4 | ||
|
|
e17bf8f325 | ||
|
|
d19cb32bf3 | ||
|
|
85a637af09 | ||
|
|
043e3c7dd6 | ||
|
|
8f59afb159 | ||
|
|
77f1e51444 | ||
|
|
22fc4bb938 | ||
|
|
50c7bba6ea | ||
|
|
551d99b71b | ||
|
|
b54b7213a7 | ||
|
|
a14943c8de | ||
|
|
a10cad54fc | ||
|
|
8568b7702a | ||
|
|
5d8cb34885 | ||
|
|
8d248333e8 | ||
|
|
99e2ef7f33 | ||
|
|
e767230383 | ||
|
|
90601314d6 | ||
|
|
9c5eac1274 | ||
|
|
50905439e4 | ||
|
|
a0c1239246 | ||
|
|
b8e851c332 | ||
|
|
baaf2eb24d | ||
|
|
e197895c10 | ||
|
|
cb75efa05d | ||
|
|
8b0cf2c982 | ||
|
|
fc7d9e1f9c | ||
|
|
10caafa34c | ||
|
|
22cc22225a | ||
|
|
22dff4b0e5 | ||
|
|
a00ff2b086 | ||
|
|
e4acddc23b | ||
|
|
2b2d8e4e02 | ||
|
|
5501d49032 | ||
|
|
fa54b2eec4 | ||
|
|
cb0160021f | ||
|
|
93a723d588 | ||
|
|
8ebe1fb5e8 | ||
|
|
2acdf685b1 | ||
|
|
9f122ccd16 | ||
|
|
03be26fafc | ||
|
|
df5d309d6e | ||
|
|
c355f9bd91 | ||
|
|
9c28ba417e | ||
|
|
705b58c741 | ||
|
|
510302d667 | ||
|
|
025a537413 | ||
|
|
60a1ff0fc0 | ||
|
|
f94a0b1bff | ||
|
|
4ccfeeb2cd | ||
|
|
2646f6a4f2 | ||
|
|
b286ab539e | ||
|
|
2cca6e0922 | ||
|
|
db51f1b063 | ||
|
|
d979c47f50 | ||
|
|
e64b87b99b | ||
|
|
b985011a00 | ||
|
|
c2ed2314c8 | ||
|
|
cd496658c3 | ||
|
|
deca082623 | ||
|
|
0ea8bb7c83 | ||
|
|
1fb251a4c2 | ||
|
|
4295923b76 | ||
|
|
572aa4b26c | ||
|
|
b1359f039f | ||
|
|
867d8ee49e | ||
|
|
04c86e8a89 | ||
|
|
bc0cb43ef9 | ||
|
|
769454fdce | ||
|
|
4ee81af8f6 | ||
|
|
8b0e66122f | ||
|
|
8a98efb929 | ||
|
|
b6fd555038 | ||
|
|
7eb413ad51 | ||
|
|
4421d509eb | ||
|
|
793ffd7b01 | ||
|
|
1e22222c60 | ||
|
|
544e0549bc | ||
|
|
83178d0836 | ||
|
|
c44f5f5701 | ||
|
|
138f5bc989 | ||
|
|
e4759f86ef | ||
|
|
d71416437a | ||
|
|
a84c583b2c | ||
|
|
cdacdccdb8 | ||
|
|
d3ccd3f174 | ||
|
|
cb6de0387d | ||
|
|
abff40519d | ||
|
|
55c74ad164 | ||
|
|
673b4f7e23 | ||
|
|
d11e02da49 | ||
|
|
8790f89e08 | ||
|
|
33442026b8 | ||
|
|
03193de6d0 | ||
|
|
8675ff40f3 | ||
|
|
d88889d3fc | ||
|
|
6f244d4335 | ||
|
|
cacca663b3 | ||
|
|
d5109be559 | ||
|
|
d999f06bb9 | ||
|
|
a1a8a8c7b5 | ||
|
|
fdd6f3b4a6 | ||
|
|
f5191973df | ||
|
|
ddbaebe779 | ||
|
|
42099baeff | ||
|
|
2459965ca8 | ||
|
|
6acf436573 | ||
|
|
f217e1ce71 | ||
|
|
418000aee3 | ||
|
|
dbbba9625b | ||
|
|
397bc92fbc | ||
|
|
6e615dcd03 | ||
|
|
9ac5908b33 | ||
|
|
50912480b9 | ||
|
|
24b9b8319d | ||
|
|
b0f4f0b653 | ||
|
|
05bbd41c4b | ||
|
|
8f5f8a3cda | ||
|
|
c8938fc033 | ||
|
|
1550350e05 | ||
|
|
5cc190c026 | ||
|
|
d6a0a738ce | ||
|
|
f5fe3678ee | ||
|
|
f2a7925387 | ||
|
|
fa953ced52 | ||
|
|
f0000d9861 | ||
|
|
4e67516719 | ||
|
|
29db7a6270 | ||
|
|
852499e296 | ||
|
|
f1775fd51c | ||
|
|
4bb306932a | ||
|
|
2a37e81bd8 | ||
|
|
6a312ca856 | ||
|
|
e7f3e475a2 | ||
|
|
854ba0ec06 | ||
|
|
209b49d771 | ||
|
|
949baae539 | ||
|
|
5f4ea27586 | ||
|
|
099cc97247 | ||
|
|
592b7d6315 | ||
|
|
0880bf55a1 | ||
|
|
4cbffec0ec | ||
|
|
cc355417d4 | ||
|
|
e2bc573e61 | ||
|
|
41c0376177 | ||
|
|
c01cad091e | ||
|
|
eb349f339c | ||
|
|
24d8caaf3e | ||
|
|
5ac2c20959 | ||
|
|
bb72e6bf30 | ||
|
|
d8142e866a | ||
|
|
7b7979fd61 | ||
|
|
749616d09d | ||
|
|
5485c6d7ca | ||
|
|
b7aea38d77 | ||
|
|
0ecd9f99e6 | ||
|
|
ca04a00662 | ||
|
|
8a09601be8 | ||
|
|
1fe0d4693e | ||
|
|
bba8a3c6bc | ||
|
|
e3d7f0c7d5 | ||
|
|
be7bb71bbc | ||
|
|
e0c4829ec6 | ||
|
|
5af1575329 | ||
|
|
884f966b86 | ||
|
|
f6c6fbc223 | ||
|
|
b0cc396bca | ||
|
|
ae463518f6 | ||
|
|
2be2e9a0d8 | ||
|
|
e405fddf74 | ||
|
|
c269b0dd91 | ||
|
|
8c3211263a | ||
|
|
bf04e7c089 | ||
|
|
c7c6e48b1a | ||
|
|
974ca773be | ||
|
|
9270c2df19 | ||
|
|
b39ff92f34 | ||
|
|
7454167f78 | ||
|
|
5ceb3a962f | ||
|
|
52bd5642da | ||
|
|
c39c93725f | ||
|
|
d00f0b9fa7 | ||
|
|
01cfc70982 | ||
|
|
e6aec189bd | ||
|
|
c98fff1647 | ||
|
|
0009e31bd3 | ||
|
|
db95e880b2 | ||
|
|
e69fea4a59 | ||
|
|
4360800a6e | ||
|
|
b179e2b031 | ||
|
|
ecdec75b4e | ||
|
|
5cb2e33353 | ||
|
|
43ff2e531a | ||
|
|
1c2c9db8f0 | ||
|
|
7ea183baef | ||
|
|
ab87fac6d8 | ||
|
|
1e3b7eee3b | ||
|
|
4de028fc3b | ||
|
|
604e5dfaaf | ||
|
|
05e0c2ec9e | ||
|
|
76bd005bdc | ||
|
|
5effaed352 | ||
|
|
cedaf4809f | ||
|
|
6deaf5c268 | ||
|
|
9dc6a26472 | ||
|
|
14ad5916fc | ||
|
|
1a46738649 | ||
|
|
9e5e3b099a | ||
|
|
292ce75cc2 | ||
|
|
ce7df7afd4 | ||
|
|
e28e793f81 | ||
|
|
3e561976db | ||
|
|
273a4eb7d0 | ||
|
|
6175f85bb6 | ||
|
|
a80579f63a | ||
|
|
96d6bcf26e | ||
|
|
49e8df25ac | ||
|
|
6a05850f21 | ||
|
|
5e7c3defe3 | ||
|
|
6c0987d4d0 | ||
|
|
6eba9feffe | ||
|
|
8adfcf5950 | ||
|
|
36d6fa512a | ||
|
|
79b6e9b393 | ||
|
|
dc2e2cbd4b | ||
|
|
5c12dac30f | ||
|
|
641929191e | ||
|
|
617321631a | ||
|
|
ddc0c899f8 | ||
|
|
cdec42c1ae | ||
|
|
c48f469e39 | ||
|
|
44909cc7b8 | ||
|
|
8f61e1568c | ||
|
|
b7be7a0fd8 | ||
|
|
1526a4e084 | ||
|
|
dbdb9574b1 | ||
|
|
853ae6386c | ||
|
|
a4b56c74c7 | ||
|
|
d7f1951e44 | ||
|
|
7e2ff9825e | ||
|
|
9b423396ec | ||
|
|
781146b2fb | ||
|
|
84937d1ce0 | ||
|
|
98cce66aa4 | ||
|
|
043c2d4858 | ||
|
|
99cc434779 | ||
|
|
5095d17e81 | ||
|
|
87d835ae37 | ||
|
|
6939ca768b | ||
|
|
e3957e8239 | ||
|
|
4ad6e45216 | ||
|
|
76e5eeea3f | ||
|
|
eb17f57761 | ||
|
|
b0db14d8b0 | ||
|
|
2b644fa81b | ||
|
|
190ccee820 | ||
|
|
4e7dd32e78 | ||
|
|
5817fb66ae | ||
|
|
9cb04eef93 | ||
|
|
0019fe7f04 | ||
|
|
852c6f2de1 | ||
|
|
c4191de2e7 | ||
|
|
4de61defc9 | ||
|
|
0aa88590d0 | ||
|
|
405f3ee5fe | ||
|
|
bc339f774a | ||
|
|
e67b695b23 | ||
|
|
4a7633ab99 | ||
|
|
c58f2ef61f | ||
|
|
3866e6a3f2 | ||
|
|
381686fc66 | ||
|
|
a918c285bf | ||
|
|
1e20eafbe0 | ||
|
|
39399934ee | ||
|
|
b47635150a | ||
|
|
78d2f69ed5 | ||
|
|
7a98dc669e | ||
|
|
2f15bb5085 | ||
|
|
712a578e6c | ||
|
|
d8dfc4ccb2 | ||
|
|
e413007eb0 | ||
|
|
6d1d3e48d8 | ||
|
|
04966164ce | ||
|
|
8b62aa7cc7 | ||
|
|
1088e8c6a5 | ||
|
|
8c54c2226f | ||
|
|
f74ac1f18b | ||
|
|
25931e62fd | ||
|
|
707a940399 | ||
|
|
87ef50d384 |
2
.github/pull_request_template.md
vendored
Normal file
2
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
To show that your contribution is compatible with the MIT License, please include the following text somewhere in this PR description:
|
||||
This PR complies with the DCO; https://developercertificate.org/
|
||||
14
.gitignore
vendored
14
.gitignore
vendored
@@ -21,11 +21,23 @@ copyparty.egg-info/
|
||||
# winmerge
|
||||
*.bak
|
||||
|
||||
# apple pls
|
||||
.DS_Store
|
||||
|
||||
# derived
|
||||
copyparty/res/COPYING.txt
|
||||
copyparty/web/deps/
|
||||
srv/
|
||||
scripts/docker/i/
|
||||
contrib/package/arch/pkg/
|
||||
contrib/package/arch/src/
|
||||
|
||||
# state/logs
|
||||
up.*.txt
|
||||
.hist/
|
||||
.hist/
|
||||
scripts/docker/*.out
|
||||
scripts/docker/*.err
|
||||
/perf.*
|
||||
|
||||
# nix build output link
|
||||
result
|
||||
|
||||
10
.vscode/launch.py
vendored
10
.vscode/launch.py
vendored
@@ -30,9 +30,17 @@ except:
|
||||
|
||||
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
|
||||
|
||||
sfx = ""
|
||||
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
|
||||
sfx = sys.argv[1]
|
||||
sys.argv = [sys.argv[0]] + sys.argv[2:]
|
||||
|
||||
argv += sys.argv[1:]
|
||||
|
||||
if re.search(" -j ?[0-9]", " ".join(argv)):
|
||||
if sfx:
|
||||
argv = [sys.executable, sfx] + argv
|
||||
sp.check_call(argv)
|
||||
elif re.search(" -j ?[0-9]", " ".join(argv)):
|
||||
argv = [sys.executable, "-m", "copyparty"] + argv
|
||||
sp.check_call(argv)
|
||||
else:
|
||||
|
||||
32
.vscode/settings.json
vendored
32
.vscode/settings.json
vendored
@@ -35,34 +35,18 @@
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.mypyArgs": [
|
||||
"--ignore-missing-imports",
|
||||
"--follow-imports=silent",
|
||||
"--show-column-numbers",
|
||||
"--strict"
|
||||
],
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104"
|
||||
],
|
||||
"python.linting.pylintArgs": [
|
||||
"--disable=missing-module-docstring",
|
||||
"--disable=missing-class-docstring",
|
||||
"--disable=missing-function-docstring",
|
||||
"--disable=import-outside-toplevel",
|
||||
"--disable=wrong-import-position",
|
||||
"--disable=raise-missing-from",
|
||||
"--disable=bare-except",
|
||||
"--disable=broad-except",
|
||||
"--disable=invalid-name",
|
||||
"--disable=line-too-long",
|
||||
"--disable=consider-using-f-string"
|
||||
"--ignore=B104,B110,B112"
|
||||
],
|
||||
// python3 -m isort --py=27 --profile=black copyparty/
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.provider": "none",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||
},
|
||||
"editor.formatOnSave": true,
|
||||
"[html]": {
|
||||
"editor.formatOnSave": false,
|
||||
@@ -74,10 +58,6 @@
|
||||
"files.associations": {
|
||||
"*.makefile": "makefile"
|
||||
},
|
||||
"python.formatting.blackArgs": [
|
||||
"-t",
|
||||
"py27"
|
||||
],
|
||||
"python.linting.enabled": true,
|
||||
"python.pythonPath": "/usr/bin/python3"
|
||||
}
|
||||
576
README.md
576
README.md
@@ -1,35 +1,21 @@
|
||||
# ⇆🎉 copyparty
|
||||
# 💾🎉 copyparty
|
||||
|
||||
* portable file sharing hub (py2/py3) [(on PyPI)](https://pypi.org/project/copyparty/)
|
||||
* MIT-Licensed, 2019-05-26, ed @ irc.rizon.net
|
||||
turn almost any device into a file server with resumable uploads/downloads using [*any*](#browser-support) web browser
|
||||
|
||||
* server only needs Python (2 or 3), all dependencies optional
|
||||
* 🔌 protocols: [http](#the-browser) // [ftp](#ftp-server) // [webdav](#webdav-server) // [smb/cifs](#smb-server)
|
||||
* 📱 [android app](#android-app) // [iPhone shortcuts](#ios-shortcuts)
|
||||
|
||||
## summary
|
||||
|
||||
turn your phone or raspi into a portable file server with resumable uploads/downloads using *any* web browser
|
||||
|
||||
* server only needs Python (`2.7` or `3.3+`), all dependencies optional
|
||||
* browse/upload with [IE4](#browser-support) / netscape4.0 on win3.11 (heh)
|
||||
* protocols: [http](#the-browser) // [ftp](#ftp-server) // [webdav](#webdav-server) // [smb/cifs](#smb-server)
|
||||
|
||||
try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
|
||||
👉 **[Get started](#quickstart)!** or visit the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
|
||||
|
||||
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer)
|
||||
|
||||
|
||||
## get the app
|
||||
|
||||
<a href="https://f-droid.org/packages/me.ocv.partyup/"><img src="https://ocv.me/fdroid.png" alt="Get it on F-Droid" height="50" /> '' <img src="https://img.shields.io/f-droid/v/me.ocv.partyup.svg" alt="f-droid version info" /></a> '' <a href="https://github.com/9001/party-up"><img src="https://img.shields.io/github/release/9001/party-up.svg?logo=github" alt="github version info" /></a>
|
||||
|
||||
(the app is **NOT** the full copyparty server! just a basic upload client, nothing fancy yet)
|
||||
|
||||
|
||||
## readme toc
|
||||
|
||||
* top
|
||||
* [quickstart](#quickstart) - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
|
||||
* [quickstart](#quickstart) - just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
|
||||
* [on servers](#on-servers) - you may also want these, especially on servers
|
||||
* [on debian](#on-debian) - recommended additional steps on debian
|
||||
* [features](#features)
|
||||
* [testimonials](#testimonials) - small collection of user feedback
|
||||
* [motivations](#motivations) - project goals / philosophy
|
||||
@@ -53,11 +39,14 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
|
||||
* [self-destruct](#self-destruct) - uploads can be given a lifetime
|
||||
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||
* [media player](#media-player) - plays almost every audio format there is
|
||||
* [audio equalizer](#audio-equalizer) - bass boosted
|
||||
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
||||
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
||||
* [other tricks](#other-tricks)
|
||||
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
|
||||
* [server config](#server-config) - using arguments or config files, or a mix of both
|
||||
* [zeroconf](#zeroconf) - announce enabled services on the LAN
|
||||
* [zeroconf](#zeroconf) - announce enabled services on the LAN ([pic](https://user-images.githubusercontent.com/241032/215344737-0eae8d98-9496-4256-9aa8-cd2f6971810d.png))
|
||||
* [mdns](#mdns) - LAN domain-name and feature announcer
|
||||
* [ssdp](#ssdp) - windows-explorer announcer
|
||||
* [qr-code](#qr-code) - print a qr-code [(screenshot)](https://user-images.githubusercontent.com/241032/194728533-6f00849b-c6ac-43c6-9359-83e454d11e00.png) for quick access
|
||||
@@ -77,28 +66,39 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
|
||||
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
||||
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
||||
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
||||
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
|
||||
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
||||
* [themes](#themes)
|
||||
* [complete examples](#complete-examples)
|
||||
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
|
||||
* [prometheus](#prometheus) - metrics/stats can be enabled
|
||||
* [packages](#packages) - the party might be closer than you think
|
||||
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||
* [fedora package](#fedora-package) - now [available on copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/)
|
||||
* [nix package](#nix-package) - `nix profile install github:9001/copyparty`
|
||||
* [nixos module](#nixos-module)
|
||||
* [browser support](#browser-support) - TLDR: yes
|
||||
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
||||
* [folder sync](#folder-sync) - sync folders to/from copyparty
|
||||
* [mount as drive](#mount-as-drive) - a remote copyparty server as a local filesystem
|
||||
* [android app](#android-app) - upload to copyparty with one tap
|
||||
* [iOS shortcuts](#iOS-shortcuts) - there is no iPhone app, but
|
||||
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||
* [client-side](#client-side) - when uploading files
|
||||
* [security](#security) - some notes on hardening
|
||||
* [security](#security) - there is a [discord server](https://discord.gg/25J8CdTT6G)
|
||||
* [gotchas](#gotchas) - behavior that might be unexpected
|
||||
* [cors](#cors) - cross-site request config
|
||||
* [password hashing](#password-hashing) - you can hash passwords
|
||||
* [https](#https) - both HTTP and HTTPS are accepted
|
||||
* [recovering from crashes](#recovering-from-crashes)
|
||||
* [client crashes](#client-crashes)
|
||||
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
|
||||
* [HTTP API](#HTTP-API) - see [devnotes](#./docs/devnotes.md#http-api)
|
||||
* [HTTP API](#HTTP-API) - see [devnotes](./docs/devnotes.md#http-api)
|
||||
* [dependencies](#dependencies) - mandatory deps
|
||||
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
||||
* [install recommended deps](#install-recommended-deps)
|
||||
* [optional gpl stuff](#optional-gpl-stuff)
|
||||
* [sfx](#sfx) - the self-contained "binary"
|
||||
* [copyparty.exe](#copypartyexe) - download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) or [copyparty64.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty64.exe)
|
||||
* [copyparty.exe](#copypartyexe) - download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (win8+) or [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) (win7+)
|
||||
* [install on android](#install-on-android)
|
||||
* [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports
|
||||
* [devnotes](#devnotes) - for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
|
||||
@@ -106,27 +106,49 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
|
||||
|
||||
## quickstart
|
||||
|
||||
download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
|
||||
just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
|
||||
|
||||
if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
|
||||
* or install through pypi: `python3 -m pip install --user -U copyparty`
|
||||
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
|
||||
* or install [on arch](#arch-package) ╱ [on fedora](#fedora-package) ╱ [on NixOS](#nixos-module) ╱ [through nix](#nix-package)
|
||||
* or if you are on android, [install copyparty in termux](#install-on-android)
|
||||
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
|
||||
* docker has all deps built-in, so skip this step:
|
||||
|
||||
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes)
|
||||
enable thumbnails (images/audio/video), media indexing, and audio transcoding by installing some recommended deps:
|
||||
|
||||
* **Alpine:** `apk add py3-pillow ffmpeg`
|
||||
* **Debian:** `apt install python3-pil ffmpeg`
|
||||
* **Fedora:** `dnf install python3-pillow ffmpeg`
|
||||
* **FreeBSD:** `pkg install py39-sqlite3 py39-pillow ffmpeg`
|
||||
* **MacOS:** `port install py-Pillow ffmpeg`
|
||||
* **MacOS** (alternative): `brew install pillow ffmpeg`
|
||||
* **Windows:** `python -m pip install --user -U Pillow`
|
||||
* install python and ffmpeg manually; do not use `winget` or `Microsoft Store` (it breaks $PATH)
|
||||
* copyparty.exe comes with `Pillow` and only needs `ffmpeg`
|
||||
* see [optional dependencies](#optional-dependencies) to enable even more features
|
||||
|
||||
running copyparty without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes)
|
||||
|
||||
or see [some usage examples](#complete-examples) for inspiration, or the [complete windows example](./docs/examples/windows.md)
|
||||
|
||||
some recommended options:
|
||||
* `-e2dsa` enables general [file indexing](#file-indexing)
|
||||
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies) to enable thumbnails and more
|
||||
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen)
|
||||
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
|
||||
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
|
||||
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other permissions (`r`ead, `w`rite, `m`ove, `d`elete, `g`et, up`G`et)
|
||||
* see [accounts and volumes](#accounts-and-volumes) (or `--help-accounts`) for the syntax and other permissions
|
||||
|
||||
|
||||
### on servers
|
||||
|
||||
you may also want these, especially on servers:
|
||||
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service (see guide inside)
|
||||
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
||||
* [contrib/rc/copyparty](contrib/rc/copyparty) to run copyparty on FreeBSD
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
||||
* [nixos module](#nixos-module) to run copyparty on NixOS hosts
|
||||
|
||||
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
|
||||
```
|
||||
@@ -137,18 +159,6 @@ firewall-cmd --reload
|
||||
```
|
||||
(1900:ssdp, 3921:ftp, 3923:http/https, 3945:smb, 3990:ftps, 5353:mdns, 12000:passive-ftp)
|
||||
|
||||
### on debian
|
||||
|
||||
recommended additional steps on debian which enable audio metadata and thumbnails (from images and videos):
|
||||
|
||||
* as root, run the following:
|
||||
`apt install python3 python3-pip python3-dev ffmpeg`
|
||||
|
||||
* then, as the user which will be running copyparty (so hopefully not root), run this:
|
||||
`python3 -m pip install --user -U Pillow pillow-avif-plugin`
|
||||
|
||||
(skipped `pyheif-pillow-opener` because apparently debian is too old to build it)
|
||||
|
||||
|
||||
## features
|
||||
|
||||
@@ -162,14 +172,18 @@ recommended additional steps on debian which enable audio metadata and thumbnai
|
||||
* ☑ [smb/cifs server](#smb-server)
|
||||
* ☑ [qr-code](#qr-code) for quick access
|
||||
* ☑ [upnp / zeroconf / mdns / ssdp](#zeroconf)
|
||||
* ☑ [event hooks](#event-hooks) / script runner
|
||||
* ☑ [reverse-proxy support](https://github.com/9001/copyparty#reverse-proxy)
|
||||
* upload
|
||||
* ☑ basic: plain multipart, ie6 support
|
||||
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
||||
* unaffected by cloudflare's max-upload-size (100 MiB)
|
||||
* ☑ stash: simple PUT filedropper
|
||||
* ☑ filename randomizer
|
||||
* ☑ write-only folders
|
||||
* ☑ [unpost](#unpost): undo/delete accidental uploads
|
||||
* ☑ [self-destruct](#self-destruct) (specified server-side or client-side)
|
||||
* ☑ symlink/discard existing files (content-matching)
|
||||
* ☑ symlink/discard duplicates (content-matching)
|
||||
* download
|
||||
* ☑ single files in browser
|
||||
* ☑ [folders as zip / tar files](#zip-downloads)
|
||||
@@ -190,16 +204,21 @@ recommended additional steps on debian which enable audio metadata and thumbnai
|
||||
* ☑ [locate files by contents](#file-search)
|
||||
* ☑ search by name/path/date/size
|
||||
* ☑ [search by ID3-tags etc.](#searching)
|
||||
* client support
|
||||
* ☑ [folder sync](#folder-sync)
|
||||
* ☑ [curl-friendly](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png)
|
||||
* markdown
|
||||
* ☑ [viewer](#markdown-viewer)
|
||||
* ☑ editor (sure why not)
|
||||
|
||||
PS: something missing? post any crazy ideas you've got as a [feature request](https://github.com/9001/copyparty/issues/new?assignees=9001&labels=enhancement&template=feature_request.md) or [discussion](https://github.com/9001/copyparty/discussions/new?category=ideas) 🤙
|
||||
|
||||
|
||||
## testimonials
|
||||
|
||||
small collection of user feedback
|
||||
|
||||
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`
|
||||
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`, `wow this is better than nextcloud`
|
||||
|
||||
|
||||
# motivations
|
||||
@@ -208,7 +227,7 @@ project goals / philosophy
|
||||
|
||||
* inverse linux philosophy -- do all the things, and do an *okay* job
|
||||
* quick drop-in service to get a lot of features in a pinch
|
||||
* check [the alternatives](./docs/versus.md)
|
||||
* some of [the alternatives](./docs/versus.md) might be a better fit for you
|
||||
* run anywhere, support everything
|
||||
* as many web-browsers and python versions as possible
|
||||
* every browser should at least be able to browse, download, upload files
|
||||
@@ -237,6 +256,9 @@ browser-specific:
|
||||
server-os-specific:
|
||||
* RHEL8 / Rocky8: you can run copyparty using `/usr/libexec/platform-python`
|
||||
|
||||
server notes:
|
||||
* pypy is supported but regular cpython is faster if you enable the database
|
||||
|
||||
|
||||
# bugs
|
||||
|
||||
@@ -260,9 +282,14 @@ server-os-specific:
|
||||
|
||||
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- entire browser can crash after uploading ~4000 small files
|
||||
|
||||
* Android: music playback randomly stops due to [battery usage settings](#fix-unreliable-playback-on-android)
|
||||
|
||||
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
|
||||
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
|
||||
* "future" because `AudioContext` is broken in the current iOS version (15.1), maybe one day...
|
||||
* `AudioContext` will probably never be a viable workaround as apple introduces new issues faster than they fix current ones
|
||||
|
||||
* iPhones: the preload feature (in the media-player-options tab) can cause a tiny audio glitch 20sec before the end of each song, but disabling it may cause worse iOS bugs to appear instead
|
||||
* just a hunch, but disabling preloading may cause playback to stop entirely, or possibly mess with bluetooth speakers
|
||||
* tried to add a tooltip regarding this but looks like apple broke my tooltips
|
||||
|
||||
* Windows: folders cannot be accessed if the name ends with `.`
|
||||
* python or windows bug
|
||||
@@ -272,6 +299,7 @@ server-os-specific:
|
||||
|
||||
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
|
||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
|
||||
* also happens on mergerfs, so put the db elsewhere
|
||||
|
||||
* Ubuntu: dragging files from certain folders into firefox or chrome is impossible
|
||||
* due to snap security policies -- see `snap connections firefox` for the allowlist, `removable-media` permits all of `/mnt` and `/media` apparently
|
||||
@@ -285,7 +313,7 @@ upgrade notes
|
||||
* http-api: delete/move is now `POST` instead of `GET`
|
||||
* everything other than `GET` and `HEAD` must pass [cors validation](#cors)
|
||||
* `1.5.0` (2022-12-03): [new chunksize formula](https://github.com/9001/copyparty/commit/54e1c8d261df) for files larger than 128 GiB
|
||||
* **users:** upgrade to the latest [cli uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) if you use that
|
||||
* **users:** upgrade to the latest [cli uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) if you use that
|
||||
* **devs:** update third-party up2k clients (if those even exist)
|
||||
|
||||
|
||||
@@ -304,7 +332,8 @@ upgrade notes
|
||||
# accounts and volumes
|
||||
|
||||
per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
|
||||
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
|
||||
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if the user has `a`/admin in any volume)
|
||||
* changes to the `[global]` config section requires a restart to take effect
|
||||
|
||||
a quick summary can be seen using `--help-accounts`
|
||||
|
||||
@@ -322,6 +351,7 @@ permissions:
|
||||
* `d` (delete): delete files/folders
|
||||
* `g` (get): only download files, cannot see folder contents or zip/tar
|
||||
* `G` (upget): same as `g` except uploaders get to see their own filekeys (see `fk` in examples below)
|
||||
* `a` (admin): can see uploader IPs, config-reload
|
||||
|
||||
examples:
|
||||
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
|
||||
@@ -450,6 +480,7 @@ click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (
|
||||
## thumbnails
|
||||
|
||||
press `g` or `田` to toggle grid-view instead of the file listing and `t` toggles icons / thumbnails
|
||||
* can be made default globally with `--grid` or per-volume with volflag `grid`
|
||||
|
||||

|
||||
|
||||
@@ -460,10 +491,14 @@ it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video f
|
||||
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
||||
|
||||
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
||||
* and, if you enable [file indexing](#file-indexing), all remaining folders will also get thumbnails (as long as they contain any pics at all)
|
||||
|
||||
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||
* indicated by the audio files having the ▶ icon instead of 💾
|
||||
|
||||
enabling `multiselect` lets you click files to select them, and then shift-click another file for range-select
|
||||
* `multiselect` is mostly intended for phones/tablets, but the `sel` option in the `[⚙️] settings` tab is better suited for desktop use, allowing selection by CTRL-clicking and range-selection with SHIFT-click, all without affecting regular clicking
|
||||
|
||||
|
||||
## zip downloads
|
||||
|
||||
@@ -474,10 +509,16 @@ select which type of archive you want in the `[⚙️] config` tab:
|
||||
| name | url-suffix | description |
|
||||
|--|--|--|
|
||||
| `tar` | `?tar` | plain gnutar, works great with `curl \| tar -xv` |
|
||||
| `tar.gz` | `?tar=gz` | gzip compressed tar, for `curl \| tar -xvz` |
|
||||
| `tar.xz` | `?tar=xz` | gnu-tar with xz / lzma compression (good) |
|
||||
| `tar.bz2` | `?tar=bz2` | bzip2-compressed tar (mostly useless) |
|
||||
| `zip` | `?zip=utf8` | works everywhere, glitchy filenames on win7 and older |
|
||||
| `zip_dos` | `?zip` | traditional cp437 (no unicode) to fix glitchy filenames |
|
||||
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
|
||||
|
||||
* gzip default level is `3` (0=fast, 9=best), change with `?tar=gz:9`
|
||||
* xz default level is `1` (0=fast, 9=best), change with `?tar=xz:9`
|
||||
* bz2 default level is `2` (1=fast, 9=best), change with `?tar=bz2:9`
|
||||
* hidden files (dotfiles) are excluded unless `-ed`
|
||||
* `up2k.db` and `dir.txt` is always excluded
|
||||
* `zip_crc` will take longer to download since the server has to read each file twice
|
||||
@@ -488,10 +529,14 @@ you can also zip a selection of files or folders by clicking them in the browser
|
||||
|
||||

|
||||
|
||||
cool trick: download a folder by appending url-params `?tar&opus` to transcode all audio files (except aac|m4a|mp3|ogg|opus|wma) to opus before they're added to the archive
|
||||
* super useful if you're 5 minutes away from takeoff and realize you don't have any music on your phone but your server only has flac files and downloading those will burn through all your data + there wouldn't be enough time anyways
|
||||
* and url-params `&j` / `&w` produce jpeg/webm thumbnails/spectrograms instead of the original audio/video/images
|
||||
|
||||
|
||||
## uploading
|
||||
|
||||
drag files/folders into the web-browser to upload (or use the [command-line uploader](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy))
|
||||
drag files/folders into the web-browser to upload (or use the [command-line uploader](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy))
|
||||
|
||||
this initiates an upload using `up2k`; there are two uploaders available:
|
||||
* `[🎈] bup`, the basic uploader, supports almost every browser since netscape 4.0
|
||||
@@ -509,11 +554,14 @@ up2k has several advantages:
|
||||
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
|
||||
* the last-modified timestamp of the file is preserved
|
||||
|
||||
> it is perfectly safe to restart / upgrade copyparty while someone is uploading to it!
|
||||
> all known up2k clients will resume just fine 💪
|
||||
|
||||
see [up2k](#up2k) for details on how it works, or watch a [demo video](https://a.ocv.me/pub/demo/pics-vids/#gf-0f6f5c0d)
|
||||
|
||||

|
||||
|
||||
**protip:** you can avoid scaring away users with [contrib/plugins/minimal-up2k.html](contrib/plugins/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
**protip:** you can avoid scaring away users with [contrib/plugins/minimal-up2k.js](contrib/plugins/minimal-up2k.js) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
|
||||
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress -- also, the `[🔔]` and/or `[🔊]` switches enable visible and/or audible notifications on upload completion
|
||||
|
||||
@@ -583,6 +631,7 @@ file selection: click somewhere on the line (not the link itsef), then:
|
||||
* `up/down` to move
|
||||
* `shift-up/down` to move-and-select
|
||||
* `ctrl-shift-up/down` to also scroll
|
||||
* shift-click another line for range-select
|
||||
|
||||
* cut: select some files and `ctrl-x`
|
||||
* paste: `ctrl-v` in another folder
|
||||
@@ -638,12 +687,70 @@ or a mix of both:
|
||||
the metadata keys you can use in the format field are the ones in the file-browser table header (whatever is collected with `-mte` and `-mtp`)
|
||||
|
||||
|
||||
## media player
|
||||
|
||||
plays almost every audio format there is (if the server has FFmpeg installed for on-demand transcoding)
|
||||
|
||||
the following audio formats are usually always playable, even without FFmpeg: `aac|flac|m4a|mp3|ogg|opus|wav`
|
||||
|
||||
some hilights:
|
||||
* OS integration; control playback from your phone's lockscreen ([windows](https://user-images.githubusercontent.com/241032/233213022-298a98ba-721a-4cf1-a3d4-f62634bc53d5.png) // [iOS](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) // [android](https://user-images.githubusercontent.com/241032/233212311-a7368590-08c7-4f9f-a1af-48ccf3f36fad.png))
|
||||
* shows the audio waveform in the seekbar
|
||||
* not perfectly gapless but can get really close (see settings + eq below); good enough to enjoy gapless albums as intended
|
||||
|
||||
click the `play` link next to an audio file, or copy the link target to [share it](https://a.ocv.me/pub/demo/music/Ubiktune%20-%20SOUNDSHOCK%202%20-%20FM%20FUNK%20TERRROR!!/#af-1fbfba61&t=18) (optionally with a timestamp to start playing from, like that example does)
|
||||
|
||||
open the `[🎺]` media-player-settings tab to configure it,
|
||||
* switches:
|
||||
* `[preload]` starts loading the next track when it's about to end, reduces the silence between songs
|
||||
* `[full]` does a full preload by downloading the entire next file; good for unreliable connections, bad for slow connections
|
||||
* `[~s]` toggles the seekbar waveform display
|
||||
* `[/np]` enables buttons to copy the now-playing info as an irc message
|
||||
* `[os-ctl]` makes it possible to control audio playback from the lockscreen of your device (enables [mediasession](https://developer.mozilla.org/en-US/docs/Web/API/MediaSession))
|
||||
* `[seek]` allows seeking with lockscreen controls (buggy on some devices)
|
||||
* `[art]` shows album art on the lockscreen
|
||||
* `[🎯]` keeps the playing song scrolled into view (good when using the player as a taskbar dock)
|
||||
* `[⟎]` shrinks the playback controls
|
||||
* playback mode:
|
||||
* `[loop]` keeps looping the folder
|
||||
* `[next]` plays into the next folder
|
||||
* transcode:
|
||||
* `[flac]` converts `flac` and `wav` files into opus
|
||||
* `[aac]` converts `aac` and `m4a` files into opus
|
||||
* `[oth]` converts all other known formats into opus
|
||||
* `aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3m|tak|tta|ulaw|wav|wma|wv|xm|xpk`
|
||||
* "tint" reduces the contrast of the playback bar
|
||||
|
||||
|
||||
### audio equalizer
|
||||
|
||||
bass boosted
|
||||
|
||||
can also boost the volume in general, or increase/decrease stereo width (like [crossfeed](https://www.foobar2000.org/components/view/foo_dsp_meiercf) just worse)
|
||||
|
||||
has the convenient side-effect of reducing the pause between songs, so gapless albums play better with the eq enabled (just make it flat)
|
||||
|
||||
not available on iPhones / iPads because AudioContext currently breaks background audio playback on iOS (15.7.8)
|
||||
|
||||
|
||||
### fix unreliable playback on android
|
||||
|
||||
due to phone / app settings, android phones may randomly stop playing music when the power saver kicks in, especially at the end of an album -- you can fix it by [disabling power saving](https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png) in the [app settings](https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png) of the browser you use for music streaming (preferably a dedicated one)
|
||||
|
||||
|
||||
## markdown viewer
|
||||
|
||||
and there are *two* editors
|
||||
|
||||

|
||||
|
||||
there is a built-in extension for inline clickable thumbnails;
|
||||
* enable it by adding `<!-- th -->` somewhere in the doc
|
||||
* add thumbnails with `!th[l](your.jpg)` where `l` means left-align (`r` = right-align)
|
||||
* a single line with `---` clears the float / inlining
|
||||
* in the case of README.md being displayed below a file listing, thumbnails will open in the gallery viewer
|
||||
|
||||
other notes,
|
||||
* the document preview has a max-width which is the same as an A4 paper when printed
|
||||
|
||||
|
||||
@@ -688,7 +795,8 @@ for the above example to work, add the commandline argument `-e2ts` to also scan
|
||||
using arguments or config files, or a mix of both:
|
||||
* config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf) and [./docs/example2.conf](docs/example2.conf)
|
||||
* `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting
|
||||
* or click the `[reload cfg]` button in the control-panel when logged in as admin
|
||||
* or click the `[reload cfg]` button in the control-panel if the user has `a`/admin in any volume
|
||||
* changes to the `[global]` config section requires a restart to take effect
|
||||
|
||||
|
||||
## zeroconf
|
||||
@@ -747,6 +855,13 @@ an FTP server can be started using `--ftp 3921`, and/or `--ftps` for explicit T
|
||||
* some older software (filezilla on debian-stable) cannot passive-mode with TLS
|
||||
* login with any username + your password, or put your password in the username field
|
||||
|
||||
some recommended FTP / FTPS clients; `wark` = example password:
|
||||
* https://winscp.net/eng/download.php
|
||||
* https://filezilla-project.org/ struggles a bit with ftps in active-mode, but is fine otherwise
|
||||
* https://rclone.org/ does FTPS with `tls=false explicit_tls=true`
|
||||
* `lftp -u k,wark -p 3921 127.0.0.1 -e ls`
|
||||
* `lftp -u k,wark -p 3990 127.0.0.1 -e 'set ssl:verify-certificate no; ls'`
|
||||
|
||||
|
||||
## webdav server
|
||||
|
||||
@@ -760,6 +875,8 @@ general usage:
|
||||
on macos, connect from finder:
|
||||
* [Go] -> [Connect to Server...] -> http://192.168.123.1:3923/
|
||||
|
||||
in order to grant full write-access to webdav clients, the volflag `daw` must be set and the account must also have delete-access (otherwise the client won't be allowed to replace the contents of existing files, which is how webdav works)
|
||||
|
||||
|
||||
### connecting to webdav from windows
|
||||
|
||||
@@ -799,7 +916,7 @@ some **BIG WARNINGS** specific to SMB/CIFS, in decreasing importance:
|
||||
|
||||
and some minor issues,
|
||||
* clients only see the first ~400 files in big folders; [impacket#1433](https://github.com/SecureAuthCorp/impacket/issues/1433)
|
||||
* hot-reload of server config (`/?reload=cfg`) only works for volumes, not account passwords
|
||||
* hot-reload of server config (`/?reload=cfg`) does not include the `[global]` section (commandline args)
|
||||
* listens on the first IPv4 `-i` interface only (default = :: = 0.0.0.0 = all)
|
||||
* login doesn't work on winxp, but anonymous access is ok -- remove all accounts from copyparty config for that to work
|
||||
* win10 onwards does not allow connecting anonymously / without accounts
|
||||
@@ -839,14 +956,13 @@ through arguments:
|
||||
* `--xlink` enables deduplication across volumes
|
||||
|
||||
the same arguments can be set as volflags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts`, `d2v` for disabling:
|
||||
* `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup
|
||||
* `-v ~/music::r:c,e2ds,e2tsr` does a full reindex of everything on startup
|
||||
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
|
||||
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||
* `-v ~/music::r:c,d2ds` disables on-boot scans; only index new uploads
|
||||
* `-v ~/music::r:c,d2ts` same except only affecting tags
|
||||
|
||||
note:
|
||||
* the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr`
|
||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||
* deduplication is possible on windows if you run copyparty as administrator (not saying you should!)
|
||||
@@ -868,7 +984,11 @@ avoid traversing into other filesystems using `--xdev` / volflag `:c,xdev`, ski
|
||||
|
||||
and/or you can `--xvol` / `:c,xvol` to ignore all symlinks leaving the volume's top directory, but still allow bind-mounts pointing elsewhere
|
||||
|
||||
**NB: only affects the indexer** -- users can still access anything inside a volume, unless shadowed by another volume
|
||||
* symlinks are permitted with `xvol` if they point into another volume where the user has the same level of access
|
||||
|
||||
these options will reduce performance; unlikely worst-case estimates are 14% reduction for directory listings, 35% for download-as-tar
|
||||
|
||||
as of copyparty v1.7.0 these options also prevent file access at runtime -- in previous versions it was just hints for the indexer
|
||||
|
||||
### periodic rescan
|
||||
|
||||
@@ -885,6 +1005,8 @@ set upload rules using volflags, some examples:
|
||||
|
||||
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`)
|
||||
* `:c,df=4g` block uploads if there would be less than 4 GiB free disk space afterwards
|
||||
* `:c,vmaxb=1g` block uploads if total volume size would exceed 1 GiB afterwards
|
||||
* `:c,vmaxn=4k` block uploads if volume would contain more than 4096 files afterwards
|
||||
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
|
||||
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
|
||||
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
|
||||
@@ -898,6 +1020,9 @@ you can also set transaction limits which apply per-IP and per-volume, but these
|
||||
* `:c,maxn=250,3600` allows 250 files over 1 hour from each IP (tracked per-volume)
|
||||
* `:c,maxb=1g,300` allows 1 GiB total over 5 minutes from each IP (tracked per-volume)
|
||||
|
||||
notes:
|
||||
* `vmaxb` and `vmaxn` requires either the `e2ds` volflag or `-e2dsa` global-option
|
||||
|
||||
|
||||
## compress uploads
|
||||
|
||||
@@ -1028,6 +1153,13 @@ note that this is way more complicated than the new [event hooks](#event-hooks)
|
||||
note that it will occupy the parsing threads, so fork anything expensive (or set `kn` to have copyparty fork it for you) -- otoh if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1`
|
||||
|
||||
|
||||
## handlers
|
||||
|
||||
redefine behavior with plugins ([examples](./bin/handlers/))
|
||||
|
||||
replace 404 and 403 errors with something completely different (that's it for now)
|
||||
|
||||
|
||||
## hiding from google
|
||||
|
||||
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
|
||||
@@ -1064,7 +1196,33 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
|
||||
|
||||
## complete examples
|
||||
|
||||
* read-only music server
|
||||
* see [running on windows](./docs/examples/windows.md) for a fancy windows setup
|
||||
|
||||
* or use any of the examples below, just replace `python copyparty-sfx.py` with `copyparty.exe` if you're using the exe edition
|
||||
|
||||
* allow anyone to download or upload files into the current folder:
|
||||
`python copyparty-sfx.py`
|
||||
|
||||
* enable searching and music indexing with `-e2dsa -e2ts`
|
||||
|
||||
* start an FTP server on port 3921 with `--ftp 3921`
|
||||
|
||||
* announce it on your LAN with `-z` so it appears in windows/Linux file managers
|
||||
|
||||
* anyone can upload, but nobody can see any files (even the uploader):
|
||||
`python copyparty-sfx.py -e2dsa -v .::w`
|
||||
|
||||
* block uploads if there's less than 4 GiB free disk space with `--df 4`
|
||||
|
||||
* show a popup on new uploads with `--xau bin/hooks/notify.py`
|
||||
|
||||
* anyone can upload, and receive "secret" links for each upload they do:
|
||||
`python copyparty-sfx.py -e2dsa -v .::wG:c,fk=8`
|
||||
|
||||
* anyone can browse, only `kevin` (password `okgo`) can upload/move/delete files:
|
||||
`python copyparty-sfx.py -e2dsa -a kevin:okgo -v .::r:rwmd,kevin`
|
||||
|
||||
* read-only music server:
|
||||
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2`
|
||||
|
||||
* ...with bpm and key scanning
|
||||
@@ -1079,19 +1237,194 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
|
||||
|
||||
## reverse-proxy
|
||||
|
||||
running copyparty next to other websites hosted on an existing webserver such as nginx or apache
|
||||
running copyparty next to other websites hosted on an existing webserver such as nginx, caddy, or apache
|
||||
|
||||
you can either:
|
||||
* give copyparty its own domain or subdomain (recommended)
|
||||
* or do location-based proxying, using `--rp-loc=/stuff` to tell copyparty where it is mounted -- has a slight performance cost and higher chance of bugs
|
||||
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
|
||||
|
||||
some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatically obtain a valid https/tls certificate for you, and some support HTTP/2 and QUIC which could be a nice speed boost
|
||||
* **warning:** nginx-QUIC is still experimental and can make uploads much slower, so HTTP/2 is recommended for now
|
||||
|
||||
example webserver configs:
|
||||
|
||||
* [nginx config](contrib/nginx/copyparty.conf) -- entire domain/subdomain
|
||||
* [apache2 config](contrib/apache/copyparty.conf) -- location-based
|
||||
|
||||
|
||||
## prometheus
|
||||
|
||||
metrics/stats can be enabled at URL `/.cpr/metrics` for grafana / prometheus / etc (openmetrics 1.0.0)
|
||||
|
||||
must be enabled with `--stats` since it reduces startup time a tiny bit, and you probably want `-e2dsa` too
|
||||
|
||||
the endpoint is only accessible by `admin` accounts, meaning the `a` in `rwmda` in the following example commandline: `python3 -m copyparty -a ed:wark -v /mnt/nas::rwmda,ed --stats -e2dsa`
|
||||
|
||||
follow a guide for setting up `node_exporter` except have it read from copyparty instead; example `/etc/prometheus/prometheus.yml` below
|
||||
|
||||
```yaml
|
||||
scrape_configs:
|
||||
- job_name: copyparty
|
||||
metrics_path: /.cpr/metrics
|
||||
basic_auth:
|
||||
password: wark
|
||||
static_configs:
|
||||
- targets: ['192.168.123.1:3923']
|
||||
```
|
||||
|
||||
currently the following metrics are available,
|
||||
* `cpp_uptime_seconds`
|
||||
* `cpp_bans` number of banned IPs
|
||||
|
||||
and these are available per-volume only:
|
||||
* `cpp_disk_size_bytes` total HDD size
|
||||
* `cpp_disk_free_bytes` free HDD space
|
||||
|
||||
and these are per-volume and `total`:
|
||||
* `cpp_vol_bytes` size of all files in volume
|
||||
* `cpp_vol_files` number of files
|
||||
* `cpp_dupe_bytes` disk space presumably saved by deduplication
|
||||
* `cpp_dupe_files` number of dupe files
|
||||
* `cpp_unf_bytes` currently unfinished / incoming uploads
|
||||
|
||||
some of the metrics have additional requirements to function correctly,
|
||||
* `cpp_vol_*` requires either the `e2ds` volflag or `-e2dsa` global-option
|
||||
|
||||
the following options are available to disable some of the metrics:
|
||||
* `--nos-hdd` disables `cpp_disk_*` which can prevent spinning up HDDs
|
||||
* `--nos-vol` disables `cpp_vol_*` which reduces server startup time
|
||||
* `--nos-dup` disables `cpp_dupe_*` which reduces the server load caused by prometheus queries
|
||||
* `--nos-unf` disables `cpp_unf_*` for no particular purpose
|
||||
|
||||
|
||||
# packages
|
||||
|
||||
the party might be closer than you think
|
||||
|
||||
|
||||
## arch package
|
||||
|
||||
now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||
|
||||
|
||||
## fedora package
|
||||
|
||||
now [available on copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/) , maintained autonomously -- [track record](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/package/python-copyparty/) seems OK
|
||||
|
||||
```bash
|
||||
dnf copr enable @copr/PyPI
|
||||
dnf install python3-copyparty # just a minimal install, or...
|
||||
dnf install python3-{copyparty,pillow,argon2-cffi,pyftpdlib,pyOpenSSL} ffmpeg-free # with recommended deps
|
||||
```
|
||||
|
||||
this *may* also work on RHEL but [I'm not paying IBM to verify that](https://www.jeffgeerling.com/blog/2023/dear-red-hat-are-you-dumb)
|
||||
|
||||
|
||||
## nix package
|
||||
|
||||
`nix profile install github:9001/copyparty`
|
||||
|
||||
requires a [flake-enabled](https://nixos.wiki/wiki/Flakes) installation of nix
|
||||
|
||||
some recommended dependencies are enabled by default; [override the package](https://github.com/9001/copyparty/blob/hovudstraum/contrib/package/nix/copyparty/default.nix#L3-L22) if you want to add/remove some features/deps
|
||||
|
||||
`ffmpeg-full` was chosen over `ffmpeg-headless` mainly because we need `withWebp` (and `withOpenmpt` is also nice) and being able to use a cached build felt more important than optimizing for size at the time -- PRs welcome if you disagree 👍
|
||||
|
||||
|
||||
## nixos module
|
||||
|
||||
for this setup, you will need a [flake-enabled](https://nixos.wiki/wiki/Flakes) installation of NixOS.
|
||||
|
||||
```nix
|
||||
{
|
||||
# add copyparty flake to your inputs
|
||||
inputs.copyparty.url = "github:9001/copyparty";
|
||||
|
||||
# ensure that copyparty is an allowed argument to the outputs function
|
||||
outputs = { self, nixpkgs, copyparty }: {
|
||||
nixosConfigurations.yourHostName = nixpkgs.lib.nixosSystem {
|
||||
modules = [
|
||||
# load the copyparty NixOS module
|
||||
copyparty.nixosModules.default
|
||||
({ pkgs, ... }: {
|
||||
# add the copyparty overlay to expose the package to the module
|
||||
nixpkgs.overlays = [ copyparty.overlays.default ];
|
||||
# (optional) install the package globally
|
||||
environment.systemPackages = [ pkgs.copyparty ];
|
||||
# configure the copyparty module
|
||||
services.copyparty.enable = true;
|
||||
})
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
copyparty on NixOS is configured via `services.copyparty` options, for example:
|
||||
```nix
|
||||
services.copyparty = {
|
||||
enable = true;
|
||||
# directly maps to values in the [global] section of the copyparty config.
|
||||
# see `copyparty --help` for available options
|
||||
settings = {
|
||||
i = "0.0.0.0";
|
||||
# use lists to set multiple values
|
||||
p = [ 3210 3211 ];
|
||||
# use booleans to set binary flags
|
||||
no-reload = true;
|
||||
# using 'false' will do nothing and omit the value when generating a config
|
||||
ignored-flag = false;
|
||||
};
|
||||
|
||||
# create users
|
||||
accounts = {
|
||||
# specify the account name as the key
|
||||
ed = {
|
||||
# provide the path to a file containing the password, keeping it out of /nix/store
|
||||
# must be readable by the copyparty service user
|
||||
passwordFile = "/run/keys/copyparty/ed_password";
|
||||
};
|
||||
# or do both in one go
|
||||
k.passwordFile = "/run/keys/copyparty/k_password";
|
||||
};
|
||||
|
||||
# create a volume
|
||||
volumes = {
|
||||
# create a volume at "/" (the webroot), which will
|
||||
"/" = {
|
||||
# share the contents of "/srv/copyparty"
|
||||
path = "/srv/copyparty";
|
||||
# see `copyparty --help-accounts` for available options
|
||||
access = {
|
||||
# everyone gets read-access, but
|
||||
r = "*";
|
||||
# users "ed" and "k" get read-write
|
||||
rw = [ "ed" "k" ];
|
||||
};
|
||||
# see `copyparty --help-flags` for available options
|
||||
flags = {
|
||||
# "fk" enables filekeys (necessary for upget permission) (4 chars long)
|
||||
fk = 4;
|
||||
# scan for new files every 60sec
|
||||
scan = 60;
|
||||
# volflag "e2d" enables the uploads database
|
||||
e2d = true;
|
||||
# "d2t" disables multimedia parsers (in case the uploads are malicious)
|
||||
d2t = true;
|
||||
# skips hashing file contents if path matches *.iso
|
||||
nohash = "\.iso$";
|
||||
};
|
||||
};
|
||||
};
|
||||
# you may increase the open file limit for the process
|
||||
openFilesLimit = 8192;
|
||||
};
|
||||
```
|
||||
|
||||
the passwordFile at /run/keys/copyparty/ could for example be generated by [agenix](https://github.com/ryantm/agenix), or you could just dump it in the nix store instead if that's acceptable
|
||||
|
||||
|
||||
# browser support
|
||||
|
||||
TLDR: yes
|
||||
@@ -1162,10 +1495,10 @@ interact with copyparty using non-browser clients
|
||||
* `(printf 'PUT /junk?pw=wark HTTP/1.1\r\n\r\n'; cat movie.mkv) | nc 127.0.0.1 3923`
|
||||
* `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923`
|
||||
|
||||
* python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, folder sync, autoresume of aborted/broken uploads
|
||||
* can be downloaded from copyparty: controlpanel -> connect -> [up2k.py](http://127.0.0.1:3923/.cpr/a/up2k.py)
|
||||
* see [./bin/README.md#up2kpy](bin/README.md#up2kpy)
|
||||
* python: [u2c.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, [folder sync](#folder-sync), autoresume of aborted/broken uploads
|
||||
* can be downloaded from copyparty: controlpanel -> connect -> [u2c.py](http://127.0.0.1:3923/.cpr/a/u2c.py)
|
||||
* see [./bin/README.md#u2cpy](bin/README.md#u2cpy)
|
||||
|
||||
* FUSE: mount a copyparty server as a local filesystem
|
||||
* cross-platform python client available in [./bin/](bin/)
|
||||
@@ -1184,17 +1517,27 @@ you can provide passwords using header `PW: hunter2`, cookie `cppwd=hunter2`, ur
|
||||
NOTE: curl will not send the original filename if you use `-T` combined with url-params! Also, make sure to always leave a trailing slash in URLs unless you want to override the filename
|
||||
|
||||
|
||||
## folder sync
|
||||
|
||||
sync folders to/from copyparty
|
||||
|
||||
the commandline uploader [u2c.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy) with `--dr` is the best way to sync a folder to copyparty; verifies checksums and does files in parallel, and deletes unexpected files on the server after upload has finished which makes file-renames really cheap (it'll rename serverside and skip uploading)
|
||||
|
||||
alternatively there is [rclone](./docs/rclone.md) which allows for bidirectional sync and is *way* more flexible (stream files straight from sftp/s3/gcs to copyparty, ...), although there is no integrity check and it won't work with files over 100 MiB if copyparty is behind cloudflare
|
||||
|
||||
* starting from rclone v1.63 (currently [in beta](https://beta.rclone.org/?filter=latest)), rclone will also be faster than u2c.py
|
||||
|
||||
|
||||
## mount as drive
|
||||
|
||||
a remote copyparty server as a local filesystem; go to the control-panel and click `connect` to see a list of commands to do that
|
||||
|
||||
alternatively, some alternatives roughly sorted by speed (unreproducible benchmark), best first:
|
||||
|
||||
* [rclone-http](./docs/rclone.md) (25s), read-only
|
||||
* [rclone-webdav](./docs/rclone.md) (25s), read/WRITE ([v1.63-beta](https://beta.rclone.org/?filter=latest))
|
||||
* [rclone-http](./docs/rclone.md) (26s), read-only
|
||||
* [partyfuse.py](./bin/#partyfusepy) (35s), read-only
|
||||
* [rclone-ftp](./docs/rclone.md) (47s), read/WRITE
|
||||
* [rclone-webdav](./docs/rclone.md) (51s), read/WRITE
|
||||
* copyparty-1.5.0's webdav server is faster than rclone-1.60.0 (69s)
|
||||
* [partyfuse.py](./bin/#partyfusepy) (71s), read-only
|
||||
* davfs2 (103s), read/WRITE, *very fast* on small files
|
||||
* [win10-webdav](#webdav-server) (138s), read/WRITE
|
||||
* [win10-smb2](#smb-server) (387s), read/WRITE
|
||||
@@ -1202,6 +1545,27 @@ alternatively, some alternatives roughly sorted by speed (unreproducible benchma
|
||||
most clients will fail to mount the root of a copyparty server unless there is a root volume (so you get the admin-panel instead of a browser when accessing it) -- in that case, mount a specific volume instead
|
||||
|
||||
|
||||
# android app
|
||||
|
||||
upload to copyparty with one tap
|
||||
|
||||
<a href="https://f-droid.org/packages/me.ocv.partyup/"><img src="https://ocv.me/fdroid.png" alt="Get it on F-Droid" height="50" /> '' <img src="https://img.shields.io/f-droid/v/me.ocv.partyup.svg" alt="f-droid version info" /></a> '' <a href="https://github.com/9001/party-up"><img src="https://img.shields.io/github/release/9001/party-up.svg?logo=github" alt="github version info" /></a>
|
||||
|
||||
the app is **NOT** the full copyparty server! just a basic upload client, nothing fancy yet
|
||||
|
||||
if you want to run the copyparty server on your android device, see [install on android](#install-on-android)
|
||||
|
||||
|
||||
# iOS shortcuts
|
||||
|
||||
there is no iPhone app, but the following shortcuts are almost as good:
|
||||
|
||||
* [upload to copyparty](https://www.icloud.com/shortcuts/41e98dd985cb4d3bb433222bc1e9e770) ([offline](https://github.com/9001/copyparty/raw/hovudstraum/contrib/ios/upload-to-copyparty.shortcut)) ([png](https://user-images.githubusercontent.com/241032/226118053-78623554-b0ed-482e-98e4-6d57ada58ea4.png)) based on the [original](https://www.icloud.com/shortcuts/ab415d5b4de3467b9ce6f151b439a5d7) by [Daedren](https://github.com/Daedren) (thx!)
|
||||
* can strip exif, upload files, pics, vids, links, clipboard
|
||||
* can download links and rehost the target file on copyparty (see first comment inside the shortcut)
|
||||
* pics become lowres if you share from gallery to shortcut, so better to launch the shortcut and pick stuff from there
|
||||
|
||||
|
||||
# performance
|
||||
|
||||
defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||
@@ -1209,15 +1573,16 @@ defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||
below are some tweaks roughly ordered by usefulness:
|
||||
|
||||
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
||||
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
|
||||
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
||||
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
||||
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
|
||||
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
|
||||
* huge amount of short-lived connections
|
||||
* `-j0` enables multiprocessing (actual multithreading), can reduce latency to `20+80/numCores` percent and generally improve performance in cpu-intensive workloads, for example:
|
||||
* lots of connections (many users or heavy clients)
|
||||
* simultaneous downloads and uploads saturating a 20gbps connection
|
||||
|
||||
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
|
||||
* using [pypy](https://www.pypy.org/) instead of [cpython](https://www.python.org/) *can* be 70% faster for some workloads, but slower for many others
|
||||
* and pypy can sometimes crash on startup with `-j0` (TODO make issue)
|
||||
|
||||
|
||||
## client-side
|
||||
@@ -1227,7 +1592,7 @@ when uploading files,
|
||||
* chrome is recommended, at least compared to firefox:
|
||||
* up to 90% faster when hashing, especially on SSDs
|
||||
* up to 40% faster when uploading over extremely fast internets
|
||||
* but [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) can be 40% faster than chrome again
|
||||
* but [u2c.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) can be 40% faster than chrome again
|
||||
|
||||
* if you're cpu-bottlenecked, or the browser is maxing a cpu core:
|
||||
* up to 30% faster uploads if you hide the upload status list by switching away from the `[🚀]` up2k ui-tab (or closing it)
|
||||
@@ -1238,10 +1603,14 @@ when uploading files,
|
||||
|
||||
# security
|
||||
|
||||
there is a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` for all important updates (at the lack of better ideas)
|
||||
|
||||
some notes on hardening
|
||||
|
||||
* set `--rproxy 0` if your copyparty is directly facing the internet (not through a reverse-proxy)
|
||||
* cors doesn't work right otherwise
|
||||
* if you allow anonymous uploads or otherwise don't trust the contents of a volume, you can prevent XSS with volflag `nohtml`
|
||||
* this returns html documents as plaintext, and also disables markdown rendering
|
||||
|
||||
safety profiles:
|
||||
|
||||
@@ -1255,9 +1624,9 @@ safety profiles:
|
||||
* `--unpost 0`, `--no-del`, `--no-mv` disables all move/delete support
|
||||
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
||||
* however note if you edit one file it will also affect the other copies
|
||||
* `--vague-401` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
|
||||
* `--vague-403` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
|
||||
* `--ban-404=50,60,1440` ban client for 1440min (24h) if they hit 50 404's in 60min
|
||||
* **NB:** will ban anyone who enables up2k turbo
|
||||
* `--turbo=-1` to force-disable turbo-mode in the uploader which could otherwise hit the 404-ban
|
||||
* `--nih` removes the server hostname from directory listings
|
||||
|
||||
* option `-sss` is a shortcut for the above plus:
|
||||
@@ -1270,6 +1639,7 @@ other misc notes:
|
||||
|
||||
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
|
||||
* combine this with volflag `c,fk` to generate filekeys (per-file accesskeys); users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||
* the default filekey entropy is fairly small so give `--fk-salt` around 30 characters if you want filekeys longer than 16 chars
|
||||
* permissions `wG` lets users upload files and receive their own filekeys, still without being able to see other uploads
|
||||
|
||||
|
||||
@@ -1278,10 +1648,12 @@ other misc notes:
|
||||
behavior that might be unexpected
|
||||
|
||||
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
||||
* users can submit `<script>`s which autorun for other visitors in a few ways;
|
||||
* users can submit `<script>`s which autorun (in a sandbox) for other visitors in a few ways;
|
||||
* uploading a `README.md` -- avoid with `--no-readme`
|
||||
* renaming `some.html` to `.epilogue.html` -- avoid with either `--no-logues` or `--no-dot-ren`
|
||||
* the directory-listing embed is sandboxed (so any malicious scripts can't do any damage) but the markdown editor is not
|
||||
* the directory-listing embed is sandboxed (so any malicious scripts can't do any damage) but the markdown editor is not 100% safe, see below
|
||||
* markdown documents can contain html and `<script>`s; attempts are made to prevent scripts from executing (unless `-emp` is specified) but this is not 100% bulletproof, so setting the `nohtml` volflag is still the safest choice
|
||||
* or eliminate the problem entirely by only giving write-access to trustworthy people :^)
|
||||
|
||||
|
||||
## cors
|
||||
@@ -1296,6 +1668,29 @@ by default, except for `GET` and `HEAD` operations, all requests must either:
|
||||
cors can be configured with `--acao` and `--acam`, or the protections entirely disabled with `--allow-csrf`
|
||||
|
||||
|
||||
## password hashing
|
||||
|
||||
you can hash passwords before putting them into config files / providing them as arguments; see `--help-pwhash` for all the details
|
||||
|
||||
`--ah-alg argon2` enables it, and if you have any plaintext passwords then it'll print the hashed versions on startup so you can replace them
|
||||
|
||||
optionally also specify `--ah-cli` to enter an interactive mode where it will hash passwords without ever writing the plaintext ones to disk
|
||||
|
||||
the default configs take about 0.4 sec and 256 MiB RAM to process a new password on a decent laptop
|
||||
|
||||
|
||||
## https
|
||||
|
||||
both HTTP and HTTPS are accepted by default, but letting a [reverse proxy](#reverse-proxy) handle the https/tls/ssl would be better (probably more secure by default)
|
||||
|
||||
copyparty doesn't speak HTTP/2 or QUIC, so using a reverse proxy would solve that as well
|
||||
|
||||
if [cfssl](https://github.com/cloudflare/cfssl/releases/latest) is installed, copyparty will automatically create a CA and server-cert on startup
|
||||
* the certs are written to `--crt-dir` for distribution, see `--help` for the other `--crt` options
|
||||
* this will be a self-signed certificate so you must install your `ca.pem` into all your browsers/devices
|
||||
* if you want to avoid the hassle of distributing certs manually, please consider using a reverse proxy
|
||||
|
||||
|
||||
# recovering from crashes
|
||||
|
||||
## client crashes
|
||||
@@ -1318,7 +1713,7 @@ however you can hit `F12` in the up2k tab and use the devtools to see how far yo
|
||||
|
||||
# HTTP API
|
||||
|
||||
see [devnotes](#./docs/devnotes.md#http-api)
|
||||
see [devnotes](./docs/devnotes.md#http-api)
|
||||
|
||||
|
||||
# dependencies
|
||||
@@ -1331,6 +1726,8 @@ mandatory deps:
|
||||
|
||||
install these to enable bonus features
|
||||
|
||||
enable hashed passwords in config: `argon2-cffi`
|
||||
|
||||
enable ftp-server:
|
||||
* for just plaintext FTP, `pyftpdlib` (is built into the SFX)
|
||||
* with TLS encryption, `pyftpdlib pyopenssl`
|
||||
@@ -1346,18 +1743,12 @@ enable [thumbnails](#thumbnails) of...
|
||||
* **AVIF pictures:** `pyvips` or `ffmpeg` or `pillow-avif-plugin`
|
||||
* **JPEG XL pictures:** `pyvips` or `ffmpeg`
|
||||
|
||||
enable [smb](#smb-server) support:
|
||||
enable [smb](#smb-server) support (**not** recommended):
|
||||
* `impacket==0.10.0`
|
||||
|
||||
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
|
||||
|
||||
|
||||
## install recommended deps
|
||||
```
|
||||
python -m pip install --user -U jinja2 mutagen Pillow
|
||||
```
|
||||
|
||||
|
||||
## optional gpl stuff
|
||||
|
||||
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
|
||||
@@ -1369,20 +1760,25 @@ these are standalone programs and will never be imported / evaluated by copypart
|
||||
|
||||
the self-contained "binary" [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) will unpack itself and run copyparty, assuming you have python installed of course
|
||||
|
||||
you can reduce the sfx size by repacking it; see [./docs/devnotes.md#sfx-repack](#./docs/devnotes.md#sfx-repack)
|
||||
you can reduce the sfx size by repacking it; see [./docs/devnotes.md#sfx-repack](./docs/devnotes.md#sfx-repack)
|
||||
|
||||
|
||||
## copyparty.exe
|
||||
|
||||
download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) or [copyparty64.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty64.exe)
|
||||
download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (win8+) or [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) (win7+)
|
||||
|
||||

|
||||

|
||||
|
||||
can be convenient on old machines where installing python is problematic, however is **not recommended** and should be considered a last resort -- if possible, please use **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** instead
|
||||
can be convenient on machines where installing python is problematic, however is **not recommended** -- if possible, please use **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** instead
|
||||
|
||||
* [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) is compatible with 32bit windows7, which means it uses an ancient copy of python (3.7.9) which cannot be upgraded and will definitely become a security hazard at some point
|
||||
* [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) runs on win8 or newer, was compiled on win10, does thumbnails + media tags, and is *currently* safe to use, but any future python/expat/pillow CVEs can only be remedied by downloading a newer version of the exe
|
||||
|
||||
* [copyparty64.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty64.exe) is identical except 64bit so it [works in WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png)
|
||||
* on win8 it needs [vc redist 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48145), on win10 it just works
|
||||
* some antivirus may freak out (false-positive), possibly [Avast, AVG, and McAfee](https://www.virustotal.com/gui/file/52391a1e9842cf70ad243ef83844d46d29c0044d101ee0138fcdd3c8de2237d6/detection)
|
||||
|
||||
* dangerous: [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is compatible with [windows7](https://user-images.githubusercontent.com/241032/221445944-ae85d1f4-d351-4837-b130-82cab57d6cca.png), which means it uses an ancient copy of python (3.7.9) which cannot be upgraded and should never be exposed to the internet (LAN is fine)
|
||||
|
||||
* dangerous and deprecated: [copyparty-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.6.8/copyparty-winpe64.exe) lets you [run copyparty in WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png) and is otherwise completely useless
|
||||
|
||||
meanwhile [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) instead relies on your system python which gives better performance and will stay safe as long as you keep your python install up-to-date
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# [`up2k.py`](up2k.py)
|
||||
# [`u2c.py`](u2c.py)
|
||||
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||
* sync local folder to server
|
||||
|
||||
35
bin/handlers/README.md
Normal file
35
bin/handlers/README.md
Normal file
@@ -0,0 +1,35 @@
|
||||
replace the standard 404 / 403 responses with plugins
|
||||
|
||||
|
||||
# usage
|
||||
|
||||
load plugins either globally with `--on404 ~/dev/copyparty/bin/handlers/sorry.py` or for a specific volume with `:c,on404=~/handlers/sorry.py`
|
||||
|
||||
|
||||
# api
|
||||
|
||||
each plugin must define a `main()` which takes 3 arguments;
|
||||
|
||||
* `cli` is an instance of [copyparty/httpcli.py](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/httpcli.py) (the monstrosity itself)
|
||||
* `vn` is the VFS which overlaps with the requested URL, and
|
||||
* `rem` is the URL remainder below the VFS mountpoint
|
||||
* so `vn.vpath + rem` == `cli.vpath` == original request
|
||||
|
||||
|
||||
# examples
|
||||
|
||||
## on404
|
||||
|
||||
* [sorry.py](answer.py) replies with a custom message instead of the usual 404
|
||||
* [nooo.py](nooo.py) replies with an endless noooooooooooooo
|
||||
* [never404.py](never404.py) 100% guarantee that 404 will never be a thing again as it automatically creates dummy files whenever necessary
|
||||
* [caching-proxy.py](caching-proxy.py) transforms copyparty into a squid/varnish knockoff
|
||||
|
||||
## on403
|
||||
|
||||
* [ip-ok.py](ip-ok.py) disables security checks if client-ip is 1.2.3.4
|
||||
|
||||
|
||||
# notes
|
||||
|
||||
* on403 only works for trivial stuff (basic http access) since I haven't been able to think of any good usecases for it (was just easy to add while doing on404)
|
||||
36
bin/handlers/caching-proxy.py
Executable file
36
bin/handlers/caching-proxy.py
Executable file
@@ -0,0 +1,36 @@
|
||||
# assume each requested file exists on another webserver and
|
||||
# download + mirror them as they're requested
|
||||
# (basically pretend we're warnish)
|
||||
|
||||
import os
|
||||
import requests
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from copyparty.httpcli import HttpCli
|
||||
|
||||
|
||||
def main(cli: "HttpCli", vn, rem):
|
||||
url = "https://mirrors.edge.kernel.org/alpine/" + rem
|
||||
abspath = os.path.join(vn.realpath, rem)
|
||||
|
||||
# sneaky trick to preserve a requests-session between downloads
|
||||
# so it doesn't have to spend ages reopening https connections;
|
||||
# luckily we can stash it inside the copyparty client session,
|
||||
# name just has to be definitely unused so "hacapo_req_s" it is
|
||||
req_s = getattr(cli.conn, "hacapo_req_s", None) or requests.Session()
|
||||
setattr(cli.conn, "hacapo_req_s", req_s)
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(abspath), exist_ok=True)
|
||||
with req_s.get(url, stream=True, timeout=69) as r:
|
||||
r.raise_for_status()
|
||||
with open(abspath, "wb", 64 * 1024) as f:
|
||||
for buf in r.iter_content(chunk_size=64 * 1024):
|
||||
f.write(buf)
|
||||
except:
|
||||
os.unlink(abspath)
|
||||
return "false"
|
||||
|
||||
return "retry"
|
||||
6
bin/handlers/ip-ok.py
Executable file
6
bin/handlers/ip-ok.py
Executable file
@@ -0,0 +1,6 @@
|
||||
# disable permission checks and allow access if client-ip is 1.2.3.4
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
if cli.ip == "1.2.3.4":
|
||||
return "allow"
|
||||
11
bin/handlers/never404.py
Executable file
11
bin/handlers/never404.py
Executable file
@@ -0,0 +1,11 @@
|
||||
# create a dummy file and let copyparty return it
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
print("hello", cli.ip)
|
||||
|
||||
abspath = vn.canonical(rem)
|
||||
with open(abspath, "wb") as f:
|
||||
f.write(b"404? not on MY watch!")
|
||||
|
||||
return "retry"
|
||||
16
bin/handlers/nooo.py
Executable file
16
bin/handlers/nooo.py
Executable file
@@ -0,0 +1,16 @@
|
||||
# reply with an endless "noooooooooooooooooooooooo"
|
||||
|
||||
|
||||
def say_no():
|
||||
yield b"n"
|
||||
while True:
|
||||
yield b"o" * 4096
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
cli.send_headers(None, 404, "text/plain")
|
||||
|
||||
for chunk in say_no():
|
||||
cli.s.sendall(chunk)
|
||||
|
||||
return "false"
|
||||
7
bin/handlers/sorry.py
Executable file
7
bin/handlers/sorry.py
Executable file
@@ -0,0 +1,7 @@
|
||||
# sends a custom response instead of the usual 404
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
msg = f"sorry {cli.ip} but {cli.vpath} doesn't exist"
|
||||
|
||||
return str(cli.reply(msg.encode("utf-8"), 404, "text/plain"))
|
||||
@@ -2,15 +2,25 @@ standalone programs which are executed by copyparty when an event happens (uploa
|
||||
|
||||
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
||||
|
||||
run copyparty with `--help-hooks` for usage details / hook type explanations (xbu/xau/xiu/xbr/xar/xbd/xad)
|
||||
|
||||
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
||||
|
||||
|
||||
# after upload
|
||||
* [notify.py](notify.py) shows a desktop notification ([example](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png))
|
||||
* [notify2.py](notify2.py) uses the json API to show more context
|
||||
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
||||
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||
|
||||
|
||||
# upload batches
|
||||
these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every single file), `xiu` hooks are given a list of recent uploads on STDIN after the server has gone idle for N seconds, reducing server load + providing more context
|
||||
* [xiu.py](xiu.py) is a "minimal" example showing a list of filenames + total filesize
|
||||
* [xiu-sha.py](xiu-sha.py) produces a sha512 checksum list in the volume root
|
||||
|
||||
|
||||
# before upload
|
||||
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
||||
|
||||
|
||||
@@ -13,9 +13,15 @@ example usage as global config:
|
||||
--xau f,t5,j,bin/hooks/discord-announce.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xbu = execute after upload
|
||||
f = fork; don't wait for it to finish
|
||||
t5 = timeout if it's still running after 5 sec
|
||||
j = provide upload information as json; not just the filename
|
||||
@@ -30,6 +36,7 @@ then use this to design your message: https://discohook.org/
|
||||
|
||||
def main():
|
||||
WEBHOOK = "https://discord.com/api/webhooks/1234/base64"
|
||||
WEBHOOK = "https://discord.com/api/webhooks/1066830390280597718/M1TDD110hQA-meRLMRhdurych8iyG35LDoI1YhzbrjGP--BXNZodZFczNVwK4Ce7Yme5"
|
||||
|
||||
# read info from copyparty
|
||||
inf = json.loads(sys.argv[1])
|
||||
|
||||
72
bin/hooks/image-noexif.py
Executable file
72
bin/hooks/image-noexif.py
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
_ = r"""
|
||||
remove exif tags from uploaded images; the eventhook edition of
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py
|
||||
|
||||
dependencies:
|
||||
exiftool / perl-Image-ExifTool
|
||||
|
||||
being an upload hook, this will take effect after upload completion
|
||||
but before copyparty has hashed/indexed the file, which means that
|
||||
copyparty will never index the original file, so deduplication will
|
||||
not work as expected... which is mostly OK but ehhh
|
||||
|
||||
note: modifies the file in-place, so don't set the `f` (fork) flag
|
||||
|
||||
example usages; either as global config (all volumes) or as volflag:
|
||||
--xau bin/hooks/image-noexif.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=bin/hooks/image-noexif.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
explained:
|
||||
share fs-path srv/inc at /inc (readable by all, read-write for user ed)
|
||||
running this xau (execute-after-upload) plugin for all uploaded files
|
||||
"""
|
||||
|
||||
|
||||
# filetypes to process; ignores everything else
|
||||
EXTS = ("jpg", "jpeg", "avif", "heif", "heic")
|
||||
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
def main():
|
||||
fp = sys.argv[1]
|
||||
ext = fp.lower().split(".")[-1]
|
||||
if ext not in EXTS:
|
||||
return
|
||||
|
||||
cwd, fn = os.path.split(fp)
|
||||
os.chdir(cwd)
|
||||
f1 = fsenc(fn)
|
||||
cmd = [
|
||||
b"exiftool",
|
||||
b"-exif:all=",
|
||||
b"-iptc:all=",
|
||||
b"-xmp:all=",
|
||||
b"-P",
|
||||
b"-overwrite_original",
|
||||
b"--",
|
||||
f1,
|
||||
]
|
||||
sp.check_output(cmd)
|
||||
print("image-noexif: stripped")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except:
|
||||
pass
|
||||
@@ -17,8 +17,12 @@ depdencies:
|
||||
|
||||
example usages; either as global config (all volumes) or as volflag:
|
||||
--xau f,bin/hooks/notify.py
|
||||
-v srv/inc:inc:c,xau=f,bin/hooks/notify.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=f,bin/hooks/notify.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
@@ -26,9 +30,23 @@ parameters explained,
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
from copyparty.util import humansize
|
||||
except:
|
||||
|
||||
def humansize(n):
|
||||
return n
|
||||
|
||||
|
||||
def main():
|
||||
dp, fn = os.path.split(sys.argv[1])
|
||||
msg = "🏷️ {}\n📁 {}".format(fn, dp)
|
||||
fp = sys.argv[1]
|
||||
dp, fn = os.path.split(fp)
|
||||
try:
|
||||
sz = humansize(os.path.getsize(fp))
|
||||
except:
|
||||
sz = "?"
|
||||
|
||||
msg = "{} ({})\n📁 {}".format(fn, sz, dp)
|
||||
title = "File received"
|
||||
|
||||
if "com.termux" in sys.executable:
|
||||
|
||||
72
bin/hooks/notify2.py
Executable file
72
bin/hooks/notify2.py
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
from datetime import datetime
|
||||
from plyer import notification
|
||||
|
||||
|
||||
_ = r"""
|
||||
same as notify.py but with additional info (uploader, ...)
|
||||
and also supports --xm (notify on 📟 message)
|
||||
|
||||
example usages; either as global config (all volumes) or as volflag:
|
||||
--xm f,j,bin/hooks/notify2.py
|
||||
--xau f,j,bin/hooks/notify2.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xm=f,j,bin/hooks/notify2.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=f,j,bin/hooks/notify2.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads / msgs with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
f = fork so it doesn't block uploads
|
||||
j = provide json instead of filepath list
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
from copyparty.util import humansize
|
||||
except:
|
||||
|
||||
def humansize(n):
|
||||
return n
|
||||
|
||||
|
||||
def main():
|
||||
inf = json.loads(sys.argv[1])
|
||||
fp = inf["ap"]
|
||||
sz = humansize(inf["sz"])
|
||||
dp, fn = os.path.split(fp)
|
||||
mt = datetime.utcfromtimestamp(inf["mt"]).strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
msg = f"{fn} ({sz})\n📁 {dp}"
|
||||
title = "File received"
|
||||
icon = "emblem-documents-symbolic" if sys.platform == "linux" else ""
|
||||
|
||||
if inf.get("txt"):
|
||||
msg = inf["txt"]
|
||||
title = "Message received"
|
||||
icon = "mail-unread-symbolic" if sys.platform == "linux" else ""
|
||||
|
||||
msg += f"\n👤 {inf['user']} ({inf['ip']})\n🕒 {mt}"
|
||||
|
||||
if "com.termux" in sys.executable:
|
||||
sp.run(["termux-notification", "-t", title, "-c", msg])
|
||||
return
|
||||
|
||||
notification.notify(
|
||||
title=title,
|
||||
message=msg,
|
||||
app_icon=icon,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -10,7 +10,12 @@ example usage as global config:
|
||||
--xbu c,bin/hooks/reject-extension.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xbu=c,bin/hooks/reject-extension.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xbu=c,bin/hooks/reject-extension.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xbu = execute before upload
|
||||
|
||||
@@ -17,7 +17,12 @@ example usage as global config:
|
||||
--xau c,bin/hooks/reject-mimetype.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xau=c,bin/hooks/reject-mimetype.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=c,bin/hooks/reject-mimetype.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
|
||||
@@ -15,9 +15,15 @@ example usage as global config:
|
||||
--xm f,j,t3600,bin/hooks/wget.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xm=f,j,t3600,bin/hooks/wget.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all messages with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xm = execute on message-to-server-log
|
||||
f = fork so it doesn't block uploads
|
||||
j = provide message information as json; not just the text
|
||||
c3 = mute all output
|
||||
@@ -31,6 +37,10 @@ def main():
|
||||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
proto = url.split("://")[0].lower()
|
||||
if proto not in ("http", "https", "ftp", "ftps"):
|
||||
raise Exception("bad proto {}".format(proto))
|
||||
|
||||
os.chdir(inf["ap"])
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
|
||||
108
bin/hooks/xiu-sha.py
Executable file
108
bin/hooks/xiu-sha.py
Executable file
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
_ = r"""
|
||||
this hook will produce a single sha512 file which
|
||||
covers all recent uploads (plus metadata comments)
|
||||
|
||||
use this with --xiu, which makes copyparty buffer
|
||||
uploads until server is idle, providing file infos
|
||||
on stdin (filepaths or json)
|
||||
|
||||
example usage as global config:
|
||||
--xiu i5,j,bin/hooks/xiu-sha.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xiu=i5,j,bin/hooks/xiu-sha.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on batches of uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xiu = execute after uploads...
|
||||
i5 = ...after volume has been idle for 5sec
|
||||
j = provide json instead of filepath list
|
||||
|
||||
note the "f" (fork) flag is not set, so this xiu
|
||||
will block other xiu hooks while it's running
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p
|
||||
|
||||
|
||||
def humantime(ts):
|
||||
return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
def find_files_root(inf):
|
||||
di = 9000
|
||||
for f1, f2 in zip(inf, inf[1:]):
|
||||
p1 = f1["ap"].replace("\\", "/").rsplit("/", 1)[0]
|
||||
p2 = f2["ap"].replace("\\", "/").rsplit("/", 1)[0]
|
||||
di = min(len(p1), len(p2), di)
|
||||
di = next((i for i in range(di) if p1[i] != p2[i]), di)
|
||||
|
||||
return di + 1
|
||||
|
||||
|
||||
def find_vol_root(inf):
|
||||
return len(inf[0]["ap"][: -len(inf[0]["vp"])])
|
||||
|
||||
|
||||
def main():
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
inf = json.loads(zs)
|
||||
|
||||
# root directory (where to put the sha512 file);
|
||||
# di = find_files_root(inf) # next to the file closest to volume root
|
||||
di = find_vol_root(inf) # top of the entire volume
|
||||
|
||||
ret = []
|
||||
total_sz = 0
|
||||
for md in inf:
|
||||
ap = md["ap"]
|
||||
rp = ap[di:]
|
||||
total_sz += md["sz"]
|
||||
fsize = "{:,}".format(md["sz"])
|
||||
mtime = humantime(md["mt"])
|
||||
up_ts = humantime(md["at"])
|
||||
|
||||
h = hashlib.sha512()
|
||||
with open(fsenc(md["ap"]), "rb", 512 * 1024) as f:
|
||||
while True:
|
||||
buf = f.read(512 * 1024)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
h.update(buf)
|
||||
|
||||
cksum = h.hexdigest()
|
||||
meta = " | ".join([md["wark"], up_ts, mtime, fsize, md["ip"]])
|
||||
ret.append("# {}\n{} *{}".format(meta, cksum, rp))
|
||||
|
||||
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
|
||||
ret.append("")
|
||||
ftime = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")
|
||||
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
|
||||
with open(fsenc(fp), "wb") as f:
|
||||
f.write("\n".join(ret).encode("utf-8", "replace"))
|
||||
|
||||
print("wrote checksums to {}".format(fp))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
50
bin/hooks/xiu.py
Executable file
50
bin/hooks/xiu.py
Executable file
@@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
_ = r"""
|
||||
this hook prints absolute filepaths + total size
|
||||
|
||||
use this with --xiu, which makes copyparty buffer
|
||||
uploads until server is idle, providing file infos
|
||||
on stdin (filepaths or json)
|
||||
|
||||
example usage as global config:
|
||||
--xiu i1,j,bin/hooks/xiu.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xiu=i1,j,bin/hooks/xiu.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on batches of uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xiu = execute after uploads...
|
||||
i1 = ...after volume has been idle for 1sec
|
||||
j = provide json instead of filepath list
|
||||
|
||||
note the "f" (fork) flag is not set, so this xiu
|
||||
will block other xiu hooks while it's running
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
inf = json.loads(zs)
|
||||
|
||||
total_sz = 0
|
||||
for upload in inf:
|
||||
sz = upload["sz"]
|
||||
total_sz += sz
|
||||
print("{:9} {}".format(sz, upload["ap"]))
|
||||
|
||||
print("{} files, {} bytes total".format(len(inf), total_sz))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -24,6 +24,15 @@ these do not have any problematic dependencies at all:
|
||||
* also available as an [event hook](../hooks/wget.py)
|
||||
|
||||
|
||||
## dangerous plugins
|
||||
|
||||
plugins in this section should only be used with appropriate precautions:
|
||||
|
||||
* [very-bad-idea.py](./very-bad-idea.py) combined with [meadup.js](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js) converts copyparty into a janky yet extremely flexible chromecast clone
|
||||
* also adds a virtual keyboard by @steinuil to the basic-upload tab for comfy couch crowd control
|
||||
* anything uploaded through the [android app](https://github.com/9001/party-up) (files or links) are executed on the server, meaning anyone can infect your PC with malware... so protect this with a password and keep it on a LAN!
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
run [`install-deps.sh`](install-deps.sh) to build/install most dependencies required by these programs (supports windows/linux/macos)
|
||||
@@ -31,7 +40,7 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
|
||||
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
|
||||
|
||||
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
|
||||
* from pypy: `keyfinder vamp`
|
||||
* from pip: `keyfinder vamp`
|
||||
|
||||
|
||||
# usage from copyparty
|
||||
|
||||
@@ -16,6 +16,10 @@ dep: ffmpeg
|
||||
"""
|
||||
|
||||
|
||||
# save beat timestamps to ".beats/filename.txt"
|
||||
SAVE = False
|
||||
|
||||
|
||||
def det(tf):
|
||||
# fmt: off
|
||||
sp.check_call([
|
||||
@@ -23,12 +27,11 @@ def det(tf):
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-ss", b"13",
|
||||
b"-y", b"-i", fsenc(sys.argv[1]),
|
||||
b"-map", b"0:a:0",
|
||||
b"-ac", b"1",
|
||||
b"-ar", b"22050",
|
||||
b"-t", b"300",
|
||||
b"-t", b"360",
|
||||
b"-f", b"f32le",
|
||||
fsenc(tf)
|
||||
])
|
||||
@@ -47,10 +50,29 @@ def det(tf):
|
||||
print(c["list"][0]["label"].split(" ")[0])
|
||||
return
|
||||
|
||||
# throws if detection failed:
|
||||
bpm = float(cl[-1]["timestamp"] - cl[1]["timestamp"])
|
||||
bpm = round(60 * ((len(cl) - 1) / bpm), 2)
|
||||
print(f"{bpm:.2f}")
|
||||
# throws if detection failed:
|
||||
beats = [float(x["timestamp"]) for x in cl]
|
||||
bds = [b - a for a, b in zip(beats, beats[1:])]
|
||||
bds.sort()
|
||||
n0 = int(len(bds) * 0.2)
|
||||
n1 = int(len(bds) * 0.75) + 1
|
||||
bds = bds[n0:n1]
|
||||
bpm = sum(bds)
|
||||
bpm = round(60 * (len(bds) / bpm), 2)
|
||||
print(f"{bpm:.2f}")
|
||||
|
||||
if SAVE:
|
||||
fdir, fname = os.path.split(sys.argv[1])
|
||||
bdir = os.path.join(fdir, ".beats")
|
||||
try:
|
||||
os.mkdir(fsenc(bdir))
|
||||
except:
|
||||
pass
|
||||
|
||||
fp = os.path.join(bdir, fname) + ".txt"
|
||||
with open(fsenc(fp), "wb") as f:
|
||||
txt = "\n".join([f"{x:.2f}" for x in beats])
|
||||
f.write(txt.encode("utf-8"))
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
@@ -61,7 +61,7 @@ def main():
|
||||
|
||||
os.chdir(cwd)
|
||||
f1 = fsenc(fn)
|
||||
f2 = os.path.join(b"noexif", f1)
|
||||
f2 = fsenc(os.path.join(b"noexif", fn))
|
||||
cmd = [
|
||||
b"exiftool",
|
||||
b"-exif:all=",
|
||||
|
||||
@@ -57,6 +57,7 @@ hash -r
|
||||
command -v python3 && pybin=python3 || pybin=python
|
||||
}
|
||||
|
||||
$pybin -c 'import numpy' ||
|
||||
$pybin -m pip install --user numpy
|
||||
|
||||
|
||||
@@ -224,7 +225,7 @@ install_vamp() {
|
||||
$pybin -m pip install --user vamp
|
||||
|
||||
cd "$td"
|
||||
echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
WARNING -- DANGEROUS PLUGIN --
|
||||
if someone is able to upload files to a copyparty which is
|
||||
running this plugin, they can execute malware on your machine
|
||||
so please keep this on a LAN and protect it with a password
|
||||
|
||||
use copyparty as a chromecast replacement:
|
||||
* post a URL and it will open in the default browser
|
||||
* upload a file and it will open in the default application
|
||||
@@ -10,16 +15,17 @@ use copyparty as a chromecast replacement:
|
||||
|
||||
the android app makes it a breeze to post pics and links:
|
||||
https://github.com/9001/party-up/releases
|
||||
(iOS devices have to rely on the web-UI)
|
||||
|
||||
goes without saying, but this is HELLA DANGEROUS,
|
||||
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
|
||||
iOS devices can use the web-UI or the shortcut instead:
|
||||
https://github.com/9001/copyparty#ios-shortcuts
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py
|
||||
example copyparty config to use this;
|
||||
lets the user "kevin" with password "hunter2" use this plugin:
|
||||
-a kevin:hunter2 --urlform save,get -v.::w,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py
|
||||
|
||||
recommended deps:
|
||||
apt install xdotool libnotify-bin
|
||||
apt install xdotool libnotify-bin mpv
|
||||
python3 -m pip install --user -U streamlink yt-dlp
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
|
||||
|
||||
and you probably want `twitter-unmute.user.js` from the res folder
|
||||
@@ -63,8 +69,10 @@ set -e
|
||||
EOF
|
||||
chmod 755 /usr/local/bin/chromium-browser
|
||||
|
||||
# start the server (note: replace `-v.::rw:` with `-v.::w:` to disallow retrieving uploaded stuff)
|
||||
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
|
||||
# start the server
|
||||
# note 1: replace hunter2 with a better password to access the server
|
||||
# note 2: replace `-v.::rw` with `-v.::w` to disallow retrieving uploaded stuff
|
||||
cd ~/Downloads; python3 copyparty-sfx.py -a kevin:hunter2 --urlform save,get -v.::rw,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
|
||||
|
||||
"""
|
||||
|
||||
@@ -72,11 +80,23 @@ cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mt
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote
|
||||
|
||||
have_mpv = shutil.which("mpv")
|
||||
have_vlc = shutil.which("vlc")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) > 2 and sys.argv[1] == "x":
|
||||
# invoked on commandline for testing;
|
||||
# python3 very-bad-idea.py x msg=https://youtu.be/dQw4w9WgXcQ
|
||||
txt = " ".join(sys.argv[2:])
|
||||
txt = quote(txt.replace(" ", "+"))
|
||||
return open_post(txt.encode("utf-8"))
|
||||
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
with open(fp, "rb") as f:
|
||||
txt = f.read(4096)
|
||||
@@ -92,7 +112,7 @@ def open_post(txt):
|
||||
try:
|
||||
k, v = txt.split(" ", 1)
|
||||
except:
|
||||
open_url(txt)
|
||||
return open_url(txt)
|
||||
|
||||
if k == "key":
|
||||
sp.call(["xdotool", "key"] + v.split(" "))
|
||||
@@ -128,6 +148,17 @@ def open_url(txt):
|
||||
# else:
|
||||
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
|
||||
|
||||
# mpv is probably smart enough to use streamlink automatically
|
||||
if try_mpv(txt):
|
||||
print("mpv got it")
|
||||
return
|
||||
|
||||
# or maybe streamlink would be a good choice to open this
|
||||
if try_streamlink(txt):
|
||||
print("streamlink got it")
|
||||
return
|
||||
|
||||
# nope,
|
||||
# close any error messages:
|
||||
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
|
||||
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
|
||||
@@ -136,4 +167,39 @@ def open_url(txt):
|
||||
sp.call(["xdg-open", txt])
|
||||
|
||||
|
||||
def try_mpv(url):
|
||||
t0 = time.time()
|
||||
try:
|
||||
print("trying mpv...")
|
||||
sp.check_call(["mpv", "--fs", url])
|
||||
return True
|
||||
except:
|
||||
# if it ran for 15 sec it probably succeeded and terminated
|
||||
t = time.time()
|
||||
return t - t0 > 15
|
||||
|
||||
|
||||
def try_streamlink(url):
|
||||
t0 = time.time()
|
||||
try:
|
||||
import streamlink
|
||||
|
||||
print("trying streamlink...")
|
||||
streamlink.Streamlink().resolve_url(url)
|
||||
|
||||
if have_mpv:
|
||||
args = "-m streamlink -p mpv -a --fs"
|
||||
else:
|
||||
args = "-m streamlink"
|
||||
|
||||
cmd = [sys.executable] + args.split() + [url, "best"]
|
||||
t0 = time.time()
|
||||
sp.check_call(cmd)
|
||||
return True
|
||||
except:
|
||||
# if it ran for 10 sec it probably succeeded and terminated
|
||||
t = time.time()
|
||||
return t - t0 > 10
|
||||
|
||||
|
||||
main()
|
||||
|
||||
@@ -65,6 +65,10 @@ def main():
|
||||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
proto = url.split("://")[0].lower()
|
||||
if proto not in ("http", "https", "ftp", "ftps"):
|
||||
raise Exception("bad proto {}".format(proto))
|
||||
|
||||
os.chdir(fdir)
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
|
||||
@@ -4,8 +4,9 @@ set -e
|
||||
# runs copyparty (or any other program really) in a chroot
|
||||
#
|
||||
# assumption: these directories, and everything within, are owned by root
|
||||
sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
|
||||
|
||||
sysdirs=(); for v in /bin /lib /lib32 /lib64 /sbin /usr /etc/alternatives ; do
|
||||
[ -e $v ] && sysdirs+=($v)
|
||||
done
|
||||
|
||||
# error-handler
|
||||
help() { cat <<'EOF'
|
||||
@@ -38,7 +39,7 @@ while true; do
|
||||
v="$1"; shift
|
||||
[ "$v" = -- ] && break # end of volumes
|
||||
[ "$#" -eq 0 ] && break # invalid usage
|
||||
vols+=( "$(realpath "$v")" )
|
||||
vols+=( "$(realpath "$v" || echo "$v")" )
|
||||
done
|
||||
pybin="$1"; shift
|
||||
pybin="$(command -v "$pybin")"
|
||||
@@ -82,7 +83,7 @@ jail="${jail%/}"
|
||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
||||
while IFS= read -r v; do
|
||||
[ -e "$v" ] || {
|
||||
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
|
||||
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
|
||||
continue
|
||||
}
|
||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
||||
@@ -97,9 +98,11 @@ done
|
||||
|
||||
cln() {
|
||||
rv=$?
|
||||
# cleanup if not in use
|
||||
lsof "$jail" | grep -qF "$jail" &&
|
||||
echo "chroot is in use, will not cleanup" ||
|
||||
wait -f -p rv $p || true
|
||||
cd /
|
||||
echo "stopping chroot..."
|
||||
lsof "$jail" | grep -F "$jail" &&
|
||||
echo "chroot is in use; will not unmount" ||
|
||||
{
|
||||
mount | grep -F " on $jail" |
|
||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||
@@ -115,6 +118,15 @@ mkdir -p "$jail/tmp"
|
||||
chmod 777 "$jail/tmp"
|
||||
|
||||
|
||||
# create a dev
|
||||
(cd $jail; mkdir -p dev; cd dev
|
||||
[ -e null ] || mknod -m 666 null c 1 3
|
||||
[ -e zero ] || mknod -m 666 zero c 1 5
|
||||
[ -e random ] || mknod -m 444 random c 1 8
|
||||
[ -e urandom ] || mknod -m 444 urandom c 1 9
|
||||
)
|
||||
|
||||
|
||||
# run copyparty
|
||||
export HOME=$(getent passwd $uid | cut -d: -f6)
|
||||
export USER=$(getent passwd $uid | cut -d: -f1)
|
||||
@@ -124,5 +136,6 @@ export LOGNAME="$USER"
|
||||
#echo "cpp [$cpp]"
|
||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
||||
p=$!
|
||||
trap 'kill -USR1 $p' USR1
|
||||
trap 'kill $p' INT TERM
|
||||
wait
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
S_VERSION = "1.10"
|
||||
S_BUILD_DT = "2023-08-15"
|
||||
|
||||
"""
|
||||
up2k.py: upload to copyparty
|
||||
2023-01-13, v1.2, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||
u2c.py: upload to copyparty
|
||||
2021, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py
|
||||
|
||||
- dependencies: requests
|
||||
- supports python 2.6, 2.7, and 3.3 through 3.12
|
||||
- if something breaks just try again and it'll autoresume
|
||||
"""
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
@@ -18,12 +22,15 @@ import math
|
||||
import time
|
||||
import atexit
|
||||
import signal
|
||||
import socket
|
||||
import base64
|
||||
import hashlib
|
||||
import platform
|
||||
import threading
|
||||
import datetime
|
||||
|
||||
EXE = sys.executable.endswith("exe")
|
||||
|
||||
try:
|
||||
import argparse
|
||||
except:
|
||||
@@ -33,8 +40,10 @@ except:
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError:
|
||||
if sys.version_info > (2, 7):
|
||||
except ImportError as ex:
|
||||
if EXE:
|
||||
raise
|
||||
elif sys.version_info > (2, 7):
|
||||
m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n"
|
||||
else:
|
||||
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
|
||||
@@ -42,7 +51,7 @@ except ImportError:
|
||||
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
||||
m += "\n for f in *.whl; do unzip $f; done; rm -r *.dist-info\n"
|
||||
|
||||
print(m.format(sys.executable))
|
||||
print(m.format(sys.executable), "\nspecifically,", ex)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -51,6 +60,7 @@ PY2 = sys.version_info < (3,)
|
||||
if PY2:
|
||||
from Queue import Queue
|
||||
from urllib import quote, unquote
|
||||
from urlparse import urlsplit, urlunsplit
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
bytes = str
|
||||
@@ -58,6 +68,7 @@ else:
|
||||
from queue import Queue
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
unicode = str
|
||||
|
||||
@@ -245,7 +256,13 @@ def eprint(*a, **ka):
|
||||
|
||||
|
||||
def flushing_print(*a, **ka):
|
||||
_print(*a, **ka)
|
||||
try:
|
||||
_print(*a, **ka)
|
||||
except:
|
||||
v = " ".join(str(x) for x in a)
|
||||
v = v.encode("ascii", "replace").decode("ascii")
|
||||
_print(v, **ka)
|
||||
|
||||
if "flush" not in ka:
|
||||
sys.stdout.flush()
|
||||
|
||||
@@ -324,6 +341,32 @@ class CTermsize(object):
|
||||
ss = CTermsize()
|
||||
|
||||
|
||||
def undns(url):
|
||||
usp = urlsplit(url)
|
||||
hn = usp.hostname
|
||||
gai = None
|
||||
eprint("resolving host [{0}] ...".format(hn), end="")
|
||||
try:
|
||||
gai = socket.getaddrinfo(hn, None)
|
||||
hn = gai[0][4][0]
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
t = "\n\033[31mfailed to resolve upload destination host;\033[0m\ngai={0}\n"
|
||||
eprint(t.format(repr(gai)))
|
||||
raise
|
||||
|
||||
if usp.port:
|
||||
hn = "{0}:{1}".format(hn, usp.port)
|
||||
if usp.username or usp.password:
|
||||
hn = "{0}:{1}@{2}".format(usp.username, usp.password, hn)
|
||||
|
||||
usp = usp._replace(netloc=hn)
|
||||
url = urlunsplit(usp)
|
||||
eprint(" {0}".format(url))
|
||||
return url
|
||||
|
||||
|
||||
def _scd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
with os.scandir(top) as dh:
|
||||
@@ -369,9 +412,29 @@ def walkdir(err, top, seen):
|
||||
err.append((ap, str(ex)))
|
||||
|
||||
|
||||
def walkdirs(err, tops):
|
||||
def walkdirs(err, tops, excl):
|
||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
if not VT100:
|
||||
excl = excl.replace("/", r"\\")
|
||||
za = []
|
||||
for td in tops:
|
||||
try:
|
||||
ap = os.path.abspath(os.path.realpath(td))
|
||||
if td[-1:] in (b"\\", b"/"):
|
||||
ap += sep
|
||||
except:
|
||||
# maybe cpython #88013 (ok)
|
||||
ap = td
|
||||
|
||||
za.append(ap)
|
||||
|
||||
za = [x if x.startswith(b"\\\\") else b"\\\\?\\" + x for x in za]
|
||||
za = [x.replace(b"/", b"\\") for x in za]
|
||||
tops = za
|
||||
|
||||
ptn = re.compile(excl.encode("utf-8") or b"\n")
|
||||
|
||||
for top in tops:
|
||||
isdir = os.path.isdir(top)
|
||||
if top[-1:] == sep:
|
||||
@@ -384,6 +447,8 @@ def walkdirs(err, tops):
|
||||
|
||||
if isdir:
|
||||
for ap, inf in walkdir(err, top, []):
|
||||
if ptn.match(ap):
|
||||
continue
|
||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||
else:
|
||||
d, n = top.rsplit(sep, 1)
|
||||
@@ -520,7 +585,11 @@ def handshake(ar, file, search):
|
||||
except Exception as ex:
|
||||
em = str(ex).split("SSLError(")[-1].split("\nURL: ")[0].strip()
|
||||
|
||||
if sc == 422 or "<pre>partial upload exists at a different" in txt:
|
||||
if (
|
||||
sc == 422
|
||||
or "<pre>partial upload exists at a different" in txt
|
||||
or "<pre>source file busy; please try again" in txt
|
||||
):
|
||||
file.recheck = True
|
||||
return [], False
|
||||
elif sc == 409 or "<pre>upload rejected, file already exists" in txt:
|
||||
@@ -552,8 +621,8 @@ def handshake(ar, file, search):
|
||||
return r["hash"], r["sprs"]
|
||||
|
||||
|
||||
def upload(file, cid, pw):
|
||||
# type: (File, str, str) -> None
|
||||
def upload(file, cid, pw, stats):
|
||||
# type: (File, str, str, str) -> None
|
||||
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||
|
||||
headers = {
|
||||
@@ -561,6 +630,10 @@ def upload(file, cid, pw):
|
||||
"X-Up2k-Wark": file.wark,
|
||||
"Content-Type": "application/octet-stream",
|
||||
}
|
||||
|
||||
if stats:
|
||||
headers["X-Up2k-Stat"] = stats
|
||||
|
||||
if pw:
|
||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||
|
||||
@@ -587,7 +660,7 @@ class Ctl(object):
|
||||
nfiles = 0
|
||||
nbytes = 0
|
||||
err = []
|
||||
for _, _, inf in walkdirs(err, ar.files):
|
||||
for _, _, inf in walkdirs(err, ar.files, ar.x):
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
continue
|
||||
|
||||
@@ -615,6 +688,7 @@ class Ctl(object):
|
||||
return nfiles, nbytes
|
||||
|
||||
def __init__(self, ar, stats=None):
|
||||
self.ok = False
|
||||
self.ar = ar
|
||||
self.stats = stats or self._scan()
|
||||
if not self.stats:
|
||||
@@ -628,7 +702,9 @@ class Ctl(object):
|
||||
if ar.te:
|
||||
req_ses.verify = ar.te
|
||||
|
||||
self.filegen = walkdirs([], ar.files)
|
||||
self.filegen = walkdirs([], ar.files, ar.x)
|
||||
self.recheck = [] # type: list[File]
|
||||
|
||||
if ar.safe:
|
||||
self._safe()
|
||||
else:
|
||||
@@ -647,11 +723,11 @@ class Ctl(object):
|
||||
self.t0 = time.time()
|
||||
self.t0_up = None
|
||||
self.spd = None
|
||||
self.eta = "99:99:99"
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.q_handshake = Queue() # type: Queue[File]
|
||||
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||
self.recheck = [] # type: list[File]
|
||||
|
||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
@@ -660,6 +736,8 @@ class Ctl(object):
|
||||
|
||||
self._fancy()
|
||||
|
||||
self.ok = True
|
||||
|
||||
def _safe(self):
|
||||
"""minimal basic slow boring fallback codepath"""
|
||||
search = self.ar.s
|
||||
@@ -693,7 +771,8 @@ class Ctl(object):
|
||||
ncs = len(hs)
|
||||
for nc, cid in enumerate(hs):
|
||||
print(" {0} up {1}".format(ncs - nc, cid))
|
||||
upload(file, cid, self.ar.a)
|
||||
stats = "{0}/0/0/{1}".format(nf, self.nfiles - nf)
|
||||
upload(file, cid, self.ar.a, stats)
|
||||
|
||||
print(" ok!")
|
||||
if file.recheck:
|
||||
@@ -768,12 +847,12 @@ class Ctl(object):
|
||||
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||
|
||||
spd = humansize(spd)
|
||||
eta = str(datetime.timedelta(seconds=int(eta)))
|
||||
self.eta = str(datetime.timedelta(seconds=int(eta)))
|
||||
sleft = humansize(self.nbytes - self.up_b)
|
||||
nleft = self.nfiles - self.up_f
|
||||
tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r"
|
||||
|
||||
t = "{0} eta @ {1}/s, {2}, {3}# left".format(eta, spd, sleft, nleft)
|
||||
t = "{0} eta @ {1}/s, {2}, {3}# left".format(self.eta, spd, sleft, nleft)
|
||||
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
||||
|
||||
if not self.recheck:
|
||||
@@ -809,9 +888,9 @@ class Ctl(object):
|
||||
print(" ls ~{0}".format(srd))
|
||||
zb = self.ar.url.encode("utf-8")
|
||||
zb += quotep(rd.replace(b"\\", b"/"))
|
||||
r = req_ses.get(zb + b"?ls&dots", headers=headers)
|
||||
r = req_ses.get(zb + b"?ls<&dots", headers=headers)
|
||||
if not r:
|
||||
raise Exception("HTTP {}".format(r.status_code))
|
||||
raise Exception("HTTP {0}".format(r.status_code))
|
||||
|
||||
j = r.json()
|
||||
for f in j["dirs"] + j["files"]:
|
||||
@@ -886,6 +965,9 @@ class Ctl(object):
|
||||
self.handshaker_busy += 1
|
||||
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
if not VT100:
|
||||
upath = upath.lstrip("\\?")
|
||||
|
||||
hs, sprs = handshake(self.ar, file, search)
|
||||
if search:
|
||||
if hs:
|
||||
@@ -951,11 +1033,23 @@ class Ctl(object):
|
||||
self.uploader_busy += 1
|
||||
self.t0_up = self.t0_up or time.time()
|
||||
|
||||
zs = "{0}/{1}/{2}/{3} {4}/{5} {6}"
|
||||
stats = zs.format(
|
||||
self.up_f,
|
||||
len(self.recheck),
|
||||
self.uploader_busy,
|
||||
self.nfiles - self.up_f,
|
||||
int(self.nbytes / (1024 * 1024)),
|
||||
int((self.nbytes - self.up_b) / (1024 * 1024)),
|
||||
self.eta,
|
||||
)
|
||||
|
||||
file, cid = task
|
||||
try:
|
||||
upload(file, cid, self.ar.a)
|
||||
except:
|
||||
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
|
||||
upload(file, cid, self.ar.a, stats)
|
||||
except Exception as ex:
|
||||
t = "upload failed, retrying: {0} #{1} ({2})\n"
|
||||
eprint(t.format(file.name, cid[:8], ex))
|
||||
# handshake will fix it
|
||||
|
||||
with self.mutex:
|
||||
@@ -989,8 +1083,15 @@ def main():
|
||||
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
||||
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
||||
|
||||
ver = "{0}, v{1}".format(S_BUILD_DT, S_VERSION)
|
||||
if "--version" in sys.argv:
|
||||
print(ver)
|
||||
return
|
||||
|
||||
sys.argv = [x for x in sys.argv if x != "--ws"]
|
||||
|
||||
# fmt: off
|
||||
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
||||
ap = app = argparse.ArgumentParser(formatter_class=APF, description="copyparty up2k uploader / filesearch tool, " + ver, epilog="""
|
||||
NOTE:
|
||||
source file/folder selection uses rsync syntax, meaning that:
|
||||
"foo" uploads the entire folder to URL/foo/
|
||||
@@ -1002,11 +1103,13 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ap.add_argument("-v", action="store_true", help="verbose")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\.hist/.*'")
|
||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||
ap.add_argument("--version", action="store_true", help="show version and exit")
|
||||
|
||||
ap = app.add_argument_group("compatibility")
|
||||
ap.add_argument("--cls", action="store_true", help="clear screen before start")
|
||||
ap.add_argument("--ws", action="store_true", help="copyparty is running on windows; wait before deleting files after uploading")
|
||||
ap.add_argument("--rh", type=int, metavar="TRIES", default=0, help="resolve server hostname before upload (good for buggy networks, but TLS certs will break)")
|
||||
|
||||
ap = app.add_argument_group("folder sync")
|
||||
ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
|
||||
@@ -1017,7 +1120,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles)")
|
||||
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
|
||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
|
||||
@@ -1026,7 +1129,16 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
# fmt: on
|
||||
|
||||
ar = app.parse_args()
|
||||
try:
|
||||
ar = app.parse_args()
|
||||
finally:
|
||||
if EXE and not sys.argv[1:]:
|
||||
eprint("*** hit enter to exit ***")
|
||||
try:
|
||||
input()
|
||||
except:
|
||||
pass
|
||||
|
||||
if ar.drd:
|
||||
ar.dr = True
|
||||
|
||||
@@ -1040,7 +1152,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
|
||||
ar.files = [
|
||||
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
||||
+ (x[-1:] if x[-1:] in ("\\", "/") else "").encode("utf-8")
|
||||
for x in ar.files
|
||||
]
|
||||
|
||||
@@ -1050,24 +1162,32 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
|
||||
if ar.a and ar.a.startswith("$"):
|
||||
fn = ar.a[1:]
|
||||
print("reading password from file [{}]".format(fn))
|
||||
print("reading password from file [{0}]".format(fn))
|
||||
with open(fn, "rb") as f:
|
||||
ar.a = f.read().decode("utf-8").strip()
|
||||
|
||||
for n in range(ar.rh):
|
||||
try:
|
||||
ar.url = undns(ar.url)
|
||||
break
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
if n > ar.rh - 2:
|
||||
raise
|
||||
|
||||
if ar.cls:
|
||||
print("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
|
||||
eprint("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
|
||||
|
||||
ctl = Ctl(ar)
|
||||
|
||||
if ar.dr and not ar.drd:
|
||||
if ar.dr and not ar.drd and ctl.ok:
|
||||
print("\npass 2/2: delete")
|
||||
if getattr(ctl, "up_br") and ar.ws:
|
||||
# wait for up2k to mtime if there was uploads
|
||||
time.sleep(4)
|
||||
|
||||
ar.drd = True
|
||||
ar.z = True
|
||||
Ctl(ar, ctl.stats)
|
||||
ctl = Ctl(ar, ctl.stats)
|
||||
|
||||
sys.exit(0 if ctl.ok else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@@ -1,7 +1,6 @@
|
||||
# when running copyparty behind a reverse proxy,
|
||||
# the following arguments are recommended:
|
||||
#
|
||||
# --http-only lower latency on initial connection
|
||||
# -i 127.0.0.1 only accept connections from nginx
|
||||
#
|
||||
# if you are doing location-based proxying (such as `/stuff` below)
|
||||
|
||||
@@ -1,14 +1,44 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
cat >/dev/null <<'EOF'
|
||||
|
||||
NOTE: copyparty is now able to do this automatically;
|
||||
however you may wish to use this script instead if
|
||||
you have specific needs (or if copyparty breaks)
|
||||
|
||||
this script generates a new self-signed TLS certificate and
|
||||
replaces the default insecure one that comes with copyparty
|
||||
|
||||
as it is trivial to impersonate a copyparty server using the
|
||||
default certificate, it is highly recommended to do this
|
||||
|
||||
this will create a self-signed CA, and a Server certificate
|
||||
which gets signed by that CA -- you can run it multiple times
|
||||
with different server-FQDNs / IPs to create additional certs
|
||||
for all your different servers / (non-)copyparty services
|
||||
|
||||
EOF
|
||||
|
||||
|
||||
# ca-name and server-fqdn
|
||||
ca_name="$1"
|
||||
srv_fqdn="$2"
|
||||
|
||||
[ -z "$srv_fqdn" ] && {
|
||||
echo "need arg 1: ca name"
|
||||
echo "need arg 2: server fqdn and/or IPs, comma-separated"
|
||||
echo "optional arg 3: if set, write cert into copyparty cfg"
|
||||
[ -z "$srv_fqdn" ] && { cat <<'EOF'
|
||||
need arg 1: ca name
|
||||
need arg 2: server fqdn and/or IPs, comma-separated
|
||||
optional arg 3: if set, write cert into copyparty cfg
|
||||
|
||||
example:
|
||||
./cfssl.sh PartyCo partybox.local y
|
||||
EOF
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
command -v cfssljson 2>/dev/null || {
|
||||
echo please install cfssl and try again
|
||||
exit 1
|
||||
}
|
||||
|
||||
@@ -59,12 +89,14 @@ show() {
|
||||
}
|
||||
show ca.pem
|
||||
show "$srv_fqdn.pem"
|
||||
|
||||
echo
|
||||
echo "successfully generated new certificates"
|
||||
|
||||
# write cert into copyparty config
|
||||
[ -z "$3" ] || {
|
||||
mkdir -p ~/.config/copyparty
|
||||
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
echo "successfully replaced copyparty certificate"
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>⇆🎉 redirect</title>
|
||||
<title>💾🎉 redirect</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<style>
|
||||
|
||||
|
||||
BIN
contrib/ios/upload-to-copyparty.shortcut
Normal file
BIN
contrib/ios/upload-to-copyparty.shortcut
Normal file
Binary file not shown.
@@ -1,7 +1,6 @@
|
||||
# when running copyparty behind a reverse proxy,
|
||||
# the following arguments are recommended:
|
||||
#
|
||||
# --http-only lower latency on initial connection
|
||||
# -i 127.0.0.1 only accept connections from nginx
|
||||
#
|
||||
# -nc must match or exceed the webserver's max number of concurrent clients;
|
||||
@@ -9,7 +8,7 @@
|
||||
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
||||
#
|
||||
# you may also consider adding -j0 for CPU-intensive configurations
|
||||
# (not that i can really think of any good examples)
|
||||
# (5'000 requests per second, or 20gbps upload/download in parallel)
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
@@ -35,7 +34,15 @@ server {
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# NOTE: with cloudflare you want this instead:
|
||||
#proxy_set_header X-Forwarded-For $http_cf_connecting_ip;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Connection "Keep-Alive";
|
||||
}
|
||||
}
|
||||
|
||||
# default client_max_body_size (1M) blocks uploads larger than 256 MiB
|
||||
client_max_body_size 1024M;
|
||||
client_header_timeout 610m;
|
||||
client_body_timeout 610m;
|
||||
send_timeout 610m;
|
||||
|
||||
282
contrib/nixos/modules/copyparty.nix
Normal file
282
contrib/nixos/modules/copyparty.nix
Normal file
@@ -0,0 +1,282 @@
|
||||
{ config, pkgs, lib, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
mkKeyValue = key: value:
|
||||
if value == true then
|
||||
# sets with a true boolean value are coerced to just the key name
|
||||
key
|
||||
else if value == false then
|
||||
# or omitted completely when false
|
||||
""
|
||||
else
|
||||
(generators.mkKeyValueDefault { inherit mkValueString; } ": " key value);
|
||||
|
||||
mkAttrsString = value: (generators.toKeyValue { inherit mkKeyValue; } value);
|
||||
|
||||
mkValueString = value:
|
||||
if isList value then
|
||||
(concatStringsSep ", " (map mkValueString value))
|
||||
else if isAttrs value then
|
||||
"\n" + (mkAttrsString value)
|
||||
else
|
||||
(generators.mkValueStringDefault { } value);
|
||||
|
||||
mkSectionName = value: "[" + (escape [ "[" "]" ] value) + "]";
|
||||
|
||||
mkSection = name: attrs: ''
|
||||
${mkSectionName name}
|
||||
${mkAttrsString attrs}
|
||||
'';
|
||||
|
||||
mkVolume = name: attrs: ''
|
||||
${mkSectionName name}
|
||||
${attrs.path}
|
||||
${mkAttrsString {
|
||||
accs = attrs.access;
|
||||
flags = attrs.flags;
|
||||
}}
|
||||
'';
|
||||
|
||||
passwordPlaceholder = name: "{{password-${name}}}";
|
||||
|
||||
accountsWithPlaceholders = mapAttrs (name: attrs: passwordPlaceholder name);
|
||||
|
||||
configStr = ''
|
||||
${mkSection "global" cfg.settings}
|
||||
${mkSection "accounts" (accountsWithPlaceholders cfg.accounts)}
|
||||
${concatStringsSep "\n" (mapAttrsToList mkVolume cfg.volumes)}
|
||||
'';
|
||||
|
||||
name = "copyparty";
|
||||
cfg = config.services.copyparty;
|
||||
configFile = pkgs.writeText "${name}.conf" configStr;
|
||||
runtimeConfigPath = "/run/${name}/${name}.conf";
|
||||
home = "/var/lib/${name}";
|
||||
defaultShareDir = "${home}/data";
|
||||
in {
|
||||
options.services.copyparty = {
|
||||
enable = mkEnableOption "web-based file manager";
|
||||
|
||||
package = mkOption {
|
||||
type = types.package;
|
||||
default = pkgs.copyparty;
|
||||
defaultText = "pkgs.copyparty";
|
||||
description = ''
|
||||
Package of the application to run, exposed for overriding purposes.
|
||||
'';
|
||||
};
|
||||
|
||||
openFilesLimit = mkOption {
|
||||
default = 4096;
|
||||
type = types.either types.int types.str;
|
||||
description = "Number of files to allow copyparty to open.";
|
||||
};
|
||||
|
||||
settings = mkOption {
|
||||
type = types.attrs;
|
||||
description = ''
|
||||
Global settings to apply.
|
||||
Directly maps to values in the [global] section of the copyparty config.
|
||||
See `${getExe cfg.package} --help` for more details.
|
||||
'';
|
||||
default = {
|
||||
i = "127.0.0.1";
|
||||
no-reload = true;
|
||||
};
|
||||
example = literalExpression ''
|
||||
{
|
||||
i = "0.0.0.0";
|
||||
no-reload = true;
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
accounts = mkOption {
|
||||
type = types.attrsOf (types.submodule ({ ... }: {
|
||||
options = {
|
||||
passwordFile = mkOption {
|
||||
type = types.str;
|
||||
description = ''
|
||||
Runtime file path to a file containing the user password.
|
||||
Must be readable by the copyparty user.
|
||||
'';
|
||||
example = "/run/keys/copyparty/ed";
|
||||
};
|
||||
};
|
||||
}));
|
||||
description = ''
|
||||
A set of copyparty accounts to create.
|
||||
'';
|
||||
default = { };
|
||||
example = literalExpression ''
|
||||
{
|
||||
ed.passwordFile = "/run/keys/copyparty/ed";
|
||||
};
|
||||
'';
|
||||
};
|
||||
|
||||
volumes = mkOption {
|
||||
type = types.attrsOf (types.submodule ({ ... }: {
|
||||
options = {
|
||||
path = mkOption {
|
||||
type = types.str;
|
||||
description = ''
|
||||
Path of a directory to share.
|
||||
'';
|
||||
};
|
||||
access = mkOption {
|
||||
type = types.attrs;
|
||||
description = ''
|
||||
Attribute list of permissions and the users to apply them to.
|
||||
|
||||
The key must be a string containing any combination of allowed permission:
|
||||
"r" (read): list folder contents, download files
|
||||
"w" (write): upload files; need "r" to see the uploads
|
||||
"m" (move): move files and folders; need "w" at destination
|
||||
"d" (delete): permanently delete files and folders
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
"G" (upget): "get", but can see filekeys of their own uploads
|
||||
"a" (upget): can see uploader IPs, config-reload
|
||||
|
||||
For example: "rwmd"
|
||||
|
||||
The value must be one of:
|
||||
an account name, defined in `accounts`
|
||||
a list of account names
|
||||
"*", which means "any account"
|
||||
'';
|
||||
example = literalExpression ''
|
||||
{
|
||||
# wG = write-upget = see your own uploads only
|
||||
wG = "*";
|
||||
# read-write-modify-delete for users "ed" and "k"
|
||||
rwmd = ["ed" "k"];
|
||||
};
|
||||
'';
|
||||
};
|
||||
flags = mkOption {
|
||||
type = types.attrs;
|
||||
description = ''
|
||||
Attribute list of volume flags to apply.
|
||||
See `${getExe cfg.package} --help-flags` for more details.
|
||||
'';
|
||||
example = literalExpression ''
|
||||
{
|
||||
# "fk" enables filekeys (necessary for upget permission) (4 chars long)
|
||||
fk = 4;
|
||||
# scan for new files every 60sec
|
||||
scan = 60;
|
||||
# volflag "e2d" enables the uploads database
|
||||
e2d = true;
|
||||
# "d2t" disables multimedia parsers (in case the uploads are malicious)
|
||||
d2t = true;
|
||||
# skips hashing file contents if path matches *.iso
|
||||
nohash = "\.iso$";
|
||||
};
|
||||
'';
|
||||
default = { };
|
||||
};
|
||||
};
|
||||
}));
|
||||
description = "A set of copyparty volumes to create";
|
||||
default = {
|
||||
"/" = {
|
||||
path = defaultShareDir;
|
||||
access = { r = "*"; };
|
||||
};
|
||||
};
|
||||
example = literalExpression ''
|
||||
{
|
||||
"/" = {
|
||||
path = ${defaultShareDir};
|
||||
access = {
|
||||
# wG = write-upget = see your own uploads only
|
||||
wG = "*";
|
||||
# read-write-modify-delete for users "ed" and "k"
|
||||
rwmd = ["ed" "k"];
|
||||
};
|
||||
};
|
||||
};
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
systemd.services.copyparty = {
|
||||
description = "http file sharing hub";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
environment = {
|
||||
PYTHONUNBUFFERED = "true";
|
||||
XDG_CONFIG_HOME = "${home}/.config";
|
||||
};
|
||||
|
||||
preStart = let
|
||||
replaceSecretCommand = name: attrs:
|
||||
"${getExe pkgs.replace-secret} '${
|
||||
passwordPlaceholder name
|
||||
}' '${attrs.passwordFile}' ${runtimeConfigPath}";
|
||||
in ''
|
||||
set -euo pipefail
|
||||
install -m 600 ${configFile} ${runtimeConfigPath}
|
||||
${concatStringsSep "\n"
|
||||
(mapAttrsToList replaceSecretCommand cfg.accounts)}
|
||||
'';
|
||||
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
ExecStart = "${getExe cfg.package} -c ${runtimeConfigPath}";
|
||||
|
||||
# Hardening options
|
||||
User = "copyparty";
|
||||
Group = "copyparty";
|
||||
RuntimeDirectory = name;
|
||||
RuntimeDirectoryMode = "0700";
|
||||
StateDirectory = [ name "${name}/data" "${name}/.config" ];
|
||||
StateDirectoryMode = "0700";
|
||||
WorkingDirectory = home;
|
||||
TemporaryFileSystem = "/:ro";
|
||||
BindReadOnlyPaths = [
|
||||
"/nix/store"
|
||||
"-/etc/resolv.conf"
|
||||
"-/etc/nsswitch.conf"
|
||||
"-/etc/hosts"
|
||||
"-/etc/localtime"
|
||||
] ++ (mapAttrsToList (k: v: "-${v.passwordFile}") cfg.accounts);
|
||||
BindPaths = [ home ] ++ (mapAttrsToList (k: v: v.path) cfg.volumes);
|
||||
# Would re-mount paths ignored by temporary root
|
||||
#ProtectSystem = "strict";
|
||||
ProtectHome = true;
|
||||
PrivateTmp = true;
|
||||
PrivateDevices = true;
|
||||
ProtectKernelTunables = true;
|
||||
ProtectControlGroups = true;
|
||||
RestrictSUIDSGID = true;
|
||||
PrivateMounts = true;
|
||||
ProtectKernelModules = true;
|
||||
ProtectKernelLogs = true;
|
||||
ProtectHostname = true;
|
||||
ProtectClock = true;
|
||||
ProtectProc = "invisible";
|
||||
ProcSubset = "pid";
|
||||
RestrictNamespaces = true;
|
||||
RemoveIPC = true;
|
||||
UMask = "0077";
|
||||
LimitNOFILE = cfg.openFilesLimit;
|
||||
NoNewPrivileges = true;
|
||||
LockPersonality = true;
|
||||
RestrictRealtime = true;
|
||||
};
|
||||
};
|
||||
|
||||
users.groups.copyparty = { };
|
||||
users.users.copyparty = {
|
||||
description = "Service user for copyparty";
|
||||
group = "copyparty";
|
||||
home = home;
|
||||
isSystemUser = true;
|
||||
};
|
||||
};
|
||||
}
|
||||
55
contrib/package/arch/PKGBUILD
Normal file
55
contrib/package/arch/PKGBUILD
Normal file
@@ -0,0 +1,55 @@
|
||||
# Maintainer: icxes <dev.null@need.moe>
|
||||
pkgname=copyparty
|
||||
pkgver="1.9.3"
|
||||
pkgrel=1
|
||||
pkgdesc="Portable file sharing hub"
|
||||
arch=("any")
|
||||
url="https://github.com/9001/${pkgname}"
|
||||
license=('MIT')
|
||||
depends=("python" "lsof" "python-jinja")
|
||||
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
|
||||
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
||||
"python-mutagen: music tags (alternative)"
|
||||
"python-pillow: thumbnails for images"
|
||||
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
|
||||
"libkeyfinder-git: detection of musical keys"
|
||||
"qm-vamp-plugins: BPM detection"
|
||||
"python-pyopenssl: ftps functionality"
|
||||
"python-argon2_cffi: hashed passwords in config"
|
||||
"python-impacket-git: smb support (bad idea)"
|
||||
)
|
||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||
backup=("etc/${pkgname}.d/init" )
|
||||
sha256sums=("87db55a57adf14b3b875c72d94b5df67560abc6dbfc104104e0c76d7f02848b6")
|
||||
|
||||
build() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
|
||||
pushd copyparty/web
|
||||
make -j$(nproc)
|
||||
rm Makefile
|
||||
popd
|
||||
|
||||
python3 -m build -wn
|
||||
}
|
||||
|
||||
package() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
python3 -m installer -d "$pkgdir" dist/*.whl
|
||||
|
||||
install -dm755 "${pkgdir}/etc/${pkgname}.d"
|
||||
install -Dm755 "bin/prisonparty.sh" "${pkgdir}/usr/bin/prisonparty"
|
||||
install -Dm644 "contrib/package/arch/${pkgname}.conf" "${pkgdir}/etc/${pkgname}.d/init"
|
||||
install -Dm644 "contrib/package/arch/${pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}.service"
|
||||
install -Dm644 "contrib/package/arch/prisonparty.service" "${pkgdir}/usr/lib/systemd/system/prisonparty.service"
|
||||
install -Dm644 "contrib/package/arch/index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md"
|
||||
install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
|
||||
|
||||
find /etc/${pkgname}.d -iname '*.conf' 2>/dev/null | grep -qE . && return
|
||||
echo "┏━━━━━━━━━━━━━━━──-"
|
||||
echo "┃ Configure ${pkgname} by adding .conf files into /etc/${pkgname}.d/"
|
||||
echo "┃ and maybe copy+edit one of the following to /etc/systemd/system/:"
|
||||
echo "┣━♦ /usr/lib/systemd/system/${pkgname}.service (standard)"
|
||||
echo "┣━♦ /usr/lib/systemd/system/prisonparty.service (chroot)"
|
||||
echo "┗━━━━━━━━━━━━━━━──-"
|
||||
}
|
||||
7
contrib/package/arch/copyparty.conf
Normal file
7
contrib/package/arch/copyparty.conf
Normal file
@@ -0,0 +1,7 @@
|
||||
## import all *.conf files from the current folder (/etc/copyparty.d)
|
||||
% ./
|
||||
|
||||
# add additional .conf files to this folder;
|
||||
# see example config files for reference:
|
||||
# https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf
|
||||
# https://github.com/9001/copyparty/tree/hovudstraum/docs/copyparty.d
|
||||
32
contrib/package/arch/copyparty.service
Normal file
32
contrib/package/arch/copyparty.service
Normal file
@@ -0,0 +1,32 @@
|
||||
# this will start `/usr/bin/copyparty-sfx.py`
|
||||
# and read config from `/etc/copyparty.d/*.conf`
|
||||
#
|
||||
# you probably want to:
|
||||
# change "User=cpp" and "/home/cpp/" to another user
|
||||
#
|
||||
# unless you add -q to disable logging, you may want to remove the
|
||||
# following line to allow buffering (slightly better performance):
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
Type=notify
|
||||
SyslogIdentifier=copyparty
|
||||
Environment=PYTHONUNBUFFERED=x
|
||||
WorkingDirectory=/var/lib/copyparty-jail
|
||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
|
||||
# user to run as + where the TLS certificate is (if any)
|
||||
User=cpp
|
||||
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
||||
|
||||
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# run copyparty
|
||||
ExecStart=/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
3
contrib/package/arch/index.md
Normal file
3
contrib/package/arch/index.md
Normal file
@@ -0,0 +1,3 @@
|
||||
this is `/var/lib/copyparty-jail`, the fallback webroot when copyparty has not yet been configured
|
||||
|
||||
please add some `*.conf` files to `/etc/copyparty.d/`
|
||||
31
contrib/package/arch/prisonparty.service
Normal file
31
contrib/package/arch/prisonparty.service
Normal file
@@ -0,0 +1,31 @@
|
||||
# this will start `/usr/bin/copyparty-sfx.py`
|
||||
# in a chroot, preventing accidental access elsewhere
|
||||
# and read config from `/etc/copyparty.d/*.conf`
|
||||
#
|
||||
# expose additional filesystem locations to copyparty
|
||||
# by listing them between the last `1000` and `--`
|
||||
#
|
||||
# `1000 1000` = what user to run copyparty as
|
||||
#
|
||||
# unless you add -q to disable logging, you may want to remove the
|
||||
# following line to allow buffering (slightly better performance):
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
SyslogIdentifier=prisonparty
|
||||
Environment=PYTHONUNBUFFERED=x
|
||||
WorkingDirectory=/var/lib/copyparty-jail
|
||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
|
||||
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# run copyparty
|
||||
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail 1000 1000 /etc/copyparty.d -- \
|
||||
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
59
contrib/package/nix/copyparty/default.nix
Normal file
59
contrib/package/nix/copyparty/default.nix
Normal file
@@ -0,0 +1,59 @@
|
||||
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, ffmpeg, mutagen,
|
||||
|
||||
# use argon2id-hashed passwords in config files (sha2 is always available)
|
||||
withHashedPasswords ? true,
|
||||
|
||||
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
|
||||
withThumbnails ? true,
|
||||
|
||||
# create thumbnails with PyVIPS; even faster, uses more memory
|
||||
# -- can be combined with Pillow to support more filetypes
|
||||
withFastThumbnails ? false,
|
||||
|
||||
# enable FFmpeg; thumbnails for most filetypes (also video and audio), extract audio metadata, transcode audio to opus
|
||||
# -- possibly dangerous if you allow anonymous uploads, since FFmpeg has a huge attack surface
|
||||
# -- can be combined with Thumbnails and/or FastThumbnails, since FFmpeg is slower than both
|
||||
withMediaProcessing ? true,
|
||||
|
||||
# if MediaProcessing is not enabled, you probably want this instead (less accurate, but much safer and faster)
|
||||
withBasicAudioMetadata ? false,
|
||||
|
||||
# enable FTPS support in the FTP server
|
||||
withFTPS ? false,
|
||||
|
||||
# samba/cifs server; dangerous and buggy, enable if you really need it
|
||||
withSMB ? false,
|
||||
|
||||
}:
|
||||
|
||||
let
|
||||
pinData = lib.importJSON ./pin.json;
|
||||
pyEnv = python.withPackages (ps:
|
||||
with ps; [
|
||||
jinja2
|
||||
]
|
||||
++ lib.optional withSMB impacket
|
||||
++ lib.optional withFTPS pyopenssl
|
||||
++ lib.optional withThumbnails pillow
|
||||
++ lib.optional withFastThumbnails pyvips
|
||||
++ lib.optional withMediaProcessing ffmpeg
|
||||
++ lib.optional withBasicAudioMetadata mutagen
|
||||
++ lib.optional withHashedPasswords argon2-cffi
|
||||
);
|
||||
in stdenv.mkDerivation {
|
||||
pname = "copyparty";
|
||||
version = pinData.version;
|
||||
src = fetchurl {
|
||||
url = pinData.url;
|
||||
hash = pinData.hash;
|
||||
};
|
||||
buildInputs = [ makeWrapper ];
|
||||
dontUnpack = true;
|
||||
dontBuild = true;
|
||||
installPhase = ''
|
||||
install -Dm755 $src $out/share/copyparty-sfx.py
|
||||
makeWrapper ${pyEnv.interpreter} $out/bin/copyparty \
|
||||
--set PATH '${lib.makeBinPath ([ utillinux ] ++ lib.optional withMediaProcessing ffmpeg)}:$PATH' \
|
||||
--add-flags "$out/share/copyparty-sfx.py"
|
||||
'';
|
||||
}
|
||||
5
contrib/package/nix/copyparty/pin.json
Normal file
5
contrib/package/nix/copyparty/pin.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.9.3/copyparty-sfx.py",
|
||||
"version": "1.9.3",
|
||||
"hash": "sha256-ufT7WARaj6nKaLX/r3X/ex/hMLMh1rtG0lkZHCm4Gu4="
|
||||
}
|
||||
77
contrib/package/nix/copyparty/update.py
Executable file
77
contrib/package/nix/copyparty/update.py
Executable file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Update the Nix package pin
|
||||
#
|
||||
# Usage: ./update.sh [PATH]
|
||||
# When the [PATH] is not set, it will fetch the latest release from the repo.
|
||||
# With [PATH] set, it will hash the given file and generate the URL,
|
||||
# base on the version contained within the file
|
||||
|
||||
import base64
|
||||
import json
|
||||
import hashlib
|
||||
import sys
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
OUTPUT_FILE = Path("pin.json")
|
||||
TARGET_ASSET = "copyparty-sfx.py"
|
||||
HASH_TYPE = "sha256"
|
||||
LATEST_RELEASE_URL = "https://api.github.com/repos/9001/copyparty/releases/latest"
|
||||
DOWNLOAD_URL = lambda version: f"https://github.com/9001/copyparty/releases/download/v{version}/{TARGET_ASSET}"
|
||||
|
||||
|
||||
def get_formatted_hash(binary):
|
||||
hasher = hashlib.new("sha256")
|
||||
hasher.update(binary)
|
||||
asset_hash = hasher.digest()
|
||||
encoded_hash = base64.b64encode(asset_hash).decode("ascii")
|
||||
return f"{HASH_TYPE}-{encoded_hash}"
|
||||
|
||||
|
||||
def version_from_sfx(binary):
|
||||
result = re.search(b'^VER = "(.*)"$', binary, re.MULTILINE)
|
||||
if result:
|
||||
return result.groups(1)[0].decode("ascii")
|
||||
|
||||
raise ValueError("version not found in provided file")
|
||||
|
||||
|
||||
def remote_release_pin():
|
||||
import requests
|
||||
|
||||
response = requests.get(LATEST_RELEASE_URL).json()
|
||||
version = response["tag_name"].lstrip("v")
|
||||
asset_info = [a for a in response["assets"] if a["name"] == TARGET_ASSET][0]
|
||||
download_url = asset_info["browser_download_url"]
|
||||
asset = requests.get(download_url)
|
||||
formatted_hash = get_formatted_hash(asset.content)
|
||||
|
||||
result = {"url": download_url, "version": version, "hash": formatted_hash}
|
||||
return result
|
||||
|
||||
|
||||
def local_release_pin(path):
|
||||
asset = path.read_bytes()
|
||||
version = version_from_sfx(asset)
|
||||
download_url = DOWNLOAD_URL(version)
|
||||
formatted_hash = get_formatted_hash(asset)
|
||||
|
||||
result = {"url": download_url, "version": version, "hash": formatted_hash}
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) > 1:
|
||||
asset_path = Path(sys.argv[1])
|
||||
result = local_release_pin(asset_path)
|
||||
else:
|
||||
result = remote_release_pin()
|
||||
|
||||
print(result)
|
||||
json_result = json.dumps(result, indent=4)
|
||||
OUTPUT_FILE.write_text(json_result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,4 +1,9 @@
|
||||
<!--
|
||||
NOTE: DEPRECATED; please use the javascript version instead:
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/minimal-up2k.js
|
||||
|
||||
----
|
||||
|
||||
save this as .epilogue.html inside a write-only folder to declutter the UI, makes it look like
|
||||
https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png
|
||||
|
||||
@@ -11,7 +16,7 @@
|
||||
|
||||
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
|
||||
|
||||
#ops, #tree, #path, #epi+h2, /* main tabs and navigators (tree/breadcrumbs) */
|
||||
#ops, #tree, #path, #wfp, /* main tabs and navigators (tree/breadcrumbs) */
|
||||
|
||||
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ almost the same as minimal-up2k.html except this one...:
|
||||
var u2min = `
|
||||
<style>
|
||||
|
||||
#ops, #path, #tree, #files, #epi+div+h2,
|
||||
#ops, #path, #tree, #files, #wfp,
|
||||
#u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd {
|
||||
display: none !important;
|
||||
}
|
||||
@@ -55,5 +55,5 @@ var u2min = `
|
||||
if (!has(perms, 'read')) {
|
||||
var e2 = mknod('div');
|
||||
e2.innerHTML = u2min;
|
||||
ebi('wrap').insertBefore(e2, QS('#epi+h2'));
|
||||
ebi('wrap').insertBefore(e2, QS('#wfp'));
|
||||
}
|
||||
|
||||
208
contrib/plugins/rave.js
Normal file
208
contrib/plugins/rave.js
Normal file
@@ -0,0 +1,208 @@
|
||||
/* untz untz untz untz */
|
||||
|
||||
(function () {
|
||||
|
||||
var can, ctx, W, H, fft, buf, bars, barw, pv,
|
||||
hue = 0,
|
||||
ibeat = 0,
|
||||
beats = [9001],
|
||||
beats_url = '',
|
||||
uofs = 0,
|
||||
ops = ebi('ops'),
|
||||
raving = false,
|
||||
recalc = 0,
|
||||
cdown = 0,
|
||||
FC = 0.9,
|
||||
css = `<style>
|
||||
|
||||
#fft {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
z-index: -1;
|
||||
}
|
||||
body {
|
||||
box-shadow: inset 0 0 0 white;
|
||||
}
|
||||
#ops>a,
|
||||
#path>a {
|
||||
display: inline-block;
|
||||
}
|
||||
/*
|
||||
body.untz {
|
||||
animation: untz-body 200ms ease-out;
|
||||
}
|
||||
@keyframes untz-body {
|
||||
0% {inset 0 0 20em white}
|
||||
100% {inset 0 0 0 white}
|
||||
}
|
||||
*/
|
||||
:root, html.a, html.b, html.c, html.d, html.e {
|
||||
--row-alt: rgba(48,52,78,0.2);
|
||||
}
|
||||
#files td {
|
||||
background: none;
|
||||
}
|
||||
|
||||
</style>`;
|
||||
|
||||
QS('body').appendChild(mknod('div', null, css));
|
||||
|
||||
function rave_load() {
|
||||
console.log('rave_load');
|
||||
can = mknod('canvas', 'fft');
|
||||
QS('body').appendChild(can);
|
||||
ctx = can.getContext('2d');
|
||||
|
||||
fft = new AnalyserNode(actx, {
|
||||
"fftSize": 2048,
|
||||
"maxDecibels": 0,
|
||||
"smoothingTimeConstant": 0.7,
|
||||
});
|
||||
ibeat = 0;
|
||||
beats = [9001];
|
||||
buf = new Uint8Array(fft.frequencyBinCount);
|
||||
bars = buf.length * FC;
|
||||
afilt.filters.push(fft);
|
||||
if (!raving) {
|
||||
raving = true;
|
||||
raver();
|
||||
}
|
||||
beats_url = mp.au.src.split('?')[0].replace(/(.*\/)(.*)/, '$1.beats/$2.txt');
|
||||
console.log("reading beats from", beats_url);
|
||||
var xhr = new XHR();
|
||||
xhr.open('GET', beats_url, true);
|
||||
xhr.onload = readbeats;
|
||||
xhr.url = beats_url;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function rave_unload() {
|
||||
qsr('#fft');
|
||||
can = null;
|
||||
}
|
||||
|
||||
function readbeats() {
|
||||
if (this.url != beats_url)
|
||||
return console.log('old beats??', this.url, beats_url);
|
||||
|
||||
var sbeats = this.responseText.replace(/\r/g, '').split(/\n/g);
|
||||
if (sbeats.length < 3)
|
||||
return;
|
||||
|
||||
beats = [];
|
||||
for (var a = 0; a < sbeats.length; a++)
|
||||
beats.push(parseFloat(sbeats[a]));
|
||||
|
||||
var end = beats.slice(-2),
|
||||
t = end[1],
|
||||
d = t - end[0];
|
||||
|
||||
while (d > 0.1 && t < 1200)
|
||||
beats.push(t += d);
|
||||
}
|
||||
|
||||
function hrand() {
|
||||
return Math.random() - 0.5;
|
||||
}
|
||||
|
||||
function raver() {
|
||||
if (!can) {
|
||||
raving = false;
|
||||
return;
|
||||
}
|
||||
|
||||
requestAnimationFrame(raver);
|
||||
if (!mp || !mp.au || mp.au.paused)
|
||||
return;
|
||||
|
||||
if (--uofs >= 0) {
|
||||
document.body.style.marginLeft = hrand() * uofs + 'px';
|
||||
ebi('tree').style.marginLeft = hrand() * uofs + 'px';
|
||||
for (var a of QSA('#ops>a, #path>a, #pctl>a'))
|
||||
a.style.transform = 'translate(' + hrand() * uofs * 1 + 'px, ' + hrand() * uofs * 0.7 + 'px) rotate(' + Math.random() * uofs * 0.7 + 'deg)'
|
||||
}
|
||||
|
||||
if (--recalc < 0) {
|
||||
recalc = 60;
|
||||
var tree = ebi('tree'),
|
||||
x = tree.style.display == 'none' ? 0 : tree.offsetWidth;
|
||||
|
||||
//W = can.width = window.innerWidth - x;
|
||||
//H = can.height = window.innerHeight;
|
||||
//H = ebi('widget').offsetTop;
|
||||
W = can.width = bars;
|
||||
H = can.height = 512;
|
||||
barw = 1; //parseInt(0.8 + W / bars);
|
||||
can.style.left = x + 'px';
|
||||
can.style.width = (window.innerWidth - x) + 'px';
|
||||
can.style.height = ebi('widget').offsetTop + 'px';
|
||||
}
|
||||
|
||||
//if (--cdown == 1)
|
||||
// clmod(ops, 'untz');
|
||||
|
||||
fft.getByteFrequencyData(buf);
|
||||
|
||||
var imax = 0, vmax = 0;
|
||||
for (var a = 10; a < 50; a++)
|
||||
if (vmax < buf[a]) {
|
||||
vmax = buf[a];
|
||||
imax = a;
|
||||
}
|
||||
|
||||
hue = hue * 0.93 + imax * 0.07;
|
||||
|
||||
ctx.fillStyle = 'rgba(0,0,0,0)';
|
||||
ctx.fillRect(0, 0, W, H);
|
||||
ctx.clearRect(0, 0, W, H);
|
||||
ctx.fillStyle = 'hsla(' + (hue * 2.5) + ',100%,50%,0.7)';
|
||||
|
||||
var x = 0, mul = (H / 256) * 0.5;
|
||||
for (var a = 0; a < buf.length * FC; a++) {
|
||||
var v = buf[a] * mul * (1 + 0.69 * a / buf.length);
|
||||
ctx.fillRect(x, H - v, barw, v);
|
||||
x += barw;
|
||||
}
|
||||
|
||||
var t = mp.au.currentTime + 0.05;
|
||||
|
||||
if (ibeat >= beats.length || beats[ibeat] > t)
|
||||
return;
|
||||
|
||||
while (ibeat < beats.length && beats[ibeat++] < t)
|
||||
continue;
|
||||
|
||||
return untz();
|
||||
|
||||
var cv = 0;
|
||||
for (var a = 0; a < 128; a++)
|
||||
cv += buf[a];
|
||||
|
||||
if (cv - pv > 1000) {
|
||||
console.log(pv, cv, cv - pv);
|
||||
if (cdown < 0) {
|
||||
clmod(ops, 'untz', 1);
|
||||
cdown = 20;
|
||||
}
|
||||
}
|
||||
pv = cv;
|
||||
}
|
||||
|
||||
function untz() {
|
||||
console.log('untz');
|
||||
uofs = 14;
|
||||
document.body.animate([
|
||||
{ boxShadow: 'inset 0 0 1em #f0c' },
|
||||
{ boxShadow: 'inset 0 0 20em #f0c', offset: 0.2 },
|
||||
{ boxShadow: 'inset 0 0 0 #f0c' },
|
||||
], { duration: 200, iterations: 1 });
|
||||
}
|
||||
|
||||
afilt.plugs.push({
|
||||
"en": true,
|
||||
"load": rave_load,
|
||||
"unload": rave_unload
|
||||
});
|
||||
|
||||
})();
|
||||
@@ -1,3 +1,6 @@
|
||||
# NOTE: this is now a built-in feature in copyparty
|
||||
# but you may still want this if you have specific needs
|
||||
#
|
||||
# systemd service which generates a new TLS certificate on each boot,
|
||||
# that way the one-year expiry time won't cause any issues --
|
||||
# just have everyone trust the ca.pem once every 10 years
|
||||
|
||||
@@ -2,12 +2,16 @@
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty.service /etc/systemd/system
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service
|
||||
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
||||
# cp -pv copyparty.service /etc/systemd/system/
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
|
||||
# firewall-cmd --reload
|
||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||
#
|
||||
# if it fails to start, first check this: systemctl status copyparty
|
||||
# then try starting it while viewing logs: journalctl -fan 100
|
||||
#
|
||||
# you may want to:
|
||||
# change "User=cpp" and "/home/cpp/" to another user
|
||||
# remove the nft lines to only listen on port 3923
|
||||
@@ -18,6 +22,7 @@
|
||||
# add '-i 127.0.0.1' to only allow local connections
|
||||
# add '-e2dsa' to enable filesystem scanning + indexing
|
||||
# add '-e2ts' to enable metadata indexing
|
||||
# remove '--ansi' to disable colored logs
|
||||
#
|
||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||
# accept connections; correctly delaying units depending on copyparty.
|
||||
@@ -44,7 +49,7 @@ ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
User=cpp
|
||||
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
||||
|
||||
# setup forwarding from ports 80 and 443 to port 3923
|
||||
# OPTIONAL: setup forwarding from ports 80 and 443 to port 3923
|
||||
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
|
||||
ExecStartPre=+nft add table ip nat
|
||||
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
|
||||
@@ -55,7 +60,7 @@ ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# copyparty settings
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -e2d -v /mnt::rw
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
@@ -6,12 +6,17 @@
|
||||
# 1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin
|
||||
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
||||
#
|
||||
# expose additional filesystem locations to copyparty
|
||||
# by listing them between the last `1000` and `--`
|
||||
#
|
||||
# `1000 1000` = what user to run copyparty as
|
||||
#
|
||||
# you may want to:
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
# (remember to change the '/mnt' chroot arg too)
|
||||
#
|
||||
# enable line-buffering for realtime logging (slight performance cost):
|
||||
# inside the [Service] block, add the following line:
|
||||
# unless you add -q to disable logging, you may want to remove the
|
||||
# following line to allow buffering (slightly better performance):
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
|
||||
[Unit]
|
||||
@@ -19,7 +24,14 @@ Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
SyslogIdentifier=prisonparty
|
||||
WorkingDirectory=/usr/local/bin
|
||||
Environment=PYTHONUNBUFFERED=x
|
||||
WorkingDirectory=/var/lib/copyparty-jail
|
||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
|
||||
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# run copyparty
|
||||
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
|
||||
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@ rem removes the 47.6 MiB filesize limit when downloading from webdav
|
||||
rem + optionally allows/enables password-auth over plaintext http
|
||||
rem + optionally helps disable wpad, removing the 10sec latency
|
||||
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
net session >nul 2>&1
|
||||
if %errorlevel% neq 0 (
|
||||
echo sorry, you must run this as administrator
|
||||
@@ -20,30 +18,26 @@ echo OK;
|
||||
echo allow webdav basic-auth over plaintext http?
|
||||
echo Y: login works, but the password will be visible in wireshark etc
|
||||
echo N: login will NOT work unless you use https and valid certificates
|
||||
set c=.
|
||||
set /p "c=(Y/N): "
|
||||
echo(
|
||||
if /i not "!c!"=="y" goto :g1
|
||||
reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f
|
||||
rem default is 1 (require tls)
|
||||
choice
|
||||
if %errorlevel% equ 1 (
|
||||
reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f
|
||||
rem default is 1 (require tls)
|
||||
)
|
||||
|
||||
:g1
|
||||
echo(
|
||||
echo OK;
|
||||
echo do you want to disable wpad?
|
||||
echo can give a HUGE speed boost depending on network settings
|
||||
set c=.
|
||||
set /p "c=(Y/N): "
|
||||
echo(
|
||||
if /i not "!c!"=="y" goto :g2
|
||||
echo(
|
||||
echo i'm about to open the [Connections] tab in [Internet Properties] for you;
|
||||
echo please click [LAN settings] and disable [Automatically detect settings]
|
||||
echo(
|
||||
pause
|
||||
control inetcpl.cpl,,4
|
||||
choice
|
||||
if %errorlevel% equ 1 (
|
||||
echo(
|
||||
echo i'm about to open the [Connections] tab in [Internet Properties] for you;
|
||||
echo please click [LAN settings] and disable [Automatically detect settings]
|
||||
echo(
|
||||
pause
|
||||
control inetcpl.cpl,,4
|
||||
)
|
||||
|
||||
:g2
|
||||
net stop webclient
|
||||
net start webclient
|
||||
echo(
|
||||
|
||||
@@ -6,6 +6,10 @@ import platform
|
||||
import sys
|
||||
import time
|
||||
|
||||
# fmt: off
|
||||
_:tuple[int,int]=(0,0) # _____________________________________________________________________ hey there! if you are reading this, your python is too old to run copyparty without some help. Please use https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py or the pypi package instead, or see https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building if you want to build it yourself :-) ************************************************************************************************************************************************
|
||||
# fmt: on
|
||||
|
||||
try:
|
||||
from typing import TYPE_CHECKING
|
||||
except:
|
||||
@@ -27,13 +31,20 @@ WINDOWS: Any = (
|
||||
else False
|
||||
)
|
||||
|
||||
VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393]
|
||||
VT100 = "--ansi" in sys.argv or (
|
||||
os.environ.get("NO_COLOR", "").lower() in ("", "0", "false")
|
||||
and sys.stdout.isatty()
|
||||
and "--no-ansi" not in sys.argv
|
||||
and (not WINDOWS or WINDOWS >= [10, 0, 14393])
|
||||
)
|
||||
# introduced in anniversary update
|
||||
|
||||
ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
|
||||
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
EXE = bool(getattr(sys, "frozen", False))
|
||||
|
||||
try:
|
||||
CORES = len(os.sched_getaffinity(0))
|
||||
except:
|
||||
|
||||
@@ -10,11 +10,9 @@ __url__ = "https://github.com/9001/copyparty/"
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import filecmp
|
||||
import locale
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
@@ -23,9 +21,10 @@ import traceback
|
||||
import uuid
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import ANYWIN, CORES, PY2, VT100, WINDOWS, E, EnvParams, unicode
|
||||
from .__init__ import ANYWIN, CORES, EXE, PY2, VT100, WINDOWS, E, EnvParams, unicode
|
||||
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
|
||||
from .authsrv import expand_config_file, re_vol
|
||||
from .authsrv import expand_config_file, re_vol, split_cfg_ln, upgrade_cfg_fmt
|
||||
from .cfg import flagcats, onedash
|
||||
from .svchub import SvcHub
|
||||
from .util import (
|
||||
IMPLICATIONS,
|
||||
@@ -37,6 +36,7 @@ from .util import (
|
||||
ansi_re,
|
||||
min_ex,
|
||||
py_desc,
|
||||
pybin,
|
||||
termsize,
|
||||
wrap,
|
||||
)
|
||||
@@ -53,8 +53,9 @@ try:
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
printed: list[str] = []
|
||||
u = unicode
|
||||
printed: list[str] = []
|
||||
zsid = uuid.uuid4().urn[4:]
|
||||
|
||||
|
||||
class RiceFormatter(argparse.HelpFormatter):
|
||||
@@ -183,7 +184,7 @@ def init_E(E: EnvParams) -> None:
|
||||
|
||||
with open_binary("copyparty", "z.tar") as tgz:
|
||||
with tarfile.open(fileobj=tgz) as tf:
|
||||
tf.extractall(tdn)
|
||||
tf.extractall(tdn) # nosec (archive is safe)
|
||||
|
||||
return tdn
|
||||
|
||||
@@ -198,7 +199,7 @@ def init_E(E: EnvParams) -> None:
|
||||
E.mod = _unpack()
|
||||
|
||||
if sys.platform == "win32":
|
||||
bdir = os.environ.get("APPDATA") or os.environ.get("TEMP")
|
||||
bdir = os.environ.get("APPDATA") or os.environ.get("TEMP") or "."
|
||||
E.cfg = os.path.normpath(bdir + "/copyparty")
|
||||
elif sys.platform == "darwin":
|
||||
E.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
|
||||
@@ -239,6 +240,37 @@ def get_srvname() -> str:
|
||||
return ret
|
||||
|
||||
|
||||
def get_fk_salt(cert_path) -> str:
|
||||
fp = os.path.join(E.cfg, "fk-salt.txt")
|
||||
try:
|
||||
with open(fp, "rb") as f:
|
||||
ret = f.read().strip()
|
||||
except:
|
||||
if os.path.exists(cert_path):
|
||||
zi = os.path.getmtime(cert_path)
|
||||
ret = "{}".format(zi).encode("utf-8")
|
||||
else:
|
||||
ret = base64.b64encode(os.urandom(18))
|
||||
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret + b"\n")
|
||||
|
||||
return ret.decode("utf-8")
|
||||
|
||||
|
||||
def get_ah_salt() -> str:
|
||||
fp = os.path.join(E.cfg, "ah-salt.txt")
|
||||
try:
|
||||
with open(fp, "rb") as f:
|
||||
ret = f.read().strip()
|
||||
except:
|
||||
ret = base64.b64encode(os.urandom(18))
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret + b"\n")
|
||||
|
||||
return ret.decode("utf-8")
|
||||
|
||||
|
||||
def ensure_locale() -> None:
|
||||
safe = "en_US.UTF-8"
|
||||
for x in [
|
||||
@@ -258,30 +290,22 @@ def ensure_locale() -> None:
|
||||
warn(t.format(safe))
|
||||
|
||||
|
||||
def ensure_cert() -> None:
|
||||
def ensure_webdeps() -> None:
|
||||
ap = os.path.join(E.mod, "web/deps/mini-fa.woff")
|
||||
if os.path.exists(ap):
|
||||
return
|
||||
|
||||
warn(
|
||||
"""could not find webdeps;
|
||||
if you are running the sfx, or exe, or pypi package, or docker image,
|
||||
then this is a bug! Please let me know so I can fix it, thanks :-)
|
||||
https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md
|
||||
|
||||
however, if you are a dev, or running copyparty from source, and you want
|
||||
full client functionality, you will need to build or obtain the webdeps:
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building
|
||||
"""
|
||||
the default cert (and the entire TLS support) is only here to enable the
|
||||
crypto.subtle javascript API, which is necessary due to the webkit guys
|
||||
being massive memers (https://www.chromium.org/blink/webcrypto)
|
||||
|
||||
i feel awful about this and so should they
|
||||
"""
|
||||
cert_insec = os.path.join(E.mod, "res/insecure.pem")
|
||||
cert_cfg = os.path.join(E.cfg, "cert.pem")
|
||||
if not os.path.exists(cert_cfg):
|
||||
shutil.copy(cert_insec, cert_cfg)
|
||||
|
||||
try:
|
||||
if filecmp.cmp(cert_cfg, cert_insec):
|
||||
lprint(
|
||||
"\033[33musing default TLS certificate; https will be insecure."
|
||||
+ "\033[36m\ncertificate location: {}\033[0m\n".format(cert_cfg)
|
||||
)
|
||||
except:
|
||||
pass
|
||||
|
||||
# speaking of the default cert,
|
||||
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
|
||||
)
|
||||
|
||||
|
||||
def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
@@ -354,27 +378,28 @@ def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||
def args_from_cfg(cfg_path: str) -> list[str]:
|
||||
lines: list[str] = []
|
||||
expand_config_file(lines, cfg_path, "")
|
||||
lines = upgrade_cfg_fmt(None, argparse.Namespace(vc=False), lines, "")
|
||||
|
||||
ret: list[str] = []
|
||||
skip = False
|
||||
skip = True
|
||||
for ln in lines:
|
||||
if not ln:
|
||||
sn = ln.split(" #")[0].strip()
|
||||
if sn.startswith("["):
|
||||
skip = True
|
||||
if sn.startswith("[global]"):
|
||||
skip = False
|
||||
continue
|
||||
|
||||
if ln.startswith("#"):
|
||||
if skip or not sn.split("#")[0].strip():
|
||||
continue
|
||||
|
||||
if not ln.startswith("-"):
|
||||
continue
|
||||
|
||||
if skip:
|
||||
continue
|
||||
|
||||
try:
|
||||
ret.extend(ln.split(" ", 1))
|
||||
except:
|
||||
ret.append(ln)
|
||||
for k, v in split_cfg_ln(sn).items():
|
||||
k = k.lstrip("-")
|
||||
if not k:
|
||||
continue
|
||||
prefix = "-" if k in onedash else "--"
|
||||
if v is True:
|
||||
ret.append(prefix + k)
|
||||
else:
|
||||
ret.append(prefix + k + "=" + v)
|
||||
|
||||
return ret
|
||||
|
||||
@@ -467,8 +492,9 @@ def get_sects():
|
||||
"d" (delete): permanently delete files and folders
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
"G" (upget): "get", but can see filekeys of their own uploads
|
||||
"a" (admin): can see uploader IPs, config-reload
|
||||
|
||||
too many volflags to list here, see the other sections
|
||||
too many volflags to list here, see --help-flags
|
||||
|
||||
example:\033[35m
|
||||
-a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m
|
||||
@@ -495,68 +521,56 @@ def get_sects():
|
||||
"""
|
||||
volflags are appended to volume definitions, for example,
|
||||
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
|
||||
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub
|
||||
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub\033[0m
|
||||
|
||||
\033[0muploads, general:
|
||||
\033[36mnodupe\033[35m rejects existing files (instead of symlinking them)
|
||||
\033[36mhardlink\033[35m does dedup with hardlinks instead of symlinks
|
||||
\033[36mneversymlink\033[35m disables symlink fallback; full copy instead
|
||||
\033[36mcopydupes\033[35m disables dedup, always saves full copies of dupes
|
||||
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
|
||||
\033[36mmagic$\033[35m enables filetype detection for nameless uploads
|
||||
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
|
||||
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9
|
||||
if global config defines a volflag for all volumes,
|
||||
you can unset it for a specific volume with -flag
|
||||
"""
|
||||
).rstrip()
|
||||
+ build_flags_desc(),
|
||||
],
|
||||
[
|
||||
"handlers",
|
||||
"use plugins to handle certain events",
|
||||
dedent(
|
||||
"""
|
||||
usually copyparty returns a \033[33m404\033[0m if a file does not exist, and
|
||||
\033[33m403\033[0m if a user tries to access a file they don't have access to
|
||||
|
||||
\033[0mupload rules:
|
||||
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min
|
||||
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
|
||||
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
|
||||
\033[36mdf=1g\033[35m ensure 1 GiB free disk space
|
||||
you can load a plugin which will be invoked right before this
|
||||
happens, and the plugin can choose to override this behavior
|
||||
|
||||
\033[0mupload rotation:
|
||||
(moves all uploads into the specified folder structure)
|
||||
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
|
||||
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
|
||||
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
|
||||
load the plugin using --args or volflags; for example \033[36m
|
||||
--on404 ~/partyhandlers/not404.py
|
||||
-v .::r:c,on404=~/partyhandlers/not404.py
|
||||
\033[0m
|
||||
the file must define the function \033[35mmain(cli,vn,rem)\033[0m:
|
||||
\033[35mcli\033[0m: the copyparty HttpCli instance
|
||||
\033[35mvn\033[0m: the VFS which overlaps with the requested URL
|
||||
\033[35mrem\033[0m: the remainder of the URL below the VFS mountpoint
|
||||
|
||||
\033[0mdatabase, general:
|
||||
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
||||
\033[36md2ts\033[35m disables metadata collection for existing files
|
||||
\033[36md2ds\033[35m disables onboot indexing, overrides -e2ds*
|
||||
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
||||
\033[36md2v\033[35m disables file verification, overrides -e2v*
|
||||
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
||||
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
||||
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
||||
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
|
||||
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
|
||||
\033[36mnoforget$\033[35m don't forget files when deleted from disk
|
||||
\033[36mdbd=[acid|swal|wal|yolo]\033[35m database speed-durability tradeoff
|
||||
\033[36mxlink$\033[35m cross-volume dupe detection / linking
|
||||
\033[36mxdev\033[35m do not descend into other filesystems
|
||||
\033[36mxvol\033[35m skip symlinks leaving the volume root
|
||||
`main` must return a string; one of the following:
|
||||
|
||||
\033[0mdatabase, audio tags:
|
||||
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
|
||||
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
|
||||
generate ".bpm" tags from uploads (f = overwrite tags)
|
||||
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
|
||||
> \033[32m"true"\033[0m: the plugin has responded to the request,
|
||||
and the TCP connection should be kept open
|
||||
|
||||
\033[0mthumbnails:
|
||||
\033[36mdthumb\033[35m disables all thumbnails
|
||||
\033[36mdvthumb\033[35m disables video thumbnails
|
||||
\033[36mdathumb\033[35m disables audio thumbnails (spectrograms)
|
||||
\033[36mdithumb\033[35m disables image thumbnails
|
||||
> \033[32m"false"\033[0m: the plugin has responded to the request,
|
||||
and the TCP connection should be terminated
|
||||
|
||||
\033[0mclient and ux:
|
||||
\033[36mhtml_head=TXT\033[35m includes TXT in the <head>
|
||||
\033[36mrobots\033[35m allows indexing by search engines (default)
|
||||
\033[36mnorobots\033[35m kindly asks search engines to leave
|
||||
> \033[32m"retry"\033[0m: the plugin has done something to resolve the 404
|
||||
situation, and copyparty should reattempt reading the file.
|
||||
if it still fails, a regular 404 will be returned
|
||||
|
||||
\033[0mothers:
|
||||
\033[36mfk=8\033[35m generates per-file accesskeys,
|
||||
which will then be required at the "g" permission
|
||||
\033[0m"""
|
||||
> \033[32m"allow"\033[0m: should ignore the insufficient permissions
|
||||
and let the client continue anyways
|
||||
|
||||
> \033[32m""\033[0m: the plugin has not handled the request;
|
||||
try the next plugin or return the usual 404 or 403
|
||||
|
||||
\033[1;35mPS!\033[0m the folder that contains the python file should ideally
|
||||
not contain many other python files, and especially nothing
|
||||
with filenames that overlap with modules used by copyparty
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
@@ -567,11 +581,13 @@ def get_sects():
|
||||
execute a command (a program or script) before or after various events;
|
||||
\033[36mxbu\033[35m executes CMD before a file upload starts
|
||||
\033[36mxau\033[35m executes CMD after a file upload finishes
|
||||
\033[36mxiu\033[35m executes CMD after all uploads finish and volume is idle
|
||||
\033[36mxbr\033[35m executes CMD before a file rename/move
|
||||
\033[36mxar\033[35m executes CMD after a file rename/move
|
||||
\033[36mxbd\033[35m executes CMD before a file delete
|
||||
\033[36mxad\033[35m executes CMD after a file delete
|
||||
\033[36mxm\033[35m executes CMD on message
|
||||
\033[36mxban\033[35m executes CMD if someone gets banned
|
||||
\033[0m
|
||||
can be defined as --args or volflags; for example \033[36m
|
||||
--xau notify-send
|
||||
@@ -588,6 +604,7 @@ def get_sects():
|
||||
\033[36mj\033[35m provides json with info as 1st arg instead of filepath
|
||||
\033[36mwN\033[35m waits N sec after command has been started before continuing
|
||||
\033[36mtN\033[35m sets an N sec timeout before the command is abandoned
|
||||
\033[36miN\033[35m xiu only: volume must be idle for N sec (default = 5)
|
||||
|
||||
\033[36mkt\033[35m kills the entire process tree on timeout (default),
|
||||
\033[36mkm\033[35m kills just the main process
|
||||
@@ -598,6 +615,17 @@ def get_sects():
|
||||
\033[36mc2\033[35m show only stdout
|
||||
\033[36mc3\033[35m mute all process otput
|
||||
\033[0m
|
||||
each hook is executed once for each event, except for \033[36mxiu\033[0m
|
||||
which builds up a backlog of uploads, running the hook just once
|
||||
as soon as the volume has been idle for iN seconds (5 by default)
|
||||
|
||||
\033[36mxiu\033[0m is also unique in that it will pass the metadata to the
|
||||
executed program on STDIN instead of as argv arguments, and
|
||||
it also includes the wark (file-id/hash) as a json property
|
||||
|
||||
\033[36mxban\033[0m can be used to overrule / cancel a user ban event;
|
||||
if the program returns 0 (true/OK) then the ban will NOT happen
|
||||
|
||||
except for \033[36mxm\033[0m, only one hook / one action can run at a time,
|
||||
so it's recommended to use the \033[36mf\033[0m flag unless you really need
|
||||
to wait for the hook to finish before continuing (without \033[36mf\033[0m
|
||||
@@ -646,9 +674,9 @@ def get_sects():
|
||||
|
||||
\033[32macid\033[0m = extremely safe but slow; the old default. Should never lose any data no matter what
|
||||
|
||||
\033[32mswal\033[0m = 2.4x faster uploads yet 99.9%% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes
|
||||
\033[32mswal\033[0m = 2.4x faster uploads yet 99.9% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes
|
||||
|
||||
\033[32mwal\033[0m = another 21x faster on HDDs yet 90%% as safe; same pitfall as \033[33mswal\033[0m except more likely
|
||||
\033[32mwal\033[0m = another 21x faster on HDDs yet 90% as safe; same pitfall as \033[33mswal\033[0m except more likely
|
||||
|
||||
\033[32myolo\033[0m = another 1.5x faster, and removes the occasional sudden upload-pause while the disk syncs, but now you're at risk of losing the entire database in a powerloss / OS-crash
|
||||
|
||||
@@ -656,9 +684,86 @@ def get_sects():
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"pwhash",
|
||||
"password hashing",
|
||||
dedent(
|
||||
"""
|
||||
when \033[36m--ah-alg\033[0m is not the default [\033[32mnone\033[0m], all account passwords must be hashed
|
||||
|
||||
passwords can be hashed on the commandline with \033[36m--ah-gen\033[0m, but copyparty will also hash and print any passwords that are non-hashed (password which do not start with '+') and then terminate afterwards
|
||||
|
||||
\033[36m--ah-alg\033[0m specifies the hashing algorithm and a list of optional comma-separated arguments:
|
||||
|
||||
\033[36m--ah-alg argon2\033[0m # which is the same as:
|
||||
\033[36m--ah-alg argon2,3,256,4,19\033[0m
|
||||
use argon2id with timecost 3, 256 MiB, 4 threads, version 19 (0x13/v1.3)
|
||||
|
||||
\033[36m--ah-alg scrypt\033[0m # which is the same as:
|
||||
\033[36m--ah-alg scrypt,13,2,8,4\033[0m
|
||||
use scrypt with cost 2**13, 2 iterations, blocksize 8, 4 threads
|
||||
|
||||
\033[36m--ah-alg sha2\033[0m # which is the same as:
|
||||
\033[36m--ah-alg sha2,424242\033[0m
|
||||
use sha2-512 with 424242 iterations
|
||||
|
||||
recommended: \033[32m--ah-alg argon2\033[0m
|
||||
(takes about 0.4 sec and 256M RAM to process a new password)
|
||||
|
||||
argon2 needs python-package argon2-cffi,
|
||||
scrypt needs openssl,
|
||||
sha2 is always available
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"zm",
|
||||
"mDNS debugging",
|
||||
dedent(
|
||||
"""
|
||||
the mDNS protocol is multicast-based, which means there are thousands
|
||||
of fun and intersesting ways for it to break unexpectedly
|
||||
|
||||
things to check if it does not work at all:
|
||||
|
||||
* is there a firewall blocking port 5353 on either the server or client?
|
||||
(for example, clients may be able to send queries to copyparty,
|
||||
but the replies could get lost)
|
||||
|
||||
* is multicast accidentally disabled on either the server or client?
|
||||
(look for mDNS log messages saying "new client on [...]")
|
||||
|
||||
* the router/switch must be multicast and igmp capable
|
||||
|
||||
things to check if it works for a while but then it doesn't:
|
||||
|
||||
* is there a firewall blocking port 5353 on either the server or client?
|
||||
(copyparty may be unable to see the queries from the clients, but the
|
||||
clients may still be able to see the initial unsolicited announce,
|
||||
so it works for about 2 minutes after startup until TTL expires)
|
||||
|
||||
* does the client have multiple IPs on its interface, and some of the
|
||||
IPs are in subnets which the copyparty server is not a member of?
|
||||
|
||||
for both of the above intermittent issues, try --zm-spam 30
|
||||
(not spec-compliant but nothing will mind)
|
||||
"""
|
||||
),
|
||||
],
|
||||
]
|
||||
|
||||
|
||||
def build_flags_desc():
|
||||
ret = ""
|
||||
for grp, flags in flagcats.items():
|
||||
ret += "\n\n\033[0m" + grp
|
||||
for k, v in flags.items():
|
||||
v = v.replace("\n", "\n ")
|
||||
ret += "\n \033[36m{}\033[35m {}".format(k, v)
|
||||
|
||||
return ret + "\033[0m"
|
||||
|
||||
|
||||
# fmt: off
|
||||
|
||||
|
||||
@@ -696,6 +801,7 @@ def add_upload(ap):
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
|
||||
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
||||
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than SEC seconds ago)")
|
||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)")
|
||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
||||
@@ -704,10 +810,12 @@ def add_upload(ap):
|
||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes")
|
||||
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
||||
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
||||
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, --nrand chars long (volflag=rand)")
|
||||
ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)")
|
||||
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
|
||||
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
|
||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m0\033[0m] = off and warn if enabled, [\033[32m1\033[0m] = off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||
|
||||
@@ -723,21 +831,44 @@ def add_network(ap):
|
||||
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
||||
else:
|
||||
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
||||
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
|
||||
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=186, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
||||
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..SEC")
|
||||
|
||||
|
||||
def add_tls(ap):
|
||||
def add_tls(ap, cert_path):
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
|
||||
ap2.add_argument("--cert", metavar="PATH", type=u, default=cert_path, help="path to TLS certificate")
|
||||
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [\033[32mhelp\033[0m] shows available versions; default is what your python version considers safe")
|
||||
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [\033[32mhelp\033[0m] shows available ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
|
||||
|
||||
|
||||
def add_cert(ap, cert_path):
|
||||
cert_dir = os.path.dirname(cert_path)
|
||||
ap2 = ap.add_argument_group('TLS certificate generator options')
|
||||
ap2.add_argument("--no-crt", action="store_true", help="disable automatic certificate creation")
|
||||
ap2.add_argument("--crt-ns", metavar="N,N", type=u, default="", help="comma-separated list of FQDNs (domains) to add into the certificate")
|
||||
ap2.add_argument("--crt-exact", action="store_true", help="do not add wildcard entries for each --crt-ns")
|
||||
ap2.add_argument("--crt-noip", action="store_true", help="do not add autodetected IP addresses into cert")
|
||||
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
|
||||
ap2.add_argument("--crt-nohn", action="store_true", help="do not add mDNS names / hostname into cert")
|
||||
ap2.add_argument("--crt-dir", metavar="PATH", default=cert_dir, help="where to save the CA cert")
|
||||
ap2.add_argument("--crt-cdays", metavar="D", type=float, default=3650, help="ca-certificate expiration time in days")
|
||||
ap2.add_argument("--crt-sdays", metavar="D", type=float, default=365, help="server-cert expiration time in days")
|
||||
ap2.add_argument("--crt-cn", metavar="TXT", type=u, default="partyco", help="CA/server-cert common-name")
|
||||
ap2.add_argument("--crt-cnc", metavar="TXT", type=u, default="--crt-cn", help="override CA name")
|
||||
ap2.add_argument("--crt-cns", metavar="TXT", type=u, default="--crt-cn cpp", help="override server-cert name")
|
||||
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72, help="backdate in hours")
|
||||
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: ecdsa-256 rsa-4096 rsa-2048")
|
||||
|
||||
|
||||
def add_zeroconf(ap):
|
||||
ap2 = ap.add_argument_group("Zeroconf options")
|
||||
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
|
||||
@@ -749,7 +880,7 @@ def add_zeroconf(ap):
|
||||
|
||||
|
||||
def add_zc_mdns(ap):
|
||||
ap2 = ap.add_argument_group("Zeroconf-mDNS options")
|
||||
ap2 = ap.add_argument_group("Zeroconf-mDNS options; also see --help-zm")
|
||||
ap2.add_argument("--zm", action="store_true", help="announce the enabled protocols over mDNS (multicast DNS-SD) -- compatible with KDE, gnome, macOS, ...")
|
||||
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
@@ -763,8 +894,9 @@ def add_zc_mdns(ap):
|
||||
ap2.add_argument("--zm-lf", metavar="PATH", type=u, default="", help="link a specific folder for ftp shares")
|
||||
ap2.add_argument("--zm-ls", metavar="PATH", type=u, default="", help="link a specific folder for smb shares")
|
||||
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
|
||||
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working")
|
||||
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
|
||||
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
|
||||
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every SEC; useful if clients have IPs in a subnet which doesn't overlap with the server")
|
||||
|
||||
|
||||
def add_zc_ssdp(ap):
|
||||
@@ -774,7 +906,7 @@ def add_zc_ssdp(ap):
|
||||
ap2.add_argument("--zs-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zsv", action="store_true", help="verbose SSDP")
|
||||
ap2.add_argument("--zsl", metavar="PATH", type=u, default="/?hc", help="location to include in the url (or a complete external URL), for example [\033[32mpriv/?pw=hunter2\033[0m] (goes directly to /priv/ with password hunter2) or [\033[32m?hc=priv&pw=hunter2\033[0m] (shows mounting options for /priv/ with password)")
|
||||
ap2.add_argument("--zsid", metavar="UUID", type=u, default=uuid.uuid4().urn[4:], help="USN (device identifier) to announce")
|
||||
ap2.add_argument("--zsid", metavar="UUID", type=u, default=zsid, help="USN (device identifier) to announce")
|
||||
|
||||
|
||||
def add_ftp(ap):
|
||||
@@ -782,6 +914,7 @@ def add_ftp(ap):
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990")
|
||||
ap2.add_argument("--ftpv", action="store_true", help="verbose")
|
||||
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
|
||||
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than SEC seconds ago)")
|
||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
||||
@@ -789,9 +922,11 @@ def add_ftp(ap):
|
||||
|
||||
def add_webdav(ap):
|
||||
ap2 = ap.add_argument_group('WebDAV options')
|
||||
ap2.add_argument("--daw", action="store_true", help="enable full write support. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
|
||||
ap2.add_argument("--daw", action="store_true", help="enable full write support, even if client may not be webdav. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
|
||||
ap2.add_argument("--dav-inf", action="store_true", help="allow depth:infinite requests (recursive file listing); extremely server-heavy but required for spec compliance -- luckily few clients rely on this")
|
||||
ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)")
|
||||
ap2.add_argument("--dav-rt", action="store_true", help="show symlink-destination's lastmodified instead of the link itself; always enabled for recursive listings (volflag=davrt)")
|
||||
ap2.add_argument("--dav-auth", action="store_true", help="force auth for all folders (required by davfs2 when only some folders are world-readable) (volflag=davauth)")
|
||||
|
||||
|
||||
def add_smb(ap):
|
||||
@@ -807,15 +942,33 @@ def add_smb(ap):
|
||||
ap2.add_argument("--smbvvv", action="store_true", help="verbosest")
|
||||
|
||||
|
||||
def add_handlers(ap):
|
||||
ap2 = ap.add_argument_group('handlers (see --help-handlers)')
|
||||
ap2.add_argument("--on404", metavar="PY", type=u, action="append", help="handle 404s by executing PY file")
|
||||
ap2.add_argument("--on403", metavar="PY", type=u, action="append", help="handle 403s by executing PY file")
|
||||
ap2.add_argument("--hot-handlers", action="store_true", help="reload handlers on each request -- expensive but convenient when hacking on stuff")
|
||||
|
||||
|
||||
def add_hooks(ap):
|
||||
ap2 = ap.add_argument_group('hooks (see --help-hooks)')
|
||||
ap2 = ap.add_argument_group('event hooks (see --help-hooks)')
|
||||
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute CMD before a file upload starts")
|
||||
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute CMD after a file upload finishes")
|
||||
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute CMD after all uploads finish and volume is idle")
|
||||
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute CMD before a file move/rename")
|
||||
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute CMD after a file move/rename")
|
||||
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute CMD before a file delete")
|
||||
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute CMD after a file delete")
|
||||
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute CMD on message")
|
||||
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute CMD if someone gets banned (pw/404/403/url)")
|
||||
|
||||
|
||||
def add_stats(ap):
|
||||
ap2 = ap.add_argument_group('grafana/prometheus metrics endpoint')
|
||||
ap2.add_argument("--stats", action="store_true", help="enable openmetrics at /.cpr/metrics for admin accounts")
|
||||
ap2.add_argument("--nos-hdd", action="store_true", help="disable disk-space metrics (used/free space)")
|
||||
ap2.add_argument("--nos-vol", action="store_true", help="disable volume size metrics (num files, total bytes, vmaxb/vmaxn)")
|
||||
ap2.add_argument("--nos-dup", action="store_true", help="disable dupe-files metrics (good idea; very slow)")
|
||||
ap2.add_argument("--nos-unf", action="store_true", help="disable unfinished-uploads metrics")
|
||||
|
||||
|
||||
def add_yolo(ap):
|
||||
@@ -831,21 +984,23 @@ def add_optouts(ap):
|
||||
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
|
||||
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show --name in <title>")
|
||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
|
||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
||||
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)")
|
||||
|
||||
|
||||
def add_safety(ap, fk_salt):
|
||||
def add_safety(ap):
|
||||
ap2 = ap.add_argument_group('safety options')
|
||||
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 --turbo=-1 -nih")
|
||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
|
||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; used to generate unpredictable internal identifiers for uploads -- doesn't really matter")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
|
||||
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
|
||||
ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||
@@ -856,24 +1011,42 @@ def add_safety(ap, fk_salt):
|
||||
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
||||
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (disabled by default since turbo-up2k counts as 404s)")
|
||||
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
|
||||
ap2.add_argument("--ban-422", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 422's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (422 is server fuzzing, invalid POSTs and so)")
|
||||
ap2.add_argument("--ban-url", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m sus URL's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (decent replacement for --ban-404 if that can't be used)")
|
||||
ap2.add_argument("--sus-urls", metavar="REGEX", type=u, default=r"\.php$|(^|/)wp-(admin|content|includes)/", help="URLs which are considered sus / eligible for banning; disable with blank or [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--nonsus-urls", metavar="REGEX", type=u, default=r"^(favicon\.ico|robots\.txt)$|^apple-touch-icon|^\.well-known", help="harmless URLs ignored from 404-bans; disable with blank or [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0")
|
||||
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]")
|
||||
ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives")
|
||||
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)")
|
||||
|
||||
|
||||
def add_salt(ap, fk_salt, ah_salt):
|
||||
ap2 = ap.add_argument_group('salting options')
|
||||
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: argon2 scrypt sha2 none (each optionally followed by alg-specific comma-sep. config)")
|
||||
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if --ah-alg is none (default)")
|
||||
ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]")
|
||||
ap2.add_argument("--ah-cli", action="store_true", help="interactive shell which hashes passwords without ever storing or displaying the original passwords")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
|
||||
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
||||
|
||||
|
||||
def add_shutdown(ap):
|
||||
ap2 = ap.add_argument_group('shutdown options')
|
||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; for example [\033[32midx\033[0m] will do volume indexing + metadata analysis")
|
||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing")
|
||||
|
||||
|
||||
def add_logging(ap):
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
||||
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
|
||||
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
|
||||
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
||||
ap2.add_argument("--log-tdec", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
||||
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
|
||||
@@ -892,10 +1065,10 @@ def add_thumbnail(ap):
|
||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
||||
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms) (volflag=dathumb)")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
|
||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60, help="conversion timeout in seconds (volflag=convt)")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image (volflag=nocrop)")
|
||||
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||
@@ -904,46 +1077,46 @@ def add_thumbnail(ap):
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
|
||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling -e2d will make these case-insensitive, and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# https://github.com/libvips/libvips
|
||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="bmp,dib,gif,icns,ico,jpg,jpeg,jp2,jpx,pcx,png,pbm,pgm,ppm,pnm,sgi,tga,tif,tiff,webp,xbm,dds,xpm,heif,heifs,heic,heics,avif,avifs", help="image formats to decode using pillow")
|
||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="jpg,jpeg,jp2,jpx,jxl,tif,tiff,png,webp,heic,avif,fit,fits,fts,exr,svg,hdr,ppm,pgm,pfm,gif,nii", help="image formats to decode using pyvips")
|
||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="av1,asf,avi,flv,m4v,mkv,mjpeg,mjpg,mpg,mpeg,mpg2,mpeg2,h264,avc,mts,h265,hevc,mov,3gp,mp4,ts,mpegts,nut,ogv,ogm,rm,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,m4a,ogg,opus,flac,alac,mp3,mp2,ac3,dts,wma,ra,wav,aif,aiff,au,alaw,ulaw,mulaw,amr,gsm,ape,tak,tta,wv,mpc", help="audio formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,m4a,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,tak,tta,ulaw,wav,wma,wv,xm,xpk", help="audio formats to decode using ffmpeg")
|
||||
|
||||
|
||||
def add_transcoding(ap):
|
||||
ap2 = ap.add_argument_group('transcoding options')
|
||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
|
||||
|
||||
|
||||
def add_db_general(ap, hcores):
|
||||
ap2 = ap.add_argument_group('general db options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplocation")
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplication")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets -e2ds")
|
||||
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
||||
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
||||
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs) (volflag=hist)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans (volflag=nohash)")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans (volflag=noidx)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||
ap2.add_argument("--re-dhash", action="store_true", help="rebuild the cache if it gets out of sync (for example crash on startup during metadata scanning)")
|
||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice (volflag=noforget)")
|
||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see --help-dbd (volflag=dbd)")
|
||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
||||
ap2.add_argument("--xdev", action="store_true", help="do not descend into other filesystems (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
||||
ap2.add_argument("--xvol", action="store_true", help="skip symlinks leaving the volume root (volflag=xvol)")
|
||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off (volflag=scan)")
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than SEC seconds")
|
||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
||||
|
||||
|
||||
def add_db_metadata(ap):
|
||||
@@ -959,7 +1132,7 @@ def add_db_metadata(ap):
|
||||
ap2.add_argument("--mtag-vv", action="store_true", help="debug mtp settings and mutagen/ffprobe parsers")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash")
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash,up_ip,.up_at")
|
||||
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
|
||||
default=".vq,.aq,vc,ac,fmt,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
|
||||
@@ -967,26 +1140,33 @@ def add_db_metadata(ap):
|
||||
|
||||
def add_ui(ap, retry):
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
|
||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching REGEX in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
|
||||
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI)")
|
||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
|
||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
|
||||
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
|
||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with -np")
|
||||
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms modals popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
||||
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms modals popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
||||
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with -nb)")
|
||||
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
||||
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
||||
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
|
||||
ap2.add_argument("--no-sb-lg", action="store_true", help="don't sandbox prologue/epilogue docs (volflags: no_sb_lg | sb_lg); enables non-js support")
|
||||
|
||||
|
||||
def add_debug(ap):
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
|
||||
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
|
||||
@@ -1012,10 +1192,10 @@ def run_argparse(
|
||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||
)
|
||||
|
||||
try:
|
||||
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
|
||||
except:
|
||||
fk_salt = "hunter2"
|
||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||
|
||||
fk_salt = get_fk_salt(cert_path)
|
||||
ah_salt = get_ah_salt()
|
||||
|
||||
hcores = min(CORES, 4) # optimal on py3.11 @ r5-4500U
|
||||
|
||||
@@ -1025,7 +1205,8 @@ def run_argparse(
|
||||
|
||||
add_general(ap, nc, srvname)
|
||||
add_network(ap)
|
||||
add_tls(ap)
|
||||
add_tls(ap, cert_path)
|
||||
add_cert(ap, cert_path)
|
||||
add_qr(ap, tty)
|
||||
add_zeroconf(ap)
|
||||
add_zc_mdns(ap)
|
||||
@@ -1038,11 +1219,14 @@ def run_argparse(
|
||||
add_ftp(ap)
|
||||
add_webdav(ap)
|
||||
add_smb(ap)
|
||||
add_safety(ap, fk_salt)
|
||||
add_safety(ap)
|
||||
add_salt(ap, fk_salt, ah_salt)
|
||||
add_optouts(ap)
|
||||
add_shutdown(ap)
|
||||
add_yolo(ap)
|
||||
add_handlers(ap)
|
||||
add_hooks(ap)
|
||||
add_stats(ap)
|
||||
add_ui(ap, retry)
|
||||
add_admin(ap)
|
||||
add_logging(ap)
|
||||
@@ -1105,9 +1289,12 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
showlic()
|
||||
sys.exit(0)
|
||||
|
||||
if EXE:
|
||||
print("pybin: {}\n".format(pybin), end="")
|
||||
|
||||
ensure_locale()
|
||||
if HAVE_SSL:
|
||||
ensure_cert()
|
||||
|
||||
ensure_webdeps()
|
||||
|
||||
for k, v in zip(argv[1:], argv[2:]):
|
||||
if k == "-c" and os.path.isfile(v):
|
||||
@@ -1120,16 +1307,22 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
supp = args_from_cfg(v)
|
||||
argv.extend(supp)
|
||||
|
||||
deprecated: list[tuple[str, str]] = []
|
||||
deprecated: list[tuple[str, str]] = [("--salt", "--warksalt")]
|
||||
for dk, nk in deprecated:
|
||||
try:
|
||||
idx = argv.index(dk)
|
||||
except:
|
||||
idx = -1
|
||||
ov = ""
|
||||
for n, k in enumerate(argv):
|
||||
if k == dk or k.startswith(dk + "="):
|
||||
idx = n
|
||||
if "=" in k:
|
||||
ov = "=" + k.split("=", 1)[1]
|
||||
|
||||
if idx < 0:
|
||||
continue
|
||||
|
||||
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
|
||||
lprint(msg.format(dk, nk))
|
||||
argv[idx] = nk
|
||||
argv[idx] = nk + ov
|
||||
time.sleep(2)
|
||||
|
||||
da = len(argv) == 1
|
||||
@@ -1137,7 +1330,8 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
if da:
|
||||
argv.extend(["--qr"])
|
||||
if ANYWIN or not os.geteuid():
|
||||
argv.extend(["-p80,443,3923", "--ign-ebind"])
|
||||
# win10 allows symlinks if admin; can be unexpected
|
||||
argv.extend(["-p80,443,3923", "--ign-ebind", "--no-dedup"])
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -1159,6 +1353,7 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
|
||||
try:
|
||||
al = run_argparse(argv, fmtr, retry, nc)
|
||||
dal = run_argparse([], fmtr, retry, nc)
|
||||
break
|
||||
except SystemExit:
|
||||
raise
|
||||
@@ -1168,17 +1363,23 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
|
||||
try:
|
||||
assert al # type: ignore
|
||||
assert dal # type: ignore
|
||||
al.E = E # __init__ is not shared when oxidized
|
||||
except:
|
||||
sys.exit(1)
|
||||
|
||||
if WINDOWS and not al.keep_qem:
|
||||
if al.ansi:
|
||||
al.no_ansi = False
|
||||
elif not al.no_ansi:
|
||||
al.ansi = VT100
|
||||
|
||||
if WINDOWS and not al.keep_qem and not al.ah_cli:
|
||||
try:
|
||||
disable_quickedit()
|
||||
except:
|
||||
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
|
||||
|
||||
if not VT100:
|
||||
if al.ansi:
|
||||
al.wintitle = ""
|
||||
|
||||
nstrs: list[str] = []
|
||||
@@ -1197,11 +1398,9 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
if re.match("c[^,]", opt):
|
||||
mod = True
|
||||
na.append("c," + opt[1:])
|
||||
elif re.sub("^[rwmdgG]*", "", opt) and "," not in opt:
|
||||
elif re.sub("^[rwmdgGa]*", "", opt) and "," not in opt:
|
||||
mod = True
|
||||
perm = opt[0]
|
||||
if perm == "a":
|
||||
perm = "rw"
|
||||
na.append(perm + "," + opt[1:])
|
||||
else:
|
||||
na.append(opt)
|
||||
@@ -1257,6 +1456,7 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
configure_ssl_ciphers(al)
|
||||
else:
|
||||
warn("ssl module does not exist; cannot enable https")
|
||||
al.http_only = True
|
||||
|
||||
if PY2 and WINDOWS and al.e2d:
|
||||
warn(
|
||||
@@ -1273,7 +1473,7 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
|
||||
# signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
SvcHub(al, argv, "".join(printed)).run()
|
||||
SvcHub(al, dal, argv, "".join(printed)).run()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 6, 2)
|
||||
CODENAME = "cors k"
|
||||
BUILD_DT = (2023, 1, 29)
|
||||
VERSION = (1, 9, 4)
|
||||
CODENAME = "prometheable"
|
||||
BUILD_DT = (2023, 9, 2)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,7 @@ import queue
|
||||
|
||||
from .__init__ import CORES, TYPE_CHECKING
|
||||
from .broker_mpw import MpWorker
|
||||
from .broker_util import try_exec
|
||||
from .broker_util import ExceptionalQueue, try_exec
|
||||
from .util import Daemon, mp
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -107,6 +107,19 @@ class BrokerMp(object):
|
||||
if retq_id:
|
||||
proc.q_pend.put((retq_id, "retq", rv))
|
||||
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
|
||||
# new non-ipc invoking managed service in hub
|
||||
obj = self.hub
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
|
||||
rv = try_exec(True, obj, *args)
|
||||
|
||||
retq = ExceptionalQueue(1)
|
||||
retq.put(rv)
|
||||
return retq
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
"""
|
||||
send message to non-hub component in other process,
|
||||
|
||||
226
copyparty/cert.py
Normal file
226
copyparty/cert.py
Normal file
@@ -0,0 +1,226 @@
|
||||
import calendar
|
||||
import errno
|
||||
import filecmp
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
|
||||
from .util import Netdev, runcmd
|
||||
|
||||
HAVE_CFSSL = True
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from .util import RootLogger
|
||||
|
||||
|
||||
def ensure_cert(log: "RootLogger", args) -> None:
|
||||
"""
|
||||
the default cert (and the entire TLS support) is only here to enable the
|
||||
crypto.subtle javascript API, which is necessary due to the webkit guys
|
||||
being massive memers (https://www.chromium.org/blink/webcrypto)
|
||||
|
||||
i feel awful about this and so should they
|
||||
"""
|
||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
|
||||
if not os.path.isfile(args.cert):
|
||||
if cert_appdata != args.cert:
|
||||
raise Exception("certificate file does not exist: " + args.cert)
|
||||
|
||||
shutil.copy(cert_insec, args.cert)
|
||||
|
||||
with open(args.cert, "rb") as f:
|
||||
buf = f.read()
|
||||
o1 = buf.find(b" PRIVATE KEY-")
|
||||
o2 = buf.find(b" CERTIFICATE-")
|
||||
m = "unsupported certificate format: "
|
||||
if o1 < 0:
|
||||
raise Exception(m + "no private key inside pem")
|
||||
if o2 < 0:
|
||||
raise Exception(m + "no server certificate inside pem")
|
||||
if o1 > o2:
|
||||
raise Exception(m + "private key must appear before server certificate")
|
||||
|
||||
try:
|
||||
if filecmp.cmp(args.cert, cert_insec):
|
||||
t = "using default TLS certificate; https will be insecure:\033[36m {}"
|
||||
log("cert", t.format(args.cert), 3)
|
||||
except:
|
||||
pass
|
||||
|
||||
# speaking of the default cert,
|
||||
# printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout
|
||||
|
||||
|
||||
def _read_crt(args, fn):
|
||||
try:
|
||||
if not os.path.exists(os.path.join(args.crt_dir, fn)):
|
||||
return 0, {}
|
||||
|
||||
acmd = ["cfssl-certinfo", "-cert", fn]
|
||||
rc, so, se = runcmd(acmd, cwd=args.crt_dir)
|
||||
if rc:
|
||||
return 0, {}
|
||||
|
||||
inf = json.loads(so)
|
||||
zs = inf["not_after"]
|
||||
expiry = calendar.timegm(time.strptime(zs, "%Y-%m-%dT%H:%M:%SZ"))
|
||||
return expiry, inf
|
||||
except OSError as ex:
|
||||
if ex.errno == errno.ENOENT:
|
||||
raise
|
||||
return 0, {}
|
||||
except:
|
||||
return 0, {}
|
||||
|
||||
|
||||
def _gen_ca(log: "RootLogger", args):
|
||||
expiry = _read_crt(args, "ca.pem")[0]
|
||||
if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry:
|
||||
return
|
||||
|
||||
backdate = "{}m".format(int(args.crt_back * 60))
|
||||
expiry = "{}m".format(int(args.crt_cdays * 60 * 24))
|
||||
cn = args.crt_cnc.replace("--crt-cn", args.crt_cn)
|
||||
algo, ksz = args.crt_alg.split("-")
|
||||
req = {
|
||||
"CN": cn,
|
||||
"CA": {"backdate": backdate, "expiry": expiry, "pathlen": 0},
|
||||
"key": {"algo": algo, "size": int(ksz)},
|
||||
"names": [{"O": cn}],
|
||||
}
|
||||
sin = json.dumps(req).encode("utf-8")
|
||||
log("cert", "creating new ca ...", 6)
|
||||
|
||||
cmd = "cfssl gencert -initca -"
|
||||
rc, so, se = runcmd(cmd.split(), 30, sin=sin)
|
||||
if rc:
|
||||
raise Exception("failed to create ca-cert: {}, {}".format(rc, se), 3)
|
||||
|
||||
cmd = "cfssljson -bare ca"
|
||||
sin = so.encode("utf-8")
|
||||
rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir)
|
||||
if rc:
|
||||
raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3)
|
||||
|
||||
bname = os.path.join(args.crt_dir, "ca")
|
||||
os.rename(bname + "-key.pem", bname + ".key")
|
||||
os.unlink(bname + ".csr")
|
||||
|
||||
log("cert", "new ca OK", 2)
|
||||
|
||||
|
||||
def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
names = args.crt_ns.split(",") if args.crt_ns else []
|
||||
if not args.crt_exact:
|
||||
for n in names[:]:
|
||||
names.append("*.{}".format(n))
|
||||
if not args.crt_noip:
|
||||
for ip in netdevs.keys():
|
||||
names.append(ip.split("/")[0])
|
||||
if args.crt_nolo:
|
||||
names = [x for x in names if x not in ("localhost", "127.0.0.1", "::1")]
|
||||
if not args.crt_nohn:
|
||||
names.append(args.name)
|
||||
names.append(args.name + ".local")
|
||||
if not names:
|
||||
names = ["127.0.0.1"]
|
||||
if "127.0.0.1" in names or "::1" in names:
|
||||
names.append("localhost")
|
||||
names = list({x: 1 for x in names}.keys())
|
||||
|
||||
try:
|
||||
expiry, inf = _read_crt(args, "srv.pem")
|
||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry
|
||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||
for n in names:
|
||||
if n not in inf["sans"]:
|
||||
raise Exception("does not have {}".format(n))
|
||||
if expired:
|
||||
raise Exception("old server-cert has expired")
|
||||
if not filecmp.cmp(args.cert, cert_insec):
|
||||
return
|
||||
except Exception as ex:
|
||||
log("cert", "will create new server-cert; {}".format(ex))
|
||||
|
||||
log("cert", "creating server-cert ...", 6)
|
||||
|
||||
backdate = "{}m".format(int(args.crt_back * 60))
|
||||
expiry = "{}m".format(int(args.crt_sdays * 60 * 24))
|
||||
cfg = {
|
||||
"signing": {
|
||||
"default": {
|
||||
"backdate": backdate,
|
||||
"expiry": expiry,
|
||||
"usages": ["signing", "key encipherment", "server auth"],
|
||||
}
|
||||
}
|
||||
}
|
||||
with open(os.path.join(args.crt_dir, "cfssl.json"), "wb") as f:
|
||||
f.write(json.dumps(cfg).encode("utf-8"))
|
||||
|
||||
cn = args.crt_cns.replace("--crt-cn", args.crt_cn)
|
||||
algo, ksz = args.crt_alg.split("-")
|
||||
req = {
|
||||
"key": {"algo": algo, "size": int(ksz)},
|
||||
"names": [{"O": cn}],
|
||||
}
|
||||
sin = json.dumps(req).encode("utf-8")
|
||||
|
||||
cmd = "cfssl gencert -config=cfssl.json -ca ca.pem -ca-key ca.key -profile=www"
|
||||
acmd = cmd.split() + ["-hostname=" + ",".join(names), "-"]
|
||||
rc, so, se = runcmd(acmd, 30, sin=sin, cwd=args.crt_dir)
|
||||
if rc:
|
||||
raise Exception("failed to create cert: {}, {}".format(rc, se))
|
||||
|
||||
cmd = "cfssljson -bare srv"
|
||||
sin = so.encode("utf-8")
|
||||
rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir)
|
||||
if rc:
|
||||
raise Exception("failed to translate cert: {}, {}".format(rc, se))
|
||||
|
||||
bname = os.path.join(args.crt_dir, "srv")
|
||||
try:
|
||||
os.unlink(bname + ".key")
|
||||
except:
|
||||
pass
|
||||
os.rename(bname + "-key.pem", bname + ".key")
|
||||
os.unlink(bname + ".csr")
|
||||
|
||||
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
||||
ca = f.read()
|
||||
|
||||
with open(bname + ".key", "rb") as f:
|
||||
skey = f.read()
|
||||
|
||||
with open(bname + ".pem", "rb") as f:
|
||||
scrt = f.read()
|
||||
|
||||
with open(args.cert, "wb") as f:
|
||||
f.write(skey + scrt + ca)
|
||||
|
||||
log("cert", "new server-cert OK", 2)
|
||||
|
||||
|
||||
def gencert(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
global HAVE_CFSSL
|
||||
|
||||
if args.http_only:
|
||||
return
|
||||
|
||||
if args.no_crt or not HAVE_CFSSL:
|
||||
ensure_cert(log, args)
|
||||
return
|
||||
|
||||
try:
|
||||
_gen_ca(log, args)
|
||||
_gen_srv(log, args, netdevs)
|
||||
except Exception as ex:
|
||||
HAVE_CFSSL = False
|
||||
log("cert", "could not create TLS certificates: {}".format(ex), 3)
|
||||
if getattr(ex, "errno", 0) == errno.ENOENT:
|
||||
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest (cfssl, cfssljson, cfssl-certinfo)"
|
||||
log("cert", t, 6)
|
||||
|
||||
ensure_cert(log, args)
|
||||
170
copyparty/cfg.py
Normal file
170
copyparty/cfg.py
Normal file
@@ -0,0 +1,170 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
# awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' '
|
||||
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nw p q s ss sss v z zv"
|
||||
onedash = set(zs.split())
|
||||
|
||||
|
||||
def vf_bmap() -> dict[str, str]:
|
||||
"""argv-to-volflag: simple bools"""
|
||||
ret = {
|
||||
"never_symlink": "neversymlink",
|
||||
"no_dedup": "copydupes",
|
||||
"no_dupe": "nodupe",
|
||||
"no_forget": "noforget",
|
||||
"th_no_crop": "nocrop",
|
||||
"dav_auth": "davauth",
|
||||
"dav_rt": "davrt",
|
||||
}
|
||||
for k in (
|
||||
"dotsrch",
|
||||
"e2t",
|
||||
"e2ts",
|
||||
"e2tsr",
|
||||
"e2v",
|
||||
"e2vu",
|
||||
"e2vp",
|
||||
"grid",
|
||||
"hardlink",
|
||||
"magic",
|
||||
"no_sb_md",
|
||||
"no_sb_lg",
|
||||
"rand",
|
||||
"xdev",
|
||||
"xlink",
|
||||
"xvol",
|
||||
):
|
||||
ret[k] = k
|
||||
return ret
|
||||
|
||||
|
||||
def vf_vmap() -> dict[str, str]:
|
||||
"""argv-to-volflag: simple values"""
|
||||
ret = {"th_convt": "convt", "th_size": "thsize"}
|
||||
for k in ("dbd", "lg_sbf", "md_sbf", "nrand", "unlist"):
|
||||
ret[k] = k
|
||||
return ret
|
||||
|
||||
|
||||
def vf_cmap() -> dict[str, str]:
|
||||
"""argv-to-volflag: complex/lists"""
|
||||
ret = {}
|
||||
for k in ("html_head", "mte", "mth"):
|
||||
ret[k] = k
|
||||
return ret
|
||||
|
||||
|
||||
permdescs = {
|
||||
"r": "read; list folder contents, download files",
|
||||
"w": 'write; upload files; need "r" to see the uploads',
|
||||
"m": 'move; move files and folders; need "w" at destination',
|
||||
"d": "delete; permanently delete files and folders",
|
||||
"g": "get; download files, but cannot see folder contents",
|
||||
"G": 'upget; same as "g" but can see filekeys of their own uploads',
|
||||
}
|
||||
|
||||
|
||||
flagcats = {
|
||||
"uploads, general": {
|
||||
"nodupe": "rejects existing files (instead of symlinking them)",
|
||||
"hardlink": "does dedup with hardlinks instead of symlinks",
|
||||
"neversymlink": "disables symlink fallback; full copy instead",
|
||||
"copydupes": "disables dedup, always saves full copies of dupes",
|
||||
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
||||
"nosub": "forces all uploads into the top folder of the vfs",
|
||||
"magic": "enables filetype detection for nameless uploads",
|
||||
"gz": "allows server-side gzip of uploads with ?gz (also c,xz)",
|
||||
"pk": "forces server-side compression, optional arg: xz,9",
|
||||
},
|
||||
"upload rules": {
|
||||
"maxn=250,600": "max 250 uploads over 15min",
|
||||
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
|
||||
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
||||
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
||||
"rand": "force randomized filenames, 9 chars long by default",
|
||||
"nrand=N": "randomized filenames are N chars long",
|
||||
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
||||
"df=1g": "ensure 1 GiB free disk space",
|
||||
},
|
||||
"upload rotation\n(moves all uploads into the specified folder structure)": {
|
||||
"rotn=100,3": "3 levels of subfolders with 100 entries in each",
|
||||
"rotf=%Y-%m/%d-%H": "date-formatted organizing",
|
||||
"lifetime=3600": "uploads are deleted after 1 hour",
|
||||
},
|
||||
"database, general": {
|
||||
"e2d": "enable database; makes files searchable + enables upload dedup",
|
||||
"e2ds": "scan writable folders for new files on startup; also sets -e2d",
|
||||
"e2dsa": "scans all folders for new files on startup; also sets -e2d",
|
||||
"e2t": "enable multimedia indexing; makes it possible to search for tags",
|
||||
"e2ts": "scan existing files for tags on startup; also sets -e2t",
|
||||
"e2tsa": "delete all metadata from DB (full rescan); also sets -e2ts",
|
||||
"d2ts": "disables metadata collection for existing files",
|
||||
"d2ds": "disables onboot indexing, overrides -e2ds*",
|
||||
"d2t": "disables metadata collection, overrides -e2t*",
|
||||
"d2v": "disables file verification, overrides -e2v*",
|
||||
"d2d": "disables all database stuff, overrides -e2*",
|
||||
"hist=/tmp/cdb": "puts thumbnails and indexes at that location",
|
||||
"scan=60": "scan for new files every 60sec, same as --re-maxage",
|
||||
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
|
||||
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
|
||||
"noforget": "don't forget files when deleted from disk",
|
||||
"fat32": "avoid excessive reindexing on android sdcardfs",
|
||||
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
||||
"xlink": "cross-volume dupe detection / linking",
|
||||
"xdev": "do not descend into other filesystems",
|
||||
"xvol": "do not follow symlinks leaving the volume root",
|
||||
"dotsrch": "show dotfiles in search results",
|
||||
"nodotsrch": "hide dotfiles in search results (default)",
|
||||
},
|
||||
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
|
||||
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
|
||||
"mtp=ahash,vhash=media-hash.py": "collects two tags at once",
|
||||
},
|
||||
"thumbnails": {
|
||||
"dthumb": "disables all thumbnails",
|
||||
"dvthumb": "disables video thumbnails",
|
||||
"dathumb": "disables audio thumbnails (spectrograms)",
|
||||
"dithumb": "disables image thumbnails",
|
||||
"thsize": "thumbnail res; WxH",
|
||||
"nocrop": "disable center-cropping",
|
||||
"convt": "conversion timeout in seconds",
|
||||
},
|
||||
"handlers\n(better explained in --help-handlers)": {
|
||||
"on404=PY": "handle 404s by executing PY file",
|
||||
"on403=PY": "handle 403s by executing PY file",
|
||||
},
|
||||
"event hooks\n(better explained in --help-hooks)": {
|
||||
"xbu=CMD": "execute CMD before a file upload starts",
|
||||
"xau=CMD": "execute CMD after a file upload finishes",
|
||||
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
|
||||
"xbr=CMD": "execute CMD before a file rename/move",
|
||||
"xar=CMD": "execute CMD after a file rename/move",
|
||||
"xbd=CMD": "execute CMD before a file delete",
|
||||
"xad=CMD": "execute CMD after a file delete",
|
||||
"xm=CMD": "execute CMD on message",
|
||||
"xban=CMD": "execute CMD if someone gets banned",
|
||||
},
|
||||
"client and ux": {
|
||||
"grid": "show grid/thumbnails by default",
|
||||
"unlist": "dont list files matching REGEX",
|
||||
"html_head=TXT": "includes TXT in the <head>",
|
||||
"robots": "allows indexing by search engines (default)",
|
||||
"norobots": "kindly asks search engines to leave",
|
||||
"no_sb_md": "disable js sandbox for markdown files",
|
||||
"no_sb_lg": "disable js sandbox for prologue/epilogue",
|
||||
"sb_md": "enable js sandbox for markdown files (default)",
|
||||
"sb_lg": "enable js sandbox for prologue/epilogue (default)",
|
||||
"md_sbf": "list of markdown-sandbox safeguards to disable",
|
||||
"lg_sbf": "list of *logue-sandbox safeguards to disable",
|
||||
"nohtml": "return html and markdown as text/html",
|
||||
},
|
||||
"others": {
|
||||
"fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission',
|
||||
"davauth": "ask webdav clients to login for all folders",
|
||||
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
flagdescs = {k.split("=")[0]: v for tab in flagcats.values() for k, v in tab.items()}
|
||||
@@ -2,29 +2,40 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import sys
|
||||
import time
|
||||
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
|
||||
|
||||
try:
|
||||
import asynchat
|
||||
except:
|
||||
sys.path.append(os.path.join(E.mod, "vend"))
|
||||
|
||||
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
|
||||
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
||||
from pyftpdlib.handlers import FTPHandler
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
from pyftpdlib.servers import FTPServer
|
||||
|
||||
from .__init__ import PY2, TYPE_CHECKING, E
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .util import Daemon, Pebkac, exclude_dotfiles, fsenc, ipnorm
|
||||
|
||||
try:
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
except ImportError:
|
||||
p = os.path.join(E.mod, "vend")
|
||||
print("loading asynchat from " + p)
|
||||
sys.path.append(p)
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
|
||||
from .util import (
|
||||
Daemon,
|
||||
Pebkac,
|
||||
exclude_dotfiles,
|
||||
fsenc,
|
||||
ipnorm,
|
||||
pybin,
|
||||
relchk,
|
||||
runhook,
|
||||
sanitize_fn,
|
||||
vjoin,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
@@ -34,6 +45,12 @@ if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Optional
|
||||
|
||||
|
||||
class FSE(FilesystemError):
|
||||
def __init__(self, msg: str, severity: int = 0) -> None:
|
||||
super(FilesystemError, self).__init__(msg)
|
||||
self.severity = severity
|
||||
|
||||
|
||||
class FtpAuth(DummyAuthorizer):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
super(FtpAuth, self).__init__()
|
||||
@@ -43,6 +60,7 @@ class FtpAuth(DummyAuthorizer):
|
||||
self, username: str, password: str, handler: Any
|
||||
) -> None:
|
||||
handler.username = "{}:{}".format(username, password)
|
||||
handler.uname = "*"
|
||||
|
||||
ip = handler.addr[0]
|
||||
if ip.startswith("::ffff:"):
|
||||
@@ -59,10 +77,13 @@ class FtpAuth(DummyAuthorizer):
|
||||
raise AuthenticationFailed("banned")
|
||||
|
||||
asrv = self.hub.asrv
|
||||
if username == "anonymous":
|
||||
uname = "*"
|
||||
else:
|
||||
uname = asrv.iacct.get(password, "") or asrv.iacct.get(username, "") or "*"
|
||||
uname = "*"
|
||||
if username != "anonymous":
|
||||
for zs in (password, username):
|
||||
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
|
||||
if zs:
|
||||
uname = zs
|
||||
break
|
||||
|
||||
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
|
||||
g = self.hub.gpwd
|
||||
@@ -74,14 +95,14 @@ class FtpAuth(DummyAuthorizer):
|
||||
|
||||
raise AuthenticationFailed("Authentication failed.")
|
||||
|
||||
handler.username = uname
|
||||
handler.uname = handler.username = uname
|
||||
|
||||
def get_home_dir(self, username: str) -> str:
|
||||
return "/"
|
||||
|
||||
def has_user(self, username: str) -> bool:
|
||||
asrv = self.hub.asrv
|
||||
return username in asrv.acct
|
||||
return username in asrv.acct or username in asrv.iacct
|
||||
|
||||
def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool:
|
||||
return True # handled at filesystem layer
|
||||
@@ -100,17 +121,18 @@ class FtpFs(AbstractedFS):
|
||||
def __init__(
|
||||
self, root: str, cmd_channel: Any
|
||||
) -> None: # pylint: disable=super-init-not-called
|
||||
self.h = self.cmd_channel = cmd_channel # type: FTPHandler
|
||||
self.h = cmd_channel # type: FTPHandler
|
||||
self.cmd_channel = cmd_channel # type: FTPHandler
|
||||
self.hub: "SvcHub" = cmd_channel.hub
|
||||
self.args = cmd_channel.args
|
||||
|
||||
self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*")
|
||||
self.uname = cmd_channel.uname
|
||||
|
||||
self.cwd = "/" # pyftpdlib convention of leading slash
|
||||
self.root = "/var/lib/empty"
|
||||
|
||||
self.can_read = self.can_write = self.can_move = False
|
||||
self.can_delete = self.can_get = self.can_upget = False
|
||||
self.can_admin = False
|
||||
|
||||
self.listdirinfo = self.listdir
|
||||
self.chdir(".")
|
||||
@@ -122,16 +144,36 @@ class FtpFs(AbstractedFS):
|
||||
w: bool = False,
|
||||
m: bool = False,
|
||||
d: bool = False,
|
||||
) -> str:
|
||||
) -> tuple[str, VFS, str]:
|
||||
try:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
vpath = vpath.replace("\\", "/").strip("/")
|
||||
rd, fn = os.path.split(vpath)
|
||||
if ANYWIN and relchk(rd):
|
||||
logging.warning("malicious vpath: %s", vpath)
|
||||
t = "Unsupported characters in [{}]"
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
|
||||
vpath = vjoin(rd, fn)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||
if not vfs.realpath:
|
||||
raise FilesystemError("no filesystem mounted at this path")
|
||||
t = "No filesystem mounted at [{}]"
|
||||
raise FSE(t.format(vpath))
|
||||
|
||||
return os.path.join(vfs.realpath, rem)
|
||||
if "xdev" in vfs.flags or "xvol" in vfs.flags:
|
||||
ap = vfs.canonical(rem)
|
||||
avfs = vfs.chk_ap(ap)
|
||||
t = "Permission denied in [{}]"
|
||||
if not avfs:
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
cr, cw, cm, cd, _, _, _ = avfs.can_access("", self.h.uname)
|
||||
if r and not cr or w and not cw or m and not cm or d and not cd:
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
return os.path.join(vfs.realpath, rem), vfs, rem
|
||||
except Pebkac as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
raise FSE(str(ex))
|
||||
|
||||
def rv2a(
|
||||
self,
|
||||
@@ -140,7 +182,7 @@ class FtpFs(AbstractedFS):
|
||||
w: bool = False,
|
||||
m: bool = False,
|
||||
d: bool = False,
|
||||
) -> str:
|
||||
) -> tuple[str, VFS, str]:
|
||||
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
|
||||
|
||||
def ftp2fs(self, ftppath: str) -> str:
|
||||
@@ -154,7 +196,7 @@ class FtpFs(AbstractedFS):
|
||||
def validpath(self, path: str) -> bool:
|
||||
if "/.hist/" in path:
|
||||
if "/up2k." in path or path.endswith("/dir.txt"):
|
||||
raise FilesystemError("access to this file is forbidden")
|
||||
raise FSE("Access to this file is forbidden", 1)
|
||||
|
||||
return True
|
||||
|
||||
@@ -162,7 +204,7 @@ class FtpFs(AbstractedFS):
|
||||
r = "r" in mode
|
||||
w = "w" in mode or "a" in mode or "+" in mode
|
||||
|
||||
ap = self.rv2a(filename, r, w)
|
||||
ap = self.rv2a(filename, r, w)[0]
|
||||
if w:
|
||||
try:
|
||||
st = bos.stat(ap)
|
||||
@@ -171,7 +213,7 @@ class FtpFs(AbstractedFS):
|
||||
td = 0
|
||||
|
||||
if td < -1 or td > self.args.ftp_wt:
|
||||
raise FilesystemError("cannot open existing file for writing")
|
||||
raise FSE("Cannot open existing file for writing")
|
||||
|
||||
self.validpath(ap)
|
||||
return open(fsenc(ap), mode)
|
||||
@@ -180,9 +222,17 @@ class FtpFs(AbstractedFS):
|
||||
nwd = join(self.cwd, path)
|
||||
vfs, rem = self.hub.asrv.vfs.get(nwd, self.uname, False, False)
|
||||
ap = vfs.canonical(rem)
|
||||
if not bos.path.isdir(ap):
|
||||
try:
|
||||
st = bos.stat(ap)
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
raise Exception()
|
||||
except:
|
||||
# returning 550 is library-default and suitable
|
||||
raise FilesystemError("Failed to change directory")
|
||||
raise FSE("No such file or directory")
|
||||
|
||||
avfs = vfs.chk_ap(ap, st)
|
||||
if not avfs:
|
||||
raise FSE("Permission denied", 1)
|
||||
|
||||
self.cwd = nwd
|
||||
(
|
||||
@@ -192,16 +242,19 @@ class FtpFs(AbstractedFS):
|
||||
self.can_delete,
|
||||
self.can_get,
|
||||
self.can_upget,
|
||||
) = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
|
||||
self.can_admin,
|
||||
) = avfs.can_access("", self.h.uname)
|
||||
|
||||
def mkdir(self, path: str) -> None:
|
||||
ap = self.rv2a(path, w=True)
|
||||
bos.mkdir(ap)
|
||||
ap = self.rv2a(path, w=True)[0]
|
||||
bos.makedirs(ap) # filezilla expects this
|
||||
|
||||
def listdir(self, path: str) -> list[str]:
|
||||
vpath = join(self.cwd, path).lstrip("/")
|
||||
vpath = join(self.cwd, path)
|
||||
try:
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False)
|
||||
ap, vfs, rem = self.v2a(vpath, True, False)
|
||||
if not bos.path.isdir(ap):
|
||||
raise FSE("No such file or directory", 1)
|
||||
|
||||
fsroot, vfs_ls1, vfs_virt = vfs.ls(
|
||||
rem,
|
||||
@@ -217,8 +270,12 @@ class FtpFs(AbstractedFS):
|
||||
|
||||
vfs_ls.sort()
|
||||
return vfs_ls
|
||||
except:
|
||||
if vpath:
|
||||
except Exception as ex:
|
||||
# panic on malicious names
|
||||
if getattr(ex, "severity", 0):
|
||||
raise
|
||||
|
||||
if vpath.strip("/"):
|
||||
# display write-only folders as empty
|
||||
return []
|
||||
|
||||
@@ -227,43 +284,49 @@ class FtpFs(AbstractedFS):
|
||||
return list(sorted(list(r.keys())))
|
||||
|
||||
def rmdir(self, path: str) -> None:
|
||||
ap = self.rv2a(path, d=True)
|
||||
bos.rmdir(ap)
|
||||
ap = self.rv2a(path, d=True)[0]
|
||||
try:
|
||||
bos.rmdir(ap)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
def remove(self, path: str) -> None:
|
||||
if self.args.no_del:
|
||||
raise FilesystemError("the delete feature is disabled in server config")
|
||||
raise FSE("The delete feature is disabled in server config")
|
||||
|
||||
vp = join(self.cwd, path).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_rm(self.uname, self.h.remote_ip, [vp], [])
|
||||
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False)
|
||||
except Exception as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
raise FSE(str(ex))
|
||||
|
||||
def rename(self, src: str, dst: str) -> None:
|
||||
if not self.can_move:
|
||||
raise FilesystemError("not allowed for user " + self.h.username)
|
||||
raise FSE("Not allowed for user " + self.h.uname)
|
||||
|
||||
if self.args.no_mv:
|
||||
t = "the rename/move feature is disabled in server config"
|
||||
raise FilesystemError(t)
|
||||
raise FSE("The rename/move feature is disabled in server config")
|
||||
|
||||
svp = join(self.cwd, src).lstrip("/")
|
||||
dvp = join(self.cwd, dst).lstrip("/")
|
||||
try:
|
||||
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
||||
except Exception as ex:
|
||||
raise FilesystemError(str(ex))
|
||||
raise FSE(str(ex))
|
||||
|
||||
def chmod(self, path: str, mode: str) -> None:
|
||||
pass
|
||||
|
||||
def stat(self, path: str) -> os.stat_result:
|
||||
try:
|
||||
ap = self.rv2a(path, r=True)
|
||||
ap = self.rv2a(path, r=True)[0]
|
||||
return bos.stat(ap)
|
||||
except:
|
||||
ap = self.rv2a(path)
|
||||
except FSE as ex:
|
||||
if ex.severity:
|
||||
raise
|
||||
|
||||
ap = self.rv2a(path)[0]
|
||||
st = bos.stat(ap)
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
raise
|
||||
@@ -271,44 +334,50 @@ class FtpFs(AbstractedFS):
|
||||
return st
|
||||
|
||||
def utime(self, path: str, timeval: float) -> None:
|
||||
ap = self.rv2a(path, w=True)
|
||||
ap = self.rv2a(path, w=True)[0]
|
||||
return bos.utime(ap, (timeval, timeval))
|
||||
|
||||
def lstat(self, path: str) -> os.stat_result:
|
||||
ap = self.rv2a(path)
|
||||
ap = self.rv2a(path)[0]
|
||||
return bos.stat(ap)
|
||||
|
||||
def isfile(self, path: str) -> bool:
|
||||
try:
|
||||
st = self.stat(path)
|
||||
return stat.S_ISREG(st.st_mode)
|
||||
except:
|
||||
except Exception as ex:
|
||||
if getattr(ex, "severity", 0):
|
||||
raise
|
||||
|
||||
return False # expected for mojibake in ftp_SIZE()
|
||||
|
||||
def islink(self, path: str) -> bool:
|
||||
ap = self.rv2a(path)
|
||||
ap = self.rv2a(path)[0]
|
||||
return bos.path.islink(ap)
|
||||
|
||||
def isdir(self, path: str) -> bool:
|
||||
try:
|
||||
st = self.stat(path)
|
||||
return stat.S_ISDIR(st.st_mode)
|
||||
except:
|
||||
except Exception as ex:
|
||||
if getattr(ex, "severity", 0):
|
||||
raise
|
||||
|
||||
return True
|
||||
|
||||
def getsize(self, path: str) -> int:
|
||||
ap = self.rv2a(path)
|
||||
ap = self.rv2a(path)[0]
|
||||
return bos.path.getsize(ap)
|
||||
|
||||
def getmtime(self, path: str) -> float:
|
||||
ap = self.rv2a(path)
|
||||
ap = self.rv2a(path)[0]
|
||||
return bos.path.getmtime(ap)
|
||||
|
||||
def realpath(self, path: str) -> str:
|
||||
return path
|
||||
|
||||
def lexists(self, path: str) -> bool:
|
||||
ap = self.rv2a(path)
|
||||
ap = self.rv2a(path)[0]
|
||||
return bos.path.lexists(ap)
|
||||
|
||||
def get_user_by_uid(self, uid: int) -> str:
|
||||
@@ -322,16 +391,21 @@ class FtpHandler(FTPHandler):
|
||||
abstracted_fs = FtpFs
|
||||
hub: "SvcHub"
|
||||
args: argparse.Namespace
|
||||
uname: str
|
||||
|
||||
def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None:
|
||||
self.hub: "SvcHub" = FtpHandler.hub
|
||||
self.args: argparse.Namespace = FtpHandler.args
|
||||
self.uname = "*"
|
||||
|
||||
if PY2:
|
||||
FTPHandler.__init__(self, conn, server, ioloop)
|
||||
else:
|
||||
super(FtpHandler, self).__init__(conn, server, ioloop)
|
||||
|
||||
cip = self.remote_ip
|
||||
self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip
|
||||
|
||||
# abspath->vpath mapping to resolve log_transfer paths
|
||||
self.vfs_map: dict[str, str] = {}
|
||||
|
||||
@@ -341,8 +415,24 @@ class FtpHandler(FTPHandler):
|
||||
def ftp_STOR(self, file: str, mode: str = "w") -> Any:
|
||||
# Optional[str]
|
||||
vp = join(self.fs.cwd, file).lstrip("/")
|
||||
ap = self.fs.v2a(vp)
|
||||
ap, vfs, rem = self.fs.v2a(vp, w=True)
|
||||
self.vfs_map[ap] = vp
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
None,
|
||||
xbu,
|
||||
ap,
|
||||
vfs.canonical(rem),
|
||||
"",
|
||||
self.uname,
|
||||
0,
|
||||
0,
|
||||
self.cli_ip,
|
||||
0,
|
||||
"",
|
||||
):
|
||||
raise FSE("Upload blocked by xbu server config")
|
||||
|
||||
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
|
||||
ret = FTPHandler.ftp_STOR(self, file, mode)
|
||||
# print("ftp_STOR: {} {} OK".format(vp, mode))
|
||||
@@ -363,15 +453,17 @@ class FtpHandler(FTPHandler):
|
||||
# print("xfer_end: {} => {}".format(ap, vp))
|
||||
if vp:
|
||||
vp, fn = os.path.split(vp)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vp, self.uname, False, True)
|
||||
vfs, rem = vfs.get_dbv(rem)
|
||||
self.hub.up2k.hash_file(
|
||||
vfs.realpath,
|
||||
vfs.vpath,
|
||||
vfs.flags,
|
||||
rem,
|
||||
fn,
|
||||
self.remote_ip,
|
||||
self.cli_ip,
|
||||
time.time(),
|
||||
self.uname,
|
||||
)
|
||||
|
||||
return FTPHandler.log_transfer(
|
||||
@@ -402,10 +494,10 @@ class Ftpd(object):
|
||||
h1 = SftpHandler
|
||||
except:
|
||||
t = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
|
||||
print(t.format(sys.executable))
|
||||
print(t.format(pybin))
|
||||
sys.exit(1)
|
||||
|
||||
h1.certfile = os.path.join(self.args.E.cfg, "cert.pem")
|
||||
h1.certfile = self.args.cert
|
||||
h1.tls_control_required = True
|
||||
h1.tls_data_required = True
|
||||
|
||||
@@ -413,9 +505,9 @@ class Ftpd(object):
|
||||
|
||||
for h_lp in hs:
|
||||
h2, lp = h_lp
|
||||
h2.hub = hub
|
||||
h2.args = hub.args
|
||||
h2.authorizer = FtpAuth(hub)
|
||||
FtpHandler.hub = h2.hub = hub
|
||||
FtpHandler.args = h2.args = hub.args
|
||||
FtpHandler.authorizer = h2.authorizer = FtpAuth(hub)
|
||||
|
||||
if self.args.ftp_pr:
|
||||
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
|
||||
@@ -435,10 +527,21 @@ class Ftpd(object):
|
||||
lgr = logging.getLogger("pyftpdlib")
|
||||
lgr.setLevel(logging.DEBUG if self.args.ftpv else logging.INFO)
|
||||
|
||||
ips = self.args.i
|
||||
if "::" in ips:
|
||||
ips.append("0.0.0.0")
|
||||
|
||||
if self.args.ftp4:
|
||||
ips = [x for x in ips if ":" not in x]
|
||||
|
||||
ioloop = IOLoop()
|
||||
for ip in self.args.i:
|
||||
for ip in ips:
|
||||
for h, lp in hs:
|
||||
FTPServer((ip, int(lp)), h, ioloop)
|
||||
try:
|
||||
FTPServer((ip, int(lp)), h, ioloop)
|
||||
except:
|
||||
if ip != "0.0.0.0" or "::" not in ips:
|
||||
raise
|
||||
|
||||
Daemon(ioloop.loop, "ftp")
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -54,7 +54,6 @@ class HttpConn(object):
|
||||
self.args: argparse.Namespace = hsrv.args # mypy404
|
||||
self.E: EnvParams = self.args.E
|
||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||
self.cert_path = hsrv.cert_path
|
||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||
self.iphash: HMaccas = hsrv.broker.iphash
|
||||
self.bans: dict[str, int] = hsrv.bans
|
||||
@@ -103,17 +102,18 @@ class HttpConn(object):
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func(self.log_src, msg, c)
|
||||
|
||||
def get_u2idx(self) -> U2idx:
|
||||
# one u2idx per tcp connection;
|
||||
def get_u2idx(self) -> Optional[U2idx]:
|
||||
# grab from a pool of u2idx instances;
|
||||
# sqlite3 fully parallelizes under python threads
|
||||
# but avoid running out of FDs by creating too many
|
||||
if not self.u2idx:
|
||||
self.u2idx = U2idx(self)
|
||||
self.u2idx = self.hsrv.get_u2idx(str(self.addr))
|
||||
|
||||
return self.u2idx
|
||||
|
||||
def _detect_https(self) -> bool:
|
||||
method = None
|
||||
if self.cert_path:
|
||||
if True:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
except socket.timeout:
|
||||
@@ -147,7 +147,7 @@ class HttpConn(object):
|
||||
self.sr = None
|
||||
if self.args.https_only:
|
||||
is_https = True
|
||||
elif self.args.http_only or not HAVE_SSL:
|
||||
elif self.args.http_only:
|
||||
is_https = False
|
||||
else:
|
||||
# raise Exception("asdf")
|
||||
@@ -161,7 +161,7 @@ class HttpConn(object):
|
||||
self.log_src = self.log_src.replace("[36m", "[35m")
|
||||
try:
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
ctx.load_cert_chain(self.cert_path)
|
||||
ctx.load_cert_chain(self.args.cert)
|
||||
if self.args.ssl_ver:
|
||||
ctx.options &= ~self.args.ssl_flags_en
|
||||
ctx.options |= self.args.ssl_flags_de
|
||||
@@ -215,3 +215,7 @@ class HttpConn(object):
|
||||
self.cli = HttpCli(self)
|
||||
if not self.cli.run():
|
||||
return
|
||||
|
||||
if self.u2idx:
|
||||
self.hsrv.put_u2idx(str(self.addr), self.u2idx)
|
||||
self.u2idx = None
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import base64
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
@@ -11,9 +12,19 @@ import time
|
||||
|
||||
import queue
|
||||
|
||||
from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams
|
||||
|
||||
try:
|
||||
MNFE = ModuleNotFoundError
|
||||
except:
|
||||
MNFE = ImportError
|
||||
|
||||
try:
|
||||
import jinja2
|
||||
except ImportError:
|
||||
except MNFE:
|
||||
if EXE:
|
||||
raise
|
||||
|
||||
print(
|
||||
"""\033[1;31m
|
||||
you do not have jinja2 installed,\033[33m
|
||||
@@ -23,14 +34,30 @@ except ImportError:
|
||||
* (try another python version, if you have one)
|
||||
* (try copyparty.sfx instead)
|
||||
""".format(
|
||||
os.path.basename(sys.executable)
|
||||
sys.executable
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
except SyntaxError:
|
||||
if EXE:
|
||||
raise
|
||||
|
||||
print(
|
||||
"""\033[1;31m
|
||||
your jinja2 version is incompatible with your python version;\033[33m
|
||||
please try to replace it with an older version:\033[0m
|
||||
* {} -m pip install --user jinja2==2.11.3
|
||||
* (try another python version, if you have one)
|
||||
* (try copyparty.sfx instead)
|
||||
""".format(
|
||||
sys.executable
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, EnvParams
|
||||
from .bos import bos
|
||||
from .httpconn import HttpConn
|
||||
from .metrics import Metrics
|
||||
from .u2idx import U2idx
|
||||
from .util import (
|
||||
E_SCK,
|
||||
FHC,
|
||||
@@ -39,6 +66,7 @@ from .util import (
|
||||
Magician,
|
||||
Netdev,
|
||||
NetMap,
|
||||
absreal,
|
||||
ipnorm,
|
||||
min_ex,
|
||||
shut_socket,
|
||||
@@ -72,12 +100,16 @@ class HttpSrv(object):
|
||||
# redefine in case of multiprocessing
|
||||
socket.setdefaulttimeout(120)
|
||||
|
||||
self.t0 = time.time()
|
||||
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
||||
self.magician = Magician()
|
||||
self.nm = NetMap([], {})
|
||||
self.ssdp: Optional["SSDPr"] = None
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
self.g403 = Garda(self.args.ban_403)
|
||||
self.g422 = Garda(self.args.ban_422, False)
|
||||
self.gurl = Garda(self.args.ban_url)
|
||||
self.bans: dict[str, int] = {}
|
||||
self.aclose: dict[str, int] = {}
|
||||
|
||||
@@ -95,6 +127,7 @@ class HttpSrv(object):
|
||||
self.t_periodic: Optional[threading.Thread] = None
|
||||
|
||||
self.u2fh = FHC()
|
||||
self.metrics = Metrics(self)
|
||||
self.srvs: list[socket.socket] = []
|
||||
self.ncli = 0 # exact
|
||||
self.clients: set[HttpConn] = set() # laggy
|
||||
@@ -102,6 +135,9 @@ class HttpSrv(object):
|
||||
self.cb_ts = 0.0
|
||||
self.cb_v = ""
|
||||
|
||||
self.u2idx_free: dict[str, U2idx] = {}
|
||||
self.u2idx_n = 0
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web"))
|
||||
jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"]
|
||||
@@ -109,6 +145,11 @@ class HttpSrv(object):
|
||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
||||
self.prism = os.path.exists(zs)
|
||||
|
||||
self.statics: set[str] = set()
|
||||
self._build_statics()
|
||||
|
||||
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
||||
|
||||
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
||||
if not self.args.no_dav:
|
||||
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
|
||||
@@ -119,12 +160,6 @@ class HttpSrv(object):
|
||||
|
||||
self.ssdp = SSDPr(broker)
|
||||
|
||||
cert_path = os.path.join(self.E.cfg, "cert.pem")
|
||||
if bos.path.exists(cert_path):
|
||||
self.cert_path = cert_path
|
||||
else:
|
||||
self.cert_path = ""
|
||||
|
||||
if self.tp_q:
|
||||
self.start_threads(4)
|
||||
|
||||
@@ -145,6 +180,14 @@ class HttpSrv(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def _build_statics(self) -> None:
|
||||
for dp, _, df in os.walk(os.path.join(self.E.mod, "web")):
|
||||
for fn in df:
|
||||
ap = absreal(os.path.join(dp, fn))
|
||||
self.statics.add(ap)
|
||||
if ap.endswith(".gz") or ap.endswith(".br"):
|
||||
self.statics.add(ap[:-3])
|
||||
|
||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||
ips = set()
|
||||
for ip, _ in self.bound:
|
||||
@@ -436,6 +479,9 @@ class HttpSrv(object):
|
||||
self.clients.remove(cli)
|
||||
self.ncli -= 1
|
||||
|
||||
if cli.u2idx:
|
||||
self.put_u2idx(str(addr), cli.u2idx)
|
||||
|
||||
def cachebuster(self) -> str:
|
||||
if time.time() - self.cb_ts < 1:
|
||||
return self.cb_v
|
||||
@@ -457,3 +503,31 @@ class HttpSrv(object):
|
||||
self.cb_v = v.decode("ascii")[-4:]
|
||||
self.cb_ts = time.time()
|
||||
return self.cb_v
|
||||
|
||||
def get_u2idx(self, ident: str) -> Optional[U2idx]:
|
||||
utab = self.u2idx_free
|
||||
for _ in range(100): # 5/0.05 = 5sec
|
||||
with self.mutex:
|
||||
if utab:
|
||||
if ident in utab:
|
||||
return utab.pop(ident)
|
||||
|
||||
return utab.pop(list(utab.keys())[0])
|
||||
|
||||
if self.u2idx_n < CORES:
|
||||
self.u2idx_n += 1
|
||||
return U2idx(self)
|
||||
|
||||
time.sleep(0.05)
|
||||
# not using conditional waits, on a hunch that
|
||||
# average performance will be faster like this
|
||||
# since most servers won't be fully saturated
|
||||
|
||||
return None
|
||||
|
||||
def put_u2idx(self, ident: str, u2idx: U2idx) -> None:
|
||||
with self.mutex:
|
||||
while ident in self.u2idx_free:
|
||||
ident += "a"
|
||||
|
||||
self.u2idx_free[ident] = u2idx
|
||||
|
||||
@@ -17,7 +17,9 @@ class Ico(object):
|
||||
def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]:
|
||||
"""placeholder to make thumbnails not break"""
|
||||
|
||||
zb = hashlib.sha1(ext.encode("utf-8")).digest()[2:4]
|
||||
bext = ext.encode("ascii", "replace")
|
||||
ext = bext.decode("utf-8")
|
||||
zb = hashlib.sha1(bext).digest()[2:4]
|
||||
if PY2:
|
||||
zb = [ord(x) for x in zb]
|
||||
|
||||
@@ -33,7 +35,7 @@ class Ico(object):
|
||||
h = int(100 / (float(sw) / float(sh)))
|
||||
w = 100
|
||||
|
||||
if chrome and as_thumb:
|
||||
if chrome:
|
||||
# cannot handle more than ~2000 unique SVGs
|
||||
if HAVE_PIL:
|
||||
# svg: 3s, cache: 6s, this: 8s
|
||||
@@ -43,8 +45,19 @@ class Ico(object):
|
||||
w = 64
|
||||
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||
pb = ImageDraw.Draw(img)
|
||||
tw, th = pb.textsize(ext)
|
||||
pb.text(((w - tw) // 2, (h - th) // 2), ext, fill="#" + c[6:])
|
||||
try:
|
||||
_, _, tw, th = pb.textbbox((0, 0), ext)
|
||||
except:
|
||||
tw, th = pb.textsize(ext)
|
||||
|
||||
tw += len(ext)
|
||||
cw = tw // len(ext)
|
||||
x = ((w - tw) // 2) - (cw * 2) // 3
|
||||
fill = "#" + c[6:]
|
||||
for ch in ext:
|
||||
pb.text((x, (h - th) // 2), " %s " % (ch,), fill=fill)
|
||||
x += cw
|
||||
|
||||
img = img.resize((w * 3, h * 3), Image.NEAREST)
|
||||
|
||||
buf = BytesIO()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import errno
|
||||
import random
|
||||
import select
|
||||
import socket
|
||||
@@ -11,6 +12,7 @@ from ipaddress import IPv4Network, IPv6Network
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .__init__ import unicode as U
|
||||
from .multicast import MC_Sck, MCast
|
||||
from .stolen.dnslib import AAAA
|
||||
from .stolen.dnslib import CLASS as DC
|
||||
from .stolen.dnslib import (
|
||||
NSEC,
|
||||
@@ -20,7 +22,6 @@ from .stolen.dnslib import (
|
||||
SRV,
|
||||
TXT,
|
||||
A,
|
||||
AAAA,
|
||||
DNSHeader,
|
||||
DNSQuestion,
|
||||
DNSRecord,
|
||||
@@ -277,12 +278,26 @@ class MDNS(MCast):
|
||||
zf = time.time() + 2
|
||||
self.probing = zf # cant unicast so give everyone an extra sec
|
||||
self.unsolicited = [zf, zf + 1, zf + 3, zf + 7] # rfc-8.3
|
||||
|
||||
try:
|
||||
self.run2()
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EBADF:
|
||||
raise
|
||||
|
||||
self.log("stopping due to {}".format(ex), "90")
|
||||
|
||||
self.log("stopped", 2)
|
||||
|
||||
def run2(self) -> None:
|
||||
last_hop = time.time()
|
||||
ihop = self.args.mc_hop
|
||||
while self.running:
|
||||
timeout = (
|
||||
0.02 + random.random() * 0.07
|
||||
if self.probing or self.q or self.defend or self.unsolicited
|
||||
if self.probing or self.q or self.defend
|
||||
else max(0.05, self.unsolicited[0] - time.time())
|
||||
if self.unsolicited
|
||||
else (last_hop + ihop if ihop else 180)
|
||||
)
|
||||
rdy = select.select(self.srv, [], [], timeout)
|
||||
@@ -314,8 +329,6 @@ class MDNS(MCast):
|
||||
self.log(t.format(self.hn[:-1]), 2)
|
||||
self.probing = 0
|
||||
|
||||
self.log("stopped", 2)
|
||||
|
||||
def stop(self, panic=False) -> None:
|
||||
self.running = False
|
||||
for srv in self.srv.values():
|
||||
@@ -502,6 +515,10 @@ class MDNS(MCast):
|
||||
for srv in self.srv.values():
|
||||
tx.add(srv)
|
||||
|
||||
if not self.unsolicited and self.args.zm_spam:
|
||||
zf = time.time() + self.args.zm_spam + random.random() * 0.07
|
||||
self.unsolicited.append(zf)
|
||||
|
||||
for srv, deadline in list(self.defend.items()):
|
||||
if now < deadline:
|
||||
continue
|
||||
|
||||
165
copyparty/metrics.py
Normal file
165
copyparty/metrics.py
Normal file
@@ -0,0 +1,165 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import json
|
||||
import time
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .util import Pebkac, get_df, unhumanize
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpcli import HttpCli
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
|
||||
class Metrics(object):
|
||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||
self.hsrv = hsrv
|
||||
|
||||
def tx(self, cli: "HttpCli") -> bool:
|
||||
if not cli.avol:
|
||||
raise Pebkac(403, "not allowed for user " + cli.uname)
|
||||
|
||||
args = cli.args
|
||||
if not args.stats:
|
||||
raise Pebkac(403, "the stats feature is not enabled in server config")
|
||||
|
||||
conn = cli.conn
|
||||
vfs = conn.asrv.vfs
|
||||
allvols = list(sorted(vfs.all_vols.items()))
|
||||
|
||||
idx = conn.get_u2idx()
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
idx = None
|
||||
|
||||
ret: list[str] = []
|
||||
|
||||
def addc(k: str, unit: str, v: str, desc: str) -> None:
|
||||
if unit:
|
||||
k += "_" + unit
|
||||
zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
||||
ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v))
|
||||
else:
|
||||
zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
||||
ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v))
|
||||
|
||||
def addh(k: str, typ: str, desc: str) -> None:
|
||||
zs = "# TYPE %s %s\n# HELP %s %s"
|
||||
ret.append(zs % (k, typ, k, desc))
|
||||
|
||||
def addbh(k: str, desc: str) -> None:
|
||||
zs = "# TYPE %s gauge\n# UNIT %s bytes\n# HELP %s %s"
|
||||
ret.append(zs % (k, k, k, desc))
|
||||
|
||||
def addv(k: str, v: str) -> None:
|
||||
ret.append("%s %s" % (k, v))
|
||||
|
||||
v = "{:.3f}".format(time.time() - self.hsrv.t0)
|
||||
addc("cpp_uptime", "seconds", v, "time since last server restart")
|
||||
|
||||
v = str(len(conn.bans or []))
|
||||
addc("cpp_bans", "", v, "number of banned IPs")
|
||||
|
||||
if not args.nos_hdd:
|
||||
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
||||
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
||||
for vpath, vol in allvols:
|
||||
free, total = get_df(vol.realpath)
|
||||
addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total))
|
||||
addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free))
|
||||
|
||||
if idx and not args.nos_vol:
|
||||
addbh("cpp_vol_bytes", "num bytes of data in volume")
|
||||
addh("cpp_vol_files", "gauge", "num files in volume")
|
||||
addbh("cpp_vol_free_bytes", "free space (vmaxb) in volume")
|
||||
addh("cpp_vol_free_files", "gauge", "free space (vmaxn) in volume")
|
||||
tnbytes = 0
|
||||
tnfiles = 0
|
||||
|
||||
volsizes = []
|
||||
try:
|
||||
ptops = [x.realpath for _, x in allvols]
|
||||
x = self.hsrv.broker.ask("up2k.get_volsizes", ptops)
|
||||
volsizes = x.get()
|
||||
except Exception as ex:
|
||||
cli.log("tx_stats get_volsizes: {!r}".format(ex), 3)
|
||||
|
||||
for (vpath, vol), (nbytes, nfiles) in zip(allvols, volsizes):
|
||||
tnbytes += nbytes
|
||||
tnfiles += nfiles
|
||||
addv('cpp_vol_bytes{vol="/%s"}' % (vpath), str(nbytes))
|
||||
addv('cpp_vol_files{vol="/%s"}' % (vpath), str(nfiles))
|
||||
|
||||
if vol.flags.get("vmaxb") or vol.flags.get("vmaxn"):
|
||||
|
||||
zi = unhumanize(vol.flags.get("vmaxb") or "0")
|
||||
if zi:
|
||||
v = str(zi - nbytes)
|
||||
addv('cpp_vol_free_bytes{vol="/%s"}' % (vpath), v)
|
||||
|
||||
zi = unhumanize(vol.flags.get("vmaxn") or "0")
|
||||
if zi:
|
||||
v = str(zi - nfiles)
|
||||
addv('cpp_vol_free_files{vol="/%s"}' % (vpath), v)
|
||||
|
||||
if volsizes:
|
||||
addv('cpp_vol_bytes{vol="total"}', str(tnbytes))
|
||||
addv('cpp_vol_files{vol="total"}', str(tnfiles))
|
||||
|
||||
if idx and not args.nos_dup:
|
||||
addbh("cpp_dupe_bytes", "num dupe bytes in volume")
|
||||
addh("cpp_dupe_files", "gauge", "num dupe files in volume")
|
||||
tnbytes = 0
|
||||
tnfiles = 0
|
||||
for vpath, vol in allvols:
|
||||
cur = idx.get_cur(vol.realpath)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
nbytes = 0
|
||||
nfiles = 0
|
||||
q = "select sz, count(*)-1 c from up group by w having c"
|
||||
for sz, c in cur.execute(q):
|
||||
nbytes += sz * c
|
||||
nfiles += c
|
||||
|
||||
tnbytes += nbytes
|
||||
tnfiles += nfiles
|
||||
addv('cpp_dupe_bytes{vol="/%s"}' % (vpath), str(nbytes))
|
||||
addv('cpp_dupe_files{vol="/%s"}' % (vpath), str(nfiles))
|
||||
|
||||
addv('cpp_dupe_bytes{vol="total"}', str(tnbytes))
|
||||
addv('cpp_dupe_files{vol="total"}', str(tnfiles))
|
||||
|
||||
if not args.nos_unf:
|
||||
addbh("cpp_unf_bytes", "incoming/unfinished uploads (num bytes)")
|
||||
addh("cpp_unf_files", "gauge", "incoming/unfinished uploads (num files)")
|
||||
tnbytes = 0
|
||||
tnfiles = 0
|
||||
try:
|
||||
x = self.hsrv.broker.ask("up2k.get_unfinished")
|
||||
xs = x.get()
|
||||
xj = json.loads(xs)
|
||||
for ptop, (nbytes, nfiles) in xj.items():
|
||||
tnbytes += nbytes
|
||||
tnfiles += nfiles
|
||||
vol = next((x[1] for x in allvols if x[1].realpath == ptop), None)
|
||||
if not vol:
|
||||
t = "tx_stats get_unfinished: could not map {}"
|
||||
cli.log(t.format(ptop), 3)
|
||||
continue
|
||||
|
||||
addv('cpp_unf_bytes{vol="/%s"}' % (vol.vpath), str(nbytes))
|
||||
addv('cpp_unf_files{vol="/%s"}' % (vol.vpath), str(nfiles))
|
||||
|
||||
addv('cpp_unf_bytes{vol="total"}', str(tnbytes))
|
||||
addv('cpp_unf_files{vol="total"}', str(tnfiles))
|
||||
|
||||
except Exception as ex:
|
||||
cli.log("tx_stats get_unfinished: {!r}".format(ex), 3)
|
||||
|
||||
ret.append("# EOF")
|
||||
|
||||
mime = "application/openmetrics-text; version=1.0.0; charset=utf-8"
|
||||
cli.reply("\n".join(ret).encode("utf-8"), mime=mime)
|
||||
return True
|
||||
@@ -8,9 +8,19 @@ import shutil
|
||||
import subprocess as sp
|
||||
import sys
|
||||
|
||||
from .__init__ import PY2, WINDOWS, E, unicode
|
||||
from .__init__ import EXE, PY2, WINDOWS, E, unicode
|
||||
from .bos import bos
|
||||
from .util import REKOBO_LKEY, fsenc, min_ex, retchk, runcmd, uncyg
|
||||
from .util import (
|
||||
FFMPEG_URL,
|
||||
REKOBO_LKEY,
|
||||
fsenc,
|
||||
min_ex,
|
||||
pybin,
|
||||
retchk,
|
||||
runcmd,
|
||||
sfsenc,
|
||||
uncyg,
|
||||
)
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Union
|
||||
@@ -259,7 +269,9 @@ class MTag(object):
|
||||
self.args = args
|
||||
self.usable = True
|
||||
self.prefer_mt = not args.no_mtag_ff
|
||||
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
|
||||
self.backend = (
|
||||
"ffprobe" if args.no_mutagen or (HAVE_FFPROBE and EXE) else "mutagen"
|
||||
)
|
||||
self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff
|
||||
mappings = args.mtm
|
||||
or_ffprobe = " or FFprobe"
|
||||
@@ -285,9 +297,14 @@ class MTag(object):
|
||||
self.log(msg, c=3)
|
||||
|
||||
if not self.usable:
|
||||
if EXE:
|
||||
t = "copyparty.exe cannot use mutagen; need ffprobe.exe to read media tags: "
|
||||
self.log(t + FFMPEG_URL)
|
||||
return
|
||||
|
||||
msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
|
||||
pybin = os.path.basename(sys.executable)
|
||||
self.log(msg.format(or_ffprobe, " " * 37, pybin), c=1)
|
||||
pyname = os.path.basename(pybin)
|
||||
self.log(msg.format(or_ffprobe, " " * 37, pyname), c=1)
|
||||
return
|
||||
|
||||
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
@@ -456,7 +473,10 @@ class MTag(object):
|
||||
self.log("mutagen: {}\033[0m".format(" ".join(zl)), "90")
|
||||
if not md.info.length and not md.info.codec:
|
||||
raise Exception()
|
||||
except:
|
||||
except Exception as ex:
|
||||
if self.args.mtag_v:
|
||||
self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90")
|
||||
|
||||
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||
|
||||
sz = bos.path.getsize(abspath)
|
||||
@@ -519,12 +539,15 @@ class MTag(object):
|
||||
|
||||
env = os.environ.copy()
|
||||
try:
|
||||
if EXE:
|
||||
raise Exception()
|
||||
|
||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(zsl))
|
||||
env["PYTHONPATH"] = pypath
|
||||
except:
|
||||
if not E.ox:
|
||||
if not E.ox and not EXE:
|
||||
raise
|
||||
|
||||
ret: dict[str, Any] = {}
|
||||
@@ -532,7 +555,7 @@ class MTag(object):
|
||||
try:
|
||||
cmd = [parser.bin, abspath]
|
||||
if parser.bin.endswith(".py"):
|
||||
cmd = [sys.executable] + cmd
|
||||
cmd = [pybin] + cmd
|
||||
|
||||
args = {
|
||||
"env": env,
|
||||
@@ -551,7 +574,7 @@ class MTag(object):
|
||||
else:
|
||||
cmd = ["nice"] + cmd
|
||||
|
||||
bcmd = [fsenc(x) for x in cmd]
|
||||
bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])]
|
||||
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
||||
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
||||
v = v.strip()
|
||||
|
||||
@@ -15,7 +15,7 @@ from ipaddress import (
|
||||
)
|
||||
|
||||
from .__init__ import MACOS, TYPE_CHECKING
|
||||
from .util import Netdev, find_prefix, min_ex, spack
|
||||
from .util import Daemon, Netdev, find_prefix, min_ex, spack
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
@@ -228,6 +228,7 @@ class MCast(object):
|
||||
for srv in self.srv.values():
|
||||
assert srv.ip in self.sips
|
||||
|
||||
Daemon(self.hopper, "mc-hop")
|
||||
return bound
|
||||
|
||||
def setup_socket(self, srv: MC_Sck) -> None:
|
||||
@@ -299,33 +300,57 @@ class MCast(object):
|
||||
t = "failed to set IPv4 TTL/LOOP; announcements may not survive multiple switches/routers"
|
||||
self.log(t, 3)
|
||||
|
||||
self.hop(srv)
|
||||
if self.hop(srv, False):
|
||||
self.log("igmp was already joined?? chilling for a sec", 3)
|
||||
time.sleep(1.2)
|
||||
|
||||
self.hop(srv, True)
|
||||
self.b4.sort(reverse=True)
|
||||
self.b6.sort(reverse=True)
|
||||
|
||||
def hop(self, srv: MC_Sck) -> None:
|
||||
def hop(self, srv: MC_Sck, on: bool) -> bool:
|
||||
"""rejoin to keepalive on routers/switches without igmp-snooping"""
|
||||
sck = srv.sck
|
||||
req = srv.mreq
|
||||
if ":" in srv.ip:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req)
|
||||
# linux does leaves/joins twice with 0.2~1.05s spacing
|
||||
time.sleep(1.2)
|
||||
except:
|
||||
pass
|
||||
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req)
|
||||
if not on:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req)
|
||||
else:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req)
|
||||
time.sleep(1.2)
|
||||
except:
|
||||
pass
|
||||
if not on:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
# t = "joining {} from ip {} idx {} with mreq {}"
|
||||
# self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6)
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req)
|
||||
|
||||
# t = "joining {} from ip {} idx {} with mreq {}"
|
||||
# self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6)
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req)
|
||||
return True
|
||||
|
||||
def hopper(self):
|
||||
while self.args.mc_hop and self.running:
|
||||
time.sleep(self.args.mc_hop)
|
||||
if not self.running:
|
||||
return
|
||||
|
||||
for srv in self.srv.values():
|
||||
self.hop(srv, False)
|
||||
|
||||
# linux does leaves/joins twice with 0.2~1.05s spacing
|
||||
time.sleep(1.2)
|
||||
if not self.running:
|
||||
return
|
||||
|
||||
for srv in self.srv.values():
|
||||
self.hop(srv, True)
|
||||
|
||||
def map_client(self, cip: str) -> Optional[MC_Sck]:
|
||||
try:
|
||||
|
||||
145
copyparty/pwhash.py
Normal file
145
copyparty/pwhash.py
Normal file
@@ -0,0 +1,145 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import hashlib
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from .__init__ import unicode
|
||||
|
||||
|
||||
class PWHash(object):
|
||||
def __init__(self, args: argparse.Namespace):
|
||||
self.args = args
|
||||
|
||||
try:
|
||||
alg, ac = args.ah_alg.split(",")
|
||||
except:
|
||||
alg = args.ah_alg
|
||||
ac = {}
|
||||
|
||||
if alg == "none":
|
||||
alg = ""
|
||||
|
||||
self.alg = alg
|
||||
self.ac = ac
|
||||
if not alg:
|
||||
self.on = False
|
||||
self.hash = unicode
|
||||
return
|
||||
|
||||
self.on = True
|
||||
self.salt = args.ah_salt.encode("utf-8")
|
||||
self.cache: dict[str, str] = {}
|
||||
self.mutex = threading.Lock()
|
||||
self.hash = self._cache_hash
|
||||
|
||||
if alg == "sha2":
|
||||
self._hash = self._gen_sha2
|
||||
elif alg == "scrypt":
|
||||
self._hash = self._gen_scrypt
|
||||
elif alg == "argon2":
|
||||
self._hash = self._gen_argon2
|
||||
else:
|
||||
t = "unsupported password hashing algorithm [{}], must be one of these: argon2 scrypt sha2 none"
|
||||
raise Exception(t.format(alg))
|
||||
|
||||
def _cache_hash(self, plain: str) -> str:
|
||||
with self.mutex:
|
||||
try:
|
||||
return self.cache[plain]
|
||||
except:
|
||||
pass
|
||||
|
||||
if not plain:
|
||||
return ""
|
||||
|
||||
if len(plain) > 255:
|
||||
raise Exception("password too long")
|
||||
|
||||
if len(self.cache) > 9000:
|
||||
self.cache = {}
|
||||
|
||||
ret = self._hash(plain)
|
||||
self.cache[plain] = ret
|
||||
return ret
|
||||
|
||||
def _gen_sha2(self, plain: str) -> str:
|
||||
its = int(self.ac[0]) if self.ac else 424242
|
||||
bplain = plain.encode("utf-8")
|
||||
ret = b"\n"
|
||||
for _ in range(its):
|
||||
ret = hashlib.sha512(self.salt + bplain + ret).digest()
|
||||
|
||||
return "+" + base64.urlsafe_b64encode(ret[:24]).decode("utf-8")
|
||||
|
||||
def _gen_scrypt(self, plain: str) -> str:
|
||||
cost = 2 << 13
|
||||
its = 2
|
||||
blksz = 8
|
||||
para = 4
|
||||
try:
|
||||
cost = 2 << int(self.ac[0])
|
||||
its = int(self.ac[1])
|
||||
blksz = int(self.ac[2])
|
||||
para = int(self.ac[3])
|
||||
except:
|
||||
pass
|
||||
|
||||
ret = plain.encode("utf-8")
|
||||
for _ in range(its):
|
||||
ret = hashlib.scrypt(ret, salt=self.salt, n=cost, r=blksz, p=para, dklen=24)
|
||||
|
||||
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
|
||||
|
||||
def _gen_argon2(self, plain: str) -> str:
|
||||
from argon2.low_level import Type as ArgonType
|
||||
from argon2.low_level import hash_secret
|
||||
|
||||
time_cost = 3
|
||||
mem_cost = 256
|
||||
parallelism = 4
|
||||
version = 19
|
||||
try:
|
||||
time_cost = int(self.ac[0])
|
||||
mem_cost = int(self.ac[1])
|
||||
parallelism = int(self.ac[2])
|
||||
version = int(self.ac[3])
|
||||
except:
|
||||
pass
|
||||
|
||||
bplain = plain.encode("utf-8")
|
||||
|
||||
bret = hash_secret(
|
||||
secret=bplain,
|
||||
salt=self.salt,
|
||||
time_cost=time_cost,
|
||||
memory_cost=mem_cost * 1024,
|
||||
parallelism=parallelism,
|
||||
hash_len=24,
|
||||
type=ArgonType.ID,
|
||||
version=version,
|
||||
)
|
||||
ret = bret.split(b"$")[-1].decode("utf-8")
|
||||
return "+" + ret.replace("/", "_").replace("+", "-")
|
||||
|
||||
def stdin(self) -> None:
|
||||
while True:
|
||||
ln = sys.stdin.readline().strip()
|
||||
if not ln:
|
||||
break
|
||||
print(self.hash(ln))
|
||||
|
||||
def cli(self) -> None:
|
||||
import getpass
|
||||
|
||||
while True:
|
||||
p1 = getpass.getpass("password> ")
|
||||
p2 = getpass.getpass("again or just hit ENTER> ")
|
||||
if p2 and p1 != p2:
|
||||
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
|
||||
continue
|
||||
print(self.hash(p1))
|
||||
print()
|
||||
0
copyparty/res/__init__.py
Normal file
0
copyparty/res/__init__.py
Normal file
@@ -9,13 +9,13 @@ import sys
|
||||
import time
|
||||
from types import SimpleNamespace
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
||||
from .__init__ import ANYWIN, EXE, TYPE_CHECKING
|
||||
from .authsrv import LEELOO_DALLAS, VFS
|
||||
from .bos import bos
|
||||
from .util import Daemon, min_ex
|
||||
from .util import Daemon, min_ex, pybin, runhook
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any
|
||||
from typing import Any, Union
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
@@ -42,8 +42,12 @@ class SMB(object):
|
||||
from impacket import smbserver
|
||||
from impacket.ntlm import compute_lmhash, compute_nthash
|
||||
except ImportError:
|
||||
if EXE:
|
||||
print("copyparty.exe cannot do SMB")
|
||||
sys.exit(1)
|
||||
|
||||
m = "\033[36m\n{}\033[31m\n\nERROR: need 'impacket'; please run this command:\033[33m\n {} -m pip install --user impacket\n\033[0m"
|
||||
print(m.format(min_ex(), sys.executable))
|
||||
print(m.format(min_ex(), pybin))
|
||||
sys.exit(1)
|
||||
|
||||
# patch vfs into smbserver.os
|
||||
@@ -109,6 +113,9 @@ class SMB(object):
|
||||
self.stop = srv.stop
|
||||
self.log("smb", "listening @ {}:{}".format(ip, port))
|
||||
|
||||
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log("smb", msg, c)
|
||||
|
||||
def start(self) -> None:
|
||||
Daemon(self.srv.start)
|
||||
|
||||
@@ -165,8 +172,15 @@ class SMB(object):
|
||||
yeet("blocked write (no --smbw): " + vpath)
|
||||
|
||||
vfs, ap = self._v2a("open", vpath, *a)
|
||||
if wr and not vfs.axs.uwrite:
|
||||
yeet("blocked write (no-write-acc): " + vpath)
|
||||
if wr:
|
||||
if not vfs.axs.uwrite:
|
||||
yeet("blocked write (no-write-acc): " + vpath)
|
||||
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
self.nlog, xbu, ap, vpath, "", "", 0, 0, "1.7.6.2", 0, ""
|
||||
):
|
||||
yeet("blocked by xbu server config: " + vpath)
|
||||
|
||||
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
||||
if wr:
|
||||
@@ -194,11 +208,13 @@ class SMB(object):
|
||||
vfs, rem = vfs.get_dbv(rem)
|
||||
self.hub.up2k.hash_file(
|
||||
vfs.realpath,
|
||||
vfs.vpath,
|
||||
vfs.flags,
|
||||
rem,
|
||||
fn,
|
||||
"1.7.6.2",
|
||||
time.time(),
|
||||
"",
|
||||
)
|
||||
|
||||
def _rename(self, vp1: str, vp2: str) -> None:
|
||||
@@ -245,7 +261,7 @@ class SMB(object):
|
||||
yeet("blocked delete (no-del-acc): " + vpath)
|
||||
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], [])
|
||||
self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], [], False)
|
||||
|
||||
def _utime(self, vpath: str, times: tuple[float, float]) -> None:
|
||||
if not self.args.smbw:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import errno
|
||||
import re
|
||||
import select
|
||||
import socket
|
||||
@@ -8,7 +9,7 @@ from email.utils import formatdate
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .multicast import MC_Sck, MCast
|
||||
from .util import CachedSet, min_ex, html_escape
|
||||
from .util import CachedSet, html_escape, min_ex
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .broker_util import BrokerCli
|
||||
@@ -80,7 +81,7 @@ class SSDPr(object):
|
||||
ubase = "{}://{}:{}".format(proto, sip, sport)
|
||||
zsl = self.args.zsl
|
||||
url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/")
|
||||
name = "{} @ {}".format(self.args.doctitle, self.args.name)
|
||||
name = self.args.doctitle
|
||||
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
|
||||
hc.reply(zs.encode("utf-8", "replace"))
|
||||
return False # close connectino
|
||||
@@ -129,6 +130,17 @@ class SSDPd(MCast):
|
||||
srv.hport = hp
|
||||
|
||||
self.log("listening")
|
||||
try:
|
||||
self.run2()
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EBADF:
|
||||
raise
|
||||
|
||||
self.log("stopping due to {}".format(ex), "90")
|
||||
|
||||
self.log("stopped", 2)
|
||||
|
||||
def run2(self) -> None:
|
||||
while self.running:
|
||||
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||
@@ -148,8 +160,6 @@ class SSDPd(MCast):
|
||||
)
|
||||
self.log(t, 6)
|
||||
|
||||
self.log("stopped", 2)
|
||||
|
||||
def stop(self) -> None:
|
||||
self.running = False
|
||||
for srv in self.srv.values():
|
||||
|
||||
@@ -44,6 +44,7 @@ class StreamTar(StreamArc):
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
cmp: str = "",
|
||||
**kwargs: Any
|
||||
):
|
||||
super(StreamTar, self).__init__(log, fgen)
|
||||
@@ -53,14 +54,36 @@ class StreamTar(StreamArc):
|
||||
self.qfile = QFile()
|
||||
self.errf: dict[str, Any] = {}
|
||||
|
||||
try:
|
||||
cmp, lv = cmp.replace(":", ",").split(",")
|
||||
lv = int(lv)
|
||||
except:
|
||||
lv = None
|
||||
|
||||
# python 3.8 changed to PAX_FORMAT as default,
|
||||
# waste of space and don't care about the new features
|
||||
fmt = tarfile.GNU_FORMAT
|
||||
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) # type: ignore
|
||||
|
||||
arg = {"name": None, "fileobj": self.qfile, "mode": "w", "format": fmt}
|
||||
if cmp == "gz":
|
||||
fun = tarfile.TarFile.gzopen
|
||||
arg["compresslevel"] = lv if lv is not None else 3
|
||||
elif cmp == "bz2":
|
||||
fun = tarfile.TarFile.bz2open
|
||||
arg["compresslevel"] = lv if lv is not None else 2
|
||||
elif cmp == "xz":
|
||||
fun = tarfile.TarFile.xzopen
|
||||
arg["preset"] = lv if lv is not None else 1
|
||||
else:
|
||||
fun = tarfile.open
|
||||
arg["mode"] = "w|"
|
||||
|
||||
self.tar = fun(**arg)
|
||||
|
||||
Daemon(self._gen, "star-gen")
|
||||
|
||||
def gen(self) -> Generator[Optional[bytes], None, None]:
|
||||
buf = b""
|
||||
try:
|
||||
while True:
|
||||
buf = self.qfile.q.get()
|
||||
@@ -72,6 +95,12 @@ class StreamTar(StreamArc):
|
||||
|
||||
yield None
|
||||
finally:
|
||||
while buf:
|
||||
try:
|
||||
buf = self.qfile.q.get()
|
||||
except:
|
||||
pass
|
||||
|
||||
if self.errf:
|
||||
bos.unlink(self.errf["ap"])
|
||||
|
||||
@@ -101,6 +130,9 @@ class StreamTar(StreamArc):
|
||||
errors.append((f["vp"], f["err"]))
|
||||
continue
|
||||
|
||||
if self.stopped:
|
||||
break
|
||||
|
||||
try:
|
||||
self.ser(f)
|
||||
except:
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
from .__init__ import CORES
|
||||
from .bos import bos
|
||||
from .th_cli import ThumbCli
|
||||
from .util import vjoin
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Generator, Optional
|
||||
@@ -21,10 +25,78 @@ class StreamArc(object):
|
||||
):
|
||||
self.log = log
|
||||
self.fgen = fgen
|
||||
self.stopped = False
|
||||
|
||||
def gen(self) -> Generator[Optional[bytes], None, None]:
|
||||
raise Exception("override me")
|
||||
|
||||
def stop(self) -> None:
|
||||
self.stopped = True
|
||||
|
||||
|
||||
def gfilter(
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
thumbcli: ThumbCli,
|
||||
uname: str,
|
||||
vtop: str,
|
||||
fmt: str,
|
||||
) -> Generator[dict[str, Any], None, None]:
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
pend = []
|
||||
with ThreadPoolExecutor(max_workers=CORES) as tp:
|
||||
try:
|
||||
for f in fgen:
|
||||
task = tp.submit(enthumb, thumbcli, uname, vtop, f, fmt)
|
||||
pend.append((task, f))
|
||||
if pend[0][0].done() or len(pend) > CORES * 4:
|
||||
task, f = pend.pop(0)
|
||||
try:
|
||||
f = task.result(600)
|
||||
except:
|
||||
pass
|
||||
yield f
|
||||
|
||||
for task, f in pend:
|
||||
try:
|
||||
f = task.result(600)
|
||||
except:
|
||||
pass
|
||||
yield f
|
||||
except Exception as ex:
|
||||
thumbcli.log("gfilter flushing ({})".format(ex))
|
||||
for task, f in pend:
|
||||
try:
|
||||
task.result(600)
|
||||
except:
|
||||
pass
|
||||
thumbcli.log("gfilter flushed")
|
||||
|
||||
|
||||
def enthumb(
|
||||
thumbcli: ThumbCli, uname: str, vtop: str, f: dict[str, Any], fmt: str
|
||||
) -> dict[str, Any]:
|
||||
rem = f["vp"]
|
||||
ext = rem.rsplit(".", 1)[-1].lower()
|
||||
if fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|"):
|
||||
raise Exception()
|
||||
|
||||
vp = vjoin(vtop, rem.split("/", 1)[1])
|
||||
vn, rem = thumbcli.asrv.vfs.get(vp, uname, True, False)
|
||||
dbv, vrem = vn.get_dbv(rem)
|
||||
thp = thumbcli.get(dbv, vrem, f["st"].st_mtime, fmt)
|
||||
if not thp:
|
||||
raise Exception()
|
||||
|
||||
ext = "jpg" if fmt == "j" else "webp" if fmt == "w" else fmt
|
||||
sz = bos.path.getsize(thp)
|
||||
st: os.stat_result = f["st"]
|
||||
ts = st.st_mtime
|
||||
f["ap"] = thp
|
||||
f["vp"] = f["vp"].rsplit(".", 1)[0] + "." + ext
|
||||
f["st"] = os.stat_result((st.st_mode, -1, -1, 1, 1000, 1000, sz, ts, ts, ts))
|
||||
return f
|
||||
|
||||
|
||||
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||
|
||||
@@ -28,13 +28,15 @@ if True: # pylint: disable=using-constant-test
|
||||
import typing
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, VT100, EnvParams, unicode
|
||||
from .authsrv import AuthSrv
|
||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
|
||||
from .authsrv import BAD_CFG, AuthSrv
|
||||
from .cert import ensure_cert
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||
from .tcpsrv import TcpSrv
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||
from .up2k import Up2k
|
||||
from .util import (
|
||||
FFMPEG_URL,
|
||||
VERSIONS,
|
||||
Daemon,
|
||||
Garda,
|
||||
@@ -44,6 +46,7 @@ from .util import (
|
||||
ansi_re,
|
||||
min_ex,
|
||||
mp,
|
||||
pybin,
|
||||
start_log_thrs,
|
||||
start_stackmon,
|
||||
)
|
||||
@@ -67,10 +70,18 @@ class SvcHub(object):
|
||||
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
|
||||
"""
|
||||
|
||||
def __init__(self, args: argparse.Namespace, argv: list[str], printed: str) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
args: argparse.Namespace,
|
||||
dargs: argparse.Namespace,
|
||||
argv: list[str],
|
||||
printed: str,
|
||||
) -> None:
|
||||
self.args = args
|
||||
self.dargs = dargs
|
||||
self.argv = argv
|
||||
self.E: EnvParams = args.E
|
||||
self.no_ansi = args.no_ansi
|
||||
self.logf: Optional[typing.TextIO] = None
|
||||
self.logf_base_fn = ""
|
||||
self.stop_req = False
|
||||
@@ -89,11 +100,6 @@ class SvcHub(object):
|
||||
|
||||
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
|
||||
|
||||
# for non-http clients (ftp)
|
||||
self.bans: dict[str, int] = {}
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
|
||||
if args.sss or args.s >= 3:
|
||||
args.ss = True
|
||||
args.no_dav = True
|
||||
@@ -110,6 +116,7 @@ class SvcHub(object):
|
||||
args.hardlink = True
|
||||
args.vague_403 = True
|
||||
args.ban_404 = "50,60,1440"
|
||||
args.turbo = -1
|
||||
args.nih = True
|
||||
|
||||
if args.s:
|
||||
@@ -119,6 +126,20 @@ class SvcHub(object):
|
||||
args.no_robots = True
|
||||
args.force_js = True
|
||||
|
||||
if not self._process_config():
|
||||
raise Exception(BAD_CFG)
|
||||
|
||||
# for non-http clients (ftp)
|
||||
self.bans: dict[str, int] = {}
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
self.g403 = Garda(self.args.ban_403)
|
||||
self.g422 = Garda(self.args.ban_422)
|
||||
self.gurl = Garda(self.args.ban_url)
|
||||
|
||||
self.log_div = 10 ** (6 - args.log_tdec)
|
||||
self.log_efmt = "%02d:%02d:%02d.%0{}d".format(args.log_tdec)
|
||||
self.log_dfmt = "%04d-%04d-%06d.%0{}d".format(args.log_tdec)
|
||||
self.log = self._log_disabled if args.q else self._log_enabled
|
||||
if args.lo:
|
||||
self._setup_logfile(printed)
|
||||
@@ -140,22 +161,34 @@ class SvcHub(object):
|
||||
self.log("root", t.format(args.j))
|
||||
|
||||
if not args.no_fpool and args.j != 1:
|
||||
t = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
||||
if ANYWIN:
|
||||
t = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
|
||||
args.no_fpool = True
|
||||
|
||||
self.log("root", t, c=3)
|
||||
t = "WARNING: ignoring --use-fpool because multithreading (-j{}) is enabled"
|
||||
self.log("root", t.format(args.j), c=3)
|
||||
args.no_fpool = True
|
||||
|
||||
bri = "zy"[args.theme % 2 :][:1]
|
||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||
|
||||
if args.nih:
|
||||
args.vname = ""
|
||||
args.doctitle = args.doctitle.replace(" @ --name", "")
|
||||
else:
|
||||
args.vname = args.name
|
||||
args.doctitle = args.doctitle.replace("--name", args.vname)
|
||||
args.bname = args.bname.replace("--name", args.vname) or args.vname
|
||||
|
||||
if args.log_fk:
|
||||
args.log_fk = re.compile(args.log_fk)
|
||||
|
||||
# initiate all services to manage
|
||||
self.asrv = AuthSrv(self.args, self.log)
|
||||
self.asrv = AuthSrv(self.args, self.log, dargs=self.dargs)
|
||||
|
||||
if args.cgen:
|
||||
self.asrv.cgen()
|
||||
|
||||
if args.exit == "cfg":
|
||||
sys.exit(0)
|
||||
|
||||
if args.ls:
|
||||
self.asrv.dbg_ls()
|
||||
|
||||
@@ -164,9 +197,6 @@ class SvcHub(object):
|
||||
|
||||
self.log("root", "max clients: {}".format(self.args.nc))
|
||||
|
||||
if not self._process_config():
|
||||
raise Exception("bad config")
|
||||
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
@@ -180,6 +210,7 @@ class SvcHub(object):
|
||||
|
||||
self.args.th_dec = list(decs.keys())
|
||||
self.thumbsrv = None
|
||||
want_ff = False
|
||||
if not args.no_thumb:
|
||||
t = ", ".join(self.args.th_dec) or "(None available)"
|
||||
self.log("thumb", "decoder preference: {}".format(t))
|
||||
@@ -191,8 +222,12 @@ class SvcHub(object):
|
||||
if self.args.th_dec:
|
||||
self.thumbsrv = ThumbSrv(self)
|
||||
else:
|
||||
want_ff = True
|
||||
msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg"
|
||||
msg = msg.format(" " * 37, os.path.basename(sys.executable))
|
||||
msg = msg.format(" " * 37, os.path.basename(pybin))
|
||||
if EXE:
|
||||
msg = "copyparty.exe cannot use Pillow or pyvips; need ffprobe.exe and ffmpeg.exe to create thumbnails"
|
||||
|
||||
self.log("thumb", msg, c=3)
|
||||
|
||||
if not args.no_acode and args.no_thumb:
|
||||
@@ -204,6 +239,10 @@ class SvcHub(object):
|
||||
msg = "setting --no-acode because either FFmpeg or FFprobe is not available"
|
||||
self.log("thumb", msg, c=6)
|
||||
args.no_acode = True
|
||||
want_ff = True
|
||||
|
||||
if want_ff and ANYWIN:
|
||||
self.log("thumb", "download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
|
||||
|
||||
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
||||
|
||||
@@ -216,7 +255,8 @@ class SvcHub(object):
|
||||
if args.ftp or args.ftps:
|
||||
from .ftpd import Ftpd
|
||||
|
||||
self.ftpd = Ftpd(self)
|
||||
self.ftpd: Optional[Ftpd] = None
|
||||
Daemon(self.start_ftpd, "start_ftpd")
|
||||
zms += "f" if args.ftp else "F"
|
||||
|
||||
if args.smb:
|
||||
@@ -246,6 +286,28 @@ class SvcHub(object):
|
||||
|
||||
self.broker = Broker(self)
|
||||
|
||||
def start_ftpd(self) -> None:
|
||||
time.sleep(30)
|
||||
if self.ftpd:
|
||||
return
|
||||
|
||||
self.restart_ftpd()
|
||||
|
||||
def restart_ftpd(self) -> None:
|
||||
if not hasattr(self, "ftpd"):
|
||||
return
|
||||
|
||||
from .ftpd import Ftpd
|
||||
|
||||
if self.ftpd:
|
||||
return # todo
|
||||
|
||||
if not os.path.exists(self.args.cert):
|
||||
ensure_cert(self.log, self.args)
|
||||
|
||||
self.ftpd = Ftpd(self)
|
||||
self.log("root", "started FTPd")
|
||||
|
||||
def thr_httpsrv_up(self) -> None:
|
||||
time.sleep(1 if self.args.ign_ebind_all else 5)
|
||||
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||
@@ -323,6 +385,36 @@ class SvcHub(object):
|
||||
al.RS = R + "/" if R else ""
|
||||
al.SRS = "/" + R + "/" if R else "/"
|
||||
|
||||
if al.rsp_jtr:
|
||||
al.rsp_slp = 0.000001
|
||||
|
||||
al.th_covers = set(al.th_covers.split(","))
|
||||
|
||||
for k in "c".split(" "):
|
||||
vl = getattr(al, k)
|
||||
if not vl:
|
||||
continue
|
||||
|
||||
vl = [os.path.expanduser(x) if x.startswith("~") else x for x in vl]
|
||||
setattr(al, k, vl)
|
||||
|
||||
for k in "lo hist ssl_log".split(" "):
|
||||
vs = getattr(al, k)
|
||||
if vs and vs.startswith("~"):
|
||||
setattr(al, k, os.path.expanduser(vs))
|
||||
|
||||
for k in "sus_urls nonsus_urls".split(" "):
|
||||
vs = getattr(al, k)
|
||||
if not vs or vs == "no":
|
||||
setattr(al, k, None)
|
||||
else:
|
||||
setattr(al, k, re.compile(vs))
|
||||
|
||||
if not al.sus_urls:
|
||||
al.ban_url = "no"
|
||||
elif al.ban_url == "no":
|
||||
al.sus_urls = None
|
||||
|
||||
return True
|
||||
|
||||
def _setlimits(self) -> None:
|
||||
@@ -377,6 +469,7 @@ class SvcHub(object):
|
||||
|
||||
def _setup_logfile(self, printed: str) -> None:
|
||||
base_fn = fn = sel_fn = self._logname()
|
||||
do_xz = fn.lower().endswith(".xz")
|
||||
if fn != self.args.lo:
|
||||
ctr = 0
|
||||
# yup this is a race; if started sufficiently concurrently, two
|
||||
@@ -388,7 +481,7 @@ class SvcHub(object):
|
||||
fn = sel_fn
|
||||
|
||||
try:
|
||||
if fn.lower().endswith(".xz"):
|
||||
if do_xz:
|
||||
import lzma
|
||||
|
||||
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
||||
@@ -399,7 +492,7 @@ class SvcHub(object):
|
||||
|
||||
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
|
||||
|
||||
argv = [sys.executable] + self.argv
|
||||
argv = [pybin] + self.argv
|
||||
if hasattr(shlex, "quote"):
|
||||
argv = [shlex.quote(x) for x in argv]
|
||||
else:
|
||||
@@ -606,8 +699,14 @@ class SvcHub(object):
|
||||
return
|
||||
|
||||
with self.log_mutex:
|
||||
ts = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")[:-3]
|
||||
self.logf.write("@{} [{}\033[0m] {}\n".format(ts, src, msg))
|
||||
zd = datetime.utcnow()
|
||||
ts = self.log_dfmt % (
|
||||
zd.year,
|
||||
zd.month * 100 + zd.day,
|
||||
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
||||
zd.microsecond // self.log_div,
|
||||
)
|
||||
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
|
||||
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
@@ -634,26 +733,36 @@ class SvcHub(object):
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
dt = datetime.utcfromtimestamp(now)
|
||||
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
|
||||
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
||||
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
||||
print(zs, end="")
|
||||
self._set_next_day()
|
||||
if self.logf:
|
||||
self.logf.write(zs)
|
||||
|
||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
|
||||
if not VT100:
|
||||
fmt = "{} {:21} {}\n"
|
||||
fmt = "\033[36m%s \033[33m%-21s \033[0m%s\n"
|
||||
if self.no_ansi:
|
||||
fmt = "%s %-21s %s\n"
|
||||
if "\033" in msg:
|
||||
msg = ansi_re.sub("", msg)
|
||||
if "\033" in src:
|
||||
src = ansi_re.sub("", src)
|
||||
elif c:
|
||||
if isinstance(c, int):
|
||||
msg = "\033[3{}m{}\033[0m".format(c, msg)
|
||||
msg = "\033[3%sm%s\033[0m" % (c, msg)
|
||||
elif "\033" not in c:
|
||||
msg = "\033[{}m{}\033[0m".format(c, msg)
|
||||
msg = "\033[%sm%s\033[0m" % (c, msg)
|
||||
else:
|
||||
msg = "{}{}\033[0m".format(c, msg)
|
||||
msg = "%s%s\033[0m" % (c, msg)
|
||||
|
||||
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
|
||||
msg = fmt.format(ts, src, msg)
|
||||
zd = datetime.utcfromtimestamp(now)
|
||||
ts = self.log_efmt % (
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
zd.microsecond // self.log_div,
|
||||
)
|
||||
msg = fmt % (ts, src, msg)
|
||||
try:
|
||||
print(msg, end="")
|
||||
except UnicodeEncodeError:
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import calendar
|
||||
import time
|
||||
import stat
|
||||
import time
|
||||
import zlib
|
||||
|
||||
from .bos import bos
|
||||
@@ -221,6 +221,7 @@ class StreamZip(StreamArc):
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
utf8: bool = False,
|
||||
pre_crc: bool = False,
|
||||
**kwargs: Any
|
||||
) -> None:
|
||||
super(StreamZip, self).__init__(log, fgen)
|
||||
|
||||
|
||||
@@ -7,7 +7,8 @@ import socket
|
||||
import sys
|
||||
import time
|
||||
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, VT100, unicode
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, unicode
|
||||
from .cert import gencert
|
||||
from .stolen.qrcodegen import QrCode
|
||||
from .util import (
|
||||
E_ACCESS,
|
||||
@@ -295,6 +296,8 @@ class TcpSrv(object):
|
||||
def _distribute_netdevs(self):
|
||||
self.hub.broker.say("set_netdevs", self.netdevs)
|
||||
self.hub.start_zeroconf()
|
||||
gencert(self.log, self.args, self.netdevs)
|
||||
self.hub.restart_ftpd()
|
||||
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
@@ -322,7 +325,7 @@ class TcpSrv(object):
|
||||
if k not in netdevs:
|
||||
removed = "{} = {}".format(k, v)
|
||||
|
||||
t = "network change detected:\n added {}\nremoved {}"
|
||||
t = "network change detected:\n added {}\033[0;33m\nremoved {}"
|
||||
self.log("tcpsrv", t.format(added, removed), 3)
|
||||
self.netdevs = netdevs
|
||||
self._distribute_netdevs()
|
||||
@@ -501,7 +504,7 @@ class TcpSrv(object):
|
||||
zoom = 1
|
||||
|
||||
qr = qrc.render(zoom, pad)
|
||||
if not VT100:
|
||||
if self.args.no_ansi:
|
||||
return "{}\n{}".format(txt, qr)
|
||||
|
||||
halfc = "\033[40;48;5;{0}m{1}\033[47;48;5;{2}m"
|
||||
|
||||
@@ -108,6 +108,7 @@ class ThumbCli(object):
|
||||
if st.st_size:
|
||||
ret = tpath = tp
|
||||
fmt = ret.rsplit(".")[1]
|
||||
break
|
||||
else:
|
||||
abort = True
|
||||
except:
|
||||
|
||||
@@ -12,14 +12,17 @@ import time
|
||||
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||
from .util import (
|
||||
FFMPEG_URL,
|
||||
BytesIO,
|
||||
Cooldown,
|
||||
Daemon,
|
||||
Pebkac,
|
||||
afsenc,
|
||||
fsenc,
|
||||
min_ex,
|
||||
runcmd,
|
||||
@@ -82,14 +85,14 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
||||
# base64 = 64 = 4096
|
||||
rd, fn = vsplit(rem)
|
||||
if rd:
|
||||
h = hashlib.sha512(fsenc(rd)).digest()
|
||||
h = hashlib.sha512(afsenc(rd)).digest()
|
||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||
else:
|
||||
rd = "top"
|
||||
|
||||
# could keep original filenames but this is safer re pathlen
|
||||
h = hashlib.sha512(fsenc(fn)).digest()
|
||||
h = hashlib.sha512(afsenc(fn)).digest()
|
||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
|
||||
if fmt in ("opus", "caf"):
|
||||
@@ -108,8 +111,6 @@ class ThumbSrv(object):
|
||||
self.args = hub.args
|
||||
self.log_func = hub.log
|
||||
|
||||
res = hub.args.th_size.split("x")
|
||||
self.res = tuple([int(x) for x in res])
|
||||
self.poke_cd = Cooldown(self.args.th_poke)
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
@@ -117,7 +118,7 @@ class ThumbSrv(object):
|
||||
self.stopping = False
|
||||
self.nthr = max(1, self.args.th_mt)
|
||||
|
||||
self.q: Queue[Optional[tuple[str, str]]] = Queue(self.nthr * 4)
|
||||
self.q: Queue[Optional[tuple[str, str, VFS]]] = Queue(self.nthr * 4)
|
||||
for n in range(self.nthr):
|
||||
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
|
||||
|
||||
@@ -133,6 +134,8 @@ class ThumbSrv(object):
|
||||
msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
|
||||
msg += ", ".join(missing)
|
||||
self.log(msg, c=3)
|
||||
if ANYWIN and self.args.no_acode:
|
||||
self.log("download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
|
||||
|
||||
if self.args.th_clean:
|
||||
Daemon(self.cleaner, "thumb.cln")
|
||||
@@ -180,6 +183,10 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
return not self.nthr
|
||||
|
||||
def getres(self, vn: VFS) -> tuple[int, int]:
|
||||
w, h = vn.flags["thsize"].split("x")
|
||||
return int(w), int(h)
|
||||
|
||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
@@ -196,18 +203,24 @@ class ThumbSrv(object):
|
||||
self.log("wait {}".format(tpath))
|
||||
except:
|
||||
thdir = os.path.dirname(tpath)
|
||||
bos.makedirs(thdir)
|
||||
bos.makedirs(os.path.join(thdir, "w"))
|
||||
|
||||
inf_path = os.path.join(thdir, "dir.txt")
|
||||
if not bos.path.exists(inf_path):
|
||||
with open(inf_path, "wb") as f:
|
||||
f.write(fsenc(os.path.dirname(abspath)))
|
||||
f.write(afsenc(os.path.dirname(abspath)))
|
||||
|
||||
self.busy[tpath] = [cond]
|
||||
do_conv = True
|
||||
|
||||
if do_conv:
|
||||
self.q.put((abspath, tpath))
|
||||
allvols = list(self.asrv.vfs.all_vols.values())
|
||||
vn = next((x for x in allvols if x.realpath == ptop), None)
|
||||
if not vn:
|
||||
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
||||
vn = self.asrv.vfs.all_aps[0][1]
|
||||
|
||||
self.q.put((abspath, tpath, vn))
|
||||
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
||||
|
||||
while not self.stopping:
|
||||
@@ -244,7 +257,7 @@ class ThumbSrv(object):
|
||||
if not task:
|
||||
break
|
||||
|
||||
abspath, tpath = task
|
||||
abspath, tpath, vn = task
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
png_ok = False
|
||||
funs = []
|
||||
@@ -268,9 +281,16 @@ class ThumbSrv(object):
|
||||
if not png_ok and tpath.endswith(".png"):
|
||||
raise Pebkac(400, "png only allowed for waveforms")
|
||||
|
||||
tdir, tfn = os.path.split(tpath)
|
||||
ttpath = os.path.join(tdir, "w", tfn)
|
||||
try:
|
||||
bos.unlink(ttpath)
|
||||
except:
|
||||
pass
|
||||
|
||||
for fun in funs:
|
||||
try:
|
||||
fun(abspath, tpath)
|
||||
fun(abspath, ttpath, vn)
|
||||
break
|
||||
except Exception as ex:
|
||||
msg = "{} could not create thumbnail of {}\n{}"
|
||||
@@ -279,15 +299,20 @@ class ThumbSrv(object):
|
||||
self.log(msg, c)
|
||||
if getattr(ex, "returncode", 0) != 321:
|
||||
if fun == funs[-1]:
|
||||
with open(tpath, "wb") as _:
|
||||
with open(ttpath, "wb") as _:
|
||||
pass
|
||||
else:
|
||||
# ffmpeg may spawn empty files on windows
|
||||
try:
|
||||
os.unlink(tpath)
|
||||
os.unlink(ttpath)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
bos.rename(ttpath, tpath)
|
||||
except:
|
||||
pass
|
||||
|
||||
with self.mutex:
|
||||
subs = self.busy[tpath]
|
||||
del self.busy[tpath]
|
||||
@@ -299,9 +324,10 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
self.nthr -= 1
|
||||
|
||||
def fancy_pillow(self, im: "Image.Image") -> "Image.Image":
|
||||
def fancy_pillow(self, im: "Image.Image", vn: VFS) -> "Image.Image":
|
||||
# exif_transpose is expensive (loads full image + unconditional copy)
|
||||
r = max(*self.res) * 2
|
||||
res = self.getres(vn)
|
||||
r = max(*res) * 2
|
||||
im.thumbnail((r, r), resample=Image.LANCZOS)
|
||||
try:
|
||||
k = next(k for k, v in ExifTags.TAGS.items() if v == "Orientation")
|
||||
@@ -315,23 +341,23 @@ class ThumbSrv(object):
|
||||
if rot in rots:
|
||||
im = im.transpose(rots[rot])
|
||||
|
||||
if self.args.th_no_crop:
|
||||
im.thumbnail(self.res, resample=Image.LANCZOS)
|
||||
if "nocrop" in vn.flags:
|
||||
im.thumbnail(res, resample=Image.LANCZOS)
|
||||
else:
|
||||
iw, ih = im.size
|
||||
dw, dh = self.res
|
||||
dw, dh = res
|
||||
res = (min(iw, dw), min(ih, dh))
|
||||
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
||||
|
||||
return im
|
||||
|
||||
def conv_pil(self, abspath: str, tpath: str) -> None:
|
||||
def conv_pil(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||
with Image.open(fsenc(abspath)) as im:
|
||||
try:
|
||||
im = self.fancy_pillow(im)
|
||||
im = self.fancy_pillow(im, vn)
|
||||
except Exception as ex:
|
||||
self.log("fancy_pillow {}".format(ex), "90")
|
||||
im.thumbnail(self.res)
|
||||
im.thumbnail(self.getres(vn))
|
||||
|
||||
fmts = ["RGB", "L"]
|
||||
args = {"quality": 40}
|
||||
@@ -354,12 +380,12 @@ class ThumbSrv(object):
|
||||
|
||||
im.save(tpath, **args)
|
||||
|
||||
def conv_vips(self, abspath: str, tpath: str) -> None:
|
||||
def conv_vips(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||
crops = ["centre", "none"]
|
||||
if self.args.th_no_crop:
|
||||
if "nocrop" in vn.flags:
|
||||
crops = ["none"]
|
||||
|
||||
w, h = self.res
|
||||
w, h = self.getres(vn)
|
||||
kw = {"height": h, "size": "down", "intent": "relative"}
|
||||
|
||||
for c in crops:
|
||||
@@ -373,8 +399,8 @@ class ThumbSrv(object):
|
||||
|
||||
img.write_to_file(tpath, Q=40)
|
||||
|
||||
def conv_ffmpeg(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
def conv_ffmpeg(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if not ret:
|
||||
return
|
||||
|
||||
@@ -386,12 +412,13 @@ class ThumbSrv(object):
|
||||
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
||||
|
||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||
if self.args.th_no_crop:
|
||||
if "nocrop" in vn.flags:
|
||||
scale += "decrease,setsar=1:1"
|
||||
else:
|
||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||
|
||||
bscale = scale.format(*list(self.res)).encode("utf-8")
|
||||
res = self.getres(vn)
|
||||
bscale = scale.format(*list(res)).encode("utf-8")
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
@@ -423,11 +450,11 @@ class ThumbSrv(object):
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
def _run_ff(self, cmd: list[bytes]) -> None:
|
||||
def _run_ff(self, cmd: list[bytes], vn: VFS) -> None:
|
||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||
ret, _, serr = runcmd(cmd, timeout=self.args.th_convt)
|
||||
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"])
|
||||
if not ret:
|
||||
return
|
||||
|
||||
@@ -470,8 +497,8 @@ class ThumbSrv(object):
|
||||
self.log(t + txt, c=c)
|
||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||
|
||||
def conv_waves(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
def conv_waves(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
@@ -496,10 +523,10 @@ class ThumbSrv(object):
|
||||
# fmt: on
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
def conv_spec(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
def conv_spec(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
@@ -539,23 +566,34 @@ class ThumbSrv(object):
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
def conv_opus(self, abspath: str, tpath: str) -> None:
|
||||
def conv_opus(self, abspath: str, tpath: str, vn: VFS) -> None:
|
||||
if self.args.no_acode:
|
||||
raise Exception("disabled in server config")
|
||||
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
try:
|
||||
dur = ret[".dur"][1]
|
||||
except:
|
||||
dur = 0
|
||||
|
||||
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
|
||||
want_caf = tpath.endswith(".caf")
|
||||
tmp_opus = tpath
|
||||
if want_caf:
|
||||
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
|
||||
tmp_opus = tpath + ".opus"
|
||||
try:
|
||||
bos.unlink(tmp_opus)
|
||||
except:
|
||||
pass
|
||||
|
||||
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
|
||||
caf_src = abspath if src_opus else tmp_opus
|
||||
|
||||
if not want_caf or not src_opus:
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
@@ -570,9 +608,34 @@ class ThumbSrv(object):
|
||||
fsenc(tmp_opus)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
if want_caf:
|
||||
# iOS fails to play some "insufficiently complex" files
|
||||
# (average file shorter than 8 seconds), so of course we
|
||||
# fix that by mixing in some inaudible pink noise :^)
|
||||
# 6.3 sec seems like the cutoff so lets do 7, and
|
||||
# 7 sec of psyqui-musou.opus @ 3:50 is 174 KiB
|
||||
if want_caf and (dur < 20 or bos.path.getsize(caf_src) < 256 * 1024):
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-v", b"error",
|
||||
b"-hide_banner",
|
||||
b"-i", fsenc(abspath),
|
||||
b"-filter_complex", b"anoisesrc=a=0.001:d=7:c=pink,asplit[l][r]; [l][r]amerge[s]; [0:a:0][s]amix",
|
||||
b"-map_metadata", b"-1",
|
||||
b"-ac", b"2",
|
||||
b"-c:a", b"libopus",
|
||||
b"-b:a", b"128k",
|
||||
b"-f", b"caf",
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
elif want_caf:
|
||||
# simple remux should be safe
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
@@ -587,7 +650,13 @@ class ThumbSrv(object):
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
if tmp_opus != tpath:
|
||||
try:
|
||||
bos.unlink(tmp_opus)
|
||||
except:
|
||||
pass
|
||||
|
||||
def poke(self, tdir: str) -> None:
|
||||
if not self.poke_cd.poke(tdir):
|
||||
|
||||
@@ -34,14 +34,14 @@ if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpconn import HttpConn
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
|
||||
class U2idx(object):
|
||||
def __init__(self, conn: "HttpConn") -> None:
|
||||
self.log_func = conn.log_func
|
||||
self.asrv = conn.asrv
|
||||
self.args = conn.args
|
||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||
self.log_func = hsrv.log
|
||||
self.asrv = hsrv.asrv
|
||||
self.args = hsrv.args
|
||||
self.timeout = self.args.srch_time
|
||||
|
||||
if not HAVE_SQLITE3:
|
||||
@@ -51,7 +51,7 @@ class U2idx(object):
|
||||
self.active_id = ""
|
||||
self.active_cur: Optional["sqlite3.Cursor"] = None
|
||||
self.cur: dict[str, "sqlite3.Cursor"] = {}
|
||||
self.mem_cur = sqlite3.connect(":memory:").cursor()
|
||||
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor()
|
||||
self.mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
self.p_end = 0.0
|
||||
@@ -69,7 +69,7 @@ class U2idx(object):
|
||||
|
||||
fsize = body["size"]
|
||||
fhash = body["hash"]
|
||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
||||
wark = up2k_wark_from_hashlist(self.args.warksalt, fsize, fhash)
|
||||
|
||||
uq = "substr(w,1,16) = ? and w = ?"
|
||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||
@@ -101,7 +101,8 @@ class U2idx(object):
|
||||
uri = ""
|
||||
try:
|
||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||
cur = sqlite3.connect(uri, 2, uri=True).cursor()
|
||||
db = sqlite3.connect(uri, 2, uri=True, check_same_thread=False)
|
||||
cur = db.cursor()
|
||||
cur.execute('pragma table_info("up")').fetchone()
|
||||
self.log("ro: {}".format(db_path))
|
||||
except:
|
||||
@@ -112,7 +113,7 @@ class U2idx(object):
|
||||
if not cur:
|
||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||
cur = sqlite3.connect(db_path, 2).cursor()
|
||||
cur = sqlite3.connect(db_path, 2, check_same_thread=False).cursor()
|
||||
self.log("opened {}".format(db_path))
|
||||
|
||||
self.cur[ptop] = cur
|
||||
@@ -120,10 +121,10 @@ class U2idx(object):
|
||||
|
||||
def search(
|
||||
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
|
||||
) -> tuple[list[dict[str, Any]], list[str]]:
|
||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||
"""search by query params"""
|
||||
if not HAVE_SQLITE3:
|
||||
return [], []
|
||||
return [], [], False
|
||||
|
||||
q = ""
|
||||
v: Union[str, int] = ""
|
||||
@@ -275,7 +276,7 @@ class U2idx(object):
|
||||
have_up: bool,
|
||||
have_mt: bool,
|
||||
lim: int,
|
||||
) -> tuple[list[dict[str, Any]], list[str]]:
|
||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||
done_flag: list[bool] = []
|
||||
self.active_id = "{:.6f}_{}".format(
|
||||
time.time(), threading.current_thread().ident
|
||||
@@ -293,6 +294,7 @@ class U2idx(object):
|
||||
self.log("qs: {!r} {!r}".format(uq, uv))
|
||||
|
||||
ret = []
|
||||
seen_rps: set[str] = set()
|
||||
lim = min(lim, int(self.args.srch_hits))
|
||||
taglist = {}
|
||||
for (vtop, ptop, flags) in vols:
|
||||
@@ -311,16 +313,21 @@ class U2idx(object):
|
||||
|
||||
sret = []
|
||||
fk = flags.get("fk")
|
||||
dots = flags.get("dotsrch")
|
||||
c = cur.execute(uq, tuple(vuv))
|
||||
for hit in c:
|
||||
w, ts, sz, rd, fn, ip, at = hit[:7]
|
||||
lim -= 1
|
||||
if lim < 0:
|
||||
break
|
||||
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
||||
if not dots and "/." in ("/" + rp):
|
||||
continue
|
||||
|
||||
if rp in seen_rps:
|
||||
continue
|
||||
|
||||
if not fk:
|
||||
suf = ""
|
||||
else:
|
||||
@@ -337,8 +344,12 @@ class U2idx(object):
|
||||
)[:fk]
|
||||
)
|
||||
|
||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||
lim -= 1
|
||||
if lim < 0:
|
||||
break
|
||||
|
||||
seen_rps.add(rp)
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
|
||||
|
||||
for hit in sret:
|
||||
w = hit["w"]
|
||||
@@ -357,17 +368,9 @@ class U2idx(object):
|
||||
done_flag.append(True)
|
||||
self.active_id = ""
|
||||
|
||||
# undupe hits from multiple metadata keys
|
||||
if len(ret) > 1:
|
||||
ret = [ret[0]] + [
|
||||
y
|
||||
for x, y in zip(ret[:-1], ret[1:])
|
||||
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
|
||||
]
|
||||
|
||||
ret.sort(key=itemgetter("rp"))
|
||||
|
||||
return ret, list(taglist.keys())
|
||||
return ret, list(taglist.keys()), lim < 0
|
||||
|
||||
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
|
||||
for _ in range(self.timeout):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -14,6 +14,7 @@ import os
|
||||
import platform
|
||||
import re
|
||||
import select
|
||||
import shutil
|
||||
import signal
|
||||
import socket
|
||||
import stat
|
||||
@@ -30,7 +31,7 @@ from email.utils import formatdate
|
||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import ANYWIN, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
|
||||
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
|
||||
from .__version__ import S_BUILD_DT, S_VERSION
|
||||
from .stolen import surrogateescape
|
||||
|
||||
@@ -143,6 +144,8 @@ SYMTIME = sys.version_info > (3, 6) and os.utime in os.supports_follow_symlinks
|
||||
|
||||
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">'
|
||||
|
||||
FFMPEG_URL = "https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z"
|
||||
|
||||
HTTPCODE = {
|
||||
200: "OK",
|
||||
201: "Created",
|
||||
@@ -168,6 +171,7 @@ HTTPCODE = {
|
||||
500: "Internal Server Error",
|
||||
501: "Not Implemented",
|
||||
503: "Service Unavailable",
|
||||
999: "MissingNo",
|
||||
}
|
||||
|
||||
|
||||
@@ -290,6 +294,19 @@ REKOBO_KEY = {
|
||||
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
||||
|
||||
|
||||
pybin = sys.executable or ""
|
||||
if EXE:
|
||||
pybin = ""
|
||||
for zsg in "python3 python".split():
|
||||
try:
|
||||
zsg = shutil.which(zsg)
|
||||
if zsg:
|
||||
pybin = zsg
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def py_desc() -> str:
|
||||
interp = platform.python_implementation()
|
||||
py_ver = ".".join([str(x) for x in sys.version_info])
|
||||
@@ -521,7 +538,7 @@ class _Unrecv(object):
|
||||
self.log = log
|
||||
self.buf: bytes = b""
|
||||
|
||||
def recv(self, nbytes: int) -> bytes:
|
||||
def recv(self, nbytes: int, spins: int = 1) -> bytes:
|
||||
if self.buf:
|
||||
ret = self.buf[:nbytes]
|
||||
self.buf = self.buf[nbytes:]
|
||||
@@ -532,6 +549,10 @@ class _Unrecv(object):
|
||||
ret = self.s.recv(nbytes)
|
||||
break
|
||||
except socket.timeout:
|
||||
spins -= 1
|
||||
if spins <= 0:
|
||||
ret = b""
|
||||
break
|
||||
continue
|
||||
except:
|
||||
ret = b""
|
||||
@@ -574,7 +595,7 @@ class _LUnrecv(object):
|
||||
self.log = log
|
||||
self.buf = b""
|
||||
|
||||
def recv(self, nbytes: int) -> bytes:
|
||||
def recv(self, nbytes: int, spins: int) -> bytes:
|
||||
if self.buf:
|
||||
ret = self.buf[:nbytes]
|
||||
self.buf = self.buf[nbytes:]
|
||||
@@ -593,7 +614,7 @@ class _LUnrecv(object):
|
||||
def recv_ex(self, nbytes: int, raise_on_trunc: bool = True) -> bytes:
|
||||
"""read an exact number of bytes"""
|
||||
try:
|
||||
ret = self.recv(nbytes)
|
||||
ret = self.recv(nbytes, 1)
|
||||
err = False
|
||||
except:
|
||||
ret = b""
|
||||
@@ -601,7 +622,7 @@ class _LUnrecv(object):
|
||||
|
||||
while not err and len(ret) < nbytes:
|
||||
try:
|
||||
ret += self.recv(nbytes - len(ret))
|
||||
ret += self.recv(nbytes - len(ret), 1)
|
||||
except OSError:
|
||||
err = True
|
||||
|
||||
@@ -652,6 +673,7 @@ class FHC(object):
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.cache: dict[str, FHC.CE] = {}
|
||||
self.aps: set[str] = set()
|
||||
|
||||
def close(self, path: str) -> None:
|
||||
try:
|
||||
@@ -663,6 +685,7 @@ class FHC(object):
|
||||
fh.close()
|
||||
|
||||
del self.cache[path]
|
||||
self.aps.remove(path)
|
||||
|
||||
def clean(self) -> None:
|
||||
if not self.cache:
|
||||
@@ -683,6 +706,7 @@ class FHC(object):
|
||||
return self.cache[path].fhs.pop()
|
||||
|
||||
def put(self, path: str, fh: typing.BinaryIO) -> None:
|
||||
self.aps.add(path)
|
||||
try:
|
||||
ce = self.cache[path]
|
||||
ce.fhs.append(fh)
|
||||
@@ -902,7 +926,8 @@ class Magician(object):
|
||||
class Garda(object):
|
||||
"""ban clients for repeated offenses"""
|
||||
|
||||
def __init__(self, cfg: str) -> None:
|
||||
def __init__(self, cfg: str, uniq: bool = True) -> None:
|
||||
self.uniq = uniq
|
||||
try:
|
||||
a, b, c = cfg.strip().split(",")
|
||||
self.lim = int(a)
|
||||
@@ -948,7 +973,7 @@ class Garda(object):
|
||||
# assume /64 clients; drop 4 groups
|
||||
ip = IPv6Address(ip).exploded[:-20]
|
||||
|
||||
if prev:
|
||||
if prev and self.uniq:
|
||||
if self.prev.get(ip) == prev:
|
||||
return 0, ip
|
||||
|
||||
@@ -1153,20 +1178,12 @@ def ren_open(
|
||||
fun = kwargs.pop("fun", open)
|
||||
fdir = kwargs.pop("fdir", None)
|
||||
suffix = kwargs.pop("suffix", None)
|
||||
overwrite = kwargs.pop("overwrite", None)
|
||||
|
||||
if fname == os.devnull:
|
||||
with fun(fname, *args, **kwargs) as f:
|
||||
yield {"orz": (f, fname)}
|
||||
return
|
||||
|
||||
if overwrite:
|
||||
assert fdir
|
||||
fpath = os.path.join(fdir, fname)
|
||||
with fun(fsenc(fpath), *args, **kwargs) as f:
|
||||
yield {"orz": (f, fname)}
|
||||
return
|
||||
|
||||
if suffix:
|
||||
ext = fname.split(".")[-1]
|
||||
if len(ext) < 7:
|
||||
@@ -1212,12 +1229,15 @@ def ren_open(
|
||||
except OSError as ex_:
|
||||
ex = ex_
|
||||
|
||||
if ex.errno == errno.EINVAL and not asciified:
|
||||
# EPERM: android13
|
||||
if ex.errno in (errno.EINVAL, errno.EPERM) and not asciified:
|
||||
asciified = True
|
||||
bname, fname = [
|
||||
zs.encode("ascii", "replace").decode("ascii").replace("?", "_")
|
||||
for zs in [bname, fname]
|
||||
]
|
||||
zsl = []
|
||||
for zs in (bname, fname):
|
||||
zs = zs.encode("ascii", "replace").decode("ascii")
|
||||
zs = re.sub(r"[^][a-zA-Z0-9(){}.,+=!-]", "_", zs)
|
||||
zsl.append(zs)
|
||||
bname, fname = zsl
|
||||
continue
|
||||
|
||||
# ENOTSUP: zfs on ubuntu 20.04
|
||||
@@ -1281,7 +1301,7 @@ class MultipartParser(object):
|
||||
rfc1341/rfc1521/rfc2047/rfc2231/rfc2388/rfc6266/the-real-world
|
||||
(only the fallback non-js uploader relies on these filenames)
|
||||
"""
|
||||
for ln in read_header(self.sr):
|
||||
for ln in read_header(self.sr, 2, 2592000):
|
||||
self.log(ln)
|
||||
|
||||
m = self.re_ctype.match(ln)
|
||||
@@ -1428,7 +1448,7 @@ class MultipartParser(object):
|
||||
for buf in iterable:
|
||||
ret += buf
|
||||
if len(ret) > max_len:
|
||||
raise Pebkac(400, "field length is too long")
|
||||
raise Pebkac(422, "field length is too long")
|
||||
|
||||
return ret
|
||||
|
||||
@@ -1481,15 +1501,15 @@ def get_boundary(headers: dict[str, str]) -> str:
|
||||
return m.group(2)
|
||||
|
||||
|
||||
def read_header(sr: Unrecv) -> list[str]:
|
||||
def read_header(sr: Unrecv, t_idle: int, t_tot: int) -> list[str]:
|
||||
t0 = time.time()
|
||||
ret = b""
|
||||
while True:
|
||||
if time.time() - t0 > 120:
|
||||
if time.time() - t0 >= t_tot:
|
||||
return []
|
||||
|
||||
try:
|
||||
ret += sr.recv(1024)
|
||||
ret += sr.recv(1024, t_idle // 2)
|
||||
except:
|
||||
if not ret:
|
||||
return []
|
||||
@@ -1513,10 +1533,32 @@ def read_header(sr: Unrecv) -> list[str]:
|
||||
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
||||
|
||||
|
||||
def rand_name(fdir: str, fn: str, rnd: int) -> str:
|
||||
ok = False
|
||||
try:
|
||||
ext = "." + fn.rsplit(".", 1)[1]
|
||||
except:
|
||||
ext = ""
|
||||
|
||||
for extra in range(16):
|
||||
for _ in range(16):
|
||||
if ok:
|
||||
break
|
||||
|
||||
nc = rnd + extra
|
||||
nb = int((6 + 6 * nc) / 8)
|
||||
zb = os.urandom(nb)
|
||||
zb = base64.urlsafe_b64encode(zb)
|
||||
fn = zb[:nc].decode("utf-8") + ext
|
||||
ok = not os.path.exists(fsenc(os.path.join(fdir, fn)))
|
||||
|
||||
return fn
|
||||
|
||||
|
||||
def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||
return base64.urlsafe_b64encode(
|
||||
hashlib.sha512(
|
||||
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace")
|
||||
("%s %s %s %s" % (salt, fspath, fsize, inode)).encode("utf-8", "replace")
|
||||
).digest()
|
||||
).decode("ascii")
|
||||
|
||||
@@ -1556,7 +1598,7 @@ def gen_filekey_dbg(
|
||||
|
||||
|
||||
def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str:
|
||||
v = v.replace(";", "")
|
||||
v = v.replace("%", "%25").replace(";", "%3B")
|
||||
if dur:
|
||||
exp = formatdate(time.time() + dur, usegmt=True)
|
||||
else:
|
||||
@@ -1589,7 +1631,12 @@ def unhumanize(sz: str) -> int:
|
||||
pass
|
||||
|
||||
mc = sz[-1:].lower()
|
||||
mi = {"k": 1024, "m": 1024 * 1024, "g": 1024 * 1024 * 1024}.get(mc, 1)
|
||||
mi = {
|
||||
"k": 1024,
|
||||
"m": 1024 * 1024,
|
||||
"g": 1024 * 1024 * 1024,
|
||||
"t": 1024 * 1024 * 1024 * 1024,
|
||||
}.get(mc, 1)
|
||||
return int(float(sz[:-1]) * mi)
|
||||
|
||||
|
||||
@@ -1625,7 +1672,7 @@ def uncyg(path: str) -> str:
|
||||
if len(path) > 2 and path[2] != "/":
|
||||
return path
|
||||
|
||||
return "{}:\\{}".format(path[1], path[3:])
|
||||
return "%s:\\%s" % (path[1], path[3:])
|
||||
|
||||
|
||||
def undot(path: str) -> str:
|
||||
@@ -1668,7 +1715,7 @@ def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
|
||||
|
||||
bad = ["con", "prn", "aux", "nul"]
|
||||
for n in range(1, 10):
|
||||
bad += "com{0} lpt{0}".format(n).split(" ")
|
||||
bad += ("com%s lpt%s" % (n, n)).split(" ")
|
||||
|
||||
if fn.lower().split(".")[0] in bad:
|
||||
fn = "_" + fn
|
||||
@@ -1690,7 +1737,7 @@ def relchk(rp: str) -> str:
|
||||
|
||||
def absreal(fpath: str) -> str:
|
||||
try:
|
||||
return fsdec(os.path.abspath(os.path.realpath(fsenc(fpath))))
|
||||
return fsdec(os.path.abspath(os.path.realpath(afsenc(fpath))))
|
||||
except:
|
||||
if not WINDOWS:
|
||||
raise
|
||||
@@ -1810,6 +1857,32 @@ def _w8enc3(txt: str) -> bytes:
|
||||
return txt.encode(FS_ENCODING, "surrogateescape")
|
||||
|
||||
|
||||
def _msdec(txt: bytes) -> str:
|
||||
ret = txt.decode(FS_ENCODING, "surrogateescape")
|
||||
return ret[4:] if ret.startswith("\\\\?\\") else ret
|
||||
|
||||
|
||||
def _msaenc(txt: str) -> bytes:
|
||||
return txt.replace("/", "\\").encode(FS_ENCODING, "surrogateescape")
|
||||
|
||||
|
||||
def _uncify(txt: str) -> str:
|
||||
txt = txt.replace("/", "\\")
|
||||
if ":" not in txt and not txt.startswith("\\\\"):
|
||||
txt = absreal(txt)
|
||||
|
||||
return txt if txt.startswith("\\\\") else "\\\\?\\" + txt
|
||||
|
||||
|
||||
def _msenc(txt: str) -> bytes:
|
||||
txt = txt.replace("/", "\\")
|
||||
if ":" not in txt and not txt.startswith("\\\\"):
|
||||
txt = absreal(txt)
|
||||
|
||||
ret = txt.encode(FS_ENCODING, "surrogateescape")
|
||||
return ret if ret.startswith(b"\\\\") else b"\\\\?\\" + ret
|
||||
|
||||
|
||||
w8dec = _w8dec3 if not PY2 else _w8dec2
|
||||
w8enc = _w8enc3 if not PY2 else _w8enc2
|
||||
|
||||
@@ -1824,9 +1897,16 @@ def w8b64enc(txt: str) -> str:
|
||||
return base64.urlsafe_b64encode(w8enc(txt)).decode("ascii")
|
||||
|
||||
|
||||
if not PY2 or not WINDOWS:
|
||||
fsenc = w8enc
|
||||
if not PY2 and WINDOWS:
|
||||
sfsenc = w8enc
|
||||
afsenc = _msaenc
|
||||
fsenc = _msenc
|
||||
fsdec = _msdec
|
||||
uncify = _uncify
|
||||
elif not PY2 or not WINDOWS:
|
||||
fsenc = afsenc = sfsenc = w8enc
|
||||
fsdec = w8dec
|
||||
uncify = str
|
||||
else:
|
||||
# moonrunes become \x3f with bytestrings,
|
||||
# losing mojibake support is worth
|
||||
@@ -1836,8 +1916,9 @@ else:
|
||||
def _not_actually_mbcs_dec(txt: bytes) -> str:
|
||||
return txt
|
||||
|
||||
fsenc = _not_actually_mbcs_enc
|
||||
fsenc = afsenc = sfsenc = _not_actually_mbcs_enc
|
||||
fsdec = _not_actually_mbcs_dec
|
||||
uncify = str
|
||||
|
||||
|
||||
def s3enc(mem_cur: "sqlite3.Cursor", rd: str, fn: str) -> tuple[str, str]:
|
||||
@@ -1948,6 +2029,8 @@ def shut_socket(log: "NamedLogger", sck: socket.socket, timeout: int = 3) -> Non
|
||||
sck.shutdown(socket.SHUT_RDWR)
|
||||
except:
|
||||
pass
|
||||
except Exception as ex:
|
||||
log("shut({}): {}".format(fd, ex), "90")
|
||||
finally:
|
||||
td = time.time() - t0
|
||||
if td >= 1:
|
||||
@@ -2199,7 +2282,7 @@ def rmdirs(
|
||||
dirs = [os.path.join(top, x) for x in dirs]
|
||||
ok = []
|
||||
ng = []
|
||||
for d in dirs[::-1]:
|
||||
for d in reversed(dirs):
|
||||
a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
|
||||
ok += a
|
||||
ng += b
|
||||
@@ -2214,18 +2297,21 @@ def rmdirs(
|
||||
return ok, ng
|
||||
|
||||
|
||||
def rmdirs_up(top: str) -> tuple[list[str], list[str]]:
|
||||
def rmdirs_up(top: str, stop: str) -> tuple[list[str], list[str]]:
|
||||
"""rmdir on self, then all parents"""
|
||||
if top == stop:
|
||||
return [], [top]
|
||||
|
||||
try:
|
||||
os.rmdir(fsenc(top))
|
||||
except:
|
||||
return [], [top]
|
||||
|
||||
par = os.path.dirname(top)
|
||||
if not par:
|
||||
if not par or par == stop:
|
||||
return [top], []
|
||||
|
||||
ok, ng = rmdirs_up(par)
|
||||
ok, ng = rmdirs_up(par, stop)
|
||||
return [top] + ok, ng
|
||||
|
||||
|
||||
@@ -2246,7 +2332,7 @@ def unescape_cookie(orig: str) -> str:
|
||||
ret += chr(int(esc[1:], 16))
|
||||
except:
|
||||
ret += esc
|
||||
esc = ""
|
||||
esc = ""
|
||||
|
||||
else:
|
||||
ret += ch
|
||||
@@ -2346,7 +2432,7 @@ def killtree(root: int) -> None:
|
||||
|
||||
|
||||
def runcmd(
|
||||
argv: Union[list[bytes], list[str]], timeout: Optional[int] = None, **ka: Any
|
||||
argv: Union[list[bytes], list[str]], timeout: Optional[float] = None, **ka: Any
|
||||
) -> tuple[int, str, str]:
|
||||
kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one
|
||||
capture = ka.pop("capture", 3) # 0=none 1=stdout 2=stderr 3=both
|
||||
@@ -2399,7 +2485,7 @@ def chkcmd(argv: Union[list[bytes], list[str]], **ka: Any) -> tuple[str, str]:
|
||||
return sout, serr
|
||||
|
||||
|
||||
def mchkcmd(argv: Union[list[bytes], list[str]], timeout: int = 10) -> None:
|
||||
def mchkcmd(argv: Union[list[bytes], list[str]], timeout: float = 10) -> None:
|
||||
if PY2:
|
||||
with open(os.devnull, "wb") as f:
|
||||
rv = sp.call(argv, stdout=f, stderr=f)
|
||||
@@ -2459,23 +2545,14 @@ def retchk(
|
||||
raise Exception(t)
|
||||
|
||||
|
||||
def _runhook(
|
||||
log: "NamedLogger",
|
||||
cmd: str,
|
||||
ap: str,
|
||||
vp: str,
|
||||
host: str,
|
||||
uname: str,
|
||||
ip: str,
|
||||
at: float,
|
||||
sz: int,
|
||||
txt: str,
|
||||
) -> bool:
|
||||
def _parsehook(
|
||||
log: Optional["NamedLogger"], cmd: str
|
||||
) -> tuple[bool, bool, bool, float, dict[str, Any], str]:
|
||||
chk = False
|
||||
fork = False
|
||||
jtxt = False
|
||||
wait = 0
|
||||
tout = 0
|
||||
wait = 0.0
|
||||
tout = 0.0
|
||||
kill = "t"
|
||||
cap = 0
|
||||
ocmd = cmd
|
||||
@@ -2495,34 +2572,111 @@ def _runhook(
|
||||
cap = int(arg[1:]) # 0=none 1=stdout 2=stderr 3=both
|
||||
elif arg.startswith("k"):
|
||||
kill = arg[1:] # [t]ree [m]ain [n]one
|
||||
elif arg.startswith("i"):
|
||||
pass
|
||||
else:
|
||||
t = "hook: invalid flag {} in {}"
|
||||
log(t.format(arg, ocmd))
|
||||
(log or print)(t.format(arg, ocmd))
|
||||
|
||||
env = os.environ.copy()
|
||||
# try:
|
||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(zsl))
|
||||
env["PYTHONPATH"] = pypath
|
||||
# except: if not E.ox: raise
|
||||
try:
|
||||
if EXE:
|
||||
raise Exception()
|
||||
|
||||
ka = {
|
||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(zsl))
|
||||
env["PYTHONPATH"] = pypath
|
||||
except:
|
||||
if not EXE:
|
||||
raise
|
||||
|
||||
sp_ka = {
|
||||
"env": env,
|
||||
"timeout": tout,
|
||||
"kill": kill,
|
||||
"capture": cap,
|
||||
}
|
||||
|
||||
if cmd.startswith("~"):
|
||||
cmd = os.path.expanduser(cmd)
|
||||
|
||||
return chk, fork, jtxt, wait, sp_ka, cmd
|
||||
|
||||
|
||||
def runihook(
|
||||
log: Optional["NamedLogger"],
|
||||
cmd: str,
|
||||
vol: "VFS",
|
||||
ups: list[tuple[str, int, int, str, str, str, int]],
|
||||
) -> bool:
|
||||
ocmd = cmd
|
||||
chk, fork, jtxt, wait, sp_ka, cmd = _parsehook(log, cmd)
|
||||
bcmd = [sfsenc(cmd)]
|
||||
if cmd.endswith(".py"):
|
||||
bcmd = [sfsenc(pybin)] + bcmd
|
||||
|
||||
vps = [vjoin(*list(s3dec(x[3], x[4]))) for x in ups]
|
||||
aps = [djoin(vol.realpath, x) for x in vps]
|
||||
if jtxt:
|
||||
# 0w 1mt 2sz 3rd 4fn 5ip 6at
|
||||
ja = [
|
||||
{
|
||||
"ap": uncify(ap), # utf8 for json
|
||||
"vp": vp,
|
||||
"wark": x[0][:16],
|
||||
"mt": x[1],
|
||||
"sz": x[2],
|
||||
"ip": x[5],
|
||||
"at": x[6],
|
||||
}
|
||||
for x, vp, ap in zip(ups, vps, aps)
|
||||
]
|
||||
sp_ka["sin"] = json.dumps(ja).encode("utf-8", "replace")
|
||||
else:
|
||||
sp_ka["sin"] = b"\n".join(fsenc(x) for x in aps)
|
||||
|
||||
t0 = time.time()
|
||||
if fork:
|
||||
Daemon(runcmd, ocmd, [bcmd], ka=sp_ka)
|
||||
else:
|
||||
rc, v, err = runcmd(bcmd, **sp_ka) # type: ignore
|
||||
if chk and rc:
|
||||
retchk(rc, bcmd, err, log, 5)
|
||||
return False
|
||||
|
||||
wait -= time.time() - t0
|
||||
if wait > 0:
|
||||
time.sleep(wait)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _runhook(
|
||||
log: Optional["NamedLogger"],
|
||||
cmd: str,
|
||||
ap: str,
|
||||
vp: str,
|
||||
host: str,
|
||||
uname: str,
|
||||
mt: float,
|
||||
sz: int,
|
||||
ip: str,
|
||||
at: float,
|
||||
txt: str,
|
||||
) -> bool:
|
||||
ocmd = cmd
|
||||
chk, fork, jtxt, wait, sp_ka, cmd = _parsehook(log, cmd)
|
||||
if jtxt:
|
||||
ja = {
|
||||
"ap": ap,
|
||||
"vp": vp,
|
||||
"mt": mt,
|
||||
"sz": sz,
|
||||
"ip": ip,
|
||||
"at": at or time.time(),
|
||||
"host": host,
|
||||
"user": uname,
|
||||
"at": at or time.time(),
|
||||
"sz": sz,
|
||||
"txt": txt,
|
||||
}
|
||||
arg = json.dumps(ja)
|
||||
@@ -2531,15 +2685,15 @@ def _runhook(
|
||||
|
||||
acmd = [cmd, arg]
|
||||
if cmd.endswith(".py"):
|
||||
acmd = [sys.executable] + acmd
|
||||
acmd = [pybin] + acmd
|
||||
|
||||
bcmd = [fsenc(x) for x in acmd]
|
||||
bcmd = [fsenc(x) if x == ap else sfsenc(x) for x in acmd]
|
||||
|
||||
t0 = time.time()
|
||||
if fork:
|
||||
Daemon(runcmd, ocmd, [acmd], ka=ka)
|
||||
Daemon(runcmd, ocmd, [bcmd], ka=sp_ka)
|
||||
else:
|
||||
rc, v, err = runcmd(bcmd, **ka) # type: ignore
|
||||
rc, v, err = runcmd(bcmd, **sp_ka) # type: ignore
|
||||
if chk and rc:
|
||||
retchk(rc, bcmd, err, log, 5)
|
||||
return False
|
||||
@@ -2552,24 +2706,25 @@ def _runhook(
|
||||
|
||||
|
||||
def runhook(
|
||||
log: "NamedLogger",
|
||||
log: Optional["NamedLogger"],
|
||||
cmds: list[str],
|
||||
ap: str,
|
||||
vp: str,
|
||||
host: str,
|
||||
uname: str,
|
||||
mt: float,
|
||||
sz: int,
|
||||
ip: str,
|
||||
at: float,
|
||||
sz: int,
|
||||
txt: str,
|
||||
) -> bool:
|
||||
vp = vp.replace("\\", "/")
|
||||
for cmd in cmds:
|
||||
try:
|
||||
if not _runhook(log, cmd, ap, vp, host, uname, ip, at, sz, txt):
|
||||
if not _runhook(log, cmd, ap, vp, host, uname, mt, sz, ip, at, txt):
|
||||
return False
|
||||
except Exception as ex:
|
||||
log("hook: {}".format(ex))
|
||||
(log or print)("hook: {}".format(ex))
|
||||
if ",c," in "," + cmd:
|
||||
return False
|
||||
break
|
||||
@@ -2577,6 +2732,34 @@ def runhook(
|
||||
return True
|
||||
|
||||
|
||||
def loadpy(ap: str, hot: bool) -> Any:
|
||||
"""
|
||||
a nice can of worms capable of causing all sorts of bugs
|
||||
depending on what other inconveniently named files happen
|
||||
to be in the same folder
|
||||
"""
|
||||
if ap.startswith("~"):
|
||||
ap = os.path.expanduser(ap)
|
||||
|
||||
mdir, mfile = os.path.split(absreal(ap))
|
||||
mname = mfile.rsplit(".", 1)[0]
|
||||
sys.path.insert(0, mdir)
|
||||
|
||||
if PY2:
|
||||
mod = __import__(mname)
|
||||
if hot:
|
||||
reload(mod)
|
||||
else:
|
||||
import importlib
|
||||
|
||||
mod = importlib.import_module(mname)
|
||||
if hot:
|
||||
importlib.reload(mod)
|
||||
|
||||
sys.path.remove(mdir)
|
||||
return mod
|
||||
|
||||
|
||||
def gzip_orig_sz(fn: str) -> int:
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
f.seek(-4, 2)
|
||||
|
||||
@@ -6,7 +6,7 @@ pk: $(addsuffix .gz, $(wildcard *.js *.css))
|
||||
un: $(addsuffix .un, $(wildcard *.gz))
|
||||
|
||||
%.gz: %
|
||||
pigz -11 -J 34 -I 5730 $<
|
||||
pigz -11 -J 34 -I 573 $<
|
||||
|
||||
%.un: %
|
||||
pigz -d $<
|
||||
|
||||
1
copyparty/web/a/u2c.py
Symbolic link
1
copyparty/web/a/u2c.py
Symbolic link
@@ -0,0 +1 @@
|
||||
../../../bin/u2c.py
|
||||
@@ -1 +0,0 @@
|
||||
../../../bin/up2k.py
|
||||
@@ -27,8 +27,8 @@ window.baguetteBox = (function () {
|
||||
isOverlayVisible = false,
|
||||
touch = {}, // start-pos
|
||||
touchFlag = false, // busy
|
||||
re_i = /.+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||
re_v = /.+\.(webm|mkv|mp4)(\?|$)/i,
|
||||
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||
re_v = /^[^?]+\.(webm|mkv|mp4)(\?|$)/i,
|
||||
anims = ['slideIn', 'fadeIn', 'none'],
|
||||
data = {}, // all galleries
|
||||
imagesElements = [],
|
||||
@@ -127,7 +127,7 @@ window.baguetteBox = (function () {
|
||||
var gallery = [];
|
||||
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
|
||||
var imageElementClickHandler = function (e) {
|
||||
if (ctrl(e))
|
||||
if (ctrl(e) || e && e.shiftKey)
|
||||
return true;
|
||||
|
||||
e.preventDefault ? e.preventDefault() : e.returnValue = false;
|
||||
@@ -310,7 +310,7 @@ window.baguetteBox = (function () {
|
||||
options = {};
|
||||
setOptions(o);
|
||||
if (tt.en)
|
||||
tt.show.bind(this)();
|
||||
tt.show.call(this);
|
||||
}
|
||||
|
||||
function setVmode() {
|
||||
@@ -356,7 +356,7 @@ window.baguetteBox = (function () {
|
||||
|
||||
setVmode();
|
||||
if (tt.en)
|
||||
tt.show.bind(this)();
|
||||
tt.show.call(this);
|
||||
}
|
||||
|
||||
function findfile() {
|
||||
@@ -376,7 +376,12 @@ window.baguetteBox = (function () {
|
||||
else
|
||||
(vid() || ebi('bbox-overlay')).requestFullscreen();
|
||||
}
|
||||
catch (ex) { alert(ex); }
|
||||
catch (ex) {
|
||||
if (IPHONE)
|
||||
alert('sorry, apple decided to make this impossible on iphones (should work on ipad tho)');
|
||||
else
|
||||
alert(ex);
|
||||
}
|
||||
}
|
||||
|
||||
function tglsel() {
|
||||
@@ -580,6 +585,7 @@ window.baguetteBox = (function () {
|
||||
function hideOverlay(e) {
|
||||
ev(e);
|
||||
playvid(false);
|
||||
removeFromCache('#files');
|
||||
if (options.noScrollbars) {
|
||||
document.documentElement.style.overflowY = 'auto';
|
||||
document.body.style.overflowY = 'auto';
|
||||
@@ -812,10 +818,16 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function vid() {
|
||||
if (currentIndex >= imagesElements.length)
|
||||
return;
|
||||
|
||||
return imagesElements[currentIndex].querySelector('video');
|
||||
}
|
||||
|
||||
function vidimg() {
|
||||
if (currentIndex >= imagesElements.length)
|
||||
return;
|
||||
|
||||
return imagesElements[currentIndex].querySelector('img, video');
|
||||
}
|
||||
|
||||
@@ -961,7 +973,7 @@ window.baguetteBox = (function () {
|
||||
clmod(btnPrev, 'off', 't');
|
||||
clmod(btnNext, 'off', 't');
|
||||
|
||||
if (Date.now() - ctime <= 500)
|
||||
if (Date.now() - ctime <= 500 && !IPHONE)
|
||||
tglfull();
|
||||
|
||||
ctime = Date.now();
|
||||
|
||||
@@ -55,6 +55,7 @@
|
||||
--u2-sbtn-b1: #999;
|
||||
--u2-txt-bg: var(--bg-u5);
|
||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||
--u2-tab-b1: rgba(128,128,128,0.8);
|
||||
--u2-tab-1-fg: #fd7;
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
||||
--u2-tab-1-b1: #7c5;
|
||||
@@ -93,6 +94,7 @@
|
||||
--g-fsel-bg: #d39;
|
||||
--g-fsel-b1: #f4a;
|
||||
--g-fsel-ts: #804;
|
||||
--g-dfg: var(--srv-3);
|
||||
--g-fg: var(--a-hil);
|
||||
--g-bg: var(--bg-u2);
|
||||
--g-b1: var(--bg-u4);
|
||||
@@ -269,6 +271,7 @@ html.bz {
|
||||
--btn-1h-fg: #000;
|
||||
--txt-sh: a;
|
||||
|
||||
--u2-tab-b1: var(--bg-u5);
|
||||
--u2-tab-1-fg: var(--fg-max);
|
||||
--u2-tab-1-bg: var(--bg);
|
||||
|
||||
@@ -327,6 +330,8 @@ html.c {
|
||||
}
|
||||
html.cz {
|
||||
--bgg: var(--bg-u2);
|
||||
--srv-3: #fff;
|
||||
--u2-tab-b1: var(--bg-d3);
|
||||
}
|
||||
html.cy {
|
||||
--fg: #fff;
|
||||
@@ -354,6 +359,7 @@ html.cy {
|
||||
--chk-fg: #fd0;
|
||||
|
||||
--srv-1: #f00;
|
||||
--srv-3: #fff;
|
||||
--op-aa-bg: #fff;
|
||||
|
||||
--u2-b1-bg: #f00;
|
||||
@@ -408,10 +414,11 @@ html.dz {
|
||||
--op-aa-bg: var(--bg-d2);
|
||||
--op-a-sh: rgba(0,0,0,0.5);
|
||||
|
||||
--u2-btn-b1: #999;
|
||||
--u2-sbtn-b1: #999;
|
||||
--u2-btn-b1: var(--fg-weak);
|
||||
--u2-sbtn-b1: var(--fg-weak);
|
||||
--u2-txt-bg: var(--bg-u5);
|
||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||
--u2-tab-b1: var(--fg-weak);
|
||||
--u2-tab-1-fg: #fff;
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
||||
--u2-tab-1-b1: #7c5;
|
||||
@@ -420,6 +427,12 @@ html.dz {
|
||||
--u2-b-fg: #fff;
|
||||
--u2-b1-bg: #3a3;
|
||||
--u2-b2-bg: #3a3;
|
||||
--u2-o-bg: var(--btn-bg);
|
||||
--u2-o-b1: var(--bg-u5);
|
||||
--u2-o-h-bg: var(--fg-weak);
|
||||
--u2-o-1-bg: var(--fg-weak);
|
||||
--u2-o-1-b1: var(--a);
|
||||
--u2-o-1h-bg: var(--a);
|
||||
--u2-inf-bg: #07a;
|
||||
--u2-inf-b1: #0be;
|
||||
--u2-ok-bg: #380;
|
||||
@@ -718,14 +731,13 @@ html.y #files thead th {
|
||||
margin: 0;
|
||||
padding: .3em .5em;
|
||||
background: var(--bg);
|
||||
max-width: var(--file-td-w);
|
||||
word-wrap: break-word;
|
||||
overflow: hidden;
|
||||
}
|
||||
#files tr:nth-child(2n) td {
|
||||
background: var(--row-alt);
|
||||
}
|
||||
#files td+td+td {
|
||||
max-width: 30em;
|
||||
overflow: hidden;
|
||||
}
|
||||
#files td+td {
|
||||
box-shadow: 1px 0 0 0 rgba(128,128,128,var(--f-sh1)) inset, 0 1px 0 rgba(255,255,255,var(--f-sh2)) inset, 0 -1px 0 rgba(255,255,255,var(--f-sh2)) inset;
|
||||
}
|
||||
@@ -793,22 +805,31 @@ html.y #path a:hover {
|
||||
}
|
||||
.logue {
|
||||
padding: .2em 0;
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
}
|
||||
.logue.hidden,
|
||||
.logue:empty {
|
||||
display: none;
|
||||
}
|
||||
#doc>iframe,
|
||||
.logue>iframe {
|
||||
background: var(--bgg);
|
||||
border-radius: .3em;
|
||||
border: 1px solid var(--bgg);
|
||||
border-width: 0 .3em 0 .3em;
|
||||
border-radius: .5em;
|
||||
visibility: hidden;
|
||||
border: none;
|
||||
margin: 0 -.3em;
|
||||
width: 100%;
|
||||
height: 0;
|
||||
}
|
||||
#doc>iframe.focus,
|
||||
.logue>iframe.focus {
|
||||
box-shadow: 0 0 .1em .1em var(--a);
|
||||
}
|
||||
#pro.logue>iframe {
|
||||
height: 100vh;
|
||||
}
|
||||
#pro.logue {
|
||||
margin-bottom: .8em;
|
||||
}
|
||||
@@ -833,12 +854,13 @@ html.y #path a:hover {
|
||||
.mdo {
|
||||
max-width: 52em;
|
||||
}
|
||||
.mdo.sb {
|
||||
max-width: unset;
|
||||
.mdo.sb,
|
||||
#epi.logue.mdo>iframe {
|
||||
max-width: 54em;
|
||||
}
|
||||
.mdo,
|
||||
.mdo * {
|
||||
line-height: 1.4em;
|
||||
line-height: 1.5em;
|
||||
}
|
||||
#srv_info,
|
||||
#srv_info2,
|
||||
@@ -956,6 +978,9 @@ html.y #path a:hover {
|
||||
#ggrid>a.dir:before {
|
||||
content: '📂';
|
||||
}
|
||||
#ggrid>a.dir>span {
|
||||
color: var(--g-dfg);
|
||||
}
|
||||
#ggrid>a.au:before {
|
||||
content: '💾';
|
||||
}
|
||||
@@ -1002,6 +1027,9 @@ html.np_open #ggrid>a.au:before {
|
||||
background: var(--g-sel-bg);
|
||||
border-color: var(--g-sel-b1);
|
||||
}
|
||||
#ggrid>a.sel>span {
|
||||
color: var(--g-sel-fg);
|
||||
}
|
||||
#ggrid>a.sel,
|
||||
#ggrid>a[tt].sel {
|
||||
border-top: 1px solid var(--g-fsel-b1);
|
||||
@@ -1055,6 +1083,9 @@ html.np_open #ggrid>a.au:before {
|
||||
background: var(--bg-d3);
|
||||
box-shadow: -.2em .2em 0 var(--f-sel-sh), -.2em -.2em 0 var(--f-sel-sh);
|
||||
}
|
||||
#player {
|
||||
display: none;
|
||||
}
|
||||
#widget {
|
||||
position: fixed;
|
||||
font-size: 1.4em;
|
||||
@@ -1137,10 +1168,10 @@ html.y #widget.open {
|
||||
background: #fff;
|
||||
background: var(--bg-u3);
|
||||
}
|
||||
#wfm, #wzip, #wnp {
|
||||
#wfs, #wfm, #wzip, #wnp {
|
||||
display: none;
|
||||
}
|
||||
#wzip, #wnp {
|
||||
#wfs, #wzip, #wnp {
|
||||
margin-right: .2em;
|
||||
padding-right: .2em;
|
||||
border: 1px solid var(--bg-u5);
|
||||
@@ -1152,6 +1183,7 @@ html.y #widget.open {
|
||||
padding-left: .2em;
|
||||
border-left-width: .1em;
|
||||
}
|
||||
#wfs.act,
|
||||
#wfm.act {
|
||||
display: inline-block;
|
||||
}
|
||||
@@ -1175,6 +1207,13 @@ html.y #widget.open {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
}
|
||||
#wfs {
|
||||
font-size: .36em;
|
||||
text-align: right;
|
||||
line-height: 1.3em;
|
||||
padding: 0 .3em 0 0;
|
||||
border-width: 0 .25em 0 0;
|
||||
}
|
||||
#wfm span,
|
||||
#wnp span {
|
||||
font-size: .6em;
|
||||
@@ -1190,7 +1229,8 @@ html.y #widget.open {
|
||||
#wfm a.hide {
|
||||
display: none;
|
||||
}
|
||||
#files tbody tr.fcut td {
|
||||
#files tbody tr.fcut td,
|
||||
#ggrid>a.fcut {
|
||||
animation: fcut .5s ease-out;
|
||||
}
|
||||
@keyframes fcut {
|
||||
@@ -1313,6 +1353,10 @@ html.y #ops svg circle {
|
||||
padding: .3em .6em;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#noie {
|
||||
color: #b60;
|
||||
margin: 0 0 0 .5em;
|
||||
}
|
||||
.opbox {
|
||||
padding: .5em;
|
||||
border-radius: 0 .3em .3em 0;
|
||||
@@ -1576,6 +1620,12 @@ html.cz .tgl.btn.on {
|
||||
list-style: none;
|
||||
border-top: 1px solid var(--bg-u5);
|
||||
}
|
||||
#tree li.offline>a:first-child:before {
|
||||
content: '❌';
|
||||
position: absolute;
|
||||
margin-left: -.25em;
|
||||
z-index: 3;
|
||||
}
|
||||
#tree ul a.sel {
|
||||
background: #000;
|
||||
background: var(--bg-d3);
|
||||
@@ -1717,6 +1767,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
display: none;
|
||||
}
|
||||
.ghead {
|
||||
background: var(--bg-u2);
|
||||
border-radius: .3em;
|
||||
padding: .2em .5em;
|
||||
line-height: 2.3em;
|
||||
@@ -2430,7 +2481,7 @@ html.y #bbox-overlay figcaption a {
|
||||
width: 21em;
|
||||
}
|
||||
#u2cards {
|
||||
padding: 1em 1em .3em 1em;
|
||||
padding: 1em 1em .42em 1em;
|
||||
margin: 0 auto;
|
||||
white-space: nowrap;
|
||||
text-align: center;
|
||||
@@ -2455,7 +2506,8 @@ html.y #bbox-overlay figcaption a {
|
||||
#u2cards a {
|
||||
padding: .2em 1em;
|
||||
background: var(--u2-tab-bg);
|
||||
border: 1px solid rgba(128,128,128,0.8);
|
||||
border: 1px solid #999;
|
||||
border-color: var(--u2-tab-b1);
|
||||
border-width: 0 0 1px 0;
|
||||
}
|
||||
#u2cards a:first-child {
|
||||
@@ -2913,6 +2965,7 @@ html.b #treepar {
|
||||
html.b #wrap {
|
||||
margin-top: 2em;
|
||||
}
|
||||
html.by .ghead,
|
||||
html.bz .ghead {
|
||||
background: var(--bg);
|
||||
padding: .2em 0;
|
||||
|
||||
@@ -29,17 +29,17 @@
|
||||
|
||||
<div id="op_player" class="opview opbox opwide"></div>
|
||||
|
||||
<div id="op_bup" class="opview opbox act">
|
||||
<div id="op_bup" class="opview opbox {% if not ls0 %}act{% endif %}">
|
||||
<div id="u2err"></div>
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="bput" />
|
||||
<input type="file" name="f" multiple /><br />
|
||||
<input type="submit" value="start upload">
|
||||
</form>
|
||||
<a id="bbsw" href="?b=u"><br />switch to basic browser</a>
|
||||
<a id="bbsw" href="?b=u" rel="nofollow"><br />switch to basic browser</a>
|
||||
</div>
|
||||
|
||||
<div id="op_mkdir" class="opview opbox act">
|
||||
<div id="op_mkdir" class="opview opbox {% if not ls0 %}act{% endif %}">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="mkdir" />
|
||||
📂<input type="text" name="name" class="i" placeholder="awesome mix vol.1">
|
||||
@@ -55,7 +55,7 @@
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_msg" class="opview opbox act">
|
||||
<div id="op_msg" class="opview opbox {% if not ls0 %}act{% endif %}">
|
||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
📟<input type="text" name="msg" class="i" placeholder="lorem ipsum dolor sit amet">
|
||||
<input type="submit" value="send msg to srv log">
|
||||
@@ -121,7 +121,7 @@
|
||||
|
||||
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
|
||||
|
||||
<h2><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
||||
<h2 id="wfp"><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
||||
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
@@ -135,11 +135,14 @@
|
||||
|
||||
<script>
|
||||
var SR = {{ r|tojson }},
|
||||
TS = "{{ ts }}",
|
||||
acct = "{{ acct }}",
|
||||
perms = {{ perms }},
|
||||
dgrid = {{ dgrid|tojson }},
|
||||
themes = {{ themes }},
|
||||
dtheme = "{{ dtheme }}",
|
||||
srvinf = "{{ srv_info }}",
|
||||
s_name = "{{ s_name }}",
|
||||
lang = "{{ lang }}",
|
||||
dfavico = "{{ favico }}",
|
||||
def_hcols = {{ def_hcols|tojson }},
|
||||
@@ -154,6 +157,8 @@
|
||||
sb_lg = "{{ sb_lg }}",
|
||||
lifetime = {{ lifetime }},
|
||||
turbolvl = {{ turbolvl }},
|
||||
idxh = {{ idxh }},
|
||||
frand = {{ frand|tojson }},
|
||||
u2sort = "{{ u2sort }}",
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
txt_ext = "{{ txt_ext }}",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
</head>
|
||||
|
||||
@@ -212,6 +212,8 @@ function convert_markdown(md_text, dest_dom) {
|
||||
|
||||
try {
|
||||
var md_html = marked.parse(md_text, marked_opts);
|
||||
if (!have_emp)
|
||||
md_html = DOMPurify.sanitize(md_html);
|
||||
}
|
||||
catch (ex) {
|
||||
if (ext)
|
||||
@@ -231,11 +233,11 @@ function convert_markdown(md_text, dest_dom) {
|
||||
var nodes = md_dom.getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
var href = nodes[a].getAttribute('href');
|
||||
var txt = nodes[a].textContent;
|
||||
var txt = nodes[a].innerHTML;
|
||||
|
||||
if (!txt)
|
||||
nodes[a].textContent = href;
|
||||
else if (href !== txt)
|
||||
else if (href !== txt && !nodes[a].className)
|
||||
nodes[a].className = 'vis';
|
||||
}
|
||||
|
||||
|
||||
@@ -278,6 +278,7 @@ function Modpoll() {
|
||||
return;
|
||||
|
||||
var new_md = this.responseText,
|
||||
new_mt = this.getResponseHeader('X-Lastmod3') || r.lastmod,
|
||||
server_ref = server_md.replace(/\r/g, ''),
|
||||
server_now = new_md.replace(/\r/g, '');
|
||||
|
||||
@@ -285,6 +286,7 @@ function Modpoll() {
|
||||
if (r.initial && server_ref != server_now)
|
||||
return modal.confirm('Your browser decided to show an outdated copy of the document!\n\nDo you want to load the latest version from the server instead?', function () {
|
||||
dom_src.value = server_md = new_md;
|
||||
last_modified = new_mt;
|
||||
draw_md();
|
||||
}, null);
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
|
||||
@@ -36,6 +36,11 @@ a {
|
||||
td a {
|
||||
margin: 0;
|
||||
}
|
||||
#w {
|
||||
color: #fff;
|
||||
background: #940;
|
||||
border-color: #b70;
|
||||
}
|
||||
.af,
|
||||
.logout {
|
||||
float: right;
|
||||
@@ -175,15 +180,19 @@ html.z a.g {
|
||||
border-color: #af4;
|
||||
box-shadow: 0 .3em 1em #7d0;
|
||||
}
|
||||
html.z input {
|
||||
color: #fff;
|
||||
background: #626;
|
||||
border: 1px solid #c2c;
|
||||
border-width: 1px 0 0 0;
|
||||
input {
|
||||
color: #a50;
|
||||
background: #fff;
|
||||
border: 1px solid #a50;
|
||||
border-radius: .5em;
|
||||
padding: .5em .7em;
|
||||
margin: 0 .5em 0 0;
|
||||
}
|
||||
html.z input {
|
||||
color: #fff;
|
||||
background: #626;
|
||||
border-color: #c2c;
|
||||
}
|
||||
html.z .num {
|
||||
border-color: #777;
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
@@ -89,17 +89,20 @@
|
||||
</ul>
|
||||
|
||||
<h1 id="l">login for more:</h1>
|
||||
<ul>
|
||||
<div>
|
||||
<form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||
<input type="hidden" name="act" value="login" />
|
||||
<input type="password" name="cppwd" />
|
||||
<input type="submit" value="Login" />
|
||||
{% if ahttps %}
|
||||
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||
{% endif %}
|
||||
</form>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
{%- if not this.args.nb %}
|
||||
<span id="pb"><span>powered by</span> <a href="{{ this.args.pb_url }}">copyparty</a></span>
|
||||
<span id="pb"><span>powered by</span> <a href="{{ this.args.pb_url }}">copyparty {{ver}}</a></span>
|
||||
{%- endif %}
|
||||
<script>
|
||||
|
||||
|
||||
@@ -25,7 +25,8 @@ var Ls = {
|
||||
"t1": "handling",
|
||||
"u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder",
|
||||
"v1": "koble til",
|
||||
"v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!"
|
||||
"v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!",
|
||||
"w1": "bytt til https",
|
||||
},
|
||||
"eng": {
|
||||
"d2": "shows the state of all active threads",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ args.doctitle }} @ {{ args.name }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
@@ -43,10 +43,9 @@
|
||||
<h1>WebDAV</h1>
|
||||
|
||||
<div class="os win">
|
||||
<p><em>note: rclone-FTP is a bit faster, so {% if args.ftp or args.ftps %}try that first{% else %}consider enabling FTP in server settings{% endif %}</em></p>
|
||||
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
@@ -64,9 +63,14 @@
|
||||
yum install davfs2
|
||||
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
<p>or you can use rclone instead, which is much slower but doesn't require root:</p>
|
||||
<p>make it automount on boot:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
||||
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
||||
</pre>
|
||||
<p>or you can use rclone instead, which is much slower but doesn't require root (plus it keeps lastmodified on upload):</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
@@ -106,10 +110,21 @@
|
||||
|
||||
<div class="os win">
|
||||
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
|
||||
{% if args.ftp %}
|
||||
<p>connect with plaintext FTP:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }}
|
||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
{% endif %}
|
||||
{% if args.ftps %}
|
||||
<p>connect with TLS-encrypted FTPS:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
||||
{% endif %}
|
||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||
<pre>
|
||||
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||
@@ -117,10 +132,21 @@
|
||||
</div>
|
||||
|
||||
<div class="os lin">
|
||||
{% if args.ftp %}
|
||||
<p>connect with plaintext FTP:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }}
|
||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
{% endif %}
|
||||
{% if args.ftps %}
|
||||
<p>connect with TLS-encrypted FTPS:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
||||
{% endif %}
|
||||
<p>emergency alternative (gnome/gui-only):</p>
|
||||
<!-- gnome-bug: ignores vp -->
|
||||
<pre>
|
||||
@@ -155,7 +181,7 @@
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p>
|
||||
{% endif %}
|
||||
<p>
|
||||
you can use <a href="{{ r }}/.cpr/a/up2k.py">up2k.py</a> to upload (sometimes faster than web-browsers)
|
||||
you can use <a href="{{ r }}/.cpr/a/u2c.py">u2c.py</a> to upload (sometimes faster than web-browsers)
|
||||
</p>
|
||||
|
||||
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html {
|
||||
text-size-adjust: 100%;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#tt, #toast {
|
||||
@@ -42,6 +44,10 @@ html {
|
||||
text-shadow: 1px 1px 0 #000;
|
||||
color: #fff;
|
||||
}
|
||||
#toast.top {
|
||||
top: 2em;
|
||||
bottom: unset;
|
||||
}
|
||||
#toast a {
|
||||
color: inherit;
|
||||
text-shadow: inherit;
|
||||
@@ -69,7 +75,7 @@ html {
|
||||
#toastb {
|
||||
max-height: 70vh;
|
||||
overflow-y: auto;
|
||||
padding: 1px;
|
||||
padding: .1em;
|
||||
}
|
||||
#toast.scroll #toastb {
|
||||
overflow-y: scroll;
|
||||
@@ -447,6 +453,20 @@ html.y textarea:focus {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
.mdo .mdth,
|
||||
.mdo .mdthl,
|
||||
.mdo .mdthr {
|
||||
margin: .5em .5em .5em 0;
|
||||
}
|
||||
.mdthl {
|
||||
float: left;
|
||||
}
|
||||
.mdthr {
|
||||
float: right;
|
||||
}
|
||||
hr {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
@media screen {
|
||||
.mdo {
|
||||
|
||||
@@ -114,10 +114,10 @@ function up2k_flagbus() {
|
||||
do_take(now);
|
||||
return;
|
||||
}
|
||||
if (flag.owner && now - flag.owner[1] > 5000) {
|
||||
if (flag.owner && now - flag.owner[1] > 12000) {
|
||||
flag.owner = null;
|
||||
}
|
||||
if (flag.wants && now - flag.wants[1] > 5000) {
|
||||
if (flag.wants && now - flag.wants[1] > 12000) {
|
||||
flag.wants = null;
|
||||
}
|
||||
if (!flag.owner && !flag.wants) {
|
||||
@@ -723,7 +723,7 @@ function Donut(uc, st) {
|
||||
|
||||
function strobe() {
|
||||
var txt = strobes.pop();
|
||||
wintitle(txt);
|
||||
wintitle(txt, false);
|
||||
if (!txt)
|
||||
clearInterval(tstrober);
|
||||
}
|
||||
@@ -772,6 +772,7 @@ function fsearch_explain(n) {
|
||||
|
||||
function up2k_init(subtle) {
|
||||
var r = {
|
||||
"tact": Date.now(),
|
||||
"init_deps": init_deps,
|
||||
"set_fsearch": set_fsearch,
|
||||
"gotallfiles": [gotallfiles] // hooks
|
||||
@@ -856,6 +857,7 @@ function up2k_init(subtle) {
|
||||
fdom_ctr = 0,
|
||||
biggest_file = 0;
|
||||
|
||||
bcfg_bind(uc, 'rand', 'u2rand', false, null, false);
|
||||
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
||||
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
|
||||
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
||||
@@ -969,7 +971,7 @@ function up2k_init(subtle) {
|
||||
if (++nenters <= 0)
|
||||
nenters = 1;
|
||||
|
||||
if (onover.bind(this)(e))
|
||||
if (onover.call(this, e))
|
||||
return true;
|
||||
|
||||
var mup, up = QS('#up_zd');
|
||||
@@ -993,16 +995,29 @@ function up2k_init(subtle) {
|
||||
function onoverb(e) {
|
||||
// zones are alive; disable cuo2duo branch
|
||||
document.body.ondragover = document.body.ondrop = null;
|
||||
return onover.bind(this)(e);
|
||||
return onover.call(this, e);
|
||||
}
|
||||
function onover(e) {
|
||||
return onovercmn(this, e, false);
|
||||
}
|
||||
function onoverbtn(e) {
|
||||
return onovercmn(this, e, true);
|
||||
}
|
||||
function onovercmn(self, e, btn) {
|
||||
try {
|
||||
var ok = false, dt = e.dataTransfer.types;
|
||||
for (var a = 0; a < dt.length; a++)
|
||||
if (dt[a] == 'Files')
|
||||
ok = true;
|
||||
else if (dt[a] == 'text/uri-list')
|
||||
return true;
|
||||
else if (dt[a] == 'text/uri-list') {
|
||||
if (btn) {
|
||||
ok = true;
|
||||
if (toast.txt == L.u_uri)
|
||||
toast.hide();
|
||||
}
|
||||
else
|
||||
return toast.inf(10, L.u_uri) || true;
|
||||
}
|
||||
|
||||
if (!ok)
|
||||
return true;
|
||||
@@ -1018,8 +1033,11 @@ function up2k_init(subtle) {
|
||||
document.body.ondragenter = document.body.ondragleave = document.body.ondragover = null;
|
||||
return modal.alert('your browser does not support drag-and-drop uploading');
|
||||
}
|
||||
if (btn)
|
||||
return;
|
||||
|
||||
clmod(ebi('drops'), 'vis', 1);
|
||||
var v = this.getAttribute('v');
|
||||
var v = self.getAttribute('v');
|
||||
if (v)
|
||||
clmod(ebi(v), 'hl', 1);
|
||||
}
|
||||
@@ -1043,6 +1061,8 @@ function up2k_init(subtle) {
|
||||
document.body.ondragleave = offdrag;
|
||||
document.body.ondragover = onover;
|
||||
document.body.ondrop = gotfile;
|
||||
ebi('u2btn').ondrop = gotfile;
|
||||
ebi('u2btn').ondragover = onoverbtn;
|
||||
|
||||
var drops = [ebi('up_dz'), ebi('srch_dz')];
|
||||
for (var a = 0; a < 2; a++) {
|
||||
@@ -1086,7 +1106,7 @@ function up2k_init(subtle) {
|
||||
function gotfile(e) {
|
||||
ev(e);
|
||||
nenters = 0;
|
||||
offdrag.bind(this)();
|
||||
offdrag.call(this);
|
||||
var dz = this && this.getAttribute('id');
|
||||
if (!dz && e && e.clientY)
|
||||
// cuo2duo fallback
|
||||
@@ -1130,7 +1150,7 @@ function up2k_init(subtle) {
|
||||
dst = good_files;
|
||||
|
||||
if (is_itemlist) {
|
||||
if (fobj.kind !== 'file')
|
||||
if (fobj.kind !== 'file' && fobj.type !== 'text/uri-list')
|
||||
continue;
|
||||
|
||||
try {
|
||||
@@ -1142,6 +1162,8 @@ function up2k_init(subtle) {
|
||||
}
|
||||
catch (ex) { }
|
||||
fobj = fobj.getAsFile();
|
||||
if (!fobj)
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
if (fobj.size < 1)
|
||||
@@ -1363,6 +1385,10 @@ function up2k_init(subtle) {
|
||||
|
||||
if (uc.fsearch)
|
||||
entry.srch = 1;
|
||||
else if (uc.rand) {
|
||||
entry.rand = true;
|
||||
entry.name = 'a\n' + entry.name;
|
||||
}
|
||||
|
||||
if (biggest_file < entry.size)
|
||||
biggest_file = entry.size;
|
||||
@@ -1398,7 +1424,7 @@ function up2k_init(subtle) {
|
||||
ebi('u2tabw').className = 'ye';
|
||||
|
||||
setTimeout(function () {
|
||||
if (!actx || actx.state != 'suspended' || toast.tag == L.u_unpt)
|
||||
if (!actx || actx.state != 'suspended' || toast.visible)
|
||||
return;
|
||||
|
||||
toast.warn(30, "<div onclick=\"start_actx();toast.inf(3,'thanks!')\">please click this text to<br />unlock full upload speed</div>");
|
||||
@@ -1418,7 +1444,36 @@ function up2k_init(subtle) {
|
||||
}
|
||||
more_one_file();
|
||||
|
||||
var etaref = 0, etaskip = 0, utw_minh = 0, utw_read = 0;
|
||||
function linklist() {
|
||||
var ret = [],
|
||||
base = document.location.origin.replace(/\/$/, '');
|
||||
|
||||
for (var a = 0; a < st.files.length; a++) {
|
||||
var t = st.files[a],
|
||||
url = t.purl + uricom_enc(t.name);
|
||||
|
||||
if (t.fk)
|
||||
url += '?k=' + t.fk;
|
||||
|
||||
ret.push(base + url);
|
||||
}
|
||||
return ret.join('\r\n');
|
||||
}
|
||||
|
||||
ebi('luplinks').onclick = function (e) {
|
||||
ev(e);
|
||||
modal.alert(linklist());
|
||||
};
|
||||
|
||||
ebi('cuplinks').onclick = function (e) {
|
||||
ev(e);
|
||||
var txt = linklist();
|
||||
cliptxt(txt + '\n', function () {
|
||||
toast.inf(5, txt.split('\n').length + ' links copied to clipboard');
|
||||
});
|
||||
};
|
||||
|
||||
var etaref = 0, etaskip = 0, utw_minh = 0, utw_read = 0, utw_card = 0;
|
||||
function etafun() {
|
||||
var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length,
|
||||
nsend = st.busy.upload.length + st.todo.upload.length,
|
||||
@@ -1431,6 +1486,12 @@ function up2k_init(subtle) {
|
||||
|
||||
//ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1);
|
||||
|
||||
if (utw_card != pvis.act) {
|
||||
utw_card = pvis.act;
|
||||
utw_read = 9001;
|
||||
ebi('u2tabw').style.minHeight = '0px';
|
||||
}
|
||||
|
||||
if (++utw_read >= 20) {
|
||||
utw_read = 0;
|
||||
utw_minh = parseInt(ebi('u2tabw').style.minHeight || '0');
|
||||
@@ -1607,8 +1668,14 @@ function up2k_init(subtle) {
|
||||
running = true;
|
||||
while (true) {
|
||||
var now = Date.now(),
|
||||
blocktime = now - r.tact,
|
||||
is_busy = st.car < st.files.length;
|
||||
|
||||
if (blocktime > 2500)
|
||||
console.log('main thread blocked for ' + blocktime);
|
||||
|
||||
r.tact = now;
|
||||
|
||||
if (was_busy && !is_busy) {
|
||||
for (var a = 0; a < st.files.length; a++) {
|
||||
var t = st.files[a];
|
||||
@@ -1748,6 +1815,15 @@ function up2k_init(subtle) {
|
||||
})();
|
||||
|
||||
function uptoast() {
|
||||
if (st.busy.handshake.length)
|
||||
return;
|
||||
|
||||
for (var a = 0; a < st.files.length; a++) {
|
||||
var t = st.files[a];
|
||||
if (t.want_recheck && !t.rechecks)
|
||||
return;
|
||||
}
|
||||
|
||||
var sr = uc.fsearch,
|
||||
ok = pvis.ctr.ok,
|
||||
ng = pvis.ctr.ng,
|
||||
@@ -1770,6 +1846,7 @@ function up2k_init(subtle) {
|
||||
|
||||
timer.rm(etafun);
|
||||
timer.rm(donut.do);
|
||||
ebi('u2tabw').style.minHeight = '0px';
|
||||
utw_minh = 0;
|
||||
}
|
||||
|
||||
@@ -2003,6 +2080,8 @@ function up2k_init(subtle) {
|
||||
nbusy++;
|
||||
reading++;
|
||||
nchunk++;
|
||||
if (Date.now() - up2k.tact > 1500)
|
||||
tasker();
|
||||
}
|
||||
|
||||
function onmsg(d) {
|
||||
@@ -2213,13 +2292,24 @@ function up2k_init(subtle) {
|
||||
|
||||
t.sprs = response.sprs;
|
||||
|
||||
var rsp_purl = url_enc(response.purl);
|
||||
if (rsp_purl !== t.purl || response.name !== t.name) {
|
||||
// server renamed us (file exists / path restrictions)
|
||||
console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]");
|
||||
var fk = response.fk,
|
||||
rsp_purl = url_enc(response.purl),
|
||||
rename = rsp_purl !== t.purl || response.name !== t.name;
|
||||
|
||||
if (rename || fk) {
|
||||
if (rename)
|
||||
console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]");
|
||||
|
||||
t.purl = rsp_purl;
|
||||
t.name = response.name;
|
||||
pvis.seth(t.n, 0, linksplit(t.purl + uricom_enc(t.name)).join(' '));
|
||||
|
||||
var url = t.purl + uricom_enc(t.name);
|
||||
if (fk) {
|
||||
t.fk = fk;
|
||||
url += '?k=' + fk;
|
||||
}
|
||||
|
||||
pvis.seth(t.n, 0, linksplit(url).join(' '));
|
||||
}
|
||||
|
||||
var chunksize = get_chunksize(t.size),
|
||||
@@ -2322,16 +2412,17 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
var err_pend = rsp.indexOf('partial upload exists at a different') + 1,
|
||||
err_srcb = rsp.indexOf('source file busy; please try again') + 1,
|
||||
err_plug = rsp.indexOf('upload blocked by x') + 1,
|
||||
err_dupe = rsp.indexOf('upload rejected, file already exists') + 1;
|
||||
|
||||
if (err_pend || err_plug || err_dupe) {
|
||||
if (err_pend || err_srcb || err_plug || err_dupe) {
|
||||
err = rsp;
|
||||
ofs = err.indexOf('\n/');
|
||||
if (ofs !== -1) {
|
||||
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
|
||||
}
|
||||
if (!t.rechecks && err_pend) {
|
||||
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||
t.rechecks = 0;
|
||||
t.want_recheck = true;
|
||||
}
|
||||
@@ -2368,6 +2459,8 @@ function up2k_init(subtle) {
|
||||
};
|
||||
if (t.srch)
|
||||
req.srch = 1;
|
||||
else if (t.rand)
|
||||
req.rand = true;
|
||||
|
||||
xhr.open('POST', t.purl, true);
|
||||
xhr.responseType = 'text';
|
||||
@@ -2537,8 +2630,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
}
|
||||
}
|
||||
window.addEventListener('resize', onresize);
|
||||
onresize();
|
||||
onresize100.add(onresize, true);
|
||||
|
||||
if (MOBILE) {
|
||||
// android-chrome wobbles for a bit; firefox / iOS-safari are OK
|
||||
@@ -2606,6 +2698,11 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
function draw_turbo() {
|
||||
if (turbolvl < 0 && uc.turbo) {
|
||||
bcfg_set('u2turbo', uc.turbo = false);
|
||||
toast.err(10, "turbo is disabled in server config");
|
||||
}
|
||||
|
||||
var msg = (turbolvl || !uc.turbo) ? null : uc.fsearch ? L.u_ts : L.u_tu,
|
||||
html = ebi('u2foot').innerHTML;
|
||||
|
||||
@@ -2871,7 +2968,7 @@ ebi('ico1').onclick = function () {
|
||||
if (QS('#op_up2k.act'))
|
||||
goto_up2k();
|
||||
|
||||
apply_perms(perms);
|
||||
apply_perms({ "perms": perms, "frand": frand });
|
||||
|
||||
|
||||
(function () {
|
||||
|
||||
@@ -7,6 +7,7 @@ if (!window.console || !console.log)
|
||||
|
||||
|
||||
var wah = '',
|
||||
NOAC = 'autocorrect="off" autocapitalize="off"',
|
||||
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
||||
CB = '?_=' + Date.now(),
|
||||
R = SR.slice(1),
|
||||
@@ -17,6 +18,7 @@ var wah = '',
|
||||
MOBILE = TOUCH,
|
||||
CHROME = !!window.chrome,
|
||||
VCHROME = CHROME ? 1 : 0,
|
||||
IE = /Trident\//.test(navigator.userAgent),
|
||||
FIREFOX = ('netscape' in window) && / rv:/.test(navigator.userAgent),
|
||||
IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(navigator.userAgent),
|
||||
LINUX = /Linux/.test(navigator.userAgent),
|
||||
@@ -111,12 +113,13 @@ if ((document.location + '').indexOf(',rej,') + 1)
|
||||
|
||||
try {
|
||||
console.hist = [];
|
||||
var CMAXHIST = 100;
|
||||
var hook = function (t) {
|
||||
var orig = console[t].bind(console),
|
||||
cfun = function () {
|
||||
console.hist.push(Date.now() + ' ' + t + ': ' + Array.from(arguments).join(', '));
|
||||
if (console.hist.length > 100)
|
||||
console.hist = console.hist.slice(50);
|
||||
if (console.hist.length > CMAXHIST)
|
||||
console.hist = console.hist.slice(CMAXHIST / 2);
|
||||
|
||||
orig.apply(console, arguments);
|
||||
};
|
||||
@@ -157,8 +160,8 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
window.onerror = undefined;
|
||||
var html = [
|
||||
'<h1>you hit a bug!</h1>',
|
||||
'<p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a> / <a href="?b=u">basic</a></p>',
|
||||
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">github issue</a> or <code>ed#2644</code></p>',
|
||||
'<p style="font-size:1.3em;margin:0;line-height:2em">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a> / <a href="?b=u">basic</a></p>',
|
||||
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">new github issue</a></p>',
|
||||
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)).replace(/\n/g, '<br />') + '</p>',
|
||||
'<p><b>UA:</b> ' + esc(navigator.userAgent + '')
|
||||
];
|
||||
@@ -223,7 +226,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
'#exbox{background:#222;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' +
|
||||
'#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' +
|
||||
'#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' +
|
||||
'#exbox a{text-decoration:underline;color:#fc0} ' +
|
||||
'#exbox a{text-decoration:underline;color:#fc0;background:#222;border:none} ' +
|
||||
'#exbox h1{margin:.5em 1em 0 0;padding:0} ' +
|
||||
'#exbox p.b{border-top:1px solid #999;margin:1em 0 0 0;font-size:1em} ' +
|
||||
'#exbox ul, #exbox li {margin:0 0 0 .5em;padding:0} ' +
|
||||
@@ -331,6 +334,25 @@ if (!String.prototype.format)
|
||||
});
|
||||
};
|
||||
|
||||
try {
|
||||
new URL('/a/', 'https://a.com/');
|
||||
}
|
||||
catch (ex) {
|
||||
console.log('ie11 shim URL()');
|
||||
window.URL = function (url, base) {
|
||||
if (url.indexOf('//') < 0)
|
||||
url = base + '/' + url.replace(/^\/?/, '');
|
||||
else if (url.indexOf('//') == 0)
|
||||
url = 'https:' + url;
|
||||
|
||||
var x = url.split('?');
|
||||
return {
|
||||
"pathname": '/' + x[0].split('://')[1].replace(/[^/]+\//, ''),
|
||||
"search": x.length > 1 ? x[1] : ''
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
@@ -347,6 +369,15 @@ function import_js(url, cb) {
|
||||
}
|
||||
|
||||
|
||||
function unsmart(txt) {
|
||||
return !IPHONE ? txt : (txt.
|
||||
replace(/[\u2014]/g, "--").
|
||||
replace(/[\u2022]/g, "*").
|
||||
replace(/[\u2018\u2019]/g, "'").
|
||||
replace(/[\u201c\u201d]/g, '"'));
|
||||
}
|
||||
|
||||
|
||||
var crctab = (function () {
|
||||
var c, tab = [];
|
||||
for (var n = 0; n < 256; n++) {
|
||||
@@ -611,6 +642,29 @@ function vsplit(vp) {
|
||||
}
|
||||
|
||||
|
||||
function vjoin(p1, p2) {
|
||||
if (!p1)
|
||||
p1 = '';
|
||||
|
||||
if (!p2)
|
||||
p2 = '';
|
||||
|
||||
if (p1.endsWith('/'))
|
||||
p1 = p1.slice(0, -1);
|
||||
|
||||
if (p2.startsWith('/'))
|
||||
p2 = p2.slice(1);
|
||||
|
||||
if (!p1)
|
||||
return p2;
|
||||
|
||||
if (!p2)
|
||||
return p1;
|
||||
|
||||
return p1 + '/' + p2;
|
||||
}
|
||||
|
||||
|
||||
function uricom_enc(txt, do_fb_enc) {
|
||||
try {
|
||||
return encodeURIComponent(txt);
|
||||
@@ -698,7 +752,7 @@ function get_pwd() {
|
||||
if (pwd.length < 2)
|
||||
return null;
|
||||
|
||||
return pwd[1].split(';')[0];
|
||||
return decodeURIComponent(pwd[1].split(';')[0]);
|
||||
}
|
||||
|
||||
|
||||
@@ -978,6 +1032,7 @@ function sethash(hv) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function dl_file(url) {
|
||||
console.log('DL [%s]', url);
|
||||
var o = mknod('a');
|
||||
@@ -987,10 +1042,92 @@ function dl_file(url) {
|
||||
}
|
||||
|
||||
|
||||
function cliptxt(txt, ok) {
|
||||
var fb = function () {
|
||||
console.log('fb');
|
||||
var o = mknod('input');
|
||||
o.value = txt;
|
||||
document.body.appendChild(o);
|
||||
o.focus();
|
||||
o.select();
|
||||
document.execCommand("copy");
|
||||
document.body.removeChild(o);
|
||||
ok();
|
||||
};
|
||||
try {
|
||||
navigator.clipboard.writeText(txt).then(ok, fb);
|
||||
}
|
||||
catch (ex) { fb(); }
|
||||
}
|
||||
|
||||
|
||||
function Debounce(delay) {
|
||||
var r = this;
|
||||
r.delay = delay;
|
||||
r.timer = 0;
|
||||
r.t_hit = 0;
|
||||
r.t_run = 0;
|
||||
r.q = [];
|
||||
|
||||
r.add = function (fun, run) {
|
||||
r.rm(fun);
|
||||
r.q.push(fun);
|
||||
|
||||
if (run)
|
||||
fun();
|
||||
};
|
||||
|
||||
r.rm = function (fun) {
|
||||
apop(r.q, fun);
|
||||
};
|
||||
|
||||
r.run = function () {
|
||||
if (crashed)
|
||||
return;
|
||||
|
||||
r.t_run = Date.now();
|
||||
|
||||
var q = r.q.slice(0);
|
||||
for (var a = 0; a < q.length; a++)
|
||||
q[a]();
|
||||
};
|
||||
|
||||
r.hit = function () {
|
||||
if (crashed)
|
||||
return;
|
||||
|
||||
var now = Date.now(),
|
||||
td_hit = now - r.t_hit,
|
||||
td_run = now - r.t_run;
|
||||
|
||||
if (td_run >= r.delay * 2)
|
||||
r.t_run = now;
|
||||
|
||||
if (td_run >= r.delay && td_run <= r.delay * 2) {
|
||||
// r.delay is also deadline
|
||||
clearTimeout(r.timer);
|
||||
return r.run();
|
||||
}
|
||||
|
||||
if (td_hit < r.delay / 5)
|
||||
return;
|
||||
|
||||
clearTimeout(r.timer);
|
||||
r.timer = setTimeout(r.run, r.delay);
|
||||
r.t_hit = now;
|
||||
};
|
||||
};
|
||||
|
||||
var onresize100 = new Debounce(100);
|
||||
window.addEventListener('resize', onresize100.hit);
|
||||
|
||||
|
||||
var timer = (function () {
|
||||
var r = {};
|
||||
r.q = [];
|
||||
r.last = 0;
|
||||
r.fs = 0;
|
||||
r.fc = 0;
|
||||
|
||||
r.add = function (fun, run) {
|
||||
r.rm(fun);
|
||||
@@ -1016,6 +1153,7 @@ var timer = (function () {
|
||||
q[a]();
|
||||
|
||||
r.last = Date.now();
|
||||
//r.fc++; if (r.last - r.fs >= 2000) { console.log(r.last - r.fs, r.fc); r.fs = r.last; r.fc = 0; }
|
||||
}
|
||||
setInterval(doevents, 100);
|
||||
|
||||
@@ -1040,7 +1178,7 @@ var tt = (function () {
|
||||
var prev = null;
|
||||
r.cshow = function () {
|
||||
if (this !== prev)
|
||||
r.show.bind(this)();
|
||||
r.show.call(this);
|
||||
|
||||
prev = this;
|
||||
};
|
||||
@@ -1052,7 +1190,7 @@ var tt = (function () {
|
||||
return;
|
||||
|
||||
if (Date.now() - r.lvis < 400)
|
||||
return r.show.bind(this)();
|
||||
return r.show.call(this);
|
||||
|
||||
tev = setTimeout(r.show.bind(this), 800);
|
||||
if (TOUCH)
|
||||
@@ -1142,13 +1280,13 @@ var tt = (function () {
|
||||
r.th.style.top = (e.pageY + 12 * sy) + 'px';
|
||||
};
|
||||
|
||||
if (IPHONE) {
|
||||
if (TOUCH) {
|
||||
var f1 = r.show,
|
||||
f2 = r.hide,
|
||||
q = [];
|
||||
|
||||
// if an onclick-handler creates a new timer,
|
||||
// iOS 13.1.2 delays the entire handler by up to 401ms,
|
||||
// webkits delay the entire handler by up to 401ms,
|
||||
// win by using a shared timer instead
|
||||
|
||||
timer.add(function () {
|
||||
@@ -1210,6 +1348,9 @@ var toast = (function () {
|
||||
r.visible = false;
|
||||
r.txt = null;
|
||||
r.tag = obj; // filler value (null is scary)
|
||||
r.p_txt = '';
|
||||
r.p_sec = 0;
|
||||
r.p_t = 0;
|
||||
|
||||
function scrollchk() {
|
||||
if (scrolling)
|
||||
@@ -1242,10 +1383,23 @@ var toast = (function () {
|
||||
};
|
||||
|
||||
r.show = function (cl, sec, txt, tag) {
|
||||
var same = r.visible && txt == r.p_txt && r.p_sec == sec,
|
||||
delta = Date.now() - r.p_t;
|
||||
|
||||
if (same && delta < 100)
|
||||
return;
|
||||
|
||||
r.p_txt = txt;
|
||||
r.p_sec = sec;
|
||||
r.p_t = Date.now();
|
||||
|
||||
clearTimeout(te);
|
||||
if (sec)
|
||||
te = setTimeout(r.hide, sec * 1000);
|
||||
|
||||
if (same && delta < 1000)
|
||||
return;
|
||||
|
||||
if (txt.indexOf('<body>') + 1)
|
||||
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
|
||||
|
||||
@@ -1260,17 +1414,17 @@ var toast = (function () {
|
||||
r.tag = tag;
|
||||
};
|
||||
|
||||
r.ok = function (sec, txt, tag) {
|
||||
r.show('ok', sec, txt, tag);
|
||||
r.ok = function (sec, txt, tag, cls) {
|
||||
r.show('ok ' + (cls || ''), sec, txt, tag);
|
||||
};
|
||||
r.inf = function (sec, txt, tag) {
|
||||
r.show('inf', sec, txt, tag);
|
||||
r.inf = function (sec, txt, tag, cls) {
|
||||
r.show('inf ' + (cls || ''), sec, txt, tag);
|
||||
};
|
||||
r.warn = function (sec, txt, tag) {
|
||||
r.show('warn', sec, txt, tag);
|
||||
r.warn = function (sec, txt, tag, cls) {
|
||||
r.show('warn ' + (cls || ''), sec, txt, tag);
|
||||
};
|
||||
r.err = function (sec, txt, tag) {
|
||||
r.show('err', sec, txt, tag);
|
||||
r.err = function (sec, txt, tag, cls) {
|
||||
r.show('err ' + (cls || ''), sec, txt, tag);
|
||||
};
|
||||
|
||||
return r;
|
||||
@@ -1424,7 +1578,7 @@ var modal = (function () {
|
||||
cb_ok = cok;
|
||||
cb_ng = cng === undefined ? cok : null;
|
||||
cb_up = fun;
|
||||
html += '<input id="modali" type="text" /><div id="modalb">' + ok_cancel + '</div>';
|
||||
html += '<input id="modali" type="text" ' + NOAC + ' /><div id="modalb">' + ok_cancel + '</div>';
|
||||
r.show(html);
|
||||
|
||||
ebi('modali').value = v || '';
|
||||
@@ -1456,7 +1610,7 @@ function repl_load() {
|
||||
ret = [
|
||||
'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)',
|
||||
"for (var a of QSA('#files a[id]')) a.setAttribute('download','')",
|
||||
'console.hist.slice(-10).join("\\n")'
|
||||
'console.hist.slice(-50).join("\\n")'
|
||||
];
|
||||
|
||||
ipre.innerHTML = '<option value=""></option>';
|
||||
@@ -1512,6 +1666,8 @@ function repl(e) {
|
||||
if (!cmd)
|
||||
return toast.inf(3, 'eval aborted');
|
||||
|
||||
cmd = unsmart(cmd);
|
||||
|
||||
if (cmd.startsWith(',')) {
|
||||
evalex_fatal = true;
|
||||
return modal.alert(esc(eval(cmd.slice(1)) + ''));
|
||||
@@ -1538,6 +1694,14 @@ function load_md_plug(md_text, plug_type, defer) {
|
||||
if (defer)
|
||||
md_plug[plug_type] = null;
|
||||
|
||||
if (plug_type == 'pre')
|
||||
try {
|
||||
md_text = md_thumbs(md_text);
|
||||
}
|
||||
catch (ex) {
|
||||
toast.warn(30, '' + ex);
|
||||
}
|
||||
|
||||
if (!have_emp)
|
||||
return md_text;
|
||||
|
||||
@@ -1578,6 +1742,47 @@ function load_md_plug(md_text, plug_type, defer) {
|
||||
|
||||
return md;
|
||||
}
|
||||
function md_thumbs(md) {
|
||||
if (!/(^|\n)<!-- th -->/.exec(md))
|
||||
return md;
|
||||
|
||||
// `!th[flags](some.jpg)`
|
||||
// flags: nothing or "l" or "r"
|
||||
|
||||
md = md.split(/!th\[/g);
|
||||
for (var a = 1; a < md.length; a++) {
|
||||
if (!/^[^\]!()]*\]\([^\][!()]+\)/.exec(md[a])) {
|
||||
md[a] = '!th[' + md[a];
|
||||
continue;
|
||||
}
|
||||
|
||||
var o1 = md[a].indexOf(']('),
|
||||
o2 = md[a].indexOf(')', o1),
|
||||
alt = md[a].slice(0, o1),
|
||||
flags = alt.split(','),
|
||||
url = md[a].slice(o1 + 2, o2),
|
||||
float = has(flags, 'l') ? 'left' : has(flags, 'r') ? 'right' : '';
|
||||
|
||||
if (!/[?&]cache/.exec(url))
|
||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=i';
|
||||
|
||||
md[a] = '<a href="' + url + '" class="mdth mdth' + float.slice(0, 1) + '"><img src="' + url + '&th=w" alt="' + alt + '" /></a>' + md[a].slice(o2 + 1);
|
||||
}
|
||||
return md.join('');
|
||||
}
|
||||
function md_th_set() {
|
||||
var els = QSA('.mdth');
|
||||
for (var a = 0, aa = els.length; a < aa; a++)
|
||||
els[a].onclick = md_th_click;
|
||||
}
|
||||
function md_th_click(e) {
|
||||
ev(e);
|
||||
var url = this.getAttribute('href').split('?')[0];
|
||||
if (window.sb_md)
|
||||
window.parent.postMessage("imshow " + url, "*");
|
||||
else
|
||||
thegrid.imshow(url);
|
||||
}
|
||||
|
||||
|
||||
var svg_decl = '<?xml version="1.0" encoding="UTF-8"?>\n';
|
||||
@@ -1656,7 +1861,6 @@ function cprop(name) {
|
||||
|
||||
|
||||
function bchrome() {
|
||||
console.log(document.documentElement.className);
|
||||
var v, o = QS('meta[name=theme-color]');
|
||||
if (!o)
|
||||
return;
|
||||
@@ -1674,16 +1878,17 @@ function xhrchk(xhr, prefix, e404, lvl, tag) {
|
||||
if (xhr.status < 400 && xhr.status >= 200)
|
||||
return true;
|
||||
|
||||
if (xhr.status == 403)
|
||||
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
|
||||
fun = toast[lvl || 'err'],
|
||||
is_cf = /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser|\/chall[e]nge-platform|"chall[e]nge-error|nable Ja[v]aScript and cook/.test(errtxt);
|
||||
|
||||
if (xhr.status == 403 && !is_cf)
|
||||
return toast.err(0, prefix + (L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"), tag);
|
||||
|
||||
if (xhr.status == 404)
|
||||
return toast.err(0, prefix + e404, tag);
|
||||
|
||||
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
|
||||
fun = toast[lvl || 'err'];
|
||||
|
||||
if (xhr.status == 503 && /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser/.test(errtxt)) {
|
||||
if (is_cf && (xhr.status == 403 || xhr.status == 503)) {
|
||||
var now = Date.now(), td = now - cf_cha_t;
|
||||
if (td < 15000)
|
||||
return;
|
||||
|
||||
@@ -13,15 +13,21 @@
|
||||
|
||||
# other stuff
|
||||
|
||||
## [`example.conf`](example.conf)
|
||||
* example config file for `-c`
|
||||
|
||||
## [`versus.md`](versus.md)
|
||||
* similar software / alternatives (with pros/cons)
|
||||
|
||||
## [`changelog.md`](changelog.md)
|
||||
* occasionally grabbed from github release notes
|
||||
|
||||
## [`devnotes.md`](devnotes.md)
|
||||
* technical stuff
|
||||
|
||||
## [`rclone.md`](rclone.md)
|
||||
* notes on using rclone as a fuse client/server
|
||||
|
||||
## [`example.conf`](example.conf)
|
||||
* example config file for `-c`
|
||||
|
||||
|
||||
|
||||
# junk
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user