Compare commits
289 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
987caec15d | ||
|
|
ab40ff5051 | ||
|
|
bed133d3dd | ||
|
|
829c8fca96 | ||
|
|
5b26ab0096 | ||
|
|
39554b4bc3 | ||
|
|
97d9c149f1 | ||
|
|
59688bc8d7 | ||
|
|
a18f63895f | ||
|
|
27433d6214 | ||
|
|
374c535cfa | ||
|
|
ac7815a0ae | ||
|
|
0c50ea1757 | ||
|
|
c057c5e8e8 | ||
|
|
46d667716e | ||
|
|
cba2e10d29 | ||
|
|
b1693f95cb | ||
|
|
3f00073256 | ||
|
|
d15000062d | ||
|
|
6cb3b35a54 | ||
|
|
b4031e8d43 | ||
|
|
a3ca0638cb | ||
|
|
a360ac29da | ||
|
|
9672b8c9b3 | ||
|
|
e70ecd98ef | ||
|
|
5f7ce78d7f | ||
|
|
2077dca66f | ||
|
|
91f010290c | ||
|
|
395e3386b7 | ||
|
|
a1dce0f24e | ||
|
|
c7770904e6 | ||
|
|
1690889ed8 | ||
|
|
842817d9e3 | ||
|
|
5fc04152bd | ||
|
|
1be85bdb26 | ||
|
|
2eafaa88a2 | ||
|
|
900cc463c3 | ||
|
|
97b999c463 | ||
|
|
a7cef91b8b | ||
|
|
a4a112c0ee | ||
|
|
e6bcee28d6 | ||
|
|
626b5770a5 | ||
|
|
c2f92cacc1 | ||
|
|
4f8a1f5f6a | ||
|
|
4a98b73915 | ||
|
|
00812cb1da | ||
|
|
16766e702e | ||
|
|
5e932a9504 | ||
|
|
ccab44daf2 | ||
|
|
8c52b88767 | ||
|
|
c9fd26255b | ||
|
|
0b9b8dbe72 | ||
|
|
b7723ac245 | ||
|
|
35b75c3db1 | ||
|
|
f902779050 | ||
|
|
fdddd36a5d | ||
|
|
c4ba123779 | ||
|
|
72e355eb2c | ||
|
|
43d409a5d9 | ||
|
|
b1fffc2246 | ||
|
|
edd3e53ab3 | ||
|
|
aa0b119031 | ||
|
|
eddce00765 | ||
|
|
6f4bde2111 | ||
|
|
f3035e8869 | ||
|
|
a9730499c0 | ||
|
|
b66843efe2 | ||
|
|
cc1aaea300 | ||
|
|
9ccc238799 | ||
|
|
8526ef9368 | ||
|
|
3c36727d07 | ||
|
|
ef33ce94cd | ||
|
|
d500baf5c5 | ||
|
|
deef32335e | ||
|
|
fc4b51ad00 | ||
|
|
fa762754bf | ||
|
|
29bd8f57c4 | ||
|
|
abc37354ef | ||
|
|
ee3333362f | ||
|
|
7c0c6b94a3 | ||
|
|
bac733113c | ||
|
|
32ab65d7cb | ||
|
|
c6744dc483 | ||
|
|
b9997d677d | ||
|
|
10defe6aef | ||
|
|
736aa125a8 | ||
|
|
eb48373b8b | ||
|
|
d4a7b7d84d | ||
|
|
2923a38b87 | ||
|
|
dabdaaee33 | ||
|
|
65e4d67c3e | ||
|
|
4b720f4150 | ||
|
|
2e85a25614 | ||
|
|
713fffcb8e | ||
|
|
8020b11ea0 | ||
|
|
2523d76756 | ||
|
|
7ede509973 | ||
|
|
7c1d97af3b | ||
|
|
95566e8388 | ||
|
|
76afb62b7b | ||
|
|
7dec922c70 | ||
|
|
c07e0110f8 | ||
|
|
2808734047 | ||
|
|
1f75314463 | ||
|
|
063fa3efde | ||
|
|
44693d79ec | ||
|
|
cea746377e | ||
|
|
59a98bd2b5 | ||
|
|
250aa28185 | ||
|
|
5280792cd7 | ||
|
|
2529aa151d | ||
|
|
fc658e5b9e | ||
|
|
a4bad62b60 | ||
|
|
e1d78d8b23 | ||
|
|
c7f826dbbe | ||
|
|
801da8079b | ||
|
|
7d797dba3f | ||
|
|
cda90c285e | ||
|
|
4b5a0787ab | ||
|
|
2048b7538e | ||
|
|
ac40dccc8f | ||
|
|
9ca8154651 | ||
|
|
db668ba491 | ||
|
|
edbafd94c2 | ||
|
|
2df76eb6e1 | ||
|
|
9b77c9ce7d | ||
|
|
dc2b67f155 | ||
|
|
9f32e9e11d | ||
|
|
7086d2a305 | ||
|
|
575615ca2d | ||
|
|
c0da4b09bf | ||
|
|
22880ccc9a | ||
|
|
e4001550c1 | ||
|
|
e9f65be86a | ||
|
|
3b9919a486 | ||
|
|
acc363133f | ||
|
|
8f2d502d4d | ||
|
|
2ae93ad715 | ||
|
|
bb590e364a | ||
|
|
e7fff77735 | ||
|
|
753e3cfbaf | ||
|
|
99e9cba1f7 | ||
|
|
fcc3336760 | ||
|
|
0dc3c23b42 | ||
|
|
6aa10ecedc | ||
|
|
93125bba4d | ||
|
|
fae5a36e6f | ||
|
|
fc9b729fc2 | ||
|
|
8620ae5bb7 | ||
|
|
01a851da28 | ||
|
|
309895d39d | ||
|
|
7ac0803ded | ||
|
|
cae5ccea62 | ||
|
|
3768cb4723 | ||
|
|
0815dce4c1 | ||
|
|
a62f744a18 | ||
|
|
163e3fce46 | ||
|
|
e76a50cb9d | ||
|
|
72fc76ef48 | ||
|
|
c47047c30d | ||
|
|
3b8f66c0d5 | ||
|
|
aa96a1acdc | ||
|
|
91cafc2511 | ||
|
|
23ca00bba8 | ||
|
|
a75a992951 | ||
|
|
4fbd6853f4 | ||
|
|
71c3ad63b3 | ||
|
|
e1324e37a5 | ||
|
|
a996a09bba | ||
|
|
18c763ac08 | ||
|
|
3d9fb753ba | ||
|
|
714fd1811a | ||
|
|
4364581705 | ||
|
|
ba02c9cc12 | ||
|
|
11eefaf968 | ||
|
|
5a968f9e47 | ||
|
|
6420c4bd03 | ||
|
|
0f9877201b | ||
|
|
9ba2dec9b2 | ||
|
|
ae9cfea939 | ||
|
|
cadaeeeace | ||
|
|
767696185b | ||
|
|
c1efd227b7 | ||
|
|
a50d0563c3 | ||
|
|
e5641ddd16 | ||
|
|
700111ffeb | ||
|
|
b8adeb824a | ||
|
|
30cc9defcb | ||
|
|
61875bd773 | ||
|
|
30905c6f5d | ||
|
|
9986136dfb | ||
|
|
1c0d978979 | ||
|
|
0a0364e9f8 | ||
|
|
3376fbde1a | ||
|
|
ac21fa7782 | ||
|
|
c1c8dc5e82 | ||
|
|
5a38311481 | ||
|
|
9f8edb7f32 | ||
|
|
c5a6ac8417 | ||
|
|
50e01d6904 | ||
|
|
9b46291a20 | ||
|
|
14497b2425 | ||
|
|
f7ceae5a5f | ||
|
|
c9492d16ba | ||
|
|
9fb9ada3aa | ||
|
|
db0abbfdda | ||
|
|
e7f0009e57 | ||
|
|
4444f0f6ff | ||
|
|
418842d2d3 | ||
|
|
cafe53c055 | ||
|
|
7673beef72 | ||
|
|
b28bfe64c0 | ||
|
|
135ece3fbd | ||
|
|
bd3640d256 | ||
|
|
fc0405c8f3 | ||
|
|
7df890d964 | ||
|
|
8341041857 | ||
|
|
1b7634932d | ||
|
|
48a3898aa6 | ||
|
|
5d13ebb4ac | ||
|
|
015b87ee99 | ||
|
|
0a48acf6be | ||
|
|
2b6a3afd38 | ||
|
|
18aa82fb2f | ||
|
|
f5407b2997 | ||
|
|
474d5a155b | ||
|
|
afcd98b794 | ||
|
|
4f80e44ff7 | ||
|
|
406e413594 | ||
|
|
033b50ae1b | ||
|
|
bee26e853b | ||
|
|
04a1f7040e | ||
|
|
f9d5bb3b29 | ||
|
|
ca0cd04085 | ||
|
|
999ee2e7bc | ||
|
|
1ff7f968e8 | ||
|
|
3966266207 | ||
|
|
d03e96a392 | ||
|
|
4c843c6df9 | ||
|
|
0896c5295c | ||
|
|
cc0c9839eb | ||
|
|
d0aa20e17c | ||
|
|
1a658dedb7 | ||
|
|
8d376b854c | ||
|
|
490c16b01d | ||
|
|
2437a4e864 | ||
|
|
007d948cb9 | ||
|
|
335fcc8535 | ||
|
|
9eaa9904e0 | ||
|
|
0778da6c4d | ||
|
|
a1bb10012d | ||
|
|
1441ccee4f | ||
|
|
491803d8b7 | ||
|
|
3dcc386b6f | ||
|
|
5aa54d1217 | ||
|
|
88b876027c | ||
|
|
fcc3aa98fd | ||
|
|
f2f5e266b4 | ||
|
|
e17bf8f325 | ||
|
|
d19cb32bf3 | ||
|
|
85a637af09 | ||
|
|
043e3c7dd6 | ||
|
|
8f59afb159 | ||
|
|
77f1e51444 | ||
|
|
22fc4bb938 | ||
|
|
50c7bba6ea | ||
|
|
551d99b71b | ||
|
|
b54b7213a7 | ||
|
|
a14943c8de | ||
|
|
a10cad54fc | ||
|
|
8568b7702a | ||
|
|
5d8cb34885 | ||
|
|
8d248333e8 | ||
|
|
99e2ef7f33 | ||
|
|
e767230383 | ||
|
|
90601314d6 | ||
|
|
9c5eac1274 | ||
|
|
50905439e4 | ||
|
|
a0c1239246 | ||
|
|
b8e851c332 | ||
|
|
baaf2eb24d | ||
|
|
e197895c10 | ||
|
|
cb75efa05d | ||
|
|
8b0cf2c982 | ||
|
|
fc7d9e1f9c | ||
|
|
10caafa34c | ||
|
|
22cc22225a | ||
|
|
22dff4b0e5 | ||
|
|
a00ff2b086 |
4
.vscode/launch.json
vendored
4
.vscode/launch.json
vendored
@@ -9,6 +9,10 @@
|
||||
"console": "integratedTerminal",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"justMyCode": false,
|
||||
"env": {
|
||||
"PYDEVD_DISABLE_FILE_VALIDATION": "1",
|
||||
"PYTHONWARNINGS": "always", //error
|
||||
},
|
||||
"args": [
|
||||
//"-nw",
|
||||
"-ed",
|
||||
|
||||
2
.vscode/launch.py
vendored
2
.vscode/launch.py
vendored
@@ -41,7 +41,7 @@ if sfx:
|
||||
argv = [sys.executable, sfx] + argv
|
||||
sp.check_call(argv)
|
||||
elif re.search(" -j ?[0-9]", " ".join(argv)):
|
||||
argv = [sys.executable, "-m", "copyparty"] + argv
|
||||
argv = [sys.executable, "-Wa", "-m", "copyparty"] + argv
|
||||
sp.check_call(argv)
|
||||
else:
|
||||
sys.path.insert(0, os.getcwd())
|
||||
|
||||
1
.vscode/tasks.json
vendored
1
.vscode/tasks.json
vendored
@@ -11,6 +11,7 @@
|
||||
"type": "shell",
|
||||
"command": "${config:python.pythonPath}",
|
||||
"args": [
|
||||
"-Wa", //-We
|
||||
".vscode/launch.py"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,3 +1,43 @@
|
||||
* do something cool
|
||||
|
||||
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight
|
||||
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight 👍👍
|
||||
|
||||
but to be more specific,
|
||||
|
||||
|
||||
# contribution ideas
|
||||
|
||||
|
||||
## documentation
|
||||
|
||||
I think we can agree that the documentation leaves a LOT to be desired. I've realized I'm not exactly qualified for this 😅 but maybe the [soon-to-come setup GUI](https://github.com/9001/copyparty/issues/57) will make this more manageable. The best documentation is the one that never had to be written, right? :> so I suppose we can give this a wait-and-see approach for a bit longer.
|
||||
|
||||
|
||||
## crazy ideas & features
|
||||
|
||||
assuming they won't cause too much problems or side-effects :>
|
||||
|
||||
i think someone was working on a way to list directories over DNS for example...
|
||||
|
||||
if you wanna have a go at coding it up yourself then maybe mention the idea on discord before you get too far, otherwise just go nuts 👍
|
||||
|
||||
|
||||
## others
|
||||
|
||||
aside from documentation and ideas, some other things that would be cool to have some help with is:
|
||||
|
||||
* **translations** -- the copyparty web-UI has translations for english and norwegian at the top of [browser.js](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/web/browser.js); if you'd like to add a translation for another language then that'd be welcome! and if that language has a grammar that doesn't fit into the way the strings are assembled, then we'll fix that as we go :>
|
||||
|
||||
* **UI ideas** -- at some point I was thinking of rewriting the UI in react/preact/something-not-vanilla-javascript, but I'll admit the comfiness of not having any build stage combined with raw performance has kinda convinced me otherwise :p but I'd be very open to ideas on how the UI could be improved, or be more intuitive.
|
||||
|
||||
* **docker improvements** -- I don't really know what I'm doing when it comes to containers, so I'm sure there's a *huge* room for improvement here, mainly regarding how you're supposed to use the container with kubernetes / docker-compose / any of the other popular ways to do things. At some point I swear I'll start learning about docker so I can pick up clach04's [docker-compose draft](https://github.com/9001/copyparty/issues/38) and learn how that stuff ticks, unless someone beats me to it!
|
||||
|
||||
* **packaging** for various linux distributions -- this could either be as simple as just plopping the sfx.py in the right place and calling that from systemd (the archlinux package [originally did this](https://github.com/9001/copyparty/pull/18)); maybe with a small config-file which would cause copyparty to load settings from `/etc/copyparty.d` (like the [archlinux package](https://github.com/9001/copyparty/tree/hovudstraum/contrib/package/arch) does with `copyparty.conf`), or it could be a proper installation of the copyparty python package into /usr/lib or similar (the archlinux package [eventually went for this approach](https://github.com/9001/copyparty/pull/26))
|
||||
|
||||
* [fpm](https://github.com/jordansissel/fpm) can probably help with the technical part of it, but someone needs to handle distro relations :-)
|
||||
|
||||
* **software integration** -- I'm sure there's a lot of usecases where copyparty could complement something else, or the other way around, so any ideas or any work in this regard would be dope. This doesn't necessarily have to be code inside copyparty itself;
|
||||
|
||||
* [hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) -- these are small programs which are called by copyparty when certain things happen (files are uploaded, someone hits a 404, etc.), and could be a fun way to add support for more usecases
|
||||
|
||||
* [parser plugins](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) -- if you want to have copyparty analyze and index metadata for some oddball file-formats, then additional plugins would be neat :>
|
||||
|
||||
304
README.md
304
README.md
@@ -20,13 +20,13 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [testimonials](#testimonials) - small collection of user feedback
|
||||
* [motivations](#motivations) - project goals / philosophy
|
||||
* [notes](#notes) - general notes
|
||||
* [bugs](#bugs)
|
||||
* [general bugs](#general-bugs)
|
||||
* [not my bugs](#not-my-bugs)
|
||||
* [bugs](#bugs) - roughly sorted by chance of encounter
|
||||
* [not my bugs](#not-my-bugs) - same order here too
|
||||
* [breaking changes](#breaking-changes) - upgrade notes
|
||||
* [FAQ](#FAQ) - "frequently" asked questions
|
||||
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
|
||||
* [shadowing](#shadowing) - hiding specific subfolders
|
||||
* [dotfiles](#dotfiles) - unix-style hidden files/folders
|
||||
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
|
||||
* [tabs](#tabs) - the main tabs in the ui
|
||||
* [hotkeys](#hotkeys) - the browser has the following hotkeys
|
||||
@@ -40,7 +40,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||
* [media player](#media-player) - plays almost every audio format there is
|
||||
* [audio equalizer](#audio-equalizer) - bass boosted
|
||||
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
||||
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
||||
* [other tricks](#other-tricks)
|
||||
@@ -54,6 +54,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [webdav server](#webdav-server) - with read-write support
|
||||
* [connecting to webdav from windows](#connecting-to-webdav-from-windows) - using the GUI
|
||||
* [smb server](#smb-server) - unsafe, slow, not recommended for wan
|
||||
* [browser ux](#browser-ux) - tweaking the ui
|
||||
* [file indexing](#file-indexing) - enables dedup and music search ++
|
||||
* [exclude-patterns](#exclude-patterns) - to save some time
|
||||
* [filesystem guards](#filesystem-guards) - avoid traversing into other filesystems
|
||||
@@ -66,12 +67,16 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
||||
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
||||
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
||||
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
|
||||
* [identity providers](#identity-providers) - replace copyparty passwords with oauth and such
|
||||
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
||||
* [themes](#themes)
|
||||
* [complete examples](#complete-examples)
|
||||
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
|
||||
* [prometheus](#prometheus) - metrics/stats can be enabled
|
||||
* [packages](#packages) - the party might be closer than you think
|
||||
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||
* [fedora package](#fedora-package) - currently **NOT** available on [copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/)
|
||||
* [nix package](#nix-package) - `nix profile install github:9001/copyparty`
|
||||
* [nixos module](#nixos-module)
|
||||
* [browser support](#browser-support) - TLDR: yes
|
||||
@@ -82,9 +87,11 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [iOS shortcuts](#iOS-shortcuts) - there is no iPhone app, but
|
||||
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||
* [client-side](#client-side) - when uploading files
|
||||
* [security](#security) - some notes on hardening
|
||||
* [security](#security) - there is a [discord server](https://discord.gg/25J8CdTT6G)
|
||||
* [gotchas](#gotchas) - behavior that might be unexpected
|
||||
* [cors](#cors) - cross-site request config
|
||||
* [filekeys](#filekeys) - prevent filename bruteforcing
|
||||
* [password hashing](#password-hashing) - you can hash passwords
|
||||
* [https](#https) - both HTTP and HTTPS are accepted
|
||||
* [recovering from crashes](#recovering-from-crashes)
|
||||
* [client crashes](#client-crashes)
|
||||
@@ -114,8 +121,8 @@ just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/
|
||||
enable thumbnails (images/audio/video), media indexing, and audio transcoding by installing some recommended deps:
|
||||
|
||||
* **Alpine:** `apk add py3-pillow ffmpeg`
|
||||
* **Debian:** `apt install python3-pil ffmpeg`
|
||||
* **Fedora:** `dnf install python3-pillow ffmpeg`
|
||||
* **Debian:** `apt install --no-install-recommends python3-pil ffmpeg`
|
||||
* **Fedora:** rpmfusion + `dnf install python3-pillow ffmpeg`
|
||||
* **FreeBSD:** `pkg install py39-sqlite3 py39-pillow ffmpeg`
|
||||
* **MacOS:** `port install py-Pillow ffmpeg`
|
||||
* **MacOS** (alternative): `brew install pillow ffmpeg`
|
||||
@@ -142,9 +149,10 @@ you may also want these, especially on servers:
|
||||
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service (see guide inside)
|
||||
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
||||
* [contrib/openrc/copyparty](contrib/openrc/copyparty) to run copyparty on Alpine / Gentoo
|
||||
* [contrib/rc/copyparty](contrib/rc/copyparty) to run copyparty on FreeBSD
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
||||
* [nixos module](#nixos-module) to run copyparty on NixOS hosts
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
||||
|
||||
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
|
||||
```
|
||||
@@ -258,20 +266,28 @@ server notes:
|
||||
|
||||
# bugs
|
||||
|
||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
||||
* Windows: python 2.7 cannot handle filenames with mojibake
|
||||
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
||||
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
|
||||
roughly sorted by chance of encounter
|
||||
|
||||
## general bugs
|
||||
* general:
|
||||
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
||||
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
|
||||
* if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||
* probably more, pls let me know
|
||||
|
||||
* Windows: if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||
* probably more, pls let me know
|
||||
* python 3.4 and older (including 2.7):
|
||||
* many rare and exciting edge-cases because [python didn't handle EINTR yet](https://peps.python.org/pep-0475/)
|
||||
* downloads from copyparty may suddenly fail, but uploads *should* be fine
|
||||
|
||||
* python 2.7 on Windows:
|
||||
* cannot index non-ascii filenames with `-e2d`
|
||||
* cannot handle filenames with mojibake
|
||||
|
||||
## not my bugs
|
||||
|
||||
same order here too
|
||||
|
||||
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
|
||||
|
||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
||||
@@ -281,8 +297,11 @@ server notes:
|
||||
* Android: music playback randomly stops due to [battery usage settings](#fix-unreliable-playback-on-android)
|
||||
|
||||
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
|
||||
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
|
||||
* "future" because `AudioContext` can't maintain a stable playback speed in the current iOS version (15.7), maybe one day...
|
||||
* `AudioContext` will probably never be a viable workaround as apple introduces new issues faster than they fix current ones
|
||||
|
||||
* iPhones: the preload feature (in the media-player-options tab) can cause a tiny audio glitch 20sec before the end of each song, but disabling it may cause worse iOS bugs to appear instead
|
||||
* just a hunch, but disabling preloading may cause playback to stop entirely, or possibly mess with bluetooth speakers
|
||||
* tried to add a tooltip regarding this but looks like apple broke my tooltips
|
||||
|
||||
* Windows: folders cannot be accessed if the name ends with `.`
|
||||
* python or windows bug
|
||||
@@ -292,6 +311,7 @@ server notes:
|
||||
|
||||
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
|
||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
|
||||
* also happens on mergerfs, so put the db elsewhere
|
||||
|
||||
* Ubuntu: dragging files from certain folders into firefox or chrome is impossible
|
||||
* due to snap security policies -- see `snap connections firefox` for the allowlist, `removable-media` permits all of `/mnt` and `/media` apparently
|
||||
@@ -301,6 +321,8 @@ server notes:
|
||||
|
||||
upgrade notes
|
||||
|
||||
* `1.9.16` (2023-11-04):
|
||||
* `--stats`/prometheus: `cpp_bans` renamed to `cpp_active_bans`, and that + `cpp_uptime` are gauges
|
||||
* `1.6.0` (2023-01-29):
|
||||
* http-api: delete/move is now `POST` instead of `GET`
|
||||
* everything other than `GET` and `HEAD` must pass [cors validation](#cors)
|
||||
@@ -320,11 +342,17 @@ upgrade notes
|
||||
* can I make copyparty download a file to my server if I give it a URL?
|
||||
* yes, using [hooks](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
||||
|
||||
* i want to learn python and/or programming and am considering looking at the copyparty source code in that occasion
|
||||
* ```bash
|
||||
_| _ __ _ _|_
|
||||
(_| (_) | | (_) |_
|
||||
```
|
||||
|
||||
|
||||
# accounts and volumes
|
||||
|
||||
per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
|
||||
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
|
||||
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if the user has `a`/admin in any volume)
|
||||
* changes to the `[global]` config section requires a restart to take effect
|
||||
|
||||
a quick summary can be seen using `--help-accounts`
|
||||
@@ -341,8 +369,12 @@ permissions:
|
||||
* `w` (write): upload files, move files *into* this folder
|
||||
* `m` (move): move files/folders *from* this folder
|
||||
* `d` (delete): delete files/folders
|
||||
* `.` (dots): user can ask to show dotfiles in directory listings
|
||||
* `g` (get): only download files, cannot see folder contents or zip/tar
|
||||
* `G` (upget): same as `g` except uploaders get to see their own filekeys (see `fk` in examples below)
|
||||
* `G` (upget): same as `g` except uploaders get to see their own [filekeys](#filekeys) (see `fk` in examples below)
|
||||
* `h` (html): same as `g` except folders return their index.html, and filekeys are not necessary for index.html
|
||||
* `a` (admin): can see upload time, uploader IPs, config-reload
|
||||
* `A` ("all"): same as `rwmda.` (read/write/move/delete/admin/dotfiles)
|
||||
|
||||
examples:
|
||||
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
|
||||
@@ -354,7 +386,7 @@ examples:
|
||||
* `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it
|
||||
* `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access
|
||||
* make folder `/mnt/ss` available at `/i`, read-write for u1, get-only for everyone else, and enable filekeys: `-v /mnt/ss:i:rw,u1:g:c,fk=4`
|
||||
* `c,fk=4` sets the `fk` (filekey) volflag to 4, meaning each file gets a 4-character accesskey
|
||||
* `c,fk=4` sets the `fk` ([filekey](#filekeys)) volflag to 4, meaning each file gets a 4-character accesskey
|
||||
* `u1` can upload files, browse the folder, and see the generated filekeys
|
||||
* other users cannot browse the folder, but can access the files if they have the full file URL with the filekey
|
||||
* replacing the `g` permission with `wg` would let anonymous users upload files, but not see the required filekey to access it
|
||||
@@ -370,6 +402,17 @@ hiding specific subfolders by mounting another volume on top of them
|
||||
for example `-v /mnt::r -v /var/empty:web/certs:r` mounts the server folder `/mnt` as the webroot, but another volume is mounted at `/web/certs` -- so visitors can only see the contents of `/mnt` and `/mnt/web` (at URLs `/` and `/web`), but not `/mnt/web/certs` because URL `/web/certs` is mapped to `/var/empty`
|
||||
|
||||
|
||||
## dotfiles
|
||||
|
||||
unix-style hidden files/folders by starting the name with a dot
|
||||
|
||||
anyone can access these if they know the name, but they normally don't appear in directory listings
|
||||
|
||||
a client can request to see dotfiles in directory listings if global option `-ed` is specified, or the volume has volflag `dots`, or the user has permission `.`
|
||||
|
||||
dotfiles do not appear in search results unless one of the above is true, **and** the global option / volflag `dotsrch` is set
|
||||
|
||||
|
||||
# the browser
|
||||
|
||||
accessing a copyparty server using a web-browser
|
||||
@@ -482,11 +525,14 @@ it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video f
|
||||
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
||||
|
||||
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
||||
* and, if you enable [file indexing](#file-indexing), all remaining folders will also get thumbnails (as long as they contain any pics at all)
|
||||
* and, if you enable [file indexing](#file-indexing), it will also try those names as dotfiles (`.folder.jpg` and so), and then fallback on the first picture in the folder (if it has any pictures at all)
|
||||
|
||||
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||
* indicated by the audio files having the ▶ icon instead of 💾
|
||||
|
||||
enabling `multiselect` lets you click files to select them, and then shift-click another file for range-select
|
||||
* `multiselect` is mostly intended for phones/tablets, but the `sel` option in the `[⚙️] settings` tab is better suited for desktop use, allowing selection by CTRL-clicking and range-selection with SHIFT-click, all without affecting regular clicking
|
||||
|
||||
|
||||
## zip downloads
|
||||
|
||||
@@ -497,12 +543,20 @@ select which type of archive you want in the `[⚙️] config` tab:
|
||||
| name | url-suffix | description |
|
||||
|--|--|--|
|
||||
| `tar` | `?tar` | plain gnutar, works great with `curl \| tar -xv` |
|
||||
| `pax` | `?tar=pax` | pax-format tar, futureproof, not as fast |
|
||||
| `tgz` | `?tar=gz` | gzip compressed gnu-tar (slow), for `curl \| tar -xvz` |
|
||||
| `txz` | `?tar=xz` | gnu-tar with xz / lzma compression (v.slow) |
|
||||
| `zip` | `?zip=utf8` | works everywhere, glitchy filenames on win7 and older |
|
||||
| `zip_dos` | `?zip` | traditional cp437 (no unicode) to fix glitchy filenames |
|
||||
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
|
||||
|
||||
* hidden files (dotfiles) are excluded unless `-ed`
|
||||
* gzip default level is `3` (0=fast, 9=best), change with `?tar=gz:9`
|
||||
* xz default level is `1` (0=fast, 9=best), change with `?tar=xz:9`
|
||||
* bz2 default level is `2` (1=fast, 9=best), change with `?tar=bz2:9`
|
||||
* hidden files ([dotfiles](#dotfiles)) are excluded unless account is allowed to list them
|
||||
* `up2k.db` and `dir.txt` is always excluded
|
||||
* bsdtar supports streaming unzipping: `curl foo?zip=utf8 | bsdtar -xv`
|
||||
* good, because copyparty's zip is faster than tar on small files
|
||||
* `zip_crc` will take longer to download since the server has to read each file twice
|
||||
* this is only to support MS-DOS PKZIP v2.04g (october 1993) and older
|
||||
* how are you accessing copyparty actually
|
||||
@@ -511,6 +565,11 @@ you can also zip a selection of files or folders by clicking them in the browser
|
||||
|
||||

|
||||
|
||||
cool trick: download a folder by appending url-params `?tar&opus` to transcode all audio files (except aac|m4a|mp3|ogg|opus|wma) to opus before they're added to the archive
|
||||
* super useful if you're 5 minutes away from takeoff and realize you don't have any music on your phone but your server only has flac files and downloading those will burn through all your data + there wouldn't be enough time anyways
|
||||
* and url-params `&j` / `&w` produce jpeg/webm thumbnails/spectrograms instead of the original audio/video/images
|
||||
* can also be used to pregenerate thumbnails; combine with `--th-maxage=9999999` or `--th-clean=0`
|
||||
|
||||
|
||||
## uploading
|
||||
|
||||
@@ -547,7 +606,8 @@ the up2k UI is the epitome of polished inutitive experiences:
|
||||
* "parallel uploads" specifies how many chunks to upload at the same time
|
||||
* `[🏃]` analysis of other files should continue while one is uploading
|
||||
* `[🥔]` shows a simpler UI for faster uploads from slow devices
|
||||
* `[💭]` ask for confirmation before files are added to the queue
|
||||
* `[🎲]` generate random filenames during upload
|
||||
* `[📅]` preserve last-modified timestamps; server times will match yours
|
||||
* `[🔎]` switch between upload and [file-search](#file-search) mode
|
||||
* ignore `[🔎]` if you add files by dragging them into the browser
|
||||
|
||||
@@ -609,6 +669,7 @@ file selection: click somewhere on the line (not the link itsef), then:
|
||||
* `up/down` to move
|
||||
* `shift-up/down` to move-and-select
|
||||
* `ctrl-shift-up/down` to also scroll
|
||||
* shift-click another line for range-select
|
||||
|
||||
* cut: select some files and `ctrl-x`
|
||||
* paste: `ctrl-v` in another folder
|
||||
@@ -692,7 +753,7 @@ open the `[🎺]` media-player-settings tab to configure it,
|
||||
* `[loop]` keeps looping the folder
|
||||
* `[next]` plays into the next folder
|
||||
* transcode:
|
||||
* `[flac]` convers `flac` and `wav` files into opus
|
||||
* `[flac]` converts `flac` and `wav` files into opus
|
||||
* `[aac]` converts `aac` and `m4a` files into opus
|
||||
* `[oth]` converts all other known formats into opus
|
||||
* `aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3m|tak|tta|ulaw|wav|wma|wv|xm|xpk`
|
||||
@@ -701,12 +762,14 @@ open the `[🎺]` media-player-settings tab to configure it,
|
||||
|
||||
### audio equalizer
|
||||
|
||||
bass boosted
|
||||
and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||
|
||||
can also boost the volume in general, or increase/decrease stereo width (like [crossfeed](https://www.foobar2000.org/components/view/foo_dsp_meiercf) just worse)
|
||||
|
||||
has the convenient side-effect of reducing the pause between songs, so gapless albums play better with the eq enabled (just make it flat)
|
||||
|
||||
not available on iPhones / iPads because AudioContext currently breaks background audio playback on iOS (15.7.8)
|
||||
|
||||
|
||||
### fix unreliable playback on android
|
||||
|
||||
@@ -745,6 +808,8 @@ other notes,
|
||||
|
||||
* files named `README.md` / `readme.md` will be rendered after directory listings unless `--no-readme` (but `.epilogue.html` takes precedence)
|
||||
|
||||
* `README.md` and `*logue.html` can contain placeholder values which are replaced server-side before embedding into directory listings; see `--help-exp`
|
||||
|
||||
|
||||
## searching
|
||||
|
||||
@@ -770,9 +835,12 @@ for the above example to work, add the commandline argument `-e2ts` to also scan
|
||||
using arguments or config files, or a mix of both:
|
||||
* config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf) and [./docs/example2.conf](docs/example2.conf)
|
||||
* `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting
|
||||
* or click the `[reload cfg]` button in the control-panel when logged in as admin
|
||||
* or click the `[reload cfg]` button in the control-panel if the user has `a`/admin in any volume
|
||||
* changes to the `[global]` config section requires a restart to take effect
|
||||
|
||||
**NB:** as humongous as this readme is, there is also a lot of undocumented features. Run copyparty with `--help` to see all available global options; all of those can be used in the `[global]` section of config files, and everything listed in `--help-flags` can be used in volumes as volflags.
|
||||
* if running in docker/podman, try this: `docker run --rm -it copyparty/ac --help`
|
||||
|
||||
|
||||
## zeroconf
|
||||
|
||||
@@ -878,15 +946,13 @@ unsafe, slow, not recommended for wan, enable with `--smb` for read-only or `--
|
||||
|
||||
click the [connect](http://127.0.0.1:3923/?hc) button in the control-panel to see connection instructions for windows, linux, macos
|
||||
|
||||
dependencies: `python3 -m pip install --user -U impacket==0.10.0`
|
||||
dependencies: `python3 -m pip install --user -U impacket==0.11.0`
|
||||
* newer versions of impacket will hopefully work just fine but there is monkeypatching so maybe not
|
||||
|
||||
some **BIG WARNINGS** specific to SMB/CIFS, in decreasing importance:
|
||||
* not entirely confident that read-only is read-only
|
||||
* the smb backend is not fully integrated with vfs, meaning there could be security issues (path traversal). Please use `--smb-port` (see below) and [prisonparty](./bin/prisonparty.sh)
|
||||
* account passwords work per-volume as expected, but account permissions are coalesced; all accounts have read-access to all volumes, and if a single account has write-access to some volume then all other accounts also do
|
||||
* if no accounts have write-access to a specific volume, or if `--smbw` is not set, then writing to that volume from smb *should* be impossible
|
||||
* will be fixed once [impacket v0.11.0](https://github.com/SecureAuthCorp/impacket/commit/d923c00f75d54b972bca573a211a82f09b55261a) is released
|
||||
* account passwords work per-volume as expected, and so does account permissions (read/write/move/delete), but `--smbw` must be given to allow write-access from smb
|
||||
* [shadowing](#shadowing) probably works as expected but no guarantees
|
||||
|
||||
and some minor issues,
|
||||
@@ -897,7 +963,7 @@ and some minor issues,
|
||||
* win10 onwards does not allow connecting anonymously / without accounts
|
||||
* on windows, creating a new file through rightclick --> new --> textfile throws an error due to impacket limitations -- hit OK and F5 to get your file
|
||||
* python3 only
|
||||
* slow
|
||||
* slow (the builtin webdav support in windows is 5x faster, and rclone-webdav is 30x faster)
|
||||
|
||||
known client bugs:
|
||||
* on win7 only, `--smb1` is much faster than smb2 (default) because it keeps rescanning folders on smb2
|
||||
@@ -912,6 +978,16 @@ authenticate with one of the following:
|
||||
* username `$password`, password `k`
|
||||
|
||||
|
||||
## browser ux
|
||||
|
||||
tweaking the ui
|
||||
|
||||
* set default sort order globally with `--sort` or per-volume with the `sort` volflag; specify one or more comma-separated columns to sort by, and prefix the column name with `-` for reverse sort
|
||||
* the column names you can use are visible as tooltips when hovering over the column headers in the directory listing, for example `href ext sz ts tags/.up_at tags/Cirle tags/.tn tags/Artist tags/Title`
|
||||
* to sort in music order (album, track, artist, title) with filename as fallback, you could `--sort tags/Cirle,tags/.tn,tags/Artist,tags/Title,href`
|
||||
* to sort by upload date, first enable showing the upload date in the listing with `-e2d -mte +.up_at` and then `--sort tags/.up_at`
|
||||
|
||||
|
||||
## file indexing
|
||||
|
||||
enables dedup and music search ++
|
||||
@@ -938,6 +1014,7 @@ the same arguments can be set as volflags, in addition to `d2d`, `d2ds`, `d2t`,
|
||||
* `-v ~/music::r:c,d2ts` same except only affecting tags
|
||||
|
||||
note:
|
||||
* upload-times can be displayed in the file listing by enabling the `.up_at` metadata key, either globally with `-e2d -mte +.up_at` or per-volume with volflags `e2d,mte=+.up_at` (will have a ~17% performance impact on directory listings)
|
||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||
* deduplication is possible on windows if you run copyparty as administrator (not saying you should!)
|
||||
@@ -980,6 +1057,8 @@ set upload rules using volflags, some examples:
|
||||
|
||||
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`)
|
||||
* `:c,df=4g` block uploads if there would be less than 4 GiB free disk space afterwards
|
||||
* `:c,vmaxb=1g` block uploads if total volume size would exceed 1 GiB afterwards
|
||||
* `:c,vmaxn=4k` block uploads if volume would contain more than 4096 files afterwards
|
||||
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
|
||||
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
|
||||
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
|
||||
@@ -993,6 +1072,9 @@ you can also set transaction limits which apply per-IP and per-volume, but these
|
||||
* `:c,maxn=250,3600` allows 250 files over 1 hour from each IP (tracked per-volume)
|
||||
* `:c,maxb=1g,300` allows 1 GiB total over 5 minutes from each IP (tracked per-volume)
|
||||
|
||||
notes:
|
||||
* `vmaxb` and `vmaxn` requires either the `e2ds` volflag or `-e2dsa` global-option
|
||||
|
||||
|
||||
## compress uploads
|
||||
|
||||
@@ -1123,6 +1205,24 @@ note that this is way more complicated than the new [event hooks](#event-hooks)
|
||||
note that it will occupy the parsing threads, so fork anything expensive (or set `kn` to have copyparty fork it for you) -- otoh if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1`
|
||||
|
||||
|
||||
## handlers
|
||||
|
||||
redefine behavior with plugins ([examples](./bin/handlers/))
|
||||
|
||||
replace 404 and 403 errors with something completely different (that's it for now)
|
||||
|
||||
|
||||
## identity providers
|
||||
|
||||
replace copyparty passwords with oauth and such
|
||||
|
||||
work is [ongoing](https://github.com/9001/copyparty/issues/62) to support authenticating / authorizing users based on a separate authentication proxy, which makes it possible to support oauth, single-sign-on, etc.
|
||||
|
||||
it is currently possible to specify `--idp-h-usr x-username`; copyparty will then skip password validation and blindly trust the username specified in the `X-Username` request header
|
||||
|
||||
the remaining stuff (accepting user groups through another header, creating volumes on the fly) are still to-do; configuration will probably [look like this](./docs/examples/docker/idp/copyparty.conf)
|
||||
|
||||
|
||||
## hiding from google
|
||||
|
||||
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
|
||||
@@ -1182,8 +1282,8 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
|
||||
* anyone can upload, and receive "secret" links for each upload they do:
|
||||
`python copyparty-sfx.py -e2dsa -v .::wG:c,fk=8`
|
||||
|
||||
* anyone can browse, only `kevin` (password `okgo`) can upload/move/delete files:
|
||||
`python copyparty-sfx.py -e2dsa -a kevin:okgo -v .::r:rwmd,kevin`
|
||||
* anyone can browse (`r`), only `kevin` (password `okgo`) can upload/move/delete (`A`) files:
|
||||
`python copyparty-sfx.py -e2dsa -a kevin:okgo -v .::r:A,kevin`
|
||||
|
||||
* read-only music server:
|
||||
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2`
|
||||
@@ -1208,6 +1308,7 @@ you can either:
|
||||
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
|
||||
|
||||
some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatically obtain a valid https/tls certificate for you, and some support HTTP/2 and QUIC which could be a nice speed boost
|
||||
* **warning:** nginx-QUIC is still experimental and can make uploads much slower, so HTTP/2 is recommended for now
|
||||
|
||||
example webserver configs:
|
||||
|
||||
@@ -1215,15 +1316,97 @@ example webserver configs:
|
||||
* [apache2 config](contrib/apache/copyparty.conf) -- location-based
|
||||
|
||||
|
||||
## prometheus
|
||||
|
||||
metrics/stats can be enabled at URL `/.cpr/metrics` for grafana / prometheus / etc (openmetrics 1.0.0)
|
||||
|
||||
must be enabled with `--stats` since it reduces startup time a tiny bit, and you probably want `-e2dsa` too
|
||||
|
||||
the endpoint is only accessible by `admin` accounts, meaning the `a` in `rwmda` in the following example commandline: `python3 -m copyparty -a ed:wark -v /mnt/nas::rwmda,ed --stats -e2dsa`
|
||||
|
||||
follow a guide for setting up `node_exporter` except have it read from copyparty instead; example `/etc/prometheus/prometheus.yml` below
|
||||
|
||||
```yaml
|
||||
scrape_configs:
|
||||
- job_name: copyparty
|
||||
metrics_path: /.cpr/metrics
|
||||
basic_auth:
|
||||
password: wark
|
||||
static_configs:
|
||||
- targets: ['192.168.123.1:3923']
|
||||
```
|
||||
|
||||
currently the following metrics are available,
|
||||
* `cpp_uptime_seconds` time since last copyparty restart
|
||||
* `cpp_boot_unixtime_seconds` same but as an absolute timestamp
|
||||
* `cpp_http_conns` number of open http(s) connections
|
||||
* `cpp_http_reqs` number of http(s) requests handled
|
||||
* `cpp_sus_reqs` number of 403/422/malicious requests
|
||||
* `cpp_active_bans` number of currently banned IPs
|
||||
* `cpp_total_bans` number of IPs banned since last restart
|
||||
|
||||
these are available unless `--nos-vst` is specified:
|
||||
* `cpp_db_idle_seconds` time since last database activity (upload/rename/delete)
|
||||
* `cpp_db_act_seconds` same but as an absolute timestamp
|
||||
* `cpp_idle_vols` number of volumes which are idle / ready
|
||||
* `cpp_busy_vols` number of volumes which are busy / indexing
|
||||
* `cpp_offline_vols` number of volumes which are offline / unavailable
|
||||
* `cpp_hashing_files` number of files queued for hashing / indexing
|
||||
* `cpp_tagq_files` number of files queued for metadata scanning
|
||||
* `cpp_mtpq_files` number of files queued for plugin-based analysis
|
||||
|
||||
and these are available per-volume only:
|
||||
* `cpp_disk_size_bytes` total HDD size
|
||||
* `cpp_disk_free_bytes` free HDD space
|
||||
|
||||
and these are per-volume and `total`:
|
||||
* `cpp_vol_bytes` size of all files in volume
|
||||
* `cpp_vol_files` number of files
|
||||
* `cpp_dupe_bytes` disk space presumably saved by deduplication
|
||||
* `cpp_dupe_files` number of dupe files
|
||||
* `cpp_unf_bytes` currently unfinished / incoming uploads
|
||||
|
||||
some of the metrics have additional requirements to function correctly,
|
||||
* `cpp_vol_*` requires either the `e2ds` volflag or `-e2dsa` global-option
|
||||
|
||||
the following options are available to disable some of the metrics:
|
||||
* `--nos-hdd` disables `cpp_disk_*` which can prevent spinning up HDDs
|
||||
* `--nos-vol` disables `cpp_vol_*` which reduces server startup time
|
||||
* `--nos-vst` disables volume state, reducing the worst-case prometheus query time by 0.5 sec
|
||||
* `--nos-dup` disables `cpp_dupe_*` which reduces the server load caused by prometheus queries
|
||||
* `--nos-unf` disables `cpp_unf_*` for no particular purpose
|
||||
|
||||
note: the following metrics are counted incorrectly if multiprocessing is enabled with `-j`: `cpp_http_conns`, `cpp_http_reqs`, `cpp_sus_reqs`, `cpp_active_bans`, `cpp_total_bans`
|
||||
|
||||
|
||||
# packages
|
||||
|
||||
the party might be closer than you think
|
||||
|
||||
if your distro/OS is not mentioned below, there might be some hints in the [«on servers»](#on-servers) section
|
||||
|
||||
|
||||
## arch package
|
||||
|
||||
now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||
|
||||
it comes with a [systemd service](./contrib/package/arch/copyparty.service) and expects to find one or more [config files](./docs/example.conf) in `/etc/copyparty.d/`
|
||||
|
||||
|
||||
## fedora package
|
||||
|
||||
currently **NOT** available on [copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/) , fedora is having issues with their build servers and won't be fixed for several months
|
||||
|
||||
if you previously installed copyparty from copr, you may run one of the following commands to upgrade to a more recent version:
|
||||
|
||||
```bash
|
||||
dnf install https://ocv.me/copyparty/fedora/37/python3-copyparty.fc37.noarch.rpm
|
||||
dnf install https://ocv.me/copyparty/fedora/38/python3-copyparty.fc38.noarch.rpm
|
||||
dnf install https://ocv.me/copyparty/fedora/39/python3-copyparty.fc39.noarch.rpm
|
||||
```
|
||||
|
||||
to run copyparty as a service, use the [systemd service scripts](https://github.com/9001/copyparty/tree/hovudstraum/contrib/systemd), just replace `/usr/bin/python3 /usr/local/bin/copyparty-sfx.py` with `/usr/bin/copyparty`
|
||||
|
||||
|
||||
## nix package
|
||||
|
||||
@@ -1358,7 +1541,7 @@ TLDR: yes
|
||||
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
|
||||
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
||||
|
||||
* internet explorer 6 to 8 behave the same
|
||||
* internet explorer 6 through 8 behave the same
|
||||
* firefox 52 and chrome 49 are the final winxp versions
|
||||
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
|
||||
* `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server
|
||||
@@ -1483,6 +1666,7 @@ below are some tweaks roughly ordered by usefulness:
|
||||
* `-j0` enables multiprocessing (actual multithreading), can reduce latency to `20+80/numCores` percent and generally improve performance in cpu-intensive workloads, for example:
|
||||
* lots of connections (many users or heavy clients)
|
||||
* simultaneous downloads and uploads saturating a 20gbps connection
|
||||
* if `-e2d` is enabled, `-j2` gives 4x performance for directory listings; `-j4` gives 16x
|
||||
|
||||
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
|
||||
* using [pypy](https://www.pypy.org/) instead of [cpython](https://www.python.org/) *can* be 70% faster for some workloads, but slower for many others
|
||||
@@ -1507,10 +1691,14 @@ when uploading files,
|
||||
|
||||
# security
|
||||
|
||||
there is a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` for all important updates (at the lack of better ideas)
|
||||
|
||||
some notes on hardening
|
||||
|
||||
* set `--rproxy 0` if your copyparty is directly facing the internet (not through a reverse-proxy)
|
||||
* cors doesn't work right otherwise
|
||||
* if you allow anonymous uploads or otherwise don't trust the contents of a volume, you can prevent XSS with volflag `nohtml`
|
||||
* this returns html documents as plaintext, and also disables markdown rendering
|
||||
|
||||
safety profiles:
|
||||
|
||||
@@ -1524,9 +1712,7 @@ safety profiles:
|
||||
* `--unpost 0`, `--no-del`, `--no-mv` disables all move/delete support
|
||||
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
||||
* however note if you edit one file it will also affect the other copies
|
||||
* `--vague-401` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
|
||||
* `--ban-404=50,60,1440` ban client for 1440min (24h) if they hit 50 404's in 60min
|
||||
* **NB:** will ban anyone who enables up2k turbo
|
||||
* `--vague-403` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
|
||||
* `--nih` removes the server hostname from directory listings
|
||||
|
||||
* option `-sss` is a shortcut for the above plus:
|
||||
@@ -1538,9 +1724,9 @@ safety profiles:
|
||||
other misc notes:
|
||||
|
||||
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
|
||||
* combine this with volflag `c,fk` to generate filekeys (per-file accesskeys); users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||
* the default filekey entropy is fairly small so give `--fk-salt` around 30 characters if you want filekeys longer than 16 chars
|
||||
* permissions `wG` lets users upload files and receive their own filekeys, still without being able to see other uploads
|
||||
* you may want [filekeys](#filekeys) to prevent filename bruteforcing
|
||||
* permission `h` instead of `r` makes copyparty behave like a traditional webserver with directory listing/index disabled, returning index.html instead
|
||||
* compatibility with filekeys: index.html itself can be retrieved without the correct filekey, but all other files are protected
|
||||
|
||||
|
||||
## gotchas
|
||||
@@ -1548,10 +1734,12 @@ other misc notes:
|
||||
behavior that might be unexpected
|
||||
|
||||
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
||||
* users can submit `<script>`s which autorun for other visitors in a few ways;
|
||||
* users can submit `<script>`s which autorun (in a sandbox) for other visitors in a few ways;
|
||||
* uploading a `README.md` -- avoid with `--no-readme`
|
||||
* renaming `some.html` to `.epilogue.html` -- avoid with either `--no-logues` or `--no-dot-ren`
|
||||
* the directory-listing embed is sandboxed (so any malicious scripts can't do any damage) but the markdown editor is not
|
||||
* the directory-listing embed is sandboxed (so any malicious scripts can't do any damage) but the markdown editor is not 100% safe, see below
|
||||
* markdown documents can contain html and `<script>`s; attempts are made to prevent scripts from executing (unless `-emp` is specified) but this is not 100% bulletproof, so setting the `nohtml` volflag is still the safest choice
|
||||
* or eliminate the problem entirely by only giving write-access to trustworthy people :^)
|
||||
|
||||
|
||||
## cors
|
||||
@@ -1566,6 +1754,28 @@ by default, except for `GET` and `HEAD` operations, all requests must either:
|
||||
cors can be configured with `--acao` and `--acam`, or the protections entirely disabled with `--allow-csrf`
|
||||
|
||||
|
||||
## filekeys
|
||||
|
||||
prevent filename bruteforcing
|
||||
|
||||
volflag `c,fk` generates filekeys (per-file accesskeys) for all files; users which have full read-access (permission `r`) will then see URLs with the correct filekey `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||
|
||||
by default, filekeys are generated based on salt (`--fk-salt`) + filesystem-path + file-size + inode (if not windows); add volflag `fka` to generate slightly weaker filekeys which will not be invalidated if the file is edited (only salt + path)
|
||||
|
||||
permissions `wG` (write + upget) lets users upload files and receive their own filekeys, still without being able to see other uploads
|
||||
|
||||
|
||||
## password hashing
|
||||
|
||||
you can hash passwords before putting them into config files / providing them as arguments; see `--help-pwhash` for all the details
|
||||
|
||||
`--ah-alg argon2` enables it, and if you have any plaintext passwords then it'll print the hashed versions on startup so you can replace them
|
||||
|
||||
optionally also specify `--ah-cli` to enter an interactive mode where it will hash passwords without ever writing the plaintext ones to disk
|
||||
|
||||
the default configs take about 0.4 sec and 256 MiB RAM to process a new password on a decent laptop
|
||||
|
||||
|
||||
## https
|
||||
|
||||
both HTTP and HTTPS are accepted by default, but letting a [reverse proxy](#reverse-proxy) handle the https/tls/ssl would be better (probably more secure by default)
|
||||
@@ -1613,6 +1823,8 @@ mandatory deps:
|
||||
|
||||
install these to enable bonus features
|
||||
|
||||
enable hashed passwords in config: `argon2-cffi`
|
||||
|
||||
enable ftp-server:
|
||||
* for just plaintext FTP, `pyftpdlib` (is built into the SFX)
|
||||
* with TLS encryption, `pyftpdlib pyopenssl`
|
||||
@@ -1629,7 +1841,7 @@ enable [thumbnails](#thumbnails) of...
|
||||
* **JPEG XL pictures:** `pyvips` or `ffmpeg`
|
||||
|
||||
enable [smb](#smb-server) support (**not** recommended):
|
||||
* `impacket==0.10.0`
|
||||
* `impacket==0.11.0`
|
||||
|
||||
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
|
||||
|
||||
@@ -1663,7 +1875,7 @@ can be convenient on machines where installing python is problematic, however is
|
||||
|
||||
* dangerous: [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is compatible with [windows7](https://user-images.githubusercontent.com/241032/221445944-ae85d1f4-d351-4837-b130-82cab57d6cca.png), which means it uses an ancient copy of python (3.7.9) which cannot be upgraded and should never be exposed to the internet (LAN is fine)
|
||||
|
||||
* dangerous and deprecated: [copyparty-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.6.8/copyparty-winpe64.exe) lets you [run copyparty in WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png) and is otherwise completely useless
|
||||
* dangerous and deprecated: [copyparty-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.8.7/copyparty-winpe64.exe) lets you [run copyparty in WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png) and is otherwise completely useless
|
||||
|
||||
meanwhile [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) instead relies on your system python which gives better performance and will stay safe as long as you keep your python install up-to-date
|
||||
|
||||
|
||||
@@ -207,7 +207,7 @@ def examples():
|
||||
|
||||
|
||||
def main():
|
||||
global NC, BY_PATH
|
||||
global NC, BY_PATH # pylint: disable=global-statement
|
||||
os.system("")
|
||||
print()
|
||||
|
||||
@@ -282,7 +282,8 @@ def main():
|
||||
if ver == "corrupt":
|
||||
die("{} database appears to be corrupt, sorry")
|
||||
|
||||
if ver < DB_VER1 or ver > DB_VER2:
|
||||
iver = int(ver)
|
||||
if iver < DB_VER1 or iver > DB_VER2:
|
||||
m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
|
||||
die(m)
|
||||
|
||||
|
||||
35
bin/handlers/README.md
Normal file
35
bin/handlers/README.md
Normal file
@@ -0,0 +1,35 @@
|
||||
replace the standard 404 / 403 responses with plugins
|
||||
|
||||
|
||||
# usage
|
||||
|
||||
load plugins either globally with `--on404 ~/dev/copyparty/bin/handlers/sorry.py` or for a specific volume with `:c,on404=~/handlers/sorry.py`
|
||||
|
||||
|
||||
# api
|
||||
|
||||
each plugin must define a `main()` which takes 3 arguments;
|
||||
|
||||
* `cli` is an instance of [copyparty/httpcli.py](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/httpcli.py) (the monstrosity itself)
|
||||
* `vn` is the VFS which overlaps with the requested URL, and
|
||||
* `rem` is the URL remainder below the VFS mountpoint
|
||||
* so `vn.vpath + rem` == `cli.vpath` == original request
|
||||
|
||||
|
||||
# examples
|
||||
|
||||
## on404
|
||||
|
||||
* [sorry.py](answer.py) replies with a custom message instead of the usual 404
|
||||
* [nooo.py](nooo.py) replies with an endless noooooooooooooo
|
||||
* [never404.py](never404.py) 100% guarantee that 404 will never be a thing again as it automatically creates dummy files whenever necessary
|
||||
* [caching-proxy.py](caching-proxy.py) transforms copyparty into a squid/varnish knockoff
|
||||
|
||||
## on403
|
||||
|
||||
* [ip-ok.py](ip-ok.py) disables security checks if client-ip is 1.2.3.4
|
||||
|
||||
|
||||
# notes
|
||||
|
||||
* on403 only works for trivial stuff (basic http access) since I haven't been able to think of any good usecases for it (was just easy to add while doing on404)
|
||||
36
bin/handlers/caching-proxy.py
Executable file
36
bin/handlers/caching-proxy.py
Executable file
@@ -0,0 +1,36 @@
|
||||
# assume each requested file exists on another webserver and
|
||||
# download + mirror them as they're requested
|
||||
# (basically pretend we're warnish)
|
||||
|
||||
import os
|
||||
import requests
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from copyparty.httpcli import HttpCli
|
||||
|
||||
|
||||
def main(cli: "HttpCli", vn, rem):
|
||||
url = "https://mirrors.edge.kernel.org/alpine/" + rem
|
||||
abspath = os.path.join(vn.realpath, rem)
|
||||
|
||||
# sneaky trick to preserve a requests-session between downloads
|
||||
# so it doesn't have to spend ages reopening https connections;
|
||||
# luckily we can stash it inside the copyparty client session,
|
||||
# name just has to be definitely unused so "hacapo_req_s" it is
|
||||
req_s = getattr(cli.conn, "hacapo_req_s", None) or requests.Session()
|
||||
setattr(cli.conn, "hacapo_req_s", req_s)
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(abspath), exist_ok=True)
|
||||
with req_s.get(url, stream=True, timeout=69) as r:
|
||||
r.raise_for_status()
|
||||
with open(abspath, "wb", 64 * 1024) as f:
|
||||
for buf in r.iter_content(chunk_size=64 * 1024):
|
||||
f.write(buf)
|
||||
except:
|
||||
os.unlink(abspath)
|
||||
return "false"
|
||||
|
||||
return "retry"
|
||||
6
bin/handlers/ip-ok.py
Executable file
6
bin/handlers/ip-ok.py
Executable file
@@ -0,0 +1,6 @@
|
||||
# disable permission checks and allow access if client-ip is 1.2.3.4
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
if cli.ip == "1.2.3.4":
|
||||
return "allow"
|
||||
11
bin/handlers/never404.py
Executable file
11
bin/handlers/never404.py
Executable file
@@ -0,0 +1,11 @@
|
||||
# create a dummy file and let copyparty return it
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
print("hello", cli.ip)
|
||||
|
||||
abspath = vn.canonical(rem)
|
||||
with open(abspath, "wb") as f:
|
||||
f.write(b"404? not on MY watch!")
|
||||
|
||||
return "retry"
|
||||
16
bin/handlers/nooo.py
Executable file
16
bin/handlers/nooo.py
Executable file
@@ -0,0 +1,16 @@
|
||||
# reply with an endless "noooooooooooooooooooooooo"
|
||||
|
||||
|
||||
def say_no():
|
||||
yield b"n"
|
||||
while True:
|
||||
yield b"o" * 4096
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
cli.send_headers(None, 404, "text/plain")
|
||||
|
||||
for chunk in say_no():
|
||||
cli.s.sendall(chunk)
|
||||
|
||||
return "false"
|
||||
7
bin/handlers/sorry.py
Executable file
7
bin/handlers/sorry.py
Executable file
@@ -0,0 +1,7 @@
|
||||
# sends a custom response instead of the usual 404
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
msg = f"sorry {cli.ip} but {cli.vpath} doesn't exist"
|
||||
|
||||
return str(cli.reply(msg.encode("utf-8"), 404, "text/plain"))
|
||||
123
bin/hooks/msg-log.py
Executable file
123
bin/hooks/msg-log.py
Executable file
@@ -0,0 +1,123 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
from datetime import datetime, timezone
|
||||
except:
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
"""
|
||||
use copyparty as a dumb messaging server / guestbook thing;
|
||||
initially contributed by @clach04 in https://github.com/9001/copyparty/issues/35 (thanks!)
|
||||
|
||||
Sample usage:
|
||||
|
||||
python copyparty-sfx.py --xm j,bin/hooks/msg-log.py
|
||||
|
||||
Where:
|
||||
|
||||
xm = execute on message-to-server-log
|
||||
j = provide message information as json; not just the text - this script REQUIRES json
|
||||
t10 = timeout and kill download after 10 secs
|
||||
"""
|
||||
|
||||
|
||||
# output filename
|
||||
FILENAME = os.environ.get("COPYPARTY_MESSAGE_FILENAME", "") or "README.md"
|
||||
|
||||
# set True to write in descending order (newest message at top of file);
|
||||
# note that this becomes very slow/expensive as the file gets bigger
|
||||
DESCENDING = True
|
||||
|
||||
# the message template; the following parameters are provided by copyparty and can be referenced below:
|
||||
# 'ap' = absolute filesystem path where the message was posted
|
||||
# 'vp' = virtual path (URL 'path') where the message was posted
|
||||
# 'mt' = 'at' = unix-timestamp when the message was posted
|
||||
# 'datetime' = ISO-8601 time when the message was posted
|
||||
# 'sz' = message size in bytes
|
||||
# 'host' = the server hostname which the user was accessing (URL 'host')
|
||||
# 'user' = username (if logged in), otherwise '*'
|
||||
# 'txt' = the message text itself
|
||||
# (uncomment the print(msg_info) to see if additional information has been introduced by copyparty since this was written)
|
||||
TEMPLATE = """
|
||||
🕒 %(datetime)s, 👤 %(user)s @ %(ip)s
|
||||
%(txt)s
|
||||
"""
|
||||
|
||||
|
||||
def write_ascending(filepath, msg_text):
|
||||
with open(filepath, "a", encoding="utf-8", errors="replace") as outfile:
|
||||
outfile.write(msg_text)
|
||||
|
||||
|
||||
def write_descending(filepath, msg_text):
|
||||
lockpath = filepath + ".lock"
|
||||
got_it = False
|
||||
for _ in range(16):
|
||||
try:
|
||||
os.mkdir(lockpath)
|
||||
got_it = True
|
||||
break
|
||||
except:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
if not got_it:
|
||||
return sys.exit(1)
|
||||
|
||||
try:
|
||||
oldpath = filepath + ".old"
|
||||
os.rename(filepath, oldpath)
|
||||
with open(oldpath, "r", encoding="utf-8", errors="replace") as infile, open(
|
||||
filepath, "w", encoding="utf-8", errors="replace"
|
||||
) as outfile:
|
||||
outfile.write(msg_text)
|
||||
while True:
|
||||
buf = infile.read(4096)
|
||||
if not buf:
|
||||
break
|
||||
outfile.write(buf)
|
||||
finally:
|
||||
try:
|
||||
os.unlink(oldpath)
|
||||
except:
|
||||
pass
|
||||
os.rmdir(lockpath)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
msg_info = json.loads(sys.argv[1])
|
||||
# print(msg_info)
|
||||
|
||||
try:
|
||||
dt = datetime.fromtimestamp(msg_info["at"], timezone.utc)
|
||||
except:
|
||||
dt = datetime.utcfromtimestamp(msg_info["at"])
|
||||
|
||||
msg_info["datetime"] = dt.strftime("%Y-%m-%d, %H:%M:%S")
|
||||
|
||||
msg_text = TEMPLATE % msg_info
|
||||
|
||||
filepath = os.path.join(msg_info["ap"], FILENAME)
|
||||
|
||||
if DESCENDING and os.path.exists(filepath):
|
||||
write_descending(filepath, msg_text)
|
||||
else:
|
||||
write_ascending(filepath, msg_text)
|
||||
|
||||
print(msg_text)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -4,7 +4,7 @@ import json
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from plyer import notification
|
||||
|
||||
|
||||
@@ -43,7 +43,8 @@ def main():
|
||||
fp = inf["ap"]
|
||||
sz = humansize(inf["sz"])
|
||||
dp, fn = os.path.split(fp)
|
||||
mt = datetime.utcfromtimestamp(inf["mt"]).strftime("%Y-%m-%d %H:%M:%S")
|
||||
dt = datetime.fromtimestamp(inf["mt"], timezone.utc)
|
||||
mt = dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
msg = f"{fn} ({sz})\n📁 {dp}"
|
||||
title = "File received"
|
||||
|
||||
@@ -37,6 +37,10 @@ def main():
|
||||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
proto = url.split("://")[0].lower()
|
||||
if proto not in ("http", "https", "ftp", "ftps"):
|
||||
raise Exception("bad proto {}".format(proto))
|
||||
|
||||
os.chdir(inf["ap"])
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import hashlib
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
_ = r"""
|
||||
@@ -43,8 +43,11 @@ except:
|
||||
return p
|
||||
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
def humantime(ts):
|
||||
return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S")
|
||||
return datetime.fromtimestamp(ts, UTC).strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
def find_files_root(inf):
|
||||
@@ -96,7 +99,7 @@ def main():
|
||||
|
||||
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
|
||||
ret.append("")
|
||||
ftime = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")
|
||||
ftime = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S.%f")
|
||||
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
|
||||
with open(fsenc(fp), "wb") as f:
|
||||
f.write("\n".join(ret).encode("utf-8", "replace"))
|
||||
|
||||
@@ -24,6 +24,15 @@ these do not have any problematic dependencies at all:
|
||||
* also available as an [event hook](../hooks/wget.py)
|
||||
|
||||
|
||||
## dangerous plugins
|
||||
|
||||
plugins in this section should only be used with appropriate precautions:
|
||||
|
||||
* [very-bad-idea.py](./very-bad-idea.py) combined with [meadup.js](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js) converts copyparty into a janky yet extremely flexible chromecast clone
|
||||
* also adds a virtual keyboard by @steinuil to the basic-upload tab for comfy couch crowd control
|
||||
* anything uploaded through the [android app](https://github.com/9001/party-up) (files or links) are executed on the server, meaning anyone can infect your PC with malware... so protect this with a password and keep it on a LAN!
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
run [`install-deps.sh`](install-deps.sh) to build/install most dependencies required by these programs (supports windows/linux/macos)
|
||||
|
||||
@@ -7,6 +7,7 @@ set -e
|
||||
# linux/alpine: requires gcc g++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-dev py3-{wheel,pip} py3-numpy{,-dev}
|
||||
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3,libsndfile1}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
|
||||
# linux/fedora: requires gcc gcc-c++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-devel python3-numpy vamp-plugin-sdk qm-vamp-plugins
|
||||
# linux/arch: requires gcc make cmake patchelf python3 ffmpeg fftw libsndfile python-{numpy,wheel,pip,setuptools}
|
||||
# win64: requires msys2-mingw64 environment
|
||||
# macos: requires macports
|
||||
#
|
||||
@@ -227,15 +228,16 @@ install_vamp() {
|
||||
cd "$td"
|
||||
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2691/vamp-plugin-sdk-2.10.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
echo "7ef7f837d19a08048b059e0da408373a7964ced452b290fae40b85d6d70ca9000bcfb3302cd0b4dc76cf2a848528456f78c1ce1ee0c402228d812bd347b6983b -"
|
||||
) <vamp-plugin-sdk-2.9.0.tar.gz
|
||||
tar -xf vamp-plugin-sdk-2.9.0.tar.gz
|
||||
echo "153b7f2fa01b77c65ad393ca0689742d66421017fd5931d216caa0fcf6909355fff74706fabbc062a3a04588a619c9b515a1dae00f21a57afd97902a355c48ed -"
|
||||
) <vamp-plugin-sdk-2.10.0.tar.gz
|
||||
tar -xf vamp-plugin-sdk-2.10.0.tar.gz
|
||||
rm -- *.tar.gz
|
||||
ls -al
|
||||
cd vamp-plugin-sdk-*
|
||||
./configure --prefix=$HOME/pe/vamp-sdk
|
||||
printf '%s\n' "int main(int argc, char **argv) { return 0; }" > host/vamp-simple-host.cpp
|
||||
./configure --disable-programs --prefix=$HOME/pe/vamp-sdk
|
||||
make -j1 install
|
||||
}
|
||||
|
||||
@@ -250,8 +252,9 @@ install_vamp() {
|
||||
rm -- *.tar.gz
|
||||
cd beatroot-vamp-v1.0
|
||||
[ -e ~/pe/vamp-sdk ] &&
|
||||
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux
|
||||
make -f Makefile.linux -j4 LDFLAGS=-L$HOME/pe/vamp-sdk/lib
|
||||
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux ||
|
||||
sed -ri 's`^(CFLAGS :=.*)`\1 -I/usr/include/vamp-sdk`' Makefile.linux
|
||||
make -f Makefile.linux -j4 LDFLAGS="-L$HOME/pe/vamp-sdk/lib -L/usr/lib64"
|
||||
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
|
||||
mkdir ~/vamp
|
||||
cp -pv beatroot-vamp.* ~/vamp/
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
WARNING -- DANGEROUS PLUGIN --
|
||||
if someone is able to upload files to a copyparty which is
|
||||
running this plugin, they can execute malware on your machine
|
||||
so please keep this on a LAN and protect it with a password
|
||||
|
||||
use copyparty as a chromecast replacement:
|
||||
* post a URL and it will open in the default browser
|
||||
* upload a file and it will open in the default application
|
||||
@@ -10,16 +15,17 @@ use copyparty as a chromecast replacement:
|
||||
|
||||
the android app makes it a breeze to post pics and links:
|
||||
https://github.com/9001/party-up/releases
|
||||
(iOS devices have to rely on the web-UI)
|
||||
|
||||
goes without saying, but this is HELLA DANGEROUS,
|
||||
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
|
||||
iOS devices can use the web-UI or the shortcut instead:
|
||||
https://github.com/9001/copyparty#ios-shortcuts
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py
|
||||
example copyparty config to use this;
|
||||
lets the user "kevin" with password "hunter2" use this plugin:
|
||||
-a kevin:hunter2 --urlform save,get -v.::w,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py
|
||||
|
||||
recommended deps:
|
||||
apt install xdotool libnotify-bin
|
||||
apt install xdotool libnotify-bin mpv
|
||||
python3 -m pip install --user -U streamlink yt-dlp
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
|
||||
|
||||
and you probably want `twitter-unmute.user.js` from the res folder
|
||||
@@ -63,8 +69,10 @@ set -e
|
||||
EOF
|
||||
chmod 755 /usr/local/bin/chromium-browser
|
||||
|
||||
# start the server (note: replace `-v.::rw:` with `-v.::w:` to disallow retrieving uploaded stuff)
|
||||
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
|
||||
# start the server
|
||||
# note 1: replace hunter2 with a better password to access the server
|
||||
# note 2: replace `-v.::rw` with `-v.::w` to disallow retrieving uploaded stuff
|
||||
cd ~/Downloads; python3 copyparty-sfx.py -a kevin:hunter2 --urlform save,get -v.::rw,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
|
||||
|
||||
"""
|
||||
|
||||
@@ -72,11 +80,23 @@ cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mt
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote
|
||||
|
||||
have_mpv = shutil.which("mpv")
|
||||
have_vlc = shutil.which("vlc")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) > 2 and sys.argv[1] == "x":
|
||||
# invoked on commandline for testing;
|
||||
# python3 very-bad-idea.py x msg=https://youtu.be/dQw4w9WgXcQ
|
||||
txt = " ".join(sys.argv[2:])
|
||||
txt = quote(txt.replace(" ", "+"))
|
||||
return open_post(txt.encode("utf-8"))
|
||||
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
with open(fp, "rb") as f:
|
||||
txt = f.read(4096)
|
||||
@@ -92,7 +112,7 @@ def open_post(txt):
|
||||
try:
|
||||
k, v = txt.split(" ", 1)
|
||||
except:
|
||||
open_url(txt)
|
||||
return open_url(txt)
|
||||
|
||||
if k == "key":
|
||||
sp.call(["xdotool", "key"] + v.split(" "))
|
||||
@@ -128,6 +148,17 @@ def open_url(txt):
|
||||
# else:
|
||||
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
|
||||
|
||||
# mpv is probably smart enough to use streamlink automatically
|
||||
if try_mpv(txt):
|
||||
print("mpv got it")
|
||||
return
|
||||
|
||||
# or maybe streamlink would be a good choice to open this
|
||||
if try_streamlink(txt):
|
||||
print("streamlink got it")
|
||||
return
|
||||
|
||||
# nope,
|
||||
# close any error messages:
|
||||
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
|
||||
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
|
||||
@@ -136,4 +167,39 @@ def open_url(txt):
|
||||
sp.call(["xdg-open", txt])
|
||||
|
||||
|
||||
def try_mpv(url):
|
||||
t0 = time.time()
|
||||
try:
|
||||
print("trying mpv...")
|
||||
sp.check_call(["mpv", "--fs", url])
|
||||
return True
|
||||
except:
|
||||
# if it ran for 15 sec it probably succeeded and terminated
|
||||
t = time.time()
|
||||
return t - t0 > 15
|
||||
|
||||
|
||||
def try_streamlink(url):
|
||||
t0 = time.time()
|
||||
try:
|
||||
import streamlink
|
||||
|
||||
print("trying streamlink...")
|
||||
streamlink.Streamlink().resolve_url(url)
|
||||
|
||||
if have_mpv:
|
||||
args = "-m streamlink -p mpv -a --fs"
|
||||
else:
|
||||
args = "-m streamlink"
|
||||
|
||||
cmd = [sys.executable] + args.split() + [url, "best"]
|
||||
t0 = time.time()
|
||||
sp.check_call(cmd)
|
||||
return True
|
||||
except:
|
||||
# if it ran for 10 sec it probably succeeded and terminated
|
||||
t = time.time()
|
||||
return t - t0 > 10
|
||||
|
||||
|
||||
main()
|
||||
|
||||
@@ -65,6 +65,10 @@ def main():
|
||||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
proto = url.split("://")[0].lower()
|
||||
if proto not in ("http", "https", "ftp", "ftps"):
|
||||
raise Exception("bad proto {}".format(proto))
|
||||
|
||||
os.chdir(fdir)
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
|
||||
@@ -46,13 +46,20 @@ import traceback
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
import calendar
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
WINDOWS = sys.platform == "win32"
|
||||
MACOS = platform.system() == "Darwin"
|
||||
info = log = dbg = None
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
def print(*args, **kwargs):
|
||||
try:
|
||||
builtins.print(*list(args), **kwargs)
|
||||
except:
|
||||
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
|
||||
|
||||
|
||||
print(
|
||||
@@ -64,6 +71,13 @@ print(
|
||||
)
|
||||
|
||||
|
||||
def null_log(msg):
|
||||
pass
|
||||
|
||||
|
||||
info = log = dbg = null_log
|
||||
|
||||
|
||||
try:
|
||||
from fuse import FUSE, FuseOSError, Operations
|
||||
except:
|
||||
@@ -83,13 +97,6 @@ except:
|
||||
raise
|
||||
|
||||
|
||||
def print(*args, **kwargs):
|
||||
try:
|
||||
builtins.print(*list(args), **kwargs)
|
||||
except:
|
||||
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
|
||||
|
||||
|
||||
def termsafe(txt):
|
||||
try:
|
||||
return txt.encode(sys.stdout.encoding, "backslashreplace").decode(
|
||||
@@ -118,10 +125,6 @@ def fancy_log(msg):
|
||||
print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="")
|
||||
|
||||
|
||||
def null_log(msg):
|
||||
pass
|
||||
|
||||
|
||||
def hexler(binary):
|
||||
return binary.replace("\r", "\\r").replace("\n", "\\n")
|
||||
return " ".join(["{}\033[36m{:02x}\033[0m".format(b, ord(b)) for b in binary])
|
||||
@@ -176,7 +179,7 @@ class RecentLog(object):
|
||||
def put(self, msg):
|
||||
msg = "{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg)
|
||||
if self.f:
|
||||
fmsg = " ".join([datetime.utcnow().strftime("%H%M%S.%f"), str(msg)])
|
||||
fmsg = " ".join([datetime.now(UTC).strftime("%H%M%S.%f"), str(msg)])
|
||||
self.f.write(fmsg.encode("utf-8"))
|
||||
|
||||
with self.mtx:
|
||||
|
||||
@@ -20,12 +20,13 @@ import sys
|
||||
import base64
|
||||
import sqlite3
|
||||
import argparse
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
@@ -155,11 +156,10 @@ th {
|
||||
link = txt.decode("utf-8")[4:]
|
||||
|
||||
sz = "{:,}".format(sz)
|
||||
dt = datetime.fromtimestamp(at if at > 0 else mt, UTC)
|
||||
v = [
|
||||
w[:16],
|
||||
datetime.utcfromtimestamp(at if at > 0 else mt).strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
),
|
||||
dt.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
sz,
|
||||
imap.get(ip, ip),
|
||||
]
|
||||
|
||||
@@ -12,13 +12,13 @@ done
|
||||
help() { cat <<'EOF'
|
||||
|
||||
usage:
|
||||
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
|
||||
./prisonparty.sh <ROOTDIR> <USER|UID> <GROUP|GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
|
||||
|
||||
example:
|
||||
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
|
||||
./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
|
||||
|
||||
example for running straight from source (instead of using an sfx):
|
||||
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
|
||||
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
|
||||
|
||||
note that if you have python modules installed as --user (such as bpm/key detectors),
|
||||
you should add /home/foo/.local as a VOLDIR
|
||||
@@ -28,6 +28,16 @@ exit 1
|
||||
}
|
||||
|
||||
|
||||
errs=
|
||||
for c in awk chroot dirname getent lsof mknod mount realpath sed sort stat uniq; do
|
||||
command -v $c >/dev/null || {
|
||||
echo ERROR: command not found: $c
|
||||
errs=1
|
||||
}
|
||||
done
|
||||
[ $errs ] && exit 1
|
||||
|
||||
|
||||
# read arguments
|
||||
trap help EXIT
|
||||
jail="$(realpath "$1")"; shift
|
||||
@@ -58,11 +68,18 @@ cpp="$1"; shift
|
||||
}
|
||||
trap - EXIT
|
||||
|
||||
usr="$(getent passwd $uid | cut -d: -f1)"
|
||||
[ "$usr" ] || { echo "ERROR invalid username/uid $uid"; exit 1; }
|
||||
uid="$(getent passwd $uid | cut -d: -f3)"
|
||||
|
||||
grp="$(getent group $gid | cut -d: -f1)"
|
||||
[ "$grp" ] || { echo "ERROR invalid groupname/gid $gid"; exit 1; }
|
||||
gid="$(getent group $gid | cut -d: -f3)"
|
||||
|
||||
# debug/vis
|
||||
echo
|
||||
echo "chroot-dir = $jail"
|
||||
echo "user:group = $uid:$gid"
|
||||
echo "user:group = $uid:$gid ($usr:$grp)"
|
||||
echo " copyparty = $cpp"
|
||||
echo
|
||||
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
|
||||
@@ -80,34 +97,39 @@ jail="${jail%/}"
|
||||
|
||||
|
||||
# bind-mount system directories and volumes
|
||||
for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
|
||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
||||
while IFS= read -r v; do
|
||||
[ -e "$v" ] || {
|
||||
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
|
||||
continue
|
||||
}
|
||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
||||
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
|
||||
# echo "v [$v] i1 [$i1] i2 [$i2]"
|
||||
i1=$(stat -c%D.%i "$v/" 2>/dev/null || echo a)
|
||||
i2=$(stat -c%D.%i "$jail$v/" 2>/dev/null || echo b)
|
||||
[ $i1 = $i2 ] && continue
|
||||
|
||||
mount | grep -qF " $jail$v " && echo wtf $i1 $i2 $v && continue
|
||||
mkdir -p "$jail$v"
|
||||
mount --bind "$v" "$jail$v"
|
||||
done
|
||||
rmdir "$jail/.prisonlock" || true
|
||||
|
||||
|
||||
cln() {
|
||||
rv=$?
|
||||
wait -f -p rv $p || true
|
||||
trap - EXIT
|
||||
wait -f -n $p && rv=0 || rv=$?
|
||||
cd /
|
||||
echo "stopping chroot..."
|
||||
lsof "$jail" | grep -F "$jail" &&
|
||||
for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
|
||||
lsof "$jail" 2>/dev/null | grep -F "$jail" &&
|
||||
echo "chroot is in use; will not unmount" ||
|
||||
{
|
||||
mount | grep -F " on $jail" |
|
||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
|
||||
LC_ALL=C sort -r | while IFS= read -r v; do
|
||||
umount "$v" && echo "umount OK: $v"
|
||||
done
|
||||
}
|
||||
rmdir "$jail/.prisonlock" || true
|
||||
exit $rv
|
||||
}
|
||||
trap cln EXIT
|
||||
@@ -128,8 +150,8 @@ chmod 777 "$jail/tmp"
|
||||
|
||||
|
||||
# run copyparty
|
||||
export HOME=$(getent passwd $uid | cut -d: -f6)
|
||||
export USER=$(getent passwd $uid | cut -d: -f1)
|
||||
export HOME="$(getent passwd $uid | cut -d: -f6)"
|
||||
export USER="$usr"
|
||||
export LOGNAME="$USER"
|
||||
#echo "pybin [$pybin]"
|
||||
#echo "pyarg [$pyarg]"
|
||||
@@ -137,5 +159,5 @@ export LOGNAME="$USER"
|
||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
||||
p=$!
|
||||
trap 'kill -USR1 $p' USR1
|
||||
trap 'kill $p' INT TERM
|
||||
trap 'trap - INT TERM; kill $p' INT TERM
|
||||
wait
|
||||
|
||||
84
bin/u2c.py
84
bin/u2c.py
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
S_VERSION = "1.9"
|
||||
S_BUILD_DT = "2023-05-07"
|
||||
S_VERSION = "1.12"
|
||||
S_BUILD_DT = "2023-12-08"
|
||||
|
||||
"""
|
||||
u2c.py: upload to copyparty
|
||||
@@ -14,6 +14,7 @@ https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py
|
||||
- if something breaks just try again and it'll autoresume
|
||||
"""
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
@@ -39,7 +40,7 @@ except:
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError:
|
||||
except ImportError as ex:
|
||||
if EXE:
|
||||
raise
|
||||
elif sys.version_info > (2, 7):
|
||||
@@ -50,7 +51,7 @@ except ImportError:
|
||||
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
||||
m += "\n for f in *.whl; do unzip $f; done; rm -r *.dist-info\n"
|
||||
|
||||
print(m.format(sys.executable))
|
||||
print(m.format(sys.executable), "\nspecifically,", ex)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -104,12 +105,14 @@ class File(object):
|
||||
# set by handshake
|
||||
self.recheck = False # duplicate; redo handshake after all files done
|
||||
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||
self.wark = None # type: str
|
||||
self.url = None # type: str
|
||||
self.wark = "" # type: str
|
||||
self.url = "" # type: str
|
||||
self.nhs = 0
|
||||
|
||||
# set by upload
|
||||
self.up_b = 0 # type: int
|
||||
self.up_c = 0 # type: int
|
||||
self.cd = 0
|
||||
|
||||
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||
@@ -220,6 +223,7 @@ class MTHash(object):
|
||||
|
||||
def hash_at(self, nch):
|
||||
f = self.f
|
||||
assert f
|
||||
ofs = ofs0 = nch * self.csz
|
||||
hashobj = hashlib.sha512()
|
||||
chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
|
||||
@@ -411,10 +415,11 @@ def walkdir(err, top, seen):
|
||||
err.append((ap, str(ex)))
|
||||
|
||||
|
||||
def walkdirs(err, tops):
|
||||
def walkdirs(err, tops, excl):
|
||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
if not VT100:
|
||||
excl = excl.replace("/", r"\\")
|
||||
za = []
|
||||
for td in tops:
|
||||
try:
|
||||
@@ -431,6 +436,8 @@ def walkdirs(err, tops):
|
||||
za = [x.replace(b"/", b"\\") for x in za]
|
||||
tops = za
|
||||
|
||||
ptn = re.compile(excl.encode("utf-8") or b"\n", re.I)
|
||||
|
||||
for top in tops:
|
||||
isdir = os.path.isdir(top)
|
||||
if top[-1:] == sep:
|
||||
@@ -443,6 +450,8 @@ def walkdirs(err, tops):
|
||||
|
||||
if isdir:
|
||||
for ap, inf in walkdir(err, top, []):
|
||||
if ptn.match(ap):
|
||||
continue
|
||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||
else:
|
||||
d, n = top.rsplit(sep, 1)
|
||||
@@ -455,7 +464,7 @@ def quotep(btxt):
|
||||
if not PY2:
|
||||
quot1 = quot1.encode("ascii")
|
||||
|
||||
return quot1.replace(b" ", b"+")
|
||||
return quot1.replace(b" ", b"+") # type: ignore
|
||||
|
||||
|
||||
# from copyparty/util.py
|
||||
@@ -492,7 +501,7 @@ def up2k_chunksize(filesize):
|
||||
|
||||
# mostly from copyparty/up2k.py
|
||||
def get_hashlist(file, pcb, mth):
|
||||
# type: (File, any, any) -> None
|
||||
# type: (File, Any, Any) -> None
|
||||
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||
|
||||
chunk_sz = up2k_chunksize(file.size)
|
||||
@@ -592,7 +601,7 @@ def handshake(ar, file, search):
|
||||
raise
|
||||
|
||||
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
||||
time.sleep(1)
|
||||
time.sleep(ar.cd)
|
||||
|
||||
try:
|
||||
r = r.json()
|
||||
@@ -654,7 +663,7 @@ class Ctl(object):
|
||||
nfiles = 0
|
||||
nbytes = 0
|
||||
err = []
|
||||
for _, _, inf in walkdirs(err, ar.files):
|
||||
for _, _, inf in walkdirs(err, ar.files, ar.x):
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
continue
|
||||
|
||||
@@ -683,6 +692,7 @@ class Ctl(object):
|
||||
|
||||
def __init__(self, ar, stats=None):
|
||||
self.ok = False
|
||||
self.errs = 0
|
||||
self.ar = ar
|
||||
self.stats = stats or self._scan()
|
||||
if not self.stats:
|
||||
@@ -696,7 +706,7 @@ class Ctl(object):
|
||||
if ar.te:
|
||||
req_ses.verify = ar.te
|
||||
|
||||
self.filegen = walkdirs([], ar.files)
|
||||
self.filegen = walkdirs([], ar.files, ar.x)
|
||||
self.recheck = [] # type: list[File]
|
||||
|
||||
if ar.safe:
|
||||
@@ -730,7 +740,7 @@ class Ctl(object):
|
||||
|
||||
self._fancy()
|
||||
|
||||
self.ok = True
|
||||
self.ok = not self.errs
|
||||
|
||||
def _safe(self):
|
||||
"""minimal basic slow boring fallback codepath"""
|
||||
@@ -897,12 +907,23 @@ class Ctl(object):
|
||||
dp = os.path.join(top, rd)
|
||||
lnodes = set(os.listdir(dp))
|
||||
bnames = [x for x in ls if x not in lnodes]
|
||||
if bnames:
|
||||
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||
names = [x.decode("utf-8", "replace") for x in bnames]
|
||||
locs = [vpath + srd + "/" + x for x in names]
|
||||
print("DELETING ~{0}/#{1}".format(srd, len(names)))
|
||||
req_ses.post(self.ar.url + "?delete", json=locs)
|
||||
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||
names = [x.decode("utf-8", "replace") for x in bnames]
|
||||
locs = [vpath + srd + "/" + x for x in names]
|
||||
while locs:
|
||||
req = locs
|
||||
while req:
|
||||
print("DELETING ~%s/#%s" % (srd, len(req)))
|
||||
r = req_ses.post(self.ar.url + "?delete", json=req)
|
||||
if r.status_code == 413 and "json 2big" in r.text:
|
||||
print(" (delete request too big; slicing...)")
|
||||
req = req[: len(req) // 2]
|
||||
continue
|
||||
elif not r:
|
||||
t = "delete request failed: %r %s"
|
||||
raise Exception(t % (r, r.text))
|
||||
break
|
||||
locs = locs[len(req) :]
|
||||
|
||||
if isdir:
|
||||
continue
|
||||
@@ -955,13 +976,22 @@ class Ctl(object):
|
||||
self.q_upload.put(None)
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.handshaker_busy += 1
|
||||
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
if not VT100:
|
||||
upath = upath.lstrip("\\?")
|
||||
|
||||
file.nhs += 1
|
||||
if file.nhs > 32:
|
||||
print("ERROR: giving up on file %s" % (upath))
|
||||
self.errs += 1
|
||||
continue
|
||||
|
||||
with self.mutex:
|
||||
self.handshaker_busy += 1
|
||||
|
||||
while time.time() < file.cd:
|
||||
time.sleep(0.1)
|
||||
|
||||
hs, sprs = handshake(self.ar, file, search)
|
||||
if search:
|
||||
if hs:
|
||||
@@ -1044,6 +1074,7 @@ class Ctl(object):
|
||||
except Exception as ex:
|
||||
t = "upload failed, retrying: {0} #{1} ({2})\n"
|
||||
eprint(t.format(file.name, cid[:8], ex))
|
||||
file.cd = time.time() + self.ar.cd
|
||||
# handshake will fix it
|
||||
|
||||
with self.mutex:
|
||||
@@ -1097,6 +1128,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ap.add_argument("-v", action="store_true", help="verbose")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\\.hist/.*'")
|
||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||
ap.add_argument("--version", action="store_true", help="show version and exit")
|
||||
|
||||
@@ -1113,7 +1145,8 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles)")
|
||||
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
|
||||
ap.add_argument("--cd", type=float, metavar="SEC", default=5, help="delay before reattempting a failed handshake/upload")
|
||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
|
||||
@@ -1170,7 +1203,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
raise
|
||||
|
||||
if ar.cls:
|
||||
eprint("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
|
||||
eprint("\033[H\033[2J\033[3J", end="")
|
||||
|
||||
ctl = Ctl(ar)
|
||||
|
||||
@@ -1180,6 +1213,9 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ar.z = True
|
||||
ctl = Ctl(ar, ctl.stats)
|
||||
|
||||
if ctl.errs:
|
||||
print("WARNING: %d errors" % (ctl.errs))
|
||||
|
||||
sys.exit(0 if ctl.ok else 1)
|
||||
|
||||
|
||||
|
||||
@@ -66,7 +66,7 @@ def main():
|
||||
ofs = ln.find("{")
|
||||
j = json.loads(ln[ofs:])
|
||||
except:
|
||||
pass
|
||||
continue
|
||||
|
||||
w = j["wark"]
|
||||
if db.execute("select w from up where w = ?", (w,)).fetchone():
|
||||
|
||||
@@ -26,8 +26,8 @@ a {
|
||||
<script>
|
||||
|
||||
var a = document.getElementById('redir'),
|
||||
proto = window.location.protocol.indexOf('https') === 0 ? 'https' : 'http',
|
||||
loc = window.location.hostname || '127.0.0.1',
|
||||
proto = location.protocol.indexOf('https') === 0 ? 'https' : 'http',
|
||||
loc = location.hostname || '127.0.0.1',
|
||||
port = a.getAttribute('href').split(':').pop().split('/')[0],
|
||||
url = proto + '://' + loc + ':' + port + '/';
|
||||
|
||||
@@ -35,7 +35,7 @@ a.setAttribute('href', url);
|
||||
document.getElementById('desc').innerHTML = 'redirecting to';
|
||||
|
||||
setTimeout(function() {
|
||||
window.location.href = url;
|
||||
location.href = url;
|
||||
}, 500);
|
||||
|
||||
</script>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
server 127.0.0.1:3923 fail_timeout=1s;
|
||||
keepalive 1;
|
||||
}
|
||||
server {
|
||||
@@ -34,6 +34,8 @@ server {
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# NOTE: with cloudflare you want this instead:
|
||||
#proxy_set_header X-Forwarded-For $http_cf_connecting_ip;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Connection "Keep-Alive";
|
||||
}
|
||||
|
||||
@@ -138,6 +138,8 @@ in {
|
||||
"d" (delete): permanently delete files and folders
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
"G" (upget): "get", but can see filekeys of their own uploads
|
||||
"h" (html): "get", but folders return their index.html
|
||||
"a" (admin): can see uploader IPs, config-reload
|
||||
|
||||
For example: "rwmd"
|
||||
|
||||
|
||||
@@ -1,25 +1,27 @@
|
||||
# Maintainer: icxes <dev.null@need.moe>
|
||||
pkgname=copyparty
|
||||
pkgver="1.7.2"
|
||||
pkgver="1.9.27"
|
||||
pkgrel=1
|
||||
pkgdesc="Portable file sharing hub"
|
||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, zeroconf, media indexer, thumbnails++"
|
||||
arch=("any")
|
||||
url="https://github.com/9001/${pkgname}"
|
||||
license=('MIT')
|
||||
depends=("python" "lsof" "python-jinja")
|
||||
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
|
||||
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
||||
"cfssl: generate TLS certificates on startup (pointless when reverse-proxied)"
|
||||
"python-mutagen: music tags (alternative)"
|
||||
"python-pillow: thumbnails for images"
|
||||
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
|
||||
"libkeyfinder-git: detection of musical keys"
|
||||
"qm-vamp-plugins: BPM detection"
|
||||
"python-pyopenssl: ftps functionality"
|
||||
"python-argon2_cffi: hashed passwords in config"
|
||||
"python-impacket-git: smb support (bad idea)"
|
||||
)
|
||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||
backup=("etc/${pkgname}.d/init" )
|
||||
sha256sums=("fb261d45ce7cf146a3f620d1e3109eb5c584f8950e61a872e2d92d7b7447bae0")
|
||||
sha256sums=("0bcf9362bc1bd9c85c228312c8341fcb9a37e383af6d8bee123e3a84e66394be")
|
||||
|
||||
build() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
# this will start `/usr/bin/copyparty-sfx.py`
|
||||
# in a chroot, preventing accidental access elsewhere
|
||||
# and read config from `/etc/copyparty.d/*.conf`
|
||||
# in a chroot, preventing accidental access elsewhere,
|
||||
# and read copyparty config from `/etc/copyparty.d/*.conf`
|
||||
#
|
||||
# expose additional filesystem locations to copyparty
|
||||
# by listing them between the last `1000` and `--`
|
||||
# by listing them between the last `cpp` and `--`
|
||||
#
|
||||
# `1000 1000` = what user to run copyparty as
|
||||
# `cpp cpp` = user/group to run copyparty as; can be IDs (1000 1000)
|
||||
#
|
||||
# unless you add -q to disable logging, you may want to remove the
|
||||
# following line to allow buffering (slightly better performance):
|
||||
@@ -24,7 +24,9 @@ ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# run copyparty
|
||||
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail 1000 1000 /etc/copyparty.d -- \
|
||||
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail cpp cpp \
|
||||
/etc/copyparty.d \
|
||||
-- \
|
||||
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||
|
||||
[Install]
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, pillow, pyvips, ffmpeg, mutagen,
|
||||
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, ffmpeg, mutagen,
|
||||
|
||||
# use argon2id-hashed passwords in config files (sha2 is always available)
|
||||
withHashedPasswords ? true,
|
||||
|
||||
# generate TLS certificates on startup (pointless when reverse-proxied)
|
||||
withCertgen ? false,
|
||||
|
||||
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
|
||||
withThumbnails ? true,
|
||||
@@ -31,10 +37,12 @@ let
|
||||
]
|
||||
++ lib.optional withSMB impacket
|
||||
++ lib.optional withFTPS pyopenssl
|
||||
++ lib.optional withCertgen cfssl
|
||||
++ lib.optional withThumbnails pillow
|
||||
++ lib.optional withFastThumbnails pyvips
|
||||
++ lib.optional withMediaProcessing ffmpeg
|
||||
++ lib.optional withBasicAudioMetadata mutagen
|
||||
++ lib.optional withHashedPasswords argon2-cffi
|
||||
);
|
||||
in stdenv.mkDerivation {
|
||||
pname = "copyparty";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.7.2/copyparty-sfx.py",
|
||||
"version": "1.7.2",
|
||||
"hash": "sha256-h22sRRO/bpydgRVmSVD05ZLuzsUxBCWU3izt9Eg9bf0="
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.9.27/copyparty-sfx.py",
|
||||
"version": "1.9.27",
|
||||
"hash": "sha256-o06o/gUIkQhDw9a9SLjuAyQUoLMfFCqkIeonUFzezds="
|
||||
}
|
||||
@@ -10,7 +10,7 @@ name="copyparty"
|
||||
rcvar="copyparty_enable"
|
||||
copyparty_user="copyparty"
|
||||
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
|
||||
copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
||||
copyparty_command="/usr/local/bin/python3.9 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
||||
pidfile="/var/run/copyparty/${name}.pid"
|
||||
command="/usr/sbin/daemon"
|
||||
command_args="-P ${pidfile} -r -f ${copyparty_command}"
|
||||
|
||||
42
contrib/systemd/copyparty.conf
Normal file
42
contrib/systemd/copyparty.conf
Normal file
@@ -0,0 +1,42 @@
|
||||
# not actually YAML but lets pretend:
|
||||
# -*- mode: yaml -*-
|
||||
# vim: ft=yaml:
|
||||
|
||||
|
||||
# put this file in /etc/
|
||||
|
||||
|
||||
[global]
|
||||
e2dsa # enable file indexing and filesystem scanning
|
||||
e2ts # and enable multimedia indexing
|
||||
ansi # and colors in log messages
|
||||
|
||||
# disable logging to stdout/journalctl and log to a file instead;
|
||||
# $LOGS_DIRECTORY is usually /var/log/copyparty (comes from systemd)
|
||||
# and copyparty replaces %Y-%m%d with Year-MonthDay, so the
|
||||
# full path will be something like /var/log/copyparty/2023-1130.txt
|
||||
# (note: enable compression by adding .xz at the end)
|
||||
q, lo: $LOGS_DIRECTORY/%Y-%m%d.log
|
||||
|
||||
# p: 80,443,3923 # listen on 80/443 as well (requires CAP_NET_BIND_SERVICE)
|
||||
# i: 127.0.0.1 # only allow connections from localhost (reverse-proxies)
|
||||
# ftp: 3921 # enable ftp server on port 3921
|
||||
# p: 3939 # listen on another port
|
||||
# df: 16 # stop accepting uploads if less than 16 GB free disk space
|
||||
# ver # show copyparty version in the controlpanel
|
||||
# grid # show thumbnails/grid-view by default
|
||||
# theme: 2 # monokai
|
||||
# name: datasaver # change the server-name that's displayed in the browser
|
||||
# stats, nos-dup # enable the prometheus endpoint, but disable the dupes counter (too slow)
|
||||
# no-robots, force-js # make it harder for search engines to read your server
|
||||
|
||||
|
||||
[accounts]
|
||||
ed: wark # username: password
|
||||
|
||||
|
||||
[/] # create a volume at "/" (the webroot), which will
|
||||
/mnt # share the contents of the "/mnt" folder
|
||||
accs:
|
||||
rw: * # everyone gets read-write access, but
|
||||
rwmda: ed # the user "ed" gets read-write-move-delete-admin
|
||||
@@ -1,28 +1,27 @@
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# and share '/mnt' with anonymous read+write
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py` and
|
||||
# read copyparty config from `/etc/copyparty.conf`, for example:
|
||||
# https://github.com/9001/copyparty/blob/hovudstraum/contrib/systemd/copyparty.conf
|
||||
#
|
||||
# installation:
|
||||
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
||||
# cp -pv copyparty.service /etc/systemd/system/
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
|
||||
# useradd -r -s /sbin/nologin -d /var/lib/copyparty copyparty
|
||||
# firewall-cmd --permanent --add-port=3923/tcp # --zone=libvirt
|
||||
# firewall-cmd --reload
|
||||
# cp -pv copyparty.service /etc/systemd/system/
|
||||
# cp -pv copyparty.conf /etc/
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||
#
|
||||
# if it fails to start, first check this: systemctl status copyparty
|
||||
# then try starting it while viewing logs: journalctl -fan 100
|
||||
# then try starting it while viewing logs:
|
||||
# journalctl -fan 100
|
||||
# tail -Fn 100 /var/log/copyparty/$(date +%Y-%m%d.log)
|
||||
#
|
||||
# you may want to:
|
||||
# change "User=cpp" and "/home/cpp/" to another user
|
||||
# remove the nft lines to only listen on port 3923
|
||||
# - change "User=copyparty" and "/var/lib/copyparty/" to another user
|
||||
# - edit /etc/copyparty.conf to configure copyparty
|
||||
# and in the ExecStart= line:
|
||||
# change '/usr/bin/python3' to another interpreter
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
# add '-q' to disable logging on busy servers
|
||||
# add '-i 127.0.0.1' to only allow local connections
|
||||
# add '-e2dsa' to enable filesystem scanning + indexing
|
||||
# add '-e2ts' to enable metadata indexing
|
||||
# remove '--ansi' to disable colored logs
|
||||
# - change '/usr/bin/python3' to another interpreter
|
||||
#
|
||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||
# accept connections; correctly delaying units depending on copyparty.
|
||||
@@ -30,11 +29,9 @@
|
||||
# python disabling line-buffering, so messages are out-of-order:
|
||||
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
||||
#
|
||||
# unless you add -q to disable logging, you may want to remove the
|
||||
# following line to allow buffering (slightly better performance):
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
#
|
||||
# keep ExecStartPre before ExecStart, at least on rhel8
|
||||
########################################################################
|
||||
########################################################################
|
||||
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
@@ -44,23 +41,52 @@ Type=notify
|
||||
SyslogIdentifier=copyparty
|
||||
Environment=PYTHONUNBUFFERED=x
|
||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
PermissionsStartOnly=true
|
||||
|
||||
# user to run as + where the TLS certificate is (if any)
|
||||
User=cpp
|
||||
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
||||
## user to run as + where the TLS certificate is (if any)
|
||||
##
|
||||
User=copyparty
|
||||
Group=copyparty
|
||||
WorkingDirectory=/var/lib/copyparty
|
||||
Environment=XDG_CONFIG_HOME=/var/lib/copyparty/.config
|
||||
|
||||
# OPTIONAL: setup forwarding from ports 80 and 443 to port 3923
|
||||
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
|
||||
ExecStartPre=+nft add table ip nat
|
||||
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
|
||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 80 redirect to :3923
|
||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
|
||||
## OPTIONAL: allow copyparty to listen on low ports (like 80/443);
|
||||
## you need to uncomment the "p: 80,443,3923" in the config too
|
||||
## ------------------------------------------------------------
|
||||
## a slightly safer alternative is to enable partyalone.service
|
||||
## which does portforwarding with nftables instead, but an even
|
||||
## better option is to use a reverse-proxy (nginx/caddy/...)
|
||||
##
|
||||
AmbientCapabilities=CAP_NET_BIND_SERVICE
|
||||
|
||||
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
## some quick hardening; TODO port more from the nixos package
|
||||
##
|
||||
MemoryMax=50%
|
||||
MemorySwapMax=50%
|
||||
ProtectClock=true
|
||||
ProtectControlGroups=true
|
||||
ProtectHostname=true
|
||||
ProtectKernelLogs=true
|
||||
ProtectKernelModules=true
|
||||
ProtectKernelTunables=true
|
||||
ProtectProc=invisible
|
||||
RemoveIPC=true
|
||||
RestrictNamespaces=true
|
||||
RestrictRealtime=true
|
||||
RestrictSUIDSGID=true
|
||||
|
||||
# copyparty settings
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw
|
||||
## create a directory for logfiles;
|
||||
## this defines $LOGS_DIRECTORY which is used in copyparty.conf
|
||||
##
|
||||
LogsDirectory=copyparty
|
||||
|
||||
## finally, start copyparty and give it the config file:
|
||||
##
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -c /etc/copyparty.conf
|
||||
|
||||
# NOTE: if you installed copyparty from an OS package repo (nice)
|
||||
# then you probably want something like this instead:
|
||||
#ExecStart=/usr/bin/copyparty -c /etc/copyparty.conf
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# in a chroot, preventing accidental access elsewhere
|
||||
# in a chroot, preventing accidental access elsewhere,
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
@@ -7,9 +7,9 @@
|
||||
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
||||
#
|
||||
# expose additional filesystem locations to copyparty
|
||||
# by listing them between the last `1000` and `--`
|
||||
# by listing them between the last `cpp` and `--`
|
||||
#
|
||||
# `1000 1000` = what user to run copyparty as
|
||||
# `cpp cpp` = user/group to run copyparty as; can be IDs (1000 1000)
|
||||
#
|
||||
# you may want to:
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
@@ -32,7 +32,9 @@ ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# run copyparty
|
||||
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
|
||||
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail cpp cpp \
|
||||
/mnt \
|
||||
-- \
|
||||
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
||||
|
||||
[Install]
|
||||
|
||||
2
contrib/windows/copyparty-ctmp.bat
Executable file
2
contrib/windows/copyparty-ctmp.bat
Executable file
@@ -0,0 +1,2 @@
|
||||
rem run copyparty.exe on machines with busted environment variables
|
||||
cmd /v /c "set TMP=\tmp && copyparty.exe"
|
||||
@@ -23,7 +23,7 @@ if not PY2:
|
||||
unicode: Callable[[Any], str] = str
|
||||
else:
|
||||
sys.dont_write_bytecode = True
|
||||
unicode = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
||||
unicode = unicode # type: ignore
|
||||
|
||||
WINDOWS: Any = (
|
||||
[int(x) for x in platform.version().split(".")]
|
||||
|
||||
@@ -19,14 +19,16 @@ import threading
|
||||
import time
|
||||
import traceback
|
||||
import uuid
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import ANYWIN, CORES, EXE, PY2, VT100, WINDOWS, E, EnvParams, unicode
|
||||
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
|
||||
from .authsrv import expand_config_file, re_vol, split_cfg_ln, upgrade_cfg_fmt
|
||||
from .authsrv import expand_config_file, split_cfg_ln, upgrade_cfg_fmt
|
||||
from .cfg import flagcats, onedash
|
||||
from .svchub import SvcHub
|
||||
from .util import (
|
||||
DEF_EXP,
|
||||
DEF_MTE,
|
||||
DEF_MTH,
|
||||
IMPLICATIONS,
|
||||
JINJA_VER,
|
||||
PYFTPD_VER,
|
||||
@@ -34,6 +36,7 @@ from .util import (
|
||||
UNPLICATIONS,
|
||||
align_tab,
|
||||
ansi_re,
|
||||
dedent,
|
||||
min_ex,
|
||||
py_desc,
|
||||
pybin,
|
||||
@@ -140,9 +143,11 @@ def warn(msg: str) -> None:
|
||||
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
|
||||
|
||||
|
||||
def init_E(E: EnvParams) -> None:
|
||||
def init_E(EE: EnvParams) -> None:
|
||||
# __init__ runs 18 times when oxidized; do expensive stuff here
|
||||
|
||||
E = EE # pylint: disable=redefined-outer-name
|
||||
|
||||
def get_unixdir() -> str:
|
||||
paths: list[tuple[Callable[..., Any], str]] = [
|
||||
(os.environ.get, "XDG_CONFIG_HOME"),
|
||||
@@ -184,7 +189,10 @@ def init_E(E: EnvParams) -> None:
|
||||
|
||||
with open_binary("copyparty", "z.tar") as tgz:
|
||||
with tarfile.open(fileobj=tgz) as tf:
|
||||
tf.extractall(tdn) # nosec (archive is safe)
|
||||
try:
|
||||
tf.extractall(tdn, filter="tar")
|
||||
except TypeError:
|
||||
tf.extractall(tdn) # nosec (archive is safe)
|
||||
|
||||
return tdn
|
||||
|
||||
@@ -240,20 +248,28 @@ def get_srvname() -> str:
|
||||
return ret
|
||||
|
||||
|
||||
def get_fk_salt(cert_path) -> str:
|
||||
def get_fk_salt() -> str:
|
||||
fp = os.path.join(E.cfg, "fk-salt.txt")
|
||||
try:
|
||||
with open(fp, "rb") as f:
|
||||
ret = f.read().strip()
|
||||
except:
|
||||
if os.path.exists(cert_path):
|
||||
print("salt from cert")
|
||||
return unicode(os.path.getmtime(cert_path))
|
||||
else:
|
||||
print("salt from os.random")
|
||||
ret = base64.b64encode(os.urandom(18))
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret + b"\n")
|
||||
ret = base64.b64encode(os.urandom(18))
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret + b"\n")
|
||||
|
||||
return ret.decode("utf-8")
|
||||
|
||||
|
||||
def get_ah_salt() -> str:
|
||||
fp = os.path.join(E.cfg, "ah-salt.txt")
|
||||
try:
|
||||
with open(fp, "rb") as f:
|
||||
ret = f.read().strip()
|
||||
except:
|
||||
ret = base64.b64encode(os.urandom(18))
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret + b"\n")
|
||||
|
||||
return ret.decode("utf-8")
|
||||
|
||||
@@ -306,6 +322,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
# oh man i love openssl
|
||||
# check this out
|
||||
# hold my beer
|
||||
assert ssl
|
||||
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
||||
sslver = terse_sslver(al.ssl_ver).split(",")
|
||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||
@@ -339,6 +356,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
||||
|
||||
|
||||
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||
assert ssl
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
if al.ssl_ver:
|
||||
ctx.options &= ~al.ssl_flags_en
|
||||
@@ -418,9 +436,9 @@ def disable_quickedit() -> None:
|
||||
if PY2:
|
||||
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
|
||||
|
||||
k32.GetStdHandle.errcheck = ecb
|
||||
k32.GetConsoleMode.errcheck = ecb
|
||||
k32.SetConsoleMode.errcheck = ecb
|
||||
k32.GetStdHandle.errcheck = ecb # type: ignore
|
||||
k32.GetConsoleMode.errcheck = ecb # type: ignore
|
||||
k32.SetConsoleMode.errcheck = ecb # type: ignore
|
||||
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
|
||||
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
|
||||
|
||||
@@ -479,6 +497,10 @@ def get_sects():
|
||||
"d" (delete): permanently delete files and folders
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
"G" (upget): "get", but can see filekeys of their own uploads
|
||||
"h" (html): "get", but folders return their index.html
|
||||
"." (dots): user can ask to show dotfiles in listings
|
||||
"a" (admin): can see uploader IPs, config-reload
|
||||
"A" ("all"): same as "rwmda." (read/write/move/delete/admin/dotfiles)
|
||||
|
||||
too many volflags to list here, see --help-flags
|
||||
|
||||
@@ -515,6 +537,50 @@ def get_sects():
|
||||
).rstrip()
|
||||
+ build_flags_desc(),
|
||||
],
|
||||
[
|
||||
"handlers",
|
||||
"use plugins to handle certain events",
|
||||
dedent(
|
||||
"""
|
||||
usually copyparty returns a \033[33m404\033[0m if a file does not exist, and
|
||||
\033[33m403\033[0m if a user tries to access a file they don't have access to
|
||||
|
||||
you can load a plugin which will be invoked right before this
|
||||
happens, and the plugin can choose to override this behavior
|
||||
|
||||
load the plugin using --args or volflags; for example \033[36m
|
||||
--on404 ~/partyhandlers/not404.py
|
||||
-v .::r:c,on404=~/partyhandlers/not404.py
|
||||
\033[0m
|
||||
the file must define the function \033[35mmain(cli,vn,rem)\033[0m:
|
||||
\033[35mcli\033[0m: the copyparty HttpCli instance
|
||||
\033[35mvn\033[0m: the VFS which overlaps with the requested URL
|
||||
\033[35mrem\033[0m: the remainder of the URL below the VFS mountpoint
|
||||
|
||||
`main` must return a string; one of the following:
|
||||
|
||||
> \033[32m"true"\033[0m: the plugin has responded to the request,
|
||||
and the TCP connection should be kept open
|
||||
|
||||
> \033[32m"false"\033[0m: the plugin has responded to the request,
|
||||
and the TCP connection should be terminated
|
||||
|
||||
> \033[32m"retry"\033[0m: the plugin has done something to resolve the 404
|
||||
situation, and copyparty should reattempt reading the file.
|
||||
if it still fails, a regular 404 will be returned
|
||||
|
||||
> \033[32m"allow"\033[0m: should ignore the insufficient permissions
|
||||
and let the client continue anyways
|
||||
|
||||
> \033[32m""\033[0m: the plugin has not handled the request;
|
||||
try the next plugin or return the usual 404 or 403
|
||||
|
||||
\033[1;35mPS!\033[0m the folder that contains the python file should ideally
|
||||
not contain many other python files, and especially nothing
|
||||
with filenames that overlap with modules used by copyparty
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"hooks",
|
||||
"execute commands before/after various events",
|
||||
@@ -529,6 +595,7 @@ def get_sects():
|
||||
\033[36mxbd\033[35m executes CMD before a file delete
|
||||
\033[36mxad\033[35m executes CMD after a file delete
|
||||
\033[36mxm\033[35m executes CMD on message
|
||||
\033[36mxban\033[35m executes CMD if someone gets banned
|
||||
\033[0m
|
||||
can be defined as --args or volflags; for example \033[36m
|
||||
--xau notify-send
|
||||
@@ -564,6 +631,9 @@ def get_sects():
|
||||
executed program on STDIN instead of as argv arguments, and
|
||||
it also includes the wark (file-id/hash) as a json property
|
||||
|
||||
\033[36mxban\033[0m can be used to overrule / cancel a user ban event;
|
||||
if the program returns 0 (true/OK) then the ban will NOT happen
|
||||
|
||||
except for \033[36mxm\033[0m, only one hook / one action can run at a time,
|
||||
so it's recommended to use the \033[36mf\033[0m flag unless you really need
|
||||
to wait for the hook to finish before continuing (without \033[36mf\033[0m
|
||||
@@ -583,6 +653,47 @@ def get_sects():
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"exp",
|
||||
"text expansion",
|
||||
dedent(
|
||||
"""
|
||||
specify --exp or the "exp" volflag to enable placeholder expansions
|
||||
in README.md / .prologue.html / .epilogue.html
|
||||
|
||||
--exp-md (volflag exp_md) holds the list of placeholders which can be
|
||||
expanded in READMEs, and --exp-lg (volflag exp_lg) likewise for logues;
|
||||
any placeholder not given in those lists will be ignored and shown as-is
|
||||
|
||||
the default list will expand the following placeholders:
|
||||
\033[36m{{self.ip}} \033[35mclient ip
|
||||
\033[36m{{self.ua}} \033[35mclient user-agent
|
||||
\033[36m{{self.uname}} \033[35mclient username
|
||||
\033[36m{{self.host}} \033[35mthe "Host" header, or the server's external IP otherwise
|
||||
\033[36m{{cfg.name}} \033[35mthe --name global-config
|
||||
\033[36m{{cfg.logout}} \033[35mthe --logout global-config
|
||||
\033[36m{{vf.scan}} \033[35mthe "scan" volflag
|
||||
\033[36m{{vf.thsize}} \033[35mthumbnail size
|
||||
\033[36m{{srv.itime}} \033[35mserver time in seconds
|
||||
\033[36m{{srv.htime}} \033[35mserver time as YY-mm-dd, HH:MM:SS (UTC)
|
||||
\033[36m{{hdr.cf_ipcountry}} \033[35mthe "CF-IPCountry" client header (probably blank)
|
||||
\033[0m
|
||||
so the following types of placeholders can be added to the lists:
|
||||
* any client header can be accessed through {{hdr.*}}
|
||||
* any variable in httpcli.py can be accessed through {{self.*}}
|
||||
* any global server setting can be accessed through {{cfg.*}}
|
||||
* any volflag can be accessed through {{vf.*}}
|
||||
|
||||
remove vf.scan from default list using --exp-md /vf.scan
|
||||
add "accept" header to def. list using --exp-md +hdr.accept
|
||||
|
||||
for performance reasons, expansion only happens while embedding
|
||||
documents into directory listings, and when accessing a ?doc=...
|
||||
link, but never otherwise, so if you click a -txt- link you'll
|
||||
have to refresh the page to apply expansion
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"ls",
|
||||
"volume inspection",
|
||||
@@ -596,6 +707,7 @@ def get_sects():
|
||||
\033[36mln\033[0m only prints symlinks leaving the volume mountpoint
|
||||
\033[36mp\033[0m exits 1 if any such symlinks are found
|
||||
\033[36mr\033[0m resumes startup after the listing
|
||||
|
||||
examples:
|
||||
--ls '**' # list all files which are possible to read
|
||||
--ls '**,*,ln' # check for dangerous symlinks
|
||||
@@ -622,6 +734,75 @@ def get_sects():
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"pwhash",
|
||||
"password hashing",
|
||||
dedent(
|
||||
"""
|
||||
when \033[36m--ah-alg\033[0m is not the default [\033[32mnone\033[0m], all account passwords must be hashed
|
||||
|
||||
passwords can be hashed on the commandline with \033[36m--ah-gen\033[0m, but
|
||||
copyparty will also hash and print any passwords that are non-hashed
|
||||
(password which do not start with '+') and then terminate afterwards
|
||||
|
||||
\033[36m--ah-alg\033[0m specifies the hashing algorithm and a
|
||||
list of optional comma-separated arguments:
|
||||
|
||||
\033[36m--ah-alg argon2\033[0m # which is the same as:
|
||||
\033[36m--ah-alg argon2,3,256,4,19\033[0m
|
||||
use argon2id with timecost 3, 256 MiB, 4 threads, version 19 (0x13/v1.3)
|
||||
|
||||
\033[36m--ah-alg scrypt\033[0m # which is the same as:
|
||||
\033[36m--ah-alg scrypt,13,2,8,4\033[0m
|
||||
use scrypt with cost 2**13, 2 iterations, blocksize 8, 4 threads
|
||||
|
||||
\033[36m--ah-alg sha2\033[0m # which is the same as:
|
||||
\033[36m--ah-alg sha2,424242\033[0m
|
||||
use sha2-512 with 424242 iterations
|
||||
|
||||
recommended: \033[32m--ah-alg argon2\033[0m
|
||||
(takes about 0.4 sec and 256M RAM to process a new password)
|
||||
|
||||
argon2 needs python-package argon2-cffi,
|
||||
scrypt needs openssl,
|
||||
sha2 is always available
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"zm",
|
||||
"mDNS debugging",
|
||||
dedent(
|
||||
"""
|
||||
the mDNS protocol is multicast-based, which means there are thousands
|
||||
of fun and intersesting ways for it to break unexpectedly
|
||||
|
||||
things to check if it does not work at all:
|
||||
|
||||
* is there a firewall blocking port 5353 on either the server or client?
|
||||
(for example, clients may be able to send queries to copyparty,
|
||||
but the replies could get lost)
|
||||
|
||||
* is multicast accidentally disabled on either the server or client?
|
||||
(look for mDNS log messages saying "new client on [...]")
|
||||
|
||||
* the router/switch must be multicast and igmp capable
|
||||
|
||||
things to check if it works for a while but then it doesn't:
|
||||
|
||||
* is there a firewall blocking port 5353 on either the server or client?
|
||||
(copyparty may be unable to see the queries from the clients, but the
|
||||
clients may still be able to see the initial unsolicited announce,
|
||||
so it works for about 2 minutes after startup until TTL expires)
|
||||
|
||||
* does the client have multiple IPs on its interface, and some of the
|
||||
IPs are in subnets which the copyparty server is not a member of?
|
||||
|
||||
for both of the above intermittent issues, try --zm-spam 30
|
||||
(not spec-compliant but nothing will mind)
|
||||
"""
|
||||
),
|
||||
],
|
||||
]
|
||||
|
||||
|
||||
@@ -646,11 +827,9 @@ def add_general(ap, nc, srvname):
|
||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
|
||||
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, \033[33mUSER\033[0m:\033[33mPASS\033[0m; example [\033[32med:wark\033[0m]")
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m]")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files")
|
||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
||||
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see --help-urlform")
|
||||
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files (volflag=dots)")
|
||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see \033[33m--help-urlform\033[0m")
|
||||
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="server terminal title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]")
|
||||
ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)")
|
||||
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
|
||||
ap2.add_argument("--version", action="store_true", help="show versions and exit")
|
||||
@@ -661,7 +840,7 @@ def add_qr(ap, tty):
|
||||
ap2.add_argument("--qr", action="store_true", help="show http:// QR-code on startup")
|
||||
ap2.add_argument("--qrs", action="store_true", help="show https:// QR-code on startup")
|
||||
ap2.add_argument("--qrl", metavar="PATH", type=u, default="", help="location to include in the url, for example [\033[32mpriv/?pw=hunter2\033[0m]")
|
||||
ap2.add_argument("--qri", metavar="PREFIX", type=u, default="", help="select IP which starts with PREFIX; [\033[32m.\033[0m] to force default IP when mDNS URL would have been used instead")
|
||||
ap2.add_argument("--qri", metavar="PREFIX", type=u, default="", help="select IP which starts with \033[33mPREFIX\033[0m; [\033[32m.\033[0m] to force default IP when mDNS URL would have been used instead")
|
||||
ap2.add_argument("--qr-fg", metavar="COLOR", type=int, default=0 if tty else 16, help="foreground; try [\033[32m0\033[0m] if the qr-code is unreadable")
|
||||
ap2.add_argument("--qr-bg", metavar="COLOR", type=int, default=229, help="background (white=255)")
|
||||
ap2.add_argument("--qrp", metavar="CELLS", type=int, default=4, help="padding (spec says 4 or more, but 1 is usually fine)")
|
||||
@@ -670,24 +849,28 @@ def add_qr(ap, tty):
|
||||
|
||||
def add_upload(ap):
|
||||
ap2 = ap.add_argument_group('upload options')
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m")
|
||||
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
||||
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than SEC seconds ago)")
|
||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)")
|
||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled, default=12h")
|
||||
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without \033[33m-e2d\033[0m; roughly 1 MiB RAM per 600")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (bad idea to enable this on windows and/or cow filesystems)")
|
||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
||||
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem) (volflag=hardlink)")
|
||||
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=neversymlink)")
|
||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes")
|
||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes)")
|
||||
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
||||
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
||||
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, --nrand chars long (volflag=rand)")
|
||||
ap2.add_argument("--snap-wri", metavar="SEC", type=int, default=300, help="write upload state to ./hist/up2k.snap every \033[33mSEC\033[0m seconds; allows resuming incomplete uploads after a server crash")
|
||||
ap2.add_argument("--snap-drop", metavar="MIN", type=float, default=1440, help="forget unfinished uploads after \033[33mMIN\033[0m minutes; impossible to resume them after that (360=6h, 1440=24h)")
|
||||
ap2.add_argument("--u2ts", metavar="TXT", type=u, default="c", help="how to timestamp uploaded files; [\033[32mc\033[0m]=client-last-modified, [\033[32mu\033[0m]=upload-time, [\033[32mfc\033[0m]=force-c, [\033[32mfu\033[0m]=force-u (volflag=u2ts)")
|
||||
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, \033[33m--nrand\033[0m chars long (volflag=rand)")
|
||||
ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)")
|
||||
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
|
||||
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
|
||||
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure \033[33mGiB\033[0m free disk space by rejecting upload requests")
|
||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m0\033[0m] = off and warn if enabled, [\033[32m1\033[0m] = off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16-32 for cross-atlantic (max=64)")
|
||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||
|
||||
@@ -696,9 +879,12 @@ def add_network(ap):
|
||||
ap2 = ap.add_argument_group('network options')
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 even if the NIC has routable IPs (breaks some mdns clients)")
|
||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd), [\033[32m2\033[0m]=cloudflare, [\033[32m3\033[0m]=nginx, [\033[32m-1\033[0m]=closest proxy")
|
||||
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here (eg. /foo/bar)")
|
||||
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 in mDNS replies, even if the NIC has routable IPs (breaks some mDNS clients)")
|
||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to associate clients with; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy")
|
||||
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from")
|
||||
ap2.add_argument("--xff-src", metavar="IP", type=u, default="127., ::1", help="comma-separated list of trusted reverse-proxy IPs; only accept the real-ip header (\033[33m--xff-hdr\033[0m) if the incoming connection is from an IP starting with either of these. Can be disabled with [\033[32many\033[0m] if you are behind cloudflare (or similar) and are using \033[32m--xff-hdr=cf-connecting-ip\033[0m (or similar)")
|
||||
ap2.add_argument("--ipa", metavar="PREFIX", type=u, default="", help="only accept connections from IP-addresses starting with \033[33mPREFIX\033[0m; example: [\033[32m127., 10.89., 192.168.\033[0m]")
|
||||
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here; example: [\033[32m/foo/bar\033[0m]")
|
||||
if ANYWIN:
|
||||
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
||||
else:
|
||||
@@ -708,7 +894,7 @@ def add_network(ap):
|
||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
||||
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..SEC")
|
||||
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..\033[33mSEC\033[0m")
|
||||
|
||||
|
||||
def add_tls(ap, cert_path):
|
||||
@@ -727,9 +913,10 @@ def add_cert(ap, cert_path):
|
||||
ap2 = ap.add_argument_group('TLS certificate generator options')
|
||||
ap2.add_argument("--no-crt", action="store_true", help="disable automatic certificate creation")
|
||||
ap2.add_argument("--crt-ns", metavar="N,N", type=u, default="", help="comma-separated list of FQDNs (domains) to add into the certificate")
|
||||
ap2.add_argument("--crt-exact", action="store_true", help="do not add wildcard entries for each --crt-ns")
|
||||
ap2.add_argument("--crt-exact", action="store_true", help="do not add wildcard entries for each \033[33m--crt-ns\033[0m")
|
||||
ap2.add_argument("--crt-noip", action="store_true", help="do not add autodetected IP addresses into cert")
|
||||
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
|
||||
ap2.add_argument("--crt-nohn", action="store_true", help="do not add mDNS names / hostname into cert")
|
||||
ap2.add_argument("--crt-dir", metavar="PATH", default=cert_dir, help="where to save the CA cert")
|
||||
ap2.add_argument("--crt-cdays", metavar="D", type=float, default=3650, help="ca-certificate expiration time in days")
|
||||
ap2.add_argument("--crt-sdays", metavar="D", type=float, default=365, help="server-cert expiration time in days")
|
||||
@@ -737,7 +924,14 @@ def add_cert(ap, cert_path):
|
||||
ap2.add_argument("--crt-cnc", metavar="TXT", type=u, default="--crt-cn", help="override CA name")
|
||||
ap2.add_argument("--crt-cns", metavar="TXT", type=u, default="--crt-cn cpp", help="override server-cert name")
|
||||
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72, help="backdate in hours")
|
||||
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: ecdsa-256 rsa-4096 rsa-2048")
|
||||
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: \033[32mecdsa-256 rsa-4096 rsa-2048\033[0m")
|
||||
|
||||
|
||||
def add_auth(ap):
|
||||
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
||||
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks and assume the request-header \033[33mHN\033[0m contains the username of the requesting user (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
||||
return
|
||||
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
||||
|
||||
|
||||
def add_zeroconf(ap):
|
||||
@@ -745,35 +939,36 @@ def add_zeroconf(ap):
|
||||
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
|
||||
ap2.add_argument("--z-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes\n └─example: \033[32meth0, wlo1, virhost0, 192.168.123.0/24, fd00:fda::/96\033[0m")
|
||||
ap2.add_argument("--z-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--z-chk", metavar="SEC", type=int, default=10, help="check for network changes every SEC seconds (0=disable)")
|
||||
ap2.add_argument("--z-chk", metavar="SEC", type=int, default=10, help="check for network changes every \033[33mSEC\033[0m seconds (0=disable)")
|
||||
ap2.add_argument("-zv", action="store_true", help="verbose all zeroconf backends")
|
||||
ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every SEC seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]")
|
||||
ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every \033[33mSEC\033[0m seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]\n └─note: can be due to firewalls; make sure UDP port 5353 is open in both directions (on clients too)")
|
||||
|
||||
|
||||
def add_zc_mdns(ap):
|
||||
ap2 = ap.add_argument_group("Zeroconf-mDNS options")
|
||||
ap2 = ap.add_argument_group("Zeroconf-mDNS options; also see --help-zm")
|
||||
ap2.add_argument("--zm", action="store_true", help="announce the enabled protocols over mDNS (multicast DNS-SD) -- compatible with KDE, gnome, macOS, ...")
|
||||
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable mDNS ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable mDNS on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm4", action="store_true", help="IPv4 only -- try this if some clients can't connect")
|
||||
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
||||
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
||||
ap2.add_argument("--zmvv", action="store_true", help="verboser mdns")
|
||||
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set)")
|
||||
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set, \033[32mDd\033[0m otherwise)")
|
||||
ap2.add_argument("--zm-ld", metavar="PATH", type=u, default="", help="link a specific folder for webdav shares")
|
||||
ap2.add_argument("--zm-lh", metavar="PATH", type=u, default="", help="link a specific folder for http shares")
|
||||
ap2.add_argument("--zm-lf", metavar="PATH", type=u, default="", help="link a specific folder for ftp shares")
|
||||
ap2.add_argument("--zm-ls", metavar="PATH", type=u, default="", help="link a specific folder for smb shares")
|
||||
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
|
||||
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working")
|
||||
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
|
||||
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
|
||||
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every \033[33mSEC\033[0m; useful if clients have IPs in a subnet which doesn't overlap with the server, or to avoid some firewall issues")
|
||||
|
||||
|
||||
def add_zc_ssdp(ap):
|
||||
ap2 = ap.add_argument_group("Zeroconf-SSDP options")
|
||||
ap2.add_argument("--zs", action="store_true", help="announce the enabled protocols over SSDP -- compatible with Windows")
|
||||
ap2.add_argument("--zs-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zs-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zs-on", metavar="NETS", type=u, default="", help="enable SSDP ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zs-off", metavar="NETS", type=u, default="", help="disable SSDP on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zsv", action="store_true", help="verbose SSDP")
|
||||
ap2.add_argument("--zsl", metavar="PATH", type=u, default="/?hc", help="location to include in the url (or a complete external URL), for example [\033[32mpriv/?pw=hunter2\033[0m] (goes directly to /priv/ with password hunter2) or [\033[32m?hc=priv&pw=hunter2\033[0m] (shows mounting options for /priv/ with password)")
|
||||
ap2.add_argument("--zsid", metavar="UUID", type=u, default=zsid, help="USN (device identifier) to announce")
|
||||
@@ -781,11 +976,12 @@ def add_zc_ssdp(ap):
|
||||
|
||||
def add_ftp(ap):
|
||||
ap2 = ap.add_argument_group('FTP options')
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990")
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on \033[33mPORT\033[0m, for example \033[32m3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on \033[33mPORT\033[0m, for example \033[32m3990")
|
||||
ap2.add_argument("--ftpv", action="store_true", help="verbose")
|
||||
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
|
||||
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than SEC seconds ago)")
|
||||
ap2.add_argument("--ftp-ipa", metavar="PFX", type=u, default="", help="only accept connections from IP-addresses starting with \033[33mPFX\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Example: [\033[32m127., 10.89., 192.168.\033[0m]")
|
||||
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
||||
|
||||
@@ -801,27 +997,46 @@ def add_webdav(ap):
|
||||
|
||||
def add_smb(ap):
|
||||
ap2 = ap.add_argument_group('SMB/CIFS options')
|
||||
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless --smb-port is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is dangerous! Never expose to the internet. Account permissions are coalesced; if one account has write-access to a volume, then all accounts do.")
|
||||
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless \033[33m--smb-port\033[0m is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is DANGEROUS and buggy! Never expose to the internet!")
|
||||
ap2.add_argument("--smbw", action="store_true", help="enable write support (please dont)")
|
||||
ap2.add_argument("--smb1", action="store_true", help="disable SMBv2, only enable SMBv1 (CIFS)")
|
||||
ap2.add_argument("--smb-port", metavar="PORT", type=int, default=445, help="port to listen on -- if you change this value, you must NAT from TCP:445 to this port using iptables or similar")
|
||||
ap2.add_argument("--smb-nwa-1", action="store_true", help="disable impacket#1433 workaround (truncate directory listings to 64kB)")
|
||||
ap2.add_argument("--smb-nwa-2", action="store_true", help="disable impacket workaround for filecopy globs")
|
||||
ap2.add_argument("--smba", action="store_true", help="small performance boost: disable per-account permissions, enables account coalescing instead (if one user has write/delete-access, then everyone does)")
|
||||
ap2.add_argument("--smbv", action="store_true", help="verbose")
|
||||
ap2.add_argument("--smbvv", action="store_true", help="verboser")
|
||||
ap2.add_argument("--smbvvv", action="store_true", help="verbosest")
|
||||
|
||||
|
||||
def add_handlers(ap):
|
||||
ap2 = ap.add_argument_group('handlers (see --help-handlers)')
|
||||
ap2.add_argument("--on404", metavar="PY", type=u, action="append", help="handle 404s by executing \033[33mPY\033[0m file")
|
||||
ap2.add_argument("--on403", metavar="PY", type=u, action="append", help="handle 403s by executing \033[33mPY\033[0m file")
|
||||
ap2.add_argument("--hot-handlers", action="store_true", help="recompile handlers on each request -- expensive but convenient when hacking on stuff")
|
||||
|
||||
|
||||
def add_hooks(ap):
|
||||
ap2 = ap.add_argument_group('event hooks (see --help-hooks)')
|
||||
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute CMD before a file upload starts")
|
||||
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute CMD after a file upload finishes")
|
||||
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute CMD after all uploads finish and volume is idle")
|
||||
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute CMD before a file move/rename")
|
||||
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute CMD after a file move/rename")
|
||||
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute CMD before a file delete")
|
||||
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute CMD after a file delete")
|
||||
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute CMD on message")
|
||||
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file upload starts")
|
||||
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file upload finishes")
|
||||
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after all uploads finish and volume is idle")
|
||||
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file move/rename")
|
||||
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file move/rename")
|
||||
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file delete")
|
||||
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file delete")
|
||||
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m on message")
|
||||
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m if someone gets banned (pw/404/403/url)")
|
||||
|
||||
|
||||
def add_stats(ap):
|
||||
ap2 = ap.add_argument_group('grafana/prometheus metrics endpoint')
|
||||
ap2.add_argument("--stats", action="store_true", help="enable openmetrics at /.cpr/metrics for admin accounts")
|
||||
ap2.add_argument("--nos-hdd", action="store_true", help="disable disk-space metrics (used/free space)")
|
||||
ap2.add_argument("--nos-vol", action="store_true", help="disable volume size metrics (num files, total bytes, vmaxb/vmaxn)")
|
||||
ap2.add_argument("--nos-vst", action="store_true", help="disable volume state metrics (indexing, analyzing, activity)")
|
||||
ap2.add_argument("--nos-dup", action="store_true", help="disable dupe-files metrics (good idea; very slow)")
|
||||
ap2.add_argument("--nos-unf", action="store_true", help="disable unfinished-uploads metrics")
|
||||
|
||||
|
||||
def add_yolo(ap):
|
||||
@@ -833,61 +1048,79 @@ def add_yolo(ap):
|
||||
def add_optouts(ap):
|
||||
ap2 = ap.add_argument_group('opt-outs')
|
||||
ap2.add_argument("-nw", action="store_true", help="never write anything to disk (debug/benchmark)")
|
||||
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection which will deadlock copyparty)")
|
||||
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection in the terminal window, as this would pause execution)")
|
||||
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
|
||||
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show \033[33m--name\033[0m in <title>")
|
||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
|
||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)")
|
||||
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
||||
ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
|
||||
|
||||
|
||||
def add_safety(ap, fk_salt):
|
||||
def add_safety(ap):
|
||||
ap2 = ap.add_argument_group('safety options')
|
||||
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 -nih")
|
||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m (see \033[33m--help-ls\033[0m); example [\033[32m**,*,ln,p,r\033[0m]")
|
||||
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
|
||||
ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
|
||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to turn something into a dotfile")
|
||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore --no-robots")
|
||||
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore \033[33m--no-robots\033[0m")
|
||||
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)")
|
||||
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
||||
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after \033[33mH\033[0m hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
||||
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (disabled by default since turbo-up2k counts as 404s)")
|
||||
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0")
|
||||
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]")
|
||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="50,60,1440", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; only affects users who cannot see directory listings because their access is either g/G/h")
|
||||
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
|
||||
ap2.add_argument("--ban-422", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 422's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (invalid requests, attempted exploits ++)")
|
||||
ap2.add_argument("--ban-url", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m sus URL's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; applies only to permissions g/G/h (decent replacement for \033[33m--ban-404\033[0m if that can't be used)")
|
||||
ap2.add_argument("--sus-urls", metavar="R", type=u, default=r"\.php$|(^|/)wp-(admin|content|includes)/", help="URLs which are considered sus / eligible for banning; disable with blank or [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--nonsus-urls", metavar="R", type=u, default=r"^(favicon\.ico|robots\.txt)$|^apple-touch-icon|^\.well-known", help="harmless URLs ignored from 404-bans; disable with blank or [\033[32mno\033[0m]")
|
||||
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for \033[33mMIN\033[0m minutes (and also kill its active connections) -- disable with 0")
|
||||
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for \033[33mB\033[0m minutes; disable with [\033[32m0\033[0m]")
|
||||
ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives")
|
||||
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)")
|
||||
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like \033[33m--acao\033[0m's description)")
|
||||
|
||||
|
||||
def add_salt(ap, fk_salt, ah_salt):
|
||||
ap2 = ap.add_argument_group('salting options')
|
||||
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: \033[32margon2 scrypt sha2 none\033[0m (each optionally followed by alg-specific comma-sep. config)")
|
||||
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if \033[33m--ah-alg\033[0m is none (default)")
|
||||
ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]")
|
||||
ap2.add_argument("--ah-cli", action="store_true", help="launch an interactive shell which hashes passwords without ever storing or displaying the original passwords")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
|
||||
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
||||
|
||||
|
||||
def add_shutdown(ap):
|
||||
ap2 = ap.add_argument_group('shutdown options')
|
||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing")
|
||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after \033[33mWHEN\033[0m has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing")
|
||||
|
||||
|
||||
def add_logging(ap):
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
||||
ap2.add_argument("-q", action="store_true", help="quiet; disable most STDOUT messages")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz\033[0m (NB: some errors may appear on STDOUT only)")
|
||||
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
|
||||
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
|
||||
ap2.add_argument("--no-logflush", action="store_true", help="don't flush the logfile after each write; tiny bit faster")
|
||||
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
||||
ap2.add_argument("--log-tdec", metavar="N", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
|
||||
ap2.add_argument("--log-badpwd", metavar="N", type=int, default=1, help="log failed login attempt passwords: 0=terse, 1=plaintext, 2=hashed")
|
||||
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
||||
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching")
|
||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="print request \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
|
||||
|
||||
|
||||
def add_admin(ap):
|
||||
@@ -902,25 +1135,25 @@ def add_thumbnail(ap):
|
||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
||||
ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms) (volflag=dathumb)")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
|
||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60, help="conversion timeout in seconds (volflag=convt)")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image by default (client can override in UI) (volflag=nocrop)")
|
||||
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg output for video thumbs")
|
||||
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs")
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
|
||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg output for video thumbs (avoids issues on some FFmpeg builds)")
|
||||
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs (faster, lower accuracy, avoids issues on some FFmpeg builds)")
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than \033[33mSEC\033[0m seconds")
|
||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling -e2d will make these case-insensitive, and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than \033[33m--th-poke\033[0m seconds will get deleted every \033[33m--th-clean\033[0m seconds")
|
||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling \033[33m-e2d\033[0m will make these case-insensitive, and try them as dotfiles (.folder.jpg), and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# https://github.com/libvips/libvips
|
||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,qoi,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,m4a,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,tak,tta,ulaw,wav,wma,wv,xm,xpk", help="audio formats to decode using ffmpeg")
|
||||
|
||||
@@ -928,14 +1161,15 @@ def add_thumbnail(ap):
|
||||
def add_transcoding(ap):
|
||||
ap2 = ap.add_argument_group('transcoding options')
|
||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
|
||||
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
||||
|
||||
|
||||
def add_db_general(ap, hcores):
|
||||
ap2 = ap.add_argument_group('general db options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplication")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets -e2ds")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets \033[33m-e2d\033[0m")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets \033[33m-e2ds\033[0m")
|
||||
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
||||
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
||||
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
||||
@@ -944,13 +1178,13 @@ def add_db_general(ap, hcores):
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||
ap2.add_argument("--re-dhash", action="store_true", help="rebuild the cache if it gets out of sync (for example crash on startup during metadata scanning)")
|
||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice (volflag=noforget)")
|
||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see --help-dbd (volflag=dbd)")
|
||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice -- only useful for offloading uploads to a cloud service or something (volflag=noforget)")
|
||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see \033[33m--help-dbd\033[0m (volflag=dbd)")
|
||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off (volflag=scan)")
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than SEC seconds")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="rescan filesystem for changes every \033[33mSEC\033[0m seconds; 0=off (volflag=scan)")
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than \033[33mSEC\033[0m seconds")
|
||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
||||
|
||||
@@ -958,40 +1192,49 @@ def add_db_general(ap, hcores):
|
||||
def add_db_metadata(ap):
|
||||
ap2 = ap.add_argument_group('metadata db options')
|
||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing; makes it possible to search for artist/title/codec/resolution/...")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="scan existing files on startup; sets -e2t")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="delete all metadata from DB and do a full rescan; sets -e2ts")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead; will catch more tags")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="scan newly discovered files for metadata on startup; sets \033[33m-e2t\033[0m")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="delete all metadata from DB and do a full rescan; sets \033[33m-e2ts\033[0m")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead; will detect more tags")
|
||||
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader; is probably safer")
|
||||
ap2.add_argument("--mtag-to", metavar="SEC", type=int, default=60, help="timeout for ffprobe tag-scan")
|
||||
ap2.add_argument("--mtag-to", metavar="SEC", type=int, default=60, help="timeout for FFprobe tag-scan")
|
||||
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for tag scanning")
|
||||
ap2.add_argument("--mtag-v", action="store_true", help="verbose tag scanning; print errors from mtp subprocesses and such")
|
||||
ap2.add_argument("--mtag-vv", action="store_true", help="debug mtp settings and mutagen/ffprobe parsers")
|
||||
ap2.add_argument("--mtag-vv", action="store_true", help="debug mtp settings and mutagen/FFprobe parsers")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash")
|
||||
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
|
||||
default=".vq,.aq,vc,ac,fmt,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.); either an entire replacement list, or add/remove stuff on the default-list with +foo or /bar", default=DEF_MTE)
|
||||
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.); assign/add/remove same as \033[33m-mte\033[0m", default=DEF_MTH)
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag \033[33mM\033[0m using program \033[33mBIN\033[0m to parse the file")
|
||||
|
||||
|
||||
def add_txt(ap):
|
||||
ap2 = ap.add_argument_group('textfile options')
|
||||
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="the textfile editor will check for serverside changes every \033[33mSEC\033[0m seconds")
|
||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
||||
ap2.add_argument("--exp", action="store_true", help="enable textfile expansion -- replace {{self.ip}} and such; see \033[33m--help-exp\033[0m (volflag=exp)")
|
||||
ap2.add_argument("--exp-md", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in markdown files; add/remove stuff on the default list with +hdr_foo or /vf.scan (volflag=exp_md)")
|
||||
ap2.add_argument("--exp-lg", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in prologue/epilogue files (volflag=exp_lg)")
|
||||
|
||||
|
||||
def add_ui(ap, retry):
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: \033[32meng nor\033[0m")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use (0..7)")
|
||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching REGEX in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\.(js|css)$\033[0m] (volflag=unlist)")
|
||||
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
|
||||
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI)")
|
||||
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI, or add ?v to URL for override)")
|
||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
|
||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with -np")
|
||||
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible by -np)")
|
||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
|
||||
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
|
||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with \033[33m-np\033[0m")
|
||||
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
|
||||
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
||||
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
||||
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
|
||||
@@ -1002,17 +1245,18 @@ def add_debug(ap):
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
|
||||
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="kernel-bug workaround: disable sendfile; do a safe and slow read-send-loop instead")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="kernel-bug workaround: disable scandir; do a listdir + stat on each file instead")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for initial filesystem indexing before accepting client requests")
|
||||
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||
ap2.add_argument("--srch-dbg", action="store_true", help="explain search processing, and do some extra expensive sanity checks")
|
||||
ap2.add_argument("--rclone-mdns", action="store_true", help="use mdns-domain instead of server-ip on /?hc")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
|
||||
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches REGEX; [\033[32m.\033[0m] (a single dot) = all files")
|
||||
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to --bf-nc and --bf-dir")
|
||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than NUM files at --kf-dir already; default: 6.3 GiB max (200*32M)")
|
||||
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at PATH; default: folder named 'bf' wherever copyparty was started")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to \033[33mP\033[0math every \033[33mS\033[0m second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every \033[33mSEC\033[0m")
|
||||
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches \033[33mREGEX\033[0m; [\033[32m.\033[0m] (a single dot) = all files")
|
||||
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to \033[33m--bf-nc\033[0m and \033[33m--bf-dir\033[0m")
|
||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than \033[33mNUM\033[0m files at \033[33m--kf-dir\033[0m already; default: 6.3 GiB max (200*32M)")
|
||||
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at \033[33mPATH\033[0m; default: folder named 'bf' wherever copyparty was started")
|
||||
|
||||
|
||||
# fmt: on
|
||||
@@ -1029,9 +1273,13 @@ def run_argparse(
|
||||
|
||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||
|
||||
fk_salt = get_fk_salt(cert_path)
|
||||
fk_salt = get_fk_salt()
|
||||
ah_salt = get_ah_salt()
|
||||
|
||||
hcores = min(CORES, 4) # optimal on py3.11 @ r5-4500U
|
||||
# alpine peaks at 5 threads for some reason,
|
||||
# all others scale past that (but try to avoid SMT),
|
||||
# 5 should be plenty anyways (3 GiB/s on most machines)
|
||||
hcores = min(CORES, 5 if CORES > 8 else 4)
|
||||
|
||||
tty = os.environ.get("TERM", "").lower() == "linux"
|
||||
|
||||
@@ -1041,6 +1289,7 @@ def run_argparse(
|
||||
add_network(ap)
|
||||
add_tls(ap, cert_path)
|
||||
add_cert(ap, cert_path)
|
||||
add_auth(ap)
|
||||
add_qr(ap, tty)
|
||||
add_zeroconf(ap)
|
||||
add_zc_mdns(ap)
|
||||
@@ -1053,11 +1302,15 @@ def run_argparse(
|
||||
add_ftp(ap)
|
||||
add_webdav(ap)
|
||||
add_smb(ap)
|
||||
add_safety(ap, fk_salt)
|
||||
add_safety(ap)
|
||||
add_salt(ap, fk_salt, ah_salt)
|
||||
add_optouts(ap)
|
||||
add_shutdown(ap)
|
||||
add_yolo(ap)
|
||||
add_handlers(ap)
|
||||
add_hooks(ap)
|
||||
add_stats(ap)
|
||||
add_txt(ap)
|
||||
add_ui(ap, retry)
|
||||
add_admin(ap)
|
||||
add_logging(ap)
|
||||
@@ -1085,7 +1338,7 @@ def run_argparse(
|
||||
for k, h, t in sects:
|
||||
k2 = "help_" + k.replace("-", "_")
|
||||
if vars(ret)[k2]:
|
||||
lprint("# {} help page".format(k))
|
||||
lprint("# %s help page (%s)" % (k, h))
|
||||
lprint(t + "\033[0m")
|
||||
sys.exit(0)
|
||||
|
||||
@@ -1138,16 +1391,25 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
supp = args_from_cfg(v)
|
||||
argv.extend(supp)
|
||||
|
||||
deprecated: list[tuple[str, str]] = []
|
||||
deprecated: list[tuple[str, str]] = [
|
||||
("--salt", "--warksalt"),
|
||||
("--hdr-au-usr", "--idp-h-usr"),
|
||||
]
|
||||
for dk, nk in deprecated:
|
||||
try:
|
||||
idx = argv.index(dk)
|
||||
except:
|
||||
idx = -1
|
||||
ov = ""
|
||||
for n, k in enumerate(argv):
|
||||
if k == dk or k.startswith(dk + "="):
|
||||
idx = n
|
||||
if "=" in k:
|
||||
ov = "=" + k.split("=", 1)[1]
|
||||
|
||||
if idx < 0:
|
||||
continue
|
||||
|
||||
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
|
||||
lprint(msg.format(dk, nk))
|
||||
argv[idx] = nk
|
||||
argv[idx] = nk + ov
|
||||
time.sleep(2)
|
||||
|
||||
da = len(argv) == 1
|
||||
@@ -1198,7 +1460,7 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
elif not al.no_ansi:
|
||||
al.ansi = VT100
|
||||
|
||||
if WINDOWS and not al.keep_qem:
|
||||
if WINDOWS and not al.keep_qem and not al.ah_cli:
|
||||
try:
|
||||
disable_quickedit()
|
||||
except:
|
||||
@@ -1207,42 +1469,6 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
if al.ansi:
|
||||
al.wintitle = ""
|
||||
|
||||
nstrs: list[str] = []
|
||||
anymod = False
|
||||
for ostr in al.v or []:
|
||||
m = re_vol.match(ostr)
|
||||
if not m:
|
||||
# not our problem
|
||||
nstrs.append(ostr)
|
||||
continue
|
||||
|
||||
src, dst, perms = m.groups()
|
||||
na = [src, dst]
|
||||
mod = False
|
||||
for opt in perms.split(":"):
|
||||
if re.match("c[^,]", opt):
|
||||
mod = True
|
||||
na.append("c," + opt[1:])
|
||||
elif re.sub("^[rwmdgG]*", "", opt) and "," not in opt:
|
||||
mod = True
|
||||
perm = opt[0]
|
||||
if perm == "a":
|
||||
perm = "rw"
|
||||
na.append(perm + "," + opt[1:])
|
||||
else:
|
||||
na.append(opt)
|
||||
|
||||
nstr = ":".join(na)
|
||||
nstrs.append(nstr if mod else ostr)
|
||||
if mod:
|
||||
msg = "\033[1;31mWARNING:\033[0;1m\n -v {} \033[0;33mwas replaced with\033[0;1m\n -v {} \n\033[0m"
|
||||
lprint(msg.format(ostr, nstr))
|
||||
anymod = True
|
||||
|
||||
if anymod:
|
||||
al.v = nstrs
|
||||
time.sleep(2)
|
||||
|
||||
# propagate implications
|
||||
for k1, k2 in IMPLICATIONS:
|
||||
if getattr(al, k1):
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 7, 3)
|
||||
CODENAME = "unlinked"
|
||||
BUILD_DT = (2023, 6, 11)
|
||||
VERSION = (1, 9, 28)
|
||||
CODENAME = "prometheable"
|
||||
BUILD_DT = (2023, 12, 31)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -12,19 +12,23 @@ import threading
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING, WINDOWS
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING, WINDOWS, E
|
||||
from .bos import bos
|
||||
from .cfg import flagdescs, permdescs, vf_bmap, vf_cmap, vf_vmap
|
||||
from .pwhash import PWHash
|
||||
from .util import (
|
||||
IMPLICATIONS,
|
||||
META_NOBOTS,
|
||||
SQLITE_VER,
|
||||
UNPLICATIONS,
|
||||
UTC,
|
||||
ODict,
|
||||
Pebkac,
|
||||
absreal,
|
||||
afsenc,
|
||||
get_df,
|
||||
humansize,
|
||||
odfusion,
|
||||
relchk,
|
||||
statdir,
|
||||
uncyg,
|
||||
@@ -40,7 +44,10 @@ if True: # pylint: disable=using-constant-test
|
||||
from .util import NamedLogger, RootLogger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
from .broker_mp import BrokerMp
|
||||
from .broker_thr import BrokerThr
|
||||
from .broker_util import BrokerCli
|
||||
|
||||
# Vflags: TypeAlias = dict[str, str | bool | float | list[str]]
|
||||
# Vflags: TypeAlias = dict[str, Any]
|
||||
# Mflags: TypeAlias = dict[str, Vflags]
|
||||
@@ -48,6 +55,11 @@ if TYPE_CHECKING:
|
||||
|
||||
LEELOO_DALLAS = "leeloo_dallas"
|
||||
|
||||
SEE_LOG = "see log for details"
|
||||
SSEELOG = " ({})".format(SEE_LOG)
|
||||
BAD_CFG = "invalid config; {}".format(SEE_LOG)
|
||||
SBADCFG = " ({})".format(BAD_CFG)
|
||||
|
||||
|
||||
class AXS(object):
|
||||
def __init__(
|
||||
@@ -58,6 +70,9 @@ class AXS(object):
|
||||
udel: Optional[Union[list[str], set[str]]] = None,
|
||||
uget: Optional[Union[list[str], set[str]]] = None,
|
||||
upget: Optional[Union[list[str], set[str]]] = None,
|
||||
uhtml: Optional[Union[list[str], set[str]]] = None,
|
||||
uadmin: Optional[Union[list[str], set[str]]] = None,
|
||||
udot: Optional[Union[list[str], set[str]]] = None,
|
||||
) -> None:
|
||||
self.uread: set[str] = set(uread or [])
|
||||
self.uwrite: set[str] = set(uwrite or [])
|
||||
@@ -65,14 +80,13 @@ class AXS(object):
|
||||
self.udel: set[str] = set(udel or [])
|
||||
self.uget: set[str] = set(uget or [])
|
||||
self.upget: set[str] = set(upget or [])
|
||||
self.uhtml: set[str] = set(uhtml or [])
|
||||
self.uadmin: set[str] = set(uadmin or [])
|
||||
self.udot: set[str] = set(udot or [])
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "AXS(%s)" % (
|
||||
", ".join(
|
||||
"%s=%r" % (k, self.__dict__[k])
|
||||
for k in "uread uwrite umove udel uget upget".split()
|
||||
)
|
||||
)
|
||||
ks = "uread uwrite umove udel uget upget uhtml uadmin udot".split()
|
||||
return "AXS(%s)" % (", ".join("%s=%r" % (k, self.__dict__[k]) for k in ks),)
|
||||
|
||||
|
||||
class Lim(object):
|
||||
@@ -90,6 +104,8 @@ class Lim(object):
|
||||
self.dfl = 0 # free disk space limit
|
||||
self.dft = 0 # last-measured time
|
||||
self.dfv = 0 # currently free
|
||||
self.vbmax = 0 # volume bytes max
|
||||
self.vnmax = 0 # volume max num files
|
||||
|
||||
self.smin = 0 # filesize min
|
||||
self.smax = 0 # filesize max
|
||||
@@ -119,8 +135,11 @@ class Lim(object):
|
||||
ip: str,
|
||||
rem: str,
|
||||
sz: int,
|
||||
ptop: str,
|
||||
abspath: str,
|
||||
broker: Optional[Union["BrokerCli", "BrokerMp", "BrokerThr"]] = None,
|
||||
reg: Optional[dict[str, dict[str, Any]]] = None,
|
||||
volgetter: str = "up2k.get_volsize",
|
||||
) -> tuple[str, str]:
|
||||
if reg is not None and self.reg is None:
|
||||
self.reg = reg
|
||||
@@ -131,6 +150,7 @@ class Lim(object):
|
||||
self.chk_rem(rem)
|
||||
if sz != -1:
|
||||
self.chk_sz(sz)
|
||||
self.chk_vsz(broker, ptop, sz, volgetter)
|
||||
self.chk_df(abspath, sz) # side effects; keep last-ish
|
||||
|
||||
ap2, vp2 = self.rot(abspath)
|
||||
@@ -146,6 +166,25 @@ class Lim(object):
|
||||
if self.smax and sz > self.smax:
|
||||
raise Pebkac(400, "file too big")
|
||||
|
||||
def chk_vsz(
|
||||
self,
|
||||
broker: Optional[Union["BrokerCli", "BrokerMp", "BrokerThr"]],
|
||||
ptop: str,
|
||||
sz: int,
|
||||
volgetter: str = "up2k.get_volsize",
|
||||
) -> None:
|
||||
if not broker or not self.vbmax + self.vnmax:
|
||||
return
|
||||
|
||||
x = broker.ask(volgetter, ptop)
|
||||
nbytes, nfiles = x.get()
|
||||
|
||||
if self.vbmax and self.vbmax < nbytes + sz:
|
||||
raise Pebkac(400, "volume has exceeded max size")
|
||||
|
||||
if self.vnmax and self.vnmax < nfiles + 1:
|
||||
raise Pebkac(400, "volume has exceeded max num.files")
|
||||
|
||||
def chk_df(self, abspath: str, sz: int, already_written: bool = False) -> None:
|
||||
if not self.dfl:
|
||||
return
|
||||
@@ -179,7 +218,7 @@ class Lim(object):
|
||||
if self.rot_re.search(path.replace("\\", "/")):
|
||||
return path, ""
|
||||
|
||||
suf = datetime.utcnow().strftime(self.rotf)
|
||||
suf = datetime.now(UTC).strftime(self.rotf)
|
||||
if path:
|
||||
path += "/"
|
||||
|
||||
@@ -266,7 +305,7 @@ class Lim(object):
|
||||
|
||||
self.bupc[ip] = mark
|
||||
if mark >= self.bmax:
|
||||
raise Pebkac(429, "ingress saturated")
|
||||
raise Pebkac(429, "upload size limit exceeded")
|
||||
|
||||
|
||||
class VFS(object):
|
||||
@@ -297,6 +336,10 @@ class VFS(object):
|
||||
self.adel: dict[str, list[str]] = {}
|
||||
self.aget: dict[str, list[str]] = {}
|
||||
self.apget: dict[str, list[str]] = {}
|
||||
self.ahtml: dict[str, list[str]] = {}
|
||||
self.aadmin: dict[str, list[str]] = {}
|
||||
self.adot: dict[str, list[str]] = {}
|
||||
self.all_vols: dict[str, VFS] = {}
|
||||
|
||||
if realpath:
|
||||
rp = realpath + ("" if realpath.endswith(os.sep) else os.sep)
|
||||
@@ -375,7 +418,7 @@ class VFS(object):
|
||||
hist = flags.get("hist")
|
||||
if hist and hist != "-":
|
||||
zs = "{}/{}".format(hist.rstrip("/"), name)
|
||||
flags["hist"] = os.path.expanduser(zs) if zs.startswith("~") else zs
|
||||
flags["hist"] = os.path.expandvars(os.path.expanduser(zs))
|
||||
|
||||
return flags
|
||||
|
||||
@@ -406,8 +449,8 @@ class VFS(object):
|
||||
|
||||
def can_access(
|
||||
self, vpath: str, uname: str
|
||||
) -> tuple[bool, bool, bool, bool, bool, bool]:
|
||||
"""can Read,Write,Move,Delete,Get,Upget"""
|
||||
) -> tuple[bool, bool, bool, bool, bool, bool, bool, bool]:
|
||||
"""can Read,Write,Move,Delete,Get,Upget,Admin,Dot"""
|
||||
if vpath:
|
||||
vn, _ = self._find(undot(vpath))
|
||||
else:
|
||||
@@ -415,13 +458,16 @@ class VFS(object):
|
||||
|
||||
c = vn.axs
|
||||
return (
|
||||
uname in c.uread or "*" in c.uread,
|
||||
uname in c.uwrite or "*" in c.uwrite,
|
||||
uname in c.umove or "*" in c.umove,
|
||||
uname in c.udel or "*" in c.udel,
|
||||
uname in c.uget or "*" in c.uget,
|
||||
uname in c.upget or "*" in c.upget,
|
||||
uname in c.uread,
|
||||
uname in c.uwrite,
|
||||
uname in c.umove,
|
||||
uname in c.udel,
|
||||
uname in c.uget,
|
||||
uname in c.upget,
|
||||
uname in c.uadmin,
|
||||
uname in c.udot,
|
||||
)
|
||||
# skip uhtml because it's rarely needed
|
||||
|
||||
def get(
|
||||
self,
|
||||
@@ -435,14 +481,13 @@ class VFS(object):
|
||||
err: int = 403,
|
||||
) -> tuple["VFS", str]:
|
||||
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
||||
if ANYWIN:
|
||||
mod = relchk(vpath)
|
||||
if mod:
|
||||
if self.log:
|
||||
self.log("vfs", "invalid relpath [{}]".format(vpath))
|
||||
raise Pebkac(404)
|
||||
if relchk(vpath):
|
||||
if self.log:
|
||||
self.log("vfs", "invalid relpath [{}]".format(vpath))
|
||||
raise Pebkac(422)
|
||||
|
||||
vn, rem = self._find(undot(vpath))
|
||||
cvpath = undot(vpath)
|
||||
vn, rem = self._find(cvpath)
|
||||
c: AXS = vn.axs
|
||||
|
||||
for req, d, msg in [
|
||||
@@ -452,9 +497,14 @@ class VFS(object):
|
||||
(will_del, c.udel, "delete"),
|
||||
(will_get, c.uget, "get"),
|
||||
]:
|
||||
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
|
||||
t = "you don't have {}-access for this location"
|
||||
raise Pebkac(err, t.format(msg))
|
||||
if req and uname not in d and uname != LEELOO_DALLAS:
|
||||
if vpath != cvpath and vpath != "." and self.log:
|
||||
ap = vn.canonical(rem)
|
||||
t = "{} has no {} in [{}] => [{}] => [{}]"
|
||||
self.log("vfs", t.format(uname, msg, vpath, cvpath, ap), 6)
|
||||
|
||||
t = 'you don\'t have %s-access in "/%s"'
|
||||
raise Pebkac(err, t % (msg, cvpath))
|
||||
|
||||
return vn, rem
|
||||
|
||||
@@ -508,7 +558,7 @@ class VFS(object):
|
||||
for pset in permsets:
|
||||
ok = True
|
||||
for req, lst in zip(pset, axs):
|
||||
if req and uname not in lst and "*" not in lst:
|
||||
if req and uname not in lst:
|
||||
ok = False
|
||||
if ok:
|
||||
break
|
||||
@@ -532,7 +582,7 @@ class VFS(object):
|
||||
seen: list[str],
|
||||
uname: str,
|
||||
permsets: list[list[bool]],
|
||||
dots: bool,
|
||||
wantdots: bool,
|
||||
scandir: bool,
|
||||
lstat: bool,
|
||||
subvols: bool = True,
|
||||
@@ -576,6 +626,10 @@ class VFS(object):
|
||||
rm1.append(le)
|
||||
_ = [vfs_ls.remove(x) for x in rm1] # type: ignore
|
||||
|
||||
dots_ok = wantdots and uname in dbv.axs.udot
|
||||
if not dots_ok:
|
||||
vfs_ls = [x for x in vfs_ls if "/." not in "/" + x[0]]
|
||||
|
||||
seen = seen[:] + [fsroot]
|
||||
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
|
||||
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||
@@ -588,13 +642,13 @@ class VFS(object):
|
||||
yield dbv, vrem, rel, fsroot, rfiles, rdirs, vfs_virt
|
||||
|
||||
for rdir, _ in rdirs:
|
||||
if not dots and rdir.startswith("."):
|
||||
if not dots_ok and rdir.startswith("."):
|
||||
continue
|
||||
|
||||
wrel = (rel + "/" + rdir).lstrip("/")
|
||||
wrem = (rem + "/" + rdir).lstrip("/")
|
||||
for x in self.walk(
|
||||
wrel, wrem, seen, uname, permsets, dots, scandir, lstat, subvols
|
||||
wrel, wrem, seen, uname, permsets, wantdots, scandir, lstat, subvols
|
||||
):
|
||||
yield x
|
||||
|
||||
@@ -602,11 +656,13 @@ class VFS(object):
|
||||
return
|
||||
|
||||
for n, vfs in sorted(vfs_virt.items()):
|
||||
if not dots and n.startswith("."):
|
||||
if not dots_ok and n.startswith("."):
|
||||
continue
|
||||
|
||||
wrel = (rel + "/" + n).lstrip("/")
|
||||
for x in vfs.walk(wrel, "", seen, uname, permsets, dots, scandir, lstat):
|
||||
for x in vfs.walk(
|
||||
wrel, "", seen, uname, permsets, wantdots, scandir, lstat
|
||||
):
|
||||
yield x
|
||||
|
||||
def zipgen(
|
||||
@@ -615,7 +671,6 @@ class VFS(object):
|
||||
vrem: str,
|
||||
flt: set[str],
|
||||
uname: str,
|
||||
dots: bool,
|
||||
dirs: bool,
|
||||
scandir: bool,
|
||||
wrap: bool = True,
|
||||
@@ -625,7 +680,7 @@ class VFS(object):
|
||||
# if single folder: the folder itself is the top-level item
|
||||
folder = "" if flt or not wrap else (vpath.split("/")[-1].lstrip(".") or "top")
|
||||
|
||||
g = self.walk(folder, vrem, [], uname, [[True, False]], dots, scandir, False)
|
||||
g = self.walk(folder, vrem, [], uname, [[True, False]], True, scandir, False)
|
||||
for _, _, vpath, apath, files, rd, vd in g:
|
||||
if flt:
|
||||
files = [x for x in files if x[0] in flt]
|
||||
@@ -644,18 +699,6 @@ class VFS(object):
|
||||
apaths = [os.path.join(apath, n) for n in fnames]
|
||||
ret = list(zip(vpaths, apaths, files))
|
||||
|
||||
if not dots:
|
||||
# dotfile filtering based on vpath (intended visibility)
|
||||
ret = [x for x in ret if "/." not in "/" + x[0]]
|
||||
|
||||
zel = [ze for ze in rd if ze[0].startswith(".")]
|
||||
for ze in zel:
|
||||
rd.remove(ze)
|
||||
|
||||
zsl = [zs for zs in vd.keys() if zs.startswith(".")]
|
||||
for zs in zsl:
|
||||
del vd[zs]
|
||||
|
||||
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in ret]:
|
||||
yield f
|
||||
|
||||
@@ -729,6 +772,7 @@ class AuthSrv(object):
|
||||
warn_anonwrite: bool = True,
|
||||
dargs: Optional[argparse.Namespace] = None,
|
||||
) -> None:
|
||||
self.ah = PWHash(args)
|
||||
self.args = args
|
||||
self.dargs = dargs or args
|
||||
self.log_func = log_func
|
||||
@@ -765,7 +809,7 @@ class AuthSrv(object):
|
||||
if dst in mount:
|
||||
t = "multiple filesystem-paths mounted at [/{}]:\n [{}]\n [{}]"
|
||||
self.log(t.format(dst, mount[dst], src), c=1)
|
||||
raise Exception("invalid config")
|
||||
raise Exception(BAD_CFG)
|
||||
|
||||
if src in mount.values():
|
||||
t = "filesystem-path [{}] mounted in multiple locations:"
|
||||
@@ -774,7 +818,7 @@ class AuthSrv(object):
|
||||
t += "\n /{}".format(v)
|
||||
|
||||
self.log(t, c=3)
|
||||
raise Exception("invalid config")
|
||||
raise Exception(BAD_CFG)
|
||||
|
||||
if not bos.path.isdir(src):
|
||||
self.log("warning: filesystem-path does not exist: {}".format(src), 3)
|
||||
@@ -873,7 +917,7 @@ class AuthSrv(object):
|
||||
t = "volume-specific config (anything from --help-flags)"
|
||||
self._l(ln, 6, t)
|
||||
else:
|
||||
raise Exception("invalid section header")
|
||||
raise Exception("invalid section header" + SBADCFG)
|
||||
|
||||
self.indent = " " if subsection else " "
|
||||
continue
|
||||
@@ -896,14 +940,12 @@ class AuthSrv(object):
|
||||
acct[u] = p
|
||||
except:
|
||||
t = 'lines inside the [accounts] section must be "username: password"'
|
||||
raise Exception(t)
|
||||
raise Exception(t + SBADCFG)
|
||||
continue
|
||||
|
||||
if vp is not None and ap is None:
|
||||
ap = ln
|
||||
if ap.startswith("~"):
|
||||
ap = os.path.expanduser(ap)
|
||||
|
||||
ap = os.path.expandvars(os.path.expanduser(ap))
|
||||
ap = absreal(ap)
|
||||
self._l(ln, 2, "bound to filesystem-path [{}]".format(ap))
|
||||
self._map_volume(ap, vp, mount, daxs, mflags)
|
||||
@@ -914,17 +956,18 @@ class AuthSrv(object):
|
||||
try:
|
||||
self._l(ln, 5, "volume access config:")
|
||||
sk, sv = ln.split(":")
|
||||
if re.sub("[rwmdgG]", "", sk) or not sk:
|
||||
if re.sub("[rwmdgGhaA.]", "", sk) or not sk:
|
||||
err = "invalid accs permissions list; "
|
||||
raise Exception(err)
|
||||
if " " in re.sub(", *", "", sv).strip():
|
||||
err = "list of users is not comma-separated; "
|
||||
raise Exception(err)
|
||||
assert vp is not None
|
||||
self._read_vol_str(sk, sv.replace(" ", ""), daxs[vp], mflags[vp])
|
||||
continue
|
||||
except:
|
||||
err += "accs entries must be 'rwmdgG: user1, user2, ...'"
|
||||
raise Exception(err)
|
||||
err += "accs entries must be 'rwmdgGhaA.: user1, user2, ...'"
|
||||
raise Exception(err + SBADCFG)
|
||||
|
||||
if cat == catf:
|
||||
err = ""
|
||||
@@ -937,21 +980,23 @@ class AuthSrv(object):
|
||||
if bad:
|
||||
err = "bad characters [{}] in volflag name [{}]; "
|
||||
err = err.format(bad, sk)
|
||||
raise Exception(err)
|
||||
raise Exception(err + SBADCFG)
|
||||
if sv is True:
|
||||
fstr += "," + sk
|
||||
else:
|
||||
fstr += ",{}={}".format(sk, sv)
|
||||
assert vp is not None
|
||||
self._read_vol_str("c", fstr[1:], daxs[vp], mflags[vp])
|
||||
fstr = ""
|
||||
if fstr:
|
||||
assert vp is not None
|
||||
self._read_vol_str("c", fstr[1:], daxs[vp], mflags[vp])
|
||||
continue
|
||||
except:
|
||||
err += "flags entries (volflags) must be one of the following:\n 'flag1, flag2, ...'\n 'key: value'\n 'flag1, flag2, key: value'"
|
||||
raise Exception(err)
|
||||
raise Exception(err + SBADCFG)
|
||||
|
||||
raise Exception("unprocessable line in config")
|
||||
raise Exception("unprocessable line in config" + SBADCFG)
|
||||
|
||||
self._e()
|
||||
self.line_ctr = 0
|
||||
@@ -959,8 +1004,9 @@ class AuthSrv(object):
|
||||
def _read_vol_str(
|
||||
self, lvl: str, uname: str, axs: AXS, flags: dict[str, Any]
|
||||
) -> None:
|
||||
if lvl.strip("crwmdgG"):
|
||||
raise Exception("invalid volflag: {},{}".format(lvl, uname))
|
||||
if lvl.strip("crwmdgGhaA."):
|
||||
t = "%s,%s" % (lvl, uname) if uname else lvl
|
||||
raise Exception("invalid config value (volume or volflag): %s" % (t,))
|
||||
|
||||
if lvl == "c":
|
||||
cval: Union[bool, str] = True
|
||||
@@ -982,16 +1028,31 @@ class AuthSrv(object):
|
||||
if uname == "":
|
||||
uname = "*"
|
||||
|
||||
junkset = set()
|
||||
for un in uname.replace(",", " ").strip().split():
|
||||
for alias, mapping in [
|
||||
("h", "gh"),
|
||||
("G", "gG"),
|
||||
("A", "rwmda.A"),
|
||||
]:
|
||||
expanded = ""
|
||||
for ch in mapping:
|
||||
if ch not in lvl:
|
||||
expanded += ch
|
||||
lvl = lvl.replace(alias, expanded + alias)
|
||||
|
||||
for ch, al in [
|
||||
("r", axs.uread),
|
||||
("w", axs.uwrite),
|
||||
("m", axs.umove),
|
||||
("d", axs.udel),
|
||||
(".", axs.udot),
|
||||
("a", axs.uadmin),
|
||||
("A", junkset),
|
||||
("g", axs.uget),
|
||||
("G", axs.uget),
|
||||
("G", axs.upget),
|
||||
]: # b bb bbb
|
||||
("h", axs.uhtml),
|
||||
]:
|
||||
if ch in lvl:
|
||||
if un == "*":
|
||||
t = "└─add permission [{0}] for [everyone] -- {2}"
|
||||
@@ -1017,7 +1078,8 @@ class AuthSrv(object):
|
||||
flags[name] = True
|
||||
return
|
||||
|
||||
if name not in "mtp xbu xau xiu xbr xar xbd xad xm".split():
|
||||
zs = "mtp on403 on404 xbu xau xiu xbr xar xbd xad xm xban"
|
||||
if name not in zs.split():
|
||||
if value is True:
|
||||
t = "└─add volflag [{}] = {} ({})"
|
||||
else:
|
||||
@@ -1062,7 +1124,7 @@ class AuthSrv(object):
|
||||
|
||||
if self.args.v:
|
||||
# list of src:dst:permset:permset:...
|
||||
# permset is <rwmdgG>[,username][,username] or <c>,<flag>[=args]
|
||||
# permset is <rwmdgGhaA.>[,username][,username] or <c>,<flag>[=args]
|
||||
for v_str in self.args.v:
|
||||
m = re_vol.match(v_str)
|
||||
if not m:
|
||||
@@ -1106,6 +1168,8 @@ class AuthSrv(object):
|
||||
self.log("\n{0}\n{1}{0}".format(t, "\n".join(slns)))
|
||||
raise
|
||||
|
||||
self.setup_pwhash(acct)
|
||||
|
||||
# case-insensitive; normalize
|
||||
if WINDOWS:
|
||||
cased = {}
|
||||
@@ -1134,12 +1198,13 @@ class AuthSrv(object):
|
||||
vfs = VFS(self.log_func, mount[dst], dst, daxs[dst], mflags[dst])
|
||||
continue
|
||||
|
||||
assert vfs # type: ignore
|
||||
zv = vfs.add(mount[dst], dst)
|
||||
zv.axs = daxs[dst]
|
||||
zv.flags = mflags[dst]
|
||||
zv.dbv = None
|
||||
|
||||
assert vfs
|
||||
assert vfs # type: ignore
|
||||
vfs.all_vols = {}
|
||||
vfs.all_aps = []
|
||||
vfs.all_vps = []
|
||||
@@ -1149,14 +1214,21 @@ class AuthSrv(object):
|
||||
vol.all_vps.sort(key=lambda x: len(x[0]), reverse=True)
|
||||
vol.root = vfs
|
||||
|
||||
for perm in "read write move del get pget".split():
|
||||
for perm in "read write move del get pget html admin dot".split():
|
||||
axs_key = "u" + perm
|
||||
unames = ["*"] + list(acct.keys())
|
||||
for vp, vol in vfs.all_vols.items():
|
||||
zx = getattr(vol.axs, axs_key)
|
||||
if "*" in zx:
|
||||
for usr in unames:
|
||||
zx.add(usr)
|
||||
|
||||
# aread,... = dict[uname, list[volnames] or []]
|
||||
umap: dict[str, list[str]] = {x: [] for x in unames}
|
||||
for usr in unames:
|
||||
for vp, vol in vfs.all_vols.items():
|
||||
zx = getattr(vol.axs, axs_key)
|
||||
if usr in zx or "*" in zx:
|
||||
if usr in zx:
|
||||
umap[usr].append(vp)
|
||||
umap[usr].sort()
|
||||
setattr(vfs, "a" + perm, umap)
|
||||
@@ -1164,7 +1236,17 @@ class AuthSrv(object):
|
||||
all_users = {}
|
||||
missing_users = {}
|
||||
for axs in daxs.values():
|
||||
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget, axs.upget]:
|
||||
for d in [
|
||||
axs.uread,
|
||||
axs.uwrite,
|
||||
axs.umove,
|
||||
axs.udel,
|
||||
axs.uget,
|
||||
axs.upget,
|
||||
axs.uhtml,
|
||||
axs.uadmin,
|
||||
axs.udot,
|
||||
]:
|
||||
for usr in d:
|
||||
all_users[usr] = 1
|
||||
if usr != "*" and usr not in acct:
|
||||
@@ -1176,7 +1258,7 @@ class AuthSrv(object):
|
||||
+ ", ".join(k for k in sorted(missing_users)),
|
||||
c=1,
|
||||
)
|
||||
raise Exception("invalid config")
|
||||
raise Exception(BAD_CFG)
|
||||
|
||||
if LEELOO_DALLAS in all_users:
|
||||
raise Exception("sorry, reserved username: " + LEELOO_DALLAS)
|
||||
@@ -1186,7 +1268,7 @@ class AuthSrv(object):
|
||||
if pwd in seenpwds:
|
||||
t = "accounts [{}] and [{}] have the same password; this is not supported"
|
||||
self.log(t.format(seenpwds[pwd], usr), 1)
|
||||
raise Exception("invalid config")
|
||||
raise Exception(BAD_CFG)
|
||||
seenpwds[pwd] = usr
|
||||
|
||||
promote = []
|
||||
@@ -1198,9 +1280,7 @@ class AuthSrv(object):
|
||||
if vflag == "-":
|
||||
pass
|
||||
elif vflag:
|
||||
if vflag.startswith("~"):
|
||||
vflag = os.path.expanduser(vflag)
|
||||
|
||||
vflag = os.path.expandvars(os.path.expanduser(vflag))
|
||||
vol.histpath = uncyg(vflag) if WINDOWS else vflag
|
||||
elif self.args.hist:
|
||||
for nch in range(len(hid)):
|
||||
@@ -1290,6 +1370,16 @@ class AuthSrv(object):
|
||||
use = True
|
||||
lim.bmax, lim.bwin = [unhumanize(x) for x in zs.split(",")]
|
||||
|
||||
zs = vol.flags.get("vmaxb")
|
||||
if zs:
|
||||
use = True
|
||||
lim.vbmax = unhumanize(zs)
|
||||
|
||||
zs = vol.flags.get("vmaxn")
|
||||
if zs:
|
||||
use = True
|
||||
lim.vnmax = unhumanize(zs)
|
||||
|
||||
if use:
|
||||
vol.lim = lim
|
||||
|
||||
@@ -1320,6 +1410,9 @@ class AuthSrv(object):
|
||||
have_fk = False
|
||||
for vol in vfs.all_vols.values():
|
||||
fk = vol.flags.get("fk")
|
||||
fka = vol.flags.get("fka")
|
||||
if fka and not fk:
|
||||
fk = fka
|
||||
if fk:
|
||||
vol.flags["fk"] = int(fk) if fk is not True else 8
|
||||
have_fk = True
|
||||
@@ -1327,6 +1420,12 @@ class AuthSrv(object):
|
||||
if have_fk and re.match(r"^[0-9\.]+$", self.args.fk_salt):
|
||||
self.log("filekey salt: {}".format(self.args.fk_salt))
|
||||
|
||||
fk_len = len(self.args.fk_salt)
|
||||
if have_fk and fk_len < 14:
|
||||
t = "WARNING: filekeys are enabled, but the salt is only %d chars long; %d or longer is recommended. Either specify a stronger salt using --fk-salt or delete this file and restart copyparty: %s"
|
||||
zs = os.path.join(E.cfg, "fk-salt.txt")
|
||||
self.log(t % (fk_len, 16, zs), 3)
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
||||
vol.flags["gz"] = False # def.pk
|
||||
@@ -1347,12 +1446,12 @@ class AuthSrv(object):
|
||||
|
||||
for ga, vf in [["no_hash", "nohash"], ["no_idx", "noidx"]]:
|
||||
if vf in vol.flags:
|
||||
ptn = vol.flags.pop(vf)
|
||||
ptn = re.compile(vol.flags.pop(vf))
|
||||
else:
|
||||
ptn = getattr(self.args, ga)
|
||||
|
||||
if ptn:
|
||||
vol.flags[vf] = re.compile(ptn)
|
||||
vol.flags[vf] = ptn
|
||||
|
||||
for ga, vf in vf_bmap().items():
|
||||
if getattr(self.args, ga):
|
||||
@@ -1378,6 +1477,10 @@ class AuthSrv(object):
|
||||
if k in vol.flags:
|
||||
vol.flags[k] = int(vol.flags[k])
|
||||
|
||||
for k in ("convt",):
|
||||
if k in vol.flags:
|
||||
vol.flags[k] = float(vol.flags[k])
|
||||
|
||||
for k1, k2 in IMPLICATIONS:
|
||||
if k1 in vol.flags:
|
||||
vol.flags[k2] = True
|
||||
@@ -1393,18 +1496,15 @@ class AuthSrv(object):
|
||||
raise Exception(t.format(dbd, dbds))
|
||||
|
||||
# default tag cfgs if unset
|
||||
if "mte" not in vol.flags:
|
||||
vol.flags["mte"] = self.args.mte
|
||||
elif vol.flags["mte"].startswith("+"):
|
||||
vol.flags["mte"] = ",".join(
|
||||
x for x in [self.args.mte, vol.flags["mte"][1:]] if x
|
||||
)
|
||||
if "mth" not in vol.flags:
|
||||
vol.flags["mth"] = self.args.mth
|
||||
for k in ("mte", "mth", "exp_md", "exp_lg"):
|
||||
if k not in vol.flags:
|
||||
vol.flags[k] = getattr(self.args, k).copy()
|
||||
else:
|
||||
vol.flags[k] = odfusion(getattr(self.args, k), vol.flags[k])
|
||||
|
||||
# append additive args from argv to volflags
|
||||
hooks = "xbu xau xiu xbr xar xbd xad xm".split()
|
||||
for name in ["mtp"] + hooks:
|
||||
hooks = "xbu xau xiu xbr xar xbd xad xm xban".split()
|
||||
for name in "mtp on404 on403".split() + hooks:
|
||||
self._read_volflag(vol.flags, name, getattr(self.args, name), True)
|
||||
|
||||
for hn in hooks:
|
||||
@@ -1426,6 +1526,10 @@ class AuthSrv(object):
|
||||
hfs = [x for x in hfs if x != "f"]
|
||||
ocmd = ",".join(hfs + [cmd])
|
||||
|
||||
if "c" not in hfs and "f" not in hfs and hn == "xban":
|
||||
hfs = ["c"] + hfs
|
||||
ocmd = ",".join(hfs + [cmd])
|
||||
|
||||
ncmds.append(ocmd)
|
||||
vol.flags[hn] = ncmds
|
||||
|
||||
@@ -1450,7 +1554,11 @@ class AuthSrv(object):
|
||||
if vol.flags.get(grp, False):
|
||||
continue
|
||||
|
||||
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
|
||||
vol.flags = {
|
||||
k: v
|
||||
for k, v in vol.flags.items()
|
||||
if not k.startswith(rm) or k == "mte"
|
||||
}
|
||||
|
||||
for grp, rm in [["d2v", "e2v"]]:
|
||||
if not vol.flags.get(grp, False):
|
||||
@@ -1497,12 +1605,12 @@ class AuthSrv(object):
|
||||
if local:
|
||||
local_only_mtp[a] = True
|
||||
|
||||
local_mte = {}
|
||||
for a in vol.flags.get("mte", "").split(","):
|
||||
local_mte = ODict()
|
||||
for a in vol.flags.get("mte", {}).keys():
|
||||
local = True
|
||||
all_mte[a] = True
|
||||
local_mte[a] = True
|
||||
for b in self.args.mte.split(","):
|
||||
for b in self.args.mte.keys():
|
||||
if not a or not b:
|
||||
continue
|
||||
|
||||
@@ -1536,18 +1644,28 @@ class AuthSrv(object):
|
||||
self.log(t, 1)
|
||||
errors = True
|
||||
|
||||
if self.args.smb and self.ah.on and acct:
|
||||
self.log("--smb can only be used when --ah-alg is none", 1)
|
||||
errors = True
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
for k in list(vol.flags.keys()):
|
||||
if re.match("^-[^-]+$", k):
|
||||
vol.flags.pop(k[1:], None)
|
||||
vol.flags.pop(k)
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
if vol.flags.get("dots"):
|
||||
for name in vol.axs.uread:
|
||||
vol.axs.udot.add(name)
|
||||
|
||||
if errors:
|
||||
sys.exit(1)
|
||||
|
||||
vfs.bubble_flags()
|
||||
|
||||
have_e2d = False
|
||||
have_e2t = False
|
||||
t = "volumes and permissions:\n"
|
||||
for zv in vfs.all_vols.values():
|
||||
if not self.warn_anonwrite:
|
||||
@@ -1559,10 +1677,14 @@ class AuthSrv(object):
|
||||
[" write", "uwrite"],
|
||||
[" move", "umove"],
|
||||
["delete", "udel"],
|
||||
[" dots", "udot"],
|
||||
[" get", "uget"],
|
||||
[" upget", "upget"],
|
||||
[" upGet", "upget"],
|
||||
[" html", "uhtml"],
|
||||
["uadmin", "uadmin"],
|
||||
]:
|
||||
u = list(sorted(getattr(zv.axs, attr)))
|
||||
u = ["*"] if "*" in u else u
|
||||
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
|
||||
u = u if u else "\033[36m--none--\033[0m"
|
||||
t += "\n| {}: {}".format(txt, u)
|
||||
@@ -1570,6 +1692,9 @@ class AuthSrv(object):
|
||||
if "e2d" in zv.flags:
|
||||
have_e2d = True
|
||||
|
||||
if "e2t" in zv.flags:
|
||||
have_e2t = True
|
||||
|
||||
t += "\n"
|
||||
|
||||
if self.warn_anonwrite:
|
||||
@@ -1581,6 +1706,13 @@ class AuthSrv(object):
|
||||
if t:
|
||||
self.log("\n\033[{}\033[0m\n".format(t))
|
||||
|
||||
if not have_e2t:
|
||||
t = "hint: argument -e2ts enables multimedia indexing (artist/title/...)"
|
||||
self.log(t, 6)
|
||||
else:
|
||||
t = "hint: argument -e2dsa enables searching, upload-undo, and better deduplication"
|
||||
self.log(t, 6)
|
||||
|
||||
zv, _ = vfs.get("/", "*", False, False)
|
||||
zs = zv.realpath.lower()
|
||||
if zs in ("/", "c:\\") or zs.startswith(r"c:\windows"):
|
||||
@@ -1588,7 +1720,7 @@ class AuthSrv(object):
|
||||
self.log(t.format(zv.realpath), c=1)
|
||||
|
||||
try:
|
||||
zv, _ = vfs.get("/", "*", False, True)
|
||||
zv, _ = vfs.get("", "*", False, True, err=999)
|
||||
if self.warn_anonwrite and os.getcwd() == zv.realpath:
|
||||
t = "anyone can write to the current directory: {}\n"
|
||||
self.log(t.format(zv.realpath), c=1)
|
||||
@@ -1605,7 +1737,54 @@ class AuthSrv(object):
|
||||
self.re_pwd = None
|
||||
pwds = [re.escape(x) for x in self.iacct.keys()]
|
||||
if pwds:
|
||||
self.re_pwd = re.compile("=(" + "|".join(pwds) + ")([]&; ]|$)")
|
||||
if self.ah.on:
|
||||
zs = r"(\[H\] pw:.*|[?&]pw=)([^&]+)"
|
||||
else:
|
||||
zs = r"(\[H\] pw:.*|=)(" + "|".join(pwds) + r")([]&; ]|$)"
|
||||
|
||||
self.re_pwd = re.compile(zs)
|
||||
|
||||
def setup_pwhash(self, acct: dict[str, str]) -> None:
|
||||
self.ah = PWHash(self.args)
|
||||
if not self.ah.on:
|
||||
if self.args.ah_cli or self.args.ah_gen:
|
||||
t = "\n BAD CONFIG:\n cannot --ah-cli or --ah-gen without --ah-alg"
|
||||
raise Exception(t)
|
||||
return
|
||||
|
||||
if self.args.ah_cli:
|
||||
self.ah.cli()
|
||||
sys.exit()
|
||||
elif self.args.ah_gen == "-":
|
||||
self.ah.stdin()
|
||||
sys.exit()
|
||||
elif self.args.ah_gen:
|
||||
print(self.ah.hash(self.args.ah_gen))
|
||||
sys.exit()
|
||||
|
||||
if not acct:
|
||||
return
|
||||
|
||||
changed = False
|
||||
for uname, pw in list(acct.items())[:]:
|
||||
if pw.startswith("+") and len(pw) == 33:
|
||||
continue
|
||||
|
||||
changed = True
|
||||
hpw = self.ah.hash(pw)
|
||||
acct[uname] = hpw
|
||||
t = "hashed password for account {}: {}"
|
||||
self.log(t.format(uname, hpw), 3)
|
||||
|
||||
if not changed:
|
||||
return
|
||||
|
||||
lns = []
|
||||
for uname, pw in acct.items():
|
||||
lns.append(" {}: {}".format(uname, pw))
|
||||
|
||||
t = "please use the following hashed passwords in your config:\n{}"
|
||||
self.log(t.format("\n".join(lns)), 3)
|
||||
|
||||
def chk_sqlite_threadsafe(self) -> str:
|
||||
v = SQLITE_VER[-1:]
|
||||
@@ -1662,10 +1841,21 @@ class AuthSrv(object):
|
||||
raise Exception("volume not found: " + zs)
|
||||
|
||||
self.log(str({"users": users, "vols": vols, "flags": flags}))
|
||||
t = "/{}: read({}) write({}) move({}) del({}) get({}) upget({})"
|
||||
t = "/{}: read({}) write({}) move({}) del({}) dots({}) get({}) upGet({}) uadmin({})"
|
||||
for k, zv in self.vfs.all_vols.items():
|
||||
vc = zv.axs
|
||||
vs = [k, vc.uread, vc.uwrite, vc.umove, vc.udel, vc.uget, vc.upget]
|
||||
vs = [
|
||||
k,
|
||||
vc.uread,
|
||||
vc.uwrite,
|
||||
vc.umove,
|
||||
vc.udel,
|
||||
vc.udot,
|
||||
vc.uget,
|
||||
vc.upget,
|
||||
vc.uhtml,
|
||||
vc.uadmin,
|
||||
]
|
||||
self.log(t.format(*vs))
|
||||
|
||||
flag_v = "v" in flags
|
||||
@@ -1745,7 +1935,8 @@ class AuthSrv(object):
|
||||
]
|
||||
|
||||
csv = set("i p".split())
|
||||
lst = set("c ihead mtm mtp xad xar xau xiu xbd xbr xbu xm".split())
|
||||
zs = "c ihead mtm mtp on403 on404 xad xar xau xiu xban xbd xbr xbu xm"
|
||||
lst = set(zs.split())
|
||||
askip = set("a v c vc cgen theme".split())
|
||||
|
||||
# keymap from argv to vflag
|
||||
@@ -1802,8 +1993,11 @@ class AuthSrv(object):
|
||||
"w": "uwrite",
|
||||
"m": "umove",
|
||||
"d": "udel",
|
||||
".": "udot",
|
||||
"g": "uget",
|
||||
"G": "upget",
|
||||
"h": "uhtml",
|
||||
"a": "uadmin",
|
||||
}
|
||||
users = {}
|
||||
for pkey in perms.values():
|
||||
@@ -1895,13 +2089,19 @@ def expand_config_file(ret: list[str], fp: str, ipath: str) -> None:
|
||||
|
||||
if os.path.isdir(fp):
|
||||
names = os.listdir(fp)
|
||||
ret.append("#\033[36m cfg files in {} => {}\033[0m".format(fp, names))
|
||||
crumb = "#\033[36m cfg files in {} => {}\033[0m".format(fp, names)
|
||||
ret.append(crumb)
|
||||
for fn in sorted(names):
|
||||
fp2 = os.path.join(fp, fn)
|
||||
if not fp2.endswith(".conf") or fp2 in ipath:
|
||||
continue
|
||||
|
||||
expand_config_file(ret, fp2, ipath)
|
||||
|
||||
if ret[-1] == crumb:
|
||||
# no config files below; remove breadcrumb
|
||||
ret.pop()
|
||||
|
||||
return
|
||||
|
||||
ipath += " -> " + fp
|
||||
@@ -2000,7 +2200,7 @@ def upgrade_cfg_fmt(
|
||||
else:
|
||||
sn = sn.replace(",", ", ")
|
||||
ret.append(" " + sn)
|
||||
elif sn[:1] in "rwmdgG":
|
||||
elif sn[:1] in "rwmdgGhaA.":
|
||||
if cat != catx:
|
||||
cat = catx
|
||||
ret.append(cat)
|
||||
|
||||
@@ -43,6 +43,10 @@ def open(p: str, *a, **ka) -> int:
|
||||
return os.open(fsenc(p), *a, **ka)
|
||||
|
||||
|
||||
def readlink(p: str) -> str:
|
||||
return fsdec(os.readlink(fsenc(p)))
|
||||
|
||||
|
||||
def rename(src: str, dst: str) -> None:
|
||||
return os.rename(fsenc(src), fsenc(dst))
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import queue
|
||||
|
||||
from .__init__ import CORES, TYPE_CHECKING
|
||||
from .broker_mpw import MpWorker
|
||||
from .broker_util import try_exec
|
||||
from .broker_util import ExceptionalQueue, try_exec
|
||||
from .util import Daemon, mp
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -46,8 +46,8 @@ class BrokerMp(object):
|
||||
self.num_workers = self.args.j or CORES
|
||||
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
||||
for n in range(1, self.num_workers + 1):
|
||||
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1)
|
||||
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64)
|
||||
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1) # type: ignore
|
||||
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64) # type: ignore
|
||||
|
||||
proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n))
|
||||
Daemon(self.collector, "mp-sink-{}".format(n), (proc,))
|
||||
@@ -69,7 +69,7 @@ class BrokerMp(object):
|
||||
|
||||
while procs:
|
||||
if procs[-1].is_alive():
|
||||
time.sleep(0.1)
|
||||
time.sleep(0.05)
|
||||
continue
|
||||
|
||||
procs.pop()
|
||||
@@ -107,6 +107,19 @@ class BrokerMp(object):
|
||||
if retq_id:
|
||||
proc.q_pend.put((retq_id, "retq", rv))
|
||||
|
||||
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
|
||||
|
||||
# new non-ipc invoking managed service in hub
|
||||
obj = self.hub
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
|
||||
rv = try_exec(True, obj, *args)
|
||||
|
||||
retq = ExceptionalQueue(1)
|
||||
retq.put(rv)
|
||||
return retq
|
||||
|
||||
def say(self, dest: str, *args: Any) -> None:
|
||||
"""
|
||||
send message to non-hub component in other process,
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
import os
|
||||
import errno
|
||||
import time
|
||||
import json
|
||||
import shutil
|
||||
import filecmp
|
||||
import calendar
|
||||
import errno
|
||||
import filecmp
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
|
||||
from .util import runcmd, Netdev
|
||||
|
||||
from .util import Netdev, runcmd
|
||||
|
||||
HAVE_CFSSL = True
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from .util import RootLogger
|
||||
|
||||
|
||||
def ensure_cert(log: "RootLogger", args) -> None:
|
||||
"""
|
||||
@@ -119,6 +121,9 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
names.append(ip.split("/")[0])
|
||||
if args.crt_nolo:
|
||||
names = [x for x in names if x not in ("localhost", "127.0.0.1", "::1")]
|
||||
if not args.crt_nohn:
|
||||
names.append(args.name)
|
||||
names.append(args.name + ".local")
|
||||
if not names:
|
||||
names = ["127.0.0.1"]
|
||||
if "127.0.0.1" in names or "::1" in names:
|
||||
@@ -127,7 +132,10 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
|
||||
try:
|
||||
expiry, inf = _read_crt(args, "srv.pem")
|
||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry
|
||||
if "sans" not in inf:
|
||||
raise Exception("no useable cert found")
|
||||
|
||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
|
||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||
for n in names:
|
||||
if n not in inf["sans"]:
|
||||
@@ -176,6 +184,10 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
raise Exception("failed to translate cert: {}, {}".format(rc, se))
|
||||
|
||||
bname = os.path.join(args.crt_dir, "srv")
|
||||
try:
|
||||
os.unlink(bname + ".key")
|
||||
except:
|
||||
pass
|
||||
os.rename(bname + "-key.pem", bname + ".key")
|
||||
os.unlink(bname + ".csr")
|
||||
|
||||
@@ -211,7 +223,7 @@ def gencert(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||
HAVE_CFSSL = False
|
||||
log("cert", "could not create TLS certificates: {}".format(ex), 3)
|
||||
if getattr(ex, "errno", 0) == errno.ENOENT:
|
||||
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest"
|
||||
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest (cfssl, cfssljson, cfssl-certinfo)"
|
||||
log("cert", t, 6)
|
||||
|
||||
ensure_cert(log, args)
|
||||
|
||||
@@ -9,21 +9,31 @@ onedash = set(zs.split())
|
||||
def vf_bmap() -> dict[str, str]:
|
||||
"""argv-to-volflag: simple bools"""
|
||||
ret = {
|
||||
"dav_auth": "davauth",
|
||||
"dav_rt": "davrt",
|
||||
"ed": "dots",
|
||||
"never_symlink": "neversymlink",
|
||||
"no_dedup": "copydupes",
|
||||
"no_dupe": "nodupe",
|
||||
"no_forget": "noforget",
|
||||
"dav_auth": "davauth",
|
||||
"dav_rt": "davrt",
|
||||
"no_robots": "norobots",
|
||||
"no_thumb": "dthumb",
|
||||
"no_vthumb": "dvthumb",
|
||||
"no_athumb": "dathumb",
|
||||
"th_no_crop": "nocrop",
|
||||
}
|
||||
for k in (
|
||||
"dotsrch",
|
||||
"e2d",
|
||||
"e2ds",
|
||||
"e2dsa",
|
||||
"e2t",
|
||||
"e2ts",
|
||||
"e2tsr",
|
||||
"e2v",
|
||||
"e2vu",
|
||||
"e2vp",
|
||||
"exp",
|
||||
"grid",
|
||||
"hardlink",
|
||||
"magic",
|
||||
@@ -40,8 +50,22 @@ def vf_bmap() -> dict[str, str]:
|
||||
|
||||
def vf_vmap() -> dict[str, str]:
|
||||
"""argv-to-volflag: simple values"""
|
||||
ret = {}
|
||||
for k in ("lg_sbf", "md_sbf", "unlist"):
|
||||
ret = {
|
||||
"no_hash": "nohash",
|
||||
"no_idx": "noidx",
|
||||
"re_maxage": "scan",
|
||||
"th_convt": "convt",
|
||||
"th_size": "thsize",
|
||||
}
|
||||
for k in (
|
||||
"dbd",
|
||||
"lg_sbf",
|
||||
"md_sbf",
|
||||
"nrand",
|
||||
"sort",
|
||||
"unlist",
|
||||
"u2ts",
|
||||
):
|
||||
ret[k] = k
|
||||
return ret
|
||||
|
||||
@@ -49,7 +73,23 @@ def vf_vmap() -> dict[str, str]:
|
||||
def vf_cmap() -> dict[str, str]:
|
||||
"""argv-to-volflag: complex/lists"""
|
||||
ret = {}
|
||||
for k in ("dbd", "html_head", "mte", "mth", "nrand"):
|
||||
for k in (
|
||||
"exp_lg",
|
||||
"exp_md",
|
||||
"html_head",
|
||||
"mte",
|
||||
"mth",
|
||||
"mtp",
|
||||
"xad",
|
||||
"xar",
|
||||
"xau",
|
||||
"xban",
|
||||
"xbd",
|
||||
"xbr",
|
||||
"xbu",
|
||||
"xiu",
|
||||
"xm",
|
||||
):
|
||||
ret[k] = k
|
||||
return ret
|
||||
|
||||
@@ -59,8 +99,12 @@ permdescs = {
|
||||
"w": 'write; upload files; need "r" to see the uploads',
|
||||
"m": 'move; move files and folders; need "w" at destination',
|
||||
"d": "delete; permanently delete files and folders",
|
||||
".": "dots; user can ask to show dotfiles in listings",
|
||||
"g": "get; download files, but cannot see folder contents",
|
||||
"G": 'upget; same as "g" but can see filekeys of their own uploads',
|
||||
"h": 'html; same as "g" but folders return their index.html',
|
||||
"a": "admin; can see uploader IPs, config-reload",
|
||||
"A": "all; same as 'rwmda.' (read/write/move/delete/dotfiles)",
|
||||
}
|
||||
|
||||
|
||||
@@ -78,9 +122,12 @@ flagcats = {
|
||||
},
|
||||
"upload rules": {
|
||||
"maxn=250,600": "max 250 uploads over 15min",
|
||||
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g)",
|
||||
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
|
||||
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
||||
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
||||
"rand": "force randomized filenames, 9 chars long by default",
|
||||
"nrand=N": "randomized filenames are N chars long",
|
||||
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
|
||||
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
||||
"df=1g": "ensure 1 GiB free disk space",
|
||||
},
|
||||
@@ -106,6 +153,7 @@ flagcats = {
|
||||
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
|
||||
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
|
||||
"noforget": "don't forget files when deleted from disk",
|
||||
"fat32": "avoid excessive reindexing on android sdcardfs",
|
||||
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
||||
"xlink": "cross-volume dupe detection / linking",
|
||||
"xdev": "do not descend into other filesystems",
|
||||
@@ -122,6 +170,13 @@ flagcats = {
|
||||
"dvthumb": "disables video thumbnails",
|
||||
"dathumb": "disables audio thumbnails (spectrograms)",
|
||||
"dithumb": "disables image thumbnails",
|
||||
"thsize": "thumbnail res; WxH",
|
||||
"nocrop": "disable center-cropping by default",
|
||||
"convt": "conversion timeout in seconds",
|
||||
},
|
||||
"handlers\n(better explained in --help-handlers)": {
|
||||
"on404=PY": "handle 404s by executing PY file",
|
||||
"on403=PY": "handle 403s by executing PY file",
|
||||
},
|
||||
"event hooks\n(better explained in --help-hooks)": {
|
||||
"xbu=CMD": "execute CMD before a file upload starts",
|
||||
@@ -132,9 +187,11 @@ flagcats = {
|
||||
"xbd=CMD": "execute CMD before a file delete",
|
||||
"xad=CMD": "execute CMD after a file delete",
|
||||
"xm=CMD": "execute CMD on message",
|
||||
"xban=CMD": "execute CMD if someone gets banned",
|
||||
},
|
||||
"client and ux": {
|
||||
"grid": "show grid/thumbnails by default",
|
||||
"sort": "default sort order",
|
||||
"unlist": "dont list files matching REGEX",
|
||||
"html_head=TXT": "includes TXT in the <head>",
|
||||
"robots": "allows indexing by search engines (default)",
|
||||
@@ -145,9 +202,12 @@ flagcats = {
|
||||
"sb_lg": "enable js sandbox for prologue/epilogue (default)",
|
||||
"md_sbf": "list of markdown-sandbox safeguards to disable",
|
||||
"lg_sbf": "list of *logue-sandbox safeguards to disable",
|
||||
"nohtml": "return html and markdown as text/html",
|
||||
},
|
||||
"others": {
|
||||
"fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission',
|
||||
"dots": "allow all users with read-access to\nenable the option to show dotfiles in listings",
|
||||
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
||||
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
||||
"davauth": "ask webdav clients to login for all folders",
|
||||
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
|
||||
},
|
||||
|
||||
@@ -12,9 +12,10 @@ import time
|
||||
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
|
||||
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
||||
from pyftpdlib.handlers import FTPHandler
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
from pyftpdlib.servers import FTPServer
|
||||
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
|
||||
from .__init__ import PY2, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .util import (
|
||||
@@ -30,15 +31,6 @@ from .util import (
|
||||
vjoin,
|
||||
)
|
||||
|
||||
try:
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
except ImportError:
|
||||
p = os.path.join(E.mod, "vend")
|
||||
print("loading asynchat from " + p)
|
||||
sys.path.append(p)
|
||||
from pyftpdlib.ioloop import IOLoop
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
@@ -79,10 +71,14 @@ class FtpAuth(DummyAuthorizer):
|
||||
raise AuthenticationFailed("banned")
|
||||
|
||||
asrv = self.hub.asrv
|
||||
if username == "anonymous":
|
||||
uname = "*"
|
||||
else:
|
||||
uname = asrv.iacct.get(password, "") or asrv.iacct.get(username, "") or "*"
|
||||
uname = "*"
|
||||
if username != "anonymous":
|
||||
uname = ""
|
||||
for zs in (password, username):
|
||||
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
|
||||
if zs:
|
||||
uname = zs
|
||||
break
|
||||
|
||||
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
|
||||
g = self.hub.gpwd
|
||||
@@ -91,6 +87,12 @@ class FtpAuth(DummyAuthorizer):
|
||||
if bonk:
|
||||
logging.warning("client banned: invalid passwords")
|
||||
bans[ip] = bonk
|
||||
try:
|
||||
# only possible if multiprocessing disabled
|
||||
self.hub.broker.httpsrv.bans[ip] = bonk # type: ignore
|
||||
self.hub.broker.httpsrv.nban += 1 # type: ignore
|
||||
except:
|
||||
pass
|
||||
|
||||
raise AuthenticationFailed("Authentication failed.")
|
||||
|
||||
@@ -131,6 +133,7 @@ class FtpFs(AbstractedFS):
|
||||
|
||||
self.can_read = self.can_write = self.can_move = False
|
||||
self.can_delete = self.can_get = self.can_upget = False
|
||||
self.can_admin = self.can_dot = False
|
||||
|
||||
self.listdirinfo = self.listdir
|
||||
self.chdir(".")
|
||||
@@ -146,7 +149,7 @@ class FtpFs(AbstractedFS):
|
||||
try:
|
||||
vpath = vpath.replace("\\", "/").strip("/")
|
||||
rd, fn = os.path.split(vpath)
|
||||
if ANYWIN and relchk(rd):
|
||||
if relchk(rd):
|
||||
logging.warning("malicious vpath: %s", vpath)
|
||||
t = "Unsupported characters in [{}]"
|
||||
raise FSE(t.format(vpath), 1)
|
||||
@@ -165,7 +168,7 @@ class FtpFs(AbstractedFS):
|
||||
if not avfs:
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
cr, cw, cm, cd, _, _ = avfs.can_access("", self.h.uname)
|
||||
cr, cw, cm, cd, _, _, _, _ = avfs.can_access("", self.h.uname)
|
||||
if r and not cr or w and not cw or m and not cm or d and not cd:
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
@@ -240,6 +243,8 @@ class FtpFs(AbstractedFS):
|
||||
self.can_delete,
|
||||
self.can_get,
|
||||
self.can_upget,
|
||||
self.can_admin,
|
||||
self.can_dot,
|
||||
) = avfs.can_access("", self.h.uname)
|
||||
|
||||
def mkdir(self, path: str) -> None:
|
||||
@@ -262,7 +267,7 @@ class FtpFs(AbstractedFS):
|
||||
vfs_ls = [x[0] for x in vfs_ls1]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
|
||||
if not self.args.ed:
|
||||
if not self.can_dot:
|
||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||
|
||||
vfs_ls.sort()
|
||||
@@ -401,7 +406,16 @@ class FtpHandler(FTPHandler):
|
||||
super(FtpHandler, self).__init__(conn, server, ioloop)
|
||||
|
||||
cip = self.remote_ip
|
||||
self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip
|
||||
if cip.startswith("::ffff:"):
|
||||
cip = cip[7:]
|
||||
|
||||
if self.args.ftp_ipa_re and not self.args.ftp_ipa_re.match(cip):
|
||||
logging.warning("client rejected (--ftp-ipa): %s", cip)
|
||||
self.connected = False
|
||||
conn.close()
|
||||
return
|
||||
|
||||
self.cli_ip = cip
|
||||
|
||||
# abspath->vpath mapping to resolve log_transfer paths
|
||||
self.vfs_map: dict[str, str] = {}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -8,6 +8,12 @@ import socket
|
||||
import threading # typechk
|
||||
import time
|
||||
|
||||
try:
|
||||
HAVE_SSL = True
|
||||
import ssl
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
from . import util as Util
|
||||
from .__init__ import TYPE_CHECKING, EnvParams
|
||||
from .authsrv import AuthSrv # typechk
|
||||
@@ -106,32 +112,30 @@ class HttpConn(object):
|
||||
return self.u2idx
|
||||
|
||||
def _detect_https(self) -> bool:
|
||||
method = None
|
||||
if True:
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
except socket.timeout:
|
||||
return False
|
||||
except AttributeError:
|
||||
# jython does not support msg_peek; forget about https
|
||||
method = self.s.recv(4)
|
||||
self.sr = Util.Unrecv(self.s, self.log)
|
||||
self.sr.buf = method
|
||||
try:
|
||||
method = self.s.recv(4, socket.MSG_PEEK)
|
||||
except socket.timeout:
|
||||
return False
|
||||
except AttributeError:
|
||||
# jython does not support msg_peek; forget about https
|
||||
method = self.s.recv(4)
|
||||
self.sr = Util.Unrecv(self.s, self.log)
|
||||
self.sr.buf = method
|
||||
|
||||
# jython used to do this, they stopped since it's broken
|
||||
# but reimplementing sendall is out of scope for now
|
||||
if not getattr(self.s, "sendall", None):
|
||||
self.s.sendall = self.s.send # type: ignore
|
||||
# jython used to do this, they stopped since it's broken
|
||||
# but reimplementing sendall is out of scope for now
|
||||
if not getattr(self.s, "sendall", None):
|
||||
self.s.sendall = self.s.send # type: ignore
|
||||
|
||||
if len(method) != 4:
|
||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||
len(method)
|
||||
)
|
||||
if method:
|
||||
self.log(err)
|
||||
if len(method) != 4:
|
||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||
len(method)
|
||||
)
|
||||
if method:
|
||||
self.log(err)
|
||||
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return False
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return False
|
||||
|
||||
return not method or not bool(PTN_HTTP.match(method))
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import base64
|
||||
import math
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
@@ -54,8 +55,8 @@ except SyntaxError:
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .bos import bos
|
||||
from .httpconn import HttpConn
|
||||
from .metrics import Metrics
|
||||
from .u2idx import U2idx
|
||||
from .util import (
|
||||
E_SCK,
|
||||
@@ -65,6 +66,7 @@ from .util import (
|
||||
Magician,
|
||||
Netdev,
|
||||
NetMap,
|
||||
absreal,
|
||||
ipnorm,
|
||||
min_ex,
|
||||
shut_socket,
|
||||
@@ -98,12 +100,17 @@ class HttpSrv(object):
|
||||
# redefine in case of multiprocessing
|
||||
socket.setdefaulttimeout(120)
|
||||
|
||||
self.t0 = time.time()
|
||||
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
||||
self.magician = Magician()
|
||||
self.nm = NetMap([], {})
|
||||
self.ssdp: Optional["SSDPr"] = None
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
self.g403 = Garda(self.args.ban_403)
|
||||
self.g422 = Garda(self.args.ban_422, False)
|
||||
self.gmal = Garda(self.args.ban_422)
|
||||
self.gurl = Garda(self.args.ban_url)
|
||||
self.bans: dict[str, int] = {}
|
||||
self.aclose: dict[str, int] = {}
|
||||
|
||||
@@ -121,6 +128,10 @@ class HttpSrv(object):
|
||||
self.t_periodic: Optional[threading.Thread] = None
|
||||
|
||||
self.u2fh = FHC()
|
||||
self.metrics = Metrics(self)
|
||||
self.nreq = 0
|
||||
self.nsus = 0
|
||||
self.nban = 0
|
||||
self.srvs: list[socket.socket] = []
|
||||
self.ncli = 0 # exact
|
||||
self.clients: set[HttpConn] = set() # laggy
|
||||
@@ -138,6 +149,12 @@ class HttpSrv(object):
|
||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
||||
self.prism = os.path.exists(zs)
|
||||
|
||||
self.statics: set[str] = set()
|
||||
self._build_statics()
|
||||
|
||||
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
||||
self.ptn_hsafe = re.compile(r"[\x00-\x1f<>\"'&]")
|
||||
|
||||
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
||||
if not self.args.no_dav:
|
||||
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
|
||||
@@ -158,7 +175,7 @@ class HttpSrv(object):
|
||||
if self.args.log_thrs:
|
||||
start_log_thrs(self.log, self.args.log_thrs, nid)
|
||||
|
||||
self.th_cfg: dict[str, Any] = {}
|
||||
self.th_cfg: dict[str, set[str]] = {}
|
||||
Daemon(self.post_init, "hsrv-init2")
|
||||
|
||||
def post_init(self) -> None:
|
||||
@@ -168,6 +185,14 @@ class HttpSrv(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
def _build_statics(self) -> None:
|
||||
for dp, _, df in os.walk(os.path.join(self.E.mod, "web")):
|
||||
for fn in df:
|
||||
ap = absreal(os.path.join(dp, fn))
|
||||
self.statics.add(ap)
|
||||
if ap.endswith(".gz") or ap.endswith(".br"):
|
||||
self.statics.add(ap[:-3])
|
||||
|
||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||
ips = set()
|
||||
for ip, _ in self.bound:
|
||||
|
||||
@@ -4,10 +4,11 @@ from __future__ import print_function, unicode_literals
|
||||
import argparse # typechk
|
||||
import colorsys
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
from .__init__ import PY2
|
||||
from .th_srv import HAVE_PIL
|
||||
from .util import BytesIO
|
||||
from .th_srv import HAVE_PIL, HAVE_PILF
|
||||
from .util import BytesIO # type: ignore
|
||||
|
||||
|
||||
class Ico(object):
|
||||
@@ -17,12 +18,14 @@ class Ico(object):
|
||||
def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]:
|
||||
"""placeholder to make thumbnails not break"""
|
||||
|
||||
zb = hashlib.sha1(ext.encode("utf-8")).digest()[2:4]
|
||||
bext = ext.encode("ascii", "replace")
|
||||
ext = bext.decode("utf-8")
|
||||
zb = hashlib.sha1(bext).digest()[2:4]
|
||||
if PY2:
|
||||
zb = [ord(x) for x in zb]
|
||||
zb = [ord(x) for x in zb] # type: ignore
|
||||
|
||||
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
|
||||
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 1)
|
||||
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 0.8 if HAVE_PILF else 1, 1)
|
||||
ci = [int(x * 255) for x in list(c1) + list(c2)]
|
||||
c = "".join(["{:02x}".format(x) for x in ci])
|
||||
|
||||
@@ -33,8 +36,34 @@ class Ico(object):
|
||||
h = int(100 / (float(sw) / float(sh)))
|
||||
w = 100
|
||||
|
||||
if chrome and as_thumb:
|
||||
if chrome:
|
||||
# cannot handle more than ~2000 unique SVGs
|
||||
if HAVE_PILF:
|
||||
# pillow 10.1 made this the default font;
|
||||
# svg: 3.7s, this: 36s
|
||||
try:
|
||||
from PIL import Image, ImageDraw
|
||||
|
||||
# [.lt] are hard to see lowercase / unspaced
|
||||
ext2 = re.sub("(.)", "\\1 ", ext).upper()
|
||||
|
||||
h = int(128 * h / w)
|
||||
w = 128
|
||||
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||
pb = ImageDraw.Draw(img)
|
||||
_, _, tw, th = pb.textbbox((0, 0), ext2, font_size=16)
|
||||
xy = ((w - tw) // 2, (h - th) // 2)
|
||||
pb.text(xy, ext2, fill="#" + c[6:], font_size=16)
|
||||
|
||||
img = img.resize((w * 2, h * 2), Image.NEAREST)
|
||||
|
||||
buf = BytesIO()
|
||||
img.save(buf, format="PNG", compress_level=1)
|
||||
return "image/png", buf.getvalue()
|
||||
|
||||
except:
|
||||
pass
|
||||
|
||||
if HAVE_PIL:
|
||||
# svg: 3s, cache: 6s, this: 8s
|
||||
from PIL import Image, ImageDraw
|
||||
@@ -43,28 +72,25 @@ class Ico(object):
|
||||
w = 64
|
||||
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||
pb = ImageDraw.Draw(img)
|
||||
tw, th = pb.textsize(ext)
|
||||
pb.text(((w - tw) // 2, (h - th) // 2), ext, fill="#" + c[6:])
|
||||
try:
|
||||
_, _, tw, th = pb.textbbox((0, 0), ext)
|
||||
except:
|
||||
tw, th = pb.textsize(ext)
|
||||
|
||||
tw += len(ext)
|
||||
cw = tw // len(ext)
|
||||
x = ((w - tw) // 2) - (cw * 2) // 3
|
||||
fill = "#" + c[6:]
|
||||
for ch in ext:
|
||||
pb.text((x, (h - th) // 2), " %s " % (ch,), fill=fill)
|
||||
x += cw
|
||||
|
||||
img = img.resize((w * 3, h * 3), Image.NEAREST)
|
||||
|
||||
buf = BytesIO()
|
||||
img.save(buf, format="PNG", compress_level=1)
|
||||
return "image/png", buf.getvalue()
|
||||
|
||||
elif False:
|
||||
# 48s, too slow
|
||||
import pyvips
|
||||
|
||||
h = int(192 * h / w)
|
||||
w = 192
|
||||
img = pyvips.Image.text(
|
||||
ext, width=w, height=h, dpi=192, align=pyvips.Align.CENTRE
|
||||
)
|
||||
img = img.ifthenelse(ci[3:], ci[:3], blend=True)
|
||||
# i = i.resize(3, kernel=pyvips.Kernel.NEAREST)
|
||||
buf = img.write_to_buffer(".png[compression=1]")
|
||||
return "image/png", buf
|
||||
|
||||
svg = """\
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
||||
|
||||
@@ -295,7 +295,9 @@ class MDNS(MCast):
|
||||
while self.running:
|
||||
timeout = (
|
||||
0.02 + random.random() * 0.07
|
||||
if self.probing or self.q or self.defend or self.unsolicited
|
||||
if self.probing or self.q or self.defend
|
||||
else max(0.05, self.unsolicited[0] - time.time())
|
||||
if self.unsolicited
|
||||
else (last_hop + ihop if ihop else 180)
|
||||
)
|
||||
rdy = select.select(self.srv, [], [], timeout)
|
||||
@@ -513,6 +515,10 @@ class MDNS(MCast):
|
||||
for srv in self.srv.values():
|
||||
tx.add(srv)
|
||||
|
||||
if not self.unsolicited and self.args.zm_spam:
|
||||
zf = time.time() + self.args.zm_spam + random.random() * 0.07
|
||||
self.unsolicited.append(zf)
|
||||
|
||||
for srv, deadline in list(self.defend.items()):
|
||||
if now < deadline:
|
||||
continue
|
||||
|
||||
233
copyparty/metrics.py
Normal file
233
copyparty/metrics.py
Normal file
@@ -0,0 +1,233 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import json
|
||||
import time
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .util import Pebkac, get_df, unhumanize
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .httpcli import HttpCli
|
||||
from .httpsrv import HttpSrv
|
||||
|
||||
|
||||
class Metrics(object):
|
||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||
self.hsrv = hsrv
|
||||
|
||||
def tx(self, cli: "HttpCli") -> bool:
|
||||
if not cli.avol:
|
||||
raise Pebkac(403, "not allowed for user " + cli.uname)
|
||||
|
||||
args = cli.args
|
||||
if not args.stats:
|
||||
raise Pebkac(403, "the stats feature is not enabled in server config")
|
||||
|
||||
conn = cli.conn
|
||||
vfs = conn.asrv.vfs
|
||||
allvols = list(sorted(vfs.all_vols.items()))
|
||||
|
||||
idx = conn.get_u2idx()
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
idx = None
|
||||
|
||||
ret: list[str] = []
|
||||
|
||||
def addc(k: str, v: str, desc: str) -> None:
|
||||
zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
||||
ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v))
|
||||
|
||||
def adduc(k: str, unit: str, v: str, desc: str) -> None:
|
||||
k += "_" + unit
|
||||
zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
||||
ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v))
|
||||
|
||||
def addg(k: str, v: str, desc: str) -> None:
|
||||
zs = "# TYPE %s gauge\n# HELP %s %s\n%s %s"
|
||||
ret.append(zs % (k, k, desc, k, v))
|
||||
|
||||
def addug(k: str, unit: str, v: str, desc: str) -> None:
|
||||
k += "_" + unit
|
||||
zs = "# TYPE %s gauge\n# UNIT %s %s\n# HELP %s %s\n%s %s"
|
||||
ret.append(zs % (k, k, unit, k, desc, k, v))
|
||||
|
||||
def addh(k: str, typ: str, desc: str) -> None:
|
||||
zs = "# TYPE %s %s\n# HELP %s %s"
|
||||
ret.append(zs % (k, typ, k, desc))
|
||||
|
||||
def addbh(k: str, desc: str) -> None:
|
||||
zs = "# TYPE %s gauge\n# UNIT %s bytes\n# HELP %s %s"
|
||||
ret.append(zs % (k, k, k, desc))
|
||||
|
||||
def addv(k: str, v: str) -> None:
|
||||
ret.append("%s %s" % (k, v))
|
||||
|
||||
t = "time since last copyparty restart"
|
||||
v = "{:.3f}".format(time.time() - self.hsrv.t0)
|
||||
addug("cpp_uptime", "seconds", v, t)
|
||||
|
||||
# timestamps are gauges because initial value is not zero
|
||||
t = "unixtime of last copyparty restart"
|
||||
v = "{:.3f}".format(self.hsrv.t0)
|
||||
addug("cpp_boot_unixtime", "seconds", v, t)
|
||||
|
||||
t = "number of open http(s) client connections"
|
||||
addg("cpp_http_conns", str(self.hsrv.ncli), t)
|
||||
|
||||
t = "number of http(s) requests since last restart"
|
||||
addc("cpp_http_reqs", str(self.hsrv.nreq), t)
|
||||
|
||||
t = "number of 403/422/malicious reqs since restart"
|
||||
addc("cpp_sus_reqs", str(self.hsrv.nsus), t)
|
||||
|
||||
v = str(len(conn.bans or []))
|
||||
addg("cpp_active_bans", v, "number of currently banned IPs")
|
||||
|
||||
t = "number of IPs banned since last restart"
|
||||
addg("cpp_total_bans", str(self.hsrv.nban), t)
|
||||
|
||||
if not args.nos_vst:
|
||||
x = self.hsrv.broker.ask("up2k.get_state")
|
||||
vs = json.loads(x.get())
|
||||
|
||||
nvidle = 0
|
||||
nvbusy = 0
|
||||
nvoffline = 0
|
||||
for v in vs["volstate"].values():
|
||||
if v == "online, idle":
|
||||
nvidle += 1
|
||||
elif "OFFLINE" in v:
|
||||
nvoffline += 1
|
||||
else:
|
||||
nvbusy += 1
|
||||
|
||||
addg("cpp_idle_vols", str(nvidle), "number of idle/ready volumes")
|
||||
addg("cpp_busy_vols", str(nvbusy), "number of busy/indexing volumes")
|
||||
addg("cpp_offline_vols", str(nvoffline), "number of offline volumes")
|
||||
|
||||
t = "time since last database activity (upload/rename/delete)"
|
||||
addug("cpp_db_idle", "seconds", str(vs["dbwt"]), t)
|
||||
|
||||
t = "unixtime of last database activity (upload/rename/delete)"
|
||||
addug("cpp_db_act", "seconds", str(vs["dbwu"]), t)
|
||||
|
||||
t = "number of files queued for hashing/indexing"
|
||||
addg("cpp_hashing_files", str(vs["hashq"]), t)
|
||||
|
||||
t = "number of files queued for metadata scanning"
|
||||
addg("cpp_tagq_files", str(vs["tagq"]), t)
|
||||
|
||||
try:
|
||||
t = "number of files queued for plugin-based analysis"
|
||||
addg("cpp_mtpq_files", str(int(vs["mtpq"])), t)
|
||||
except:
|
||||
pass
|
||||
|
||||
if not args.nos_hdd:
|
||||
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
||||
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
||||
for vpath, vol in allvols:
|
||||
free, total = get_df(vol.realpath)
|
||||
if free is None or total is None:
|
||||
continue
|
||||
|
||||
addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total))
|
||||
addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free))
|
||||
|
||||
if idx and not args.nos_vol:
|
||||
addbh("cpp_vol_bytes", "num bytes of data in volume")
|
||||
addh("cpp_vol_files", "gauge", "num files in volume")
|
||||
addbh("cpp_vol_free_bytes", "free space (vmaxb) in volume")
|
||||
addh("cpp_vol_free_files", "gauge", "free space (vmaxn) in volume")
|
||||
tnbytes = 0
|
||||
tnfiles = 0
|
||||
|
||||
volsizes = []
|
||||
try:
|
||||
ptops = [x.realpath for _, x in allvols]
|
||||
x = self.hsrv.broker.ask("up2k.get_volsizes", ptops)
|
||||
volsizes = x.get()
|
||||
except Exception as ex:
|
||||
cli.log("tx_stats get_volsizes: {!r}".format(ex), 3)
|
||||
|
||||
for (vpath, vol), (nbytes, nfiles) in zip(allvols, volsizes):
|
||||
tnbytes += nbytes
|
||||
tnfiles += nfiles
|
||||
addv('cpp_vol_bytes{vol="/%s"}' % (vpath), str(nbytes))
|
||||
addv('cpp_vol_files{vol="/%s"}' % (vpath), str(nfiles))
|
||||
|
||||
if vol.flags.get("vmaxb") or vol.flags.get("vmaxn"):
|
||||
|
||||
zi = unhumanize(vol.flags.get("vmaxb") or "0")
|
||||
if zi:
|
||||
v = str(zi - nbytes)
|
||||
addv('cpp_vol_free_bytes{vol="/%s"}' % (vpath), v)
|
||||
|
||||
zi = unhumanize(vol.flags.get("vmaxn") or "0")
|
||||
if zi:
|
||||
v = str(zi - nfiles)
|
||||
addv('cpp_vol_free_files{vol="/%s"}' % (vpath), v)
|
||||
|
||||
if volsizes:
|
||||
addv('cpp_vol_bytes{vol="total"}', str(tnbytes))
|
||||
addv('cpp_vol_files{vol="total"}', str(tnfiles))
|
||||
|
||||
if idx and not args.nos_dup:
|
||||
addbh("cpp_dupe_bytes", "num dupe bytes in volume")
|
||||
addh("cpp_dupe_files", "gauge", "num dupe files in volume")
|
||||
tnbytes = 0
|
||||
tnfiles = 0
|
||||
for vpath, vol in allvols:
|
||||
cur = idx.get_cur(vol.realpath)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
nbytes = 0
|
||||
nfiles = 0
|
||||
q = "select sz, count(*)-1 c from up group by w having c"
|
||||
for sz, c in cur.execute(q):
|
||||
nbytes += sz * c
|
||||
nfiles += c
|
||||
|
||||
tnbytes += nbytes
|
||||
tnfiles += nfiles
|
||||
addv('cpp_dupe_bytes{vol="/%s"}' % (vpath), str(nbytes))
|
||||
addv('cpp_dupe_files{vol="/%s"}' % (vpath), str(nfiles))
|
||||
|
||||
addv('cpp_dupe_bytes{vol="total"}', str(tnbytes))
|
||||
addv('cpp_dupe_files{vol="total"}', str(tnfiles))
|
||||
|
||||
if not args.nos_unf:
|
||||
addbh("cpp_unf_bytes", "incoming/unfinished uploads (num bytes)")
|
||||
addh("cpp_unf_files", "gauge", "incoming/unfinished uploads (num files)")
|
||||
tnbytes = 0
|
||||
tnfiles = 0
|
||||
try:
|
||||
x = self.hsrv.broker.ask("up2k.get_unfinished")
|
||||
xs = x.get()
|
||||
xj = json.loads(xs)
|
||||
for ptop, (nbytes, nfiles) in xj.items():
|
||||
tnbytes += nbytes
|
||||
tnfiles += nfiles
|
||||
vol = next((x[1] for x in allvols if x[1].realpath == ptop), None)
|
||||
if not vol:
|
||||
t = "tx_stats get_unfinished: could not map {}"
|
||||
cli.log(t.format(ptop), 3)
|
||||
continue
|
||||
|
||||
addv('cpp_unf_bytes{vol="/%s"}' % (vol.vpath), str(nbytes))
|
||||
addv('cpp_unf_files{vol="/%s"}' % (vol.vpath), str(nfiles))
|
||||
|
||||
addv('cpp_unf_bytes{vol="total"}', str(tnbytes))
|
||||
addv('cpp_unf_files{vol="total"}', str(tnfiles))
|
||||
|
||||
except Exception as ex:
|
||||
cli.log("tx_stats get_unfinished: {!r}".format(ex), 3)
|
||||
|
||||
ret.append("# EOF")
|
||||
|
||||
mime = "application/openmetrics-text; version=1.0.0; charset=utf-8"
|
||||
mime = cli.uparam.get("mime") or mime
|
||||
cli.reply("\n".join(ret).encode("utf-8"), mime=mime)
|
||||
return True
|
||||
@@ -8,7 +8,7 @@ import shutil
|
||||
import subprocess as sp
|
||||
import sys
|
||||
|
||||
from .__init__ import EXE, PY2, WINDOWS, E, unicode
|
||||
from .__init__ import ANYWIN, EXE, PY2, WINDOWS, E, unicode
|
||||
from .bos import bos
|
||||
from .util import (
|
||||
FFMPEG_URL,
|
||||
@@ -29,6 +29,9 @@ if True: # pylint: disable=using-constant-test
|
||||
|
||||
|
||||
def have_ff(scmd: str) -> bool:
|
||||
if ANYWIN:
|
||||
scmd += ".exe"
|
||||
|
||||
if PY2:
|
||||
print("# checking {}".format(scmd))
|
||||
acmd = (scmd + " -version").encode("ascii").split(b" ")
|
||||
@@ -115,7 +118,7 @@ def ffprobe(
|
||||
b"--",
|
||||
fsenc(abspath),
|
||||
]
|
||||
rc, so, se = runcmd(cmd, timeout=timeout)
|
||||
rc, so, se = runcmd(cmd, timeout=timeout, nice=True)
|
||||
retchk(rc, cmd, se)
|
||||
return parse_ffprobe(so)
|
||||
|
||||
@@ -258,7 +261,8 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
|
||||
if ".resw" in ret and ".resh" in ret:
|
||||
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
||||
|
||||
zd = {k: (0, v) for k, v in ret.items()}
|
||||
zero = int("0")
|
||||
zd = {k: (zero, v) for k, v in ret.items()}
|
||||
|
||||
return zd, md
|
||||
|
||||
@@ -559,6 +563,7 @@ class MTag(object):
|
||||
|
||||
args = {
|
||||
"env": env,
|
||||
"nice": True,
|
||||
"timeout": parser.timeout,
|
||||
"kill": parser.kill,
|
||||
"capture": parser.capture,
|
||||
@@ -569,11 +574,6 @@ class MTag(object):
|
||||
zd.update(ret)
|
||||
args["sin"] = json.dumps(zd).encode("utf-8", "replace")
|
||||
|
||||
if WINDOWS:
|
||||
args["creationflags"] = 0x4000
|
||||
else:
|
||||
cmd = ["nice"] + cmd
|
||||
|
||||
bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])]
|
||||
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
||||
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
||||
|
||||
@@ -15,7 +15,7 @@ from ipaddress import (
|
||||
)
|
||||
|
||||
from .__init__ import MACOS, TYPE_CHECKING
|
||||
from .util import Netdev, find_prefix, min_ex, spack
|
||||
from .util import Daemon, Netdev, find_prefix, min_ex, spack
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
@@ -228,6 +228,7 @@ class MCast(object):
|
||||
for srv in self.srv.values():
|
||||
assert srv.ip in self.sips
|
||||
|
||||
Daemon(self.hopper, "mc-hop")
|
||||
return bound
|
||||
|
||||
def setup_socket(self, srv: MC_Sck) -> None:
|
||||
@@ -299,33 +300,57 @@ class MCast(object):
|
||||
t = "failed to set IPv4 TTL/LOOP; announcements may not survive multiple switches/routers"
|
||||
self.log(t, 3)
|
||||
|
||||
self.hop(srv)
|
||||
if self.hop(srv, False):
|
||||
self.log("igmp was already joined?? chilling for a sec", 3)
|
||||
time.sleep(1.2)
|
||||
|
||||
self.hop(srv, True)
|
||||
self.b4.sort(reverse=True)
|
||||
self.b6.sort(reverse=True)
|
||||
|
||||
def hop(self, srv: MC_Sck) -> None:
|
||||
def hop(self, srv: MC_Sck, on: bool) -> bool:
|
||||
"""rejoin to keepalive on routers/switches without igmp-snooping"""
|
||||
sck = srv.sck
|
||||
req = srv.mreq
|
||||
if ":" in srv.ip:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req)
|
||||
# linux does leaves/joins twice with 0.2~1.05s spacing
|
||||
time.sleep(1.2)
|
||||
except:
|
||||
pass
|
||||
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req)
|
||||
if not on:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req)
|
||||
else:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req)
|
||||
time.sleep(1.2)
|
||||
except:
|
||||
pass
|
||||
if not on:
|
||||
try:
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
# t = "joining {} from ip {} idx {} with mreq {}"
|
||||
# self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6)
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req)
|
||||
|
||||
# t = "joining {} from ip {} idx {} with mreq {}"
|
||||
# self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6)
|
||||
sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req)
|
||||
return True
|
||||
|
||||
def hopper(self):
|
||||
while self.args.mc_hop and self.running:
|
||||
time.sleep(self.args.mc_hop)
|
||||
if not self.running:
|
||||
return
|
||||
|
||||
for srv in self.srv.values():
|
||||
self.hop(srv, False)
|
||||
|
||||
# linux does leaves/joins twice with 0.2~1.05s spacing
|
||||
time.sleep(1.2)
|
||||
if not self.running:
|
||||
return
|
||||
|
||||
for srv in self.srv.values():
|
||||
self.hop(srv, True)
|
||||
|
||||
def map_client(self, cip: str) -> Optional[MC_Sck]:
|
||||
try:
|
||||
|
||||
149
copyparty/pwhash.py
Normal file
149
copyparty/pwhash.py
Normal file
@@ -0,0 +1,149 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import hashlib
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from .__init__ import unicode
|
||||
|
||||
|
||||
class PWHash(object):
|
||||
def __init__(self, args: argparse.Namespace):
|
||||
self.args = args
|
||||
|
||||
try:
|
||||
alg, ac = args.ah_alg.split(",")
|
||||
except:
|
||||
alg = args.ah_alg
|
||||
ac = {}
|
||||
|
||||
if alg == "none":
|
||||
alg = ""
|
||||
|
||||
self.alg = alg
|
||||
self.ac = ac
|
||||
if not alg:
|
||||
self.on = False
|
||||
self.hash = unicode
|
||||
return
|
||||
|
||||
self.on = True
|
||||
self.salt = args.ah_salt.encode("utf-8")
|
||||
self.cache: dict[str, str] = {}
|
||||
self.mutex = threading.Lock()
|
||||
self.hash = self._cache_hash
|
||||
|
||||
if alg == "sha2":
|
||||
self._hash = self._gen_sha2
|
||||
elif alg == "scrypt":
|
||||
self._hash = self._gen_scrypt
|
||||
elif alg == "argon2":
|
||||
self._hash = self._gen_argon2
|
||||
else:
|
||||
t = "unsupported password hashing algorithm [{}], must be one of these: argon2 scrypt sha2 none"
|
||||
raise Exception(t.format(alg))
|
||||
|
||||
def _cache_hash(self, plain: str) -> str:
|
||||
with self.mutex:
|
||||
try:
|
||||
return self.cache[plain]
|
||||
except:
|
||||
pass
|
||||
|
||||
if not plain:
|
||||
return ""
|
||||
|
||||
if len(plain) > 255:
|
||||
raise Exception("password too long")
|
||||
|
||||
if len(self.cache) > 9000:
|
||||
self.cache = {}
|
||||
|
||||
ret = self._hash(plain)
|
||||
self.cache[plain] = ret
|
||||
return ret
|
||||
|
||||
def _gen_sha2(self, plain: str) -> str:
|
||||
its = int(self.ac[0]) if self.ac else 424242
|
||||
bplain = plain.encode("utf-8")
|
||||
ret = b"\n"
|
||||
for _ in range(its):
|
||||
ret = hashlib.sha512(self.salt + bplain + ret).digest()
|
||||
|
||||
return "+" + base64.urlsafe_b64encode(ret[:24]).decode("utf-8")
|
||||
|
||||
def _gen_scrypt(self, plain: str) -> str:
|
||||
cost = 2 << 13
|
||||
its = 2
|
||||
blksz = 8
|
||||
para = 4
|
||||
try:
|
||||
cost = 2 << int(self.ac[0])
|
||||
its = int(self.ac[1])
|
||||
blksz = int(self.ac[2])
|
||||
para = int(self.ac[3])
|
||||
except:
|
||||
pass
|
||||
|
||||
ret = plain.encode("utf-8")
|
||||
for _ in range(its):
|
||||
ret = hashlib.scrypt(ret, salt=self.salt, n=cost, r=blksz, p=para, dklen=24)
|
||||
|
||||
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
|
||||
|
||||
def _gen_argon2(self, plain: str) -> str:
|
||||
from argon2.low_level import Type as ArgonType
|
||||
from argon2.low_level import hash_secret
|
||||
|
||||
time_cost = 3
|
||||
mem_cost = 256
|
||||
parallelism = 4
|
||||
version = 19
|
||||
try:
|
||||
time_cost = int(self.ac[0])
|
||||
mem_cost = int(self.ac[1])
|
||||
parallelism = int(self.ac[2])
|
||||
version = int(self.ac[3])
|
||||
except:
|
||||
pass
|
||||
|
||||
bplain = plain.encode("utf-8")
|
||||
|
||||
bret = hash_secret(
|
||||
secret=bplain,
|
||||
salt=self.salt,
|
||||
time_cost=time_cost,
|
||||
memory_cost=mem_cost * 1024,
|
||||
parallelism=parallelism,
|
||||
hash_len=24,
|
||||
type=ArgonType.ID,
|
||||
version=version,
|
||||
)
|
||||
ret = bret.split(b"$")[-1].decode("utf-8")
|
||||
return "+" + ret.replace("/", "_").replace("+", "-")
|
||||
|
||||
def stdin(self) -> None:
|
||||
while True:
|
||||
ln = sys.stdin.readline().strip()
|
||||
if not ln:
|
||||
break
|
||||
print(self.hash(ln))
|
||||
|
||||
def cli(self) -> None:
|
||||
import getpass
|
||||
|
||||
while True:
|
||||
try:
|
||||
p1 = getpass.getpass("password> ")
|
||||
p2 = getpass.getpass("again or just hit ENTER> ")
|
||||
except EOFError:
|
||||
return
|
||||
|
||||
if p2 and p1 != p2:
|
||||
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
|
||||
continue
|
||||
print(self.hash(p1))
|
||||
print()
|
||||
@@ -32,6 +32,8 @@ class SMB(object):
|
||||
self.asrv = hub.asrv
|
||||
self.log = hub.log
|
||||
self.files: dict[int, tuple[float, str]] = {}
|
||||
self.noacc = self.args.smba
|
||||
self.accs = not self.args.smba
|
||||
|
||||
lg.setLevel(logging.DEBUG if self.args.smbvvv else logging.INFO)
|
||||
for x in ["impacket", "impacket.smbserver"]:
|
||||
@@ -94,6 +96,14 @@ class SMB(object):
|
||||
|
||||
port = int(self.args.smb_port)
|
||||
srv = smbserver.SimpleSMBServer(listenAddress=ip, listenPort=port)
|
||||
try:
|
||||
if self.accs:
|
||||
srv.setAuthCallback(self._auth_cb)
|
||||
except:
|
||||
self.accs = False
|
||||
self.noacc = True
|
||||
t = "impacket too old; access permissions will not work! all accounts are admin!"
|
||||
self.log("smb", t, 1)
|
||||
|
||||
ro = "no" if self.args.smbw else "yes" # (does nothing)
|
||||
srv.addShare("A", "/", readOnly=ro)
|
||||
@@ -119,24 +129,74 @@ class SMB(object):
|
||||
def start(self) -> None:
|
||||
Daemon(self.srv.start)
|
||||
|
||||
def _v2a(self, caller: str, vpath: str, *a: Any) -> tuple[VFS, str]:
|
||||
def _auth_cb(self, *a, **ka):
|
||||
debug("auth-result: %s %s", a, ka)
|
||||
conndata = ka["connData"]
|
||||
auth_ok = conndata["Authenticated"]
|
||||
uname = ka["user_name"] if auth_ok else "*"
|
||||
uname = self.asrv.iacct.get(uname, uname) or "*"
|
||||
oldname = conndata.get("partygoer", "*") or "*"
|
||||
cli_ip = conndata["ClientIP"]
|
||||
cli_hn = ka["host_name"]
|
||||
if uname != "*":
|
||||
conndata["partygoer"] = uname
|
||||
info("client %s [%s] authed as %s", cli_ip, cli_hn, uname)
|
||||
elif oldname != "*":
|
||||
info("client %s [%s] keeping old auth as %s", cli_ip, cli_hn, oldname)
|
||||
elif auth_ok:
|
||||
info("client %s [%s] authed as [*] (anon)", cli_ip, cli_hn)
|
||||
else:
|
||||
info("client %s [%s] rejected", cli_ip, cli_hn)
|
||||
|
||||
def _uname(self) -> str:
|
||||
if self.noacc:
|
||||
return LEELOO_DALLAS
|
||||
|
||||
try:
|
||||
# you found it! my single worst bit of code so far
|
||||
# (if you can think of a better way to track users through impacket i'm all ears)
|
||||
cf0 = inspect.currentframe().f_back.f_back
|
||||
cf = cf0.f_back
|
||||
for n in range(3):
|
||||
cl = cf.f_locals
|
||||
if "connData" in cl:
|
||||
return cl["connData"]["partygoer"]
|
||||
cf = cf.f_back
|
||||
raise Exception()
|
||||
except:
|
||||
warning(
|
||||
"nyoron... %s <<-- %s <<-- %s <<-- %s",
|
||||
cf0.f_code.co_name,
|
||||
cf0.f_back.f_code.co_name,
|
||||
cf0.f_back.f_back.f_code.co_name,
|
||||
cf0.f_back.f_back.f_back.f_code.co_name,
|
||||
)
|
||||
return "*"
|
||||
|
||||
def _v2a(
|
||||
self, caller: str, vpath: str, *a: Any, uname="", perms=None
|
||||
) -> tuple[VFS, str]:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
# cf = inspect.currentframe().f_back
|
||||
# c1 = cf.f_back.f_code.co_name
|
||||
# c2 = cf.f_code.co_name
|
||||
debug('%s("%s", %s)\033[K\033[0m', caller, vpath, str(a))
|
||||
if not uname:
|
||||
uname = self._uname()
|
||||
if not perms:
|
||||
perms = [True, True]
|
||||
|
||||
# TODO find a way to grab `identity` in smbComSessionSetupAndX and smb2SessionSetup
|
||||
vfs, rem = self.asrv.vfs.get(vpath, LEELOO_DALLAS, True, True)
|
||||
debug('%s("%s", %s) %s @%s\033[K\033[0m', caller, vpath, str(a), perms, uname)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, uname, *perms)
|
||||
return vfs, vfs.canonical(rem)
|
||||
|
||||
def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]:
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
# caller = inspect.currentframe().f_back.f_code.co_name
|
||||
debug('listdir("%s", %s)\033[K\033[0m', vpath, str(a))
|
||||
vfs, rem = self.asrv.vfs.get(vpath, LEELOO_DALLAS, False, False)
|
||||
uname = self._uname()
|
||||
# debug('listdir("%s", %s) @%s\033[K\033[0m', vpath, str(a), uname)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, uname, False, False)
|
||||
_, vfs_ls, vfs_virt = vfs.ls(
|
||||
rem, LEELOO_DALLAS, not self.args.no_scandir, [[False, False]]
|
||||
rem, uname, not self.args.no_scandir, [[False, False]]
|
||||
)
|
||||
dirs = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||
fils = [x[0] for x in vfs_ls if x[0] not in dirs]
|
||||
@@ -149,8 +209,8 @@ class SMB(object):
|
||||
sz = 112 * 2 # ['.', '..']
|
||||
for n, fn in enumerate(ls):
|
||||
if sz >= 64000:
|
||||
t = "listing only %d of %d files (%d byte); see impacket#1433"
|
||||
warning(t, n, len(ls), sz)
|
||||
t = "listing only %d of %d files (%d byte) in /%s; see impacket#1433"
|
||||
warning(t, n, len(ls), sz, vpath)
|
||||
break
|
||||
|
||||
nsz = len(fn.encode("utf-16", "replace"))
|
||||
@@ -171,10 +231,12 @@ class SMB(object):
|
||||
if wr and not self.args.smbw:
|
||||
yeet("blocked write (no --smbw): " + vpath)
|
||||
|
||||
vfs, ap = self._v2a("open", vpath, *a)
|
||||
uname = self._uname()
|
||||
vfs, ap = self._v2a("open", vpath, *a, uname=uname, perms=[True, wr])
|
||||
if wr:
|
||||
if not vfs.axs.uwrite:
|
||||
yeet("blocked write (no-write-acc): " + vpath)
|
||||
t = "blocked write (no-write-acc %s): /%s @%s"
|
||||
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
||||
|
||||
xbu = vfs.flags.get("xbu")
|
||||
if xbu and not runhook(
|
||||
@@ -204,7 +266,7 @@ class SMB(object):
|
||||
|
||||
_, vp = self.files.pop(fd)
|
||||
vp, fn = os.path.split(vp)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vp, LEELOO_DALLAS, False, True)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vp, self._uname(), False, True)
|
||||
vfs, rem = vfs.get_dbv(rem)
|
||||
self.hub.up2k.hash_file(
|
||||
vfs.realpath,
|
||||
@@ -224,15 +286,18 @@ class SMB(object):
|
||||
vp1 = vp1.lstrip("/")
|
||||
vp2 = vp2.lstrip("/")
|
||||
|
||||
vfs2, ap2 = self._v2a("rename", vp2, vp1)
|
||||
uname = self._uname()
|
||||
vfs2, ap2 = self._v2a("rename", vp2, vp1, uname=uname)
|
||||
if not vfs2.axs.uwrite:
|
||||
yeet("blocked rename (no-write-acc): " + vp2)
|
||||
t = "blocked write (no-write-acc %s): /%s @%s"
|
||||
yeet(t % (vfs2.axs.uwrite, vp2, uname))
|
||||
|
||||
vfs1, _ = self.asrv.vfs.get(vp1, LEELOO_DALLAS, True, True)
|
||||
vfs1, _ = self.asrv.vfs.get(vp1, uname, True, True, True)
|
||||
if not vfs1.axs.umove:
|
||||
yeet("blocked rename (no-move-acc): " + vp1)
|
||||
t = "blocked rename (no-move-acc %s): /%s @%s"
|
||||
yeet(t % (vfs1.axs.umove, vp1, uname))
|
||||
|
||||
self.hub.up2k.handle_mv(LEELOO_DALLAS, vp1, vp2)
|
||||
self.hub.up2k.handle_mv(uname, vp1, vp2)
|
||||
try:
|
||||
bos.makedirs(ap2)
|
||||
except:
|
||||
@@ -242,52 +307,74 @@ class SMB(object):
|
||||
if not self.args.smbw:
|
||||
yeet("blocked mkdir (no --smbw): " + vpath)
|
||||
|
||||
vfs, ap = self._v2a("mkdir", vpath)
|
||||
uname = self._uname()
|
||||
vfs, ap = self._v2a("mkdir", vpath, uname=uname)
|
||||
if not vfs.axs.uwrite:
|
||||
yeet("blocked mkdir (no-write-acc): " + vpath)
|
||||
t = "blocked mkdir (no-write-acc %s): /%s @%s"
|
||||
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
||||
|
||||
return bos.mkdir(ap)
|
||||
|
||||
def _stat(self, vpath: str, *a: Any, **ka: Any) -> os.stat_result:
|
||||
return bos.stat(self._v2a("stat", vpath, *a)[1], *a, **ka)
|
||||
try:
|
||||
ap = self._v2a("stat", vpath, *a, perms=[True, False])[1]
|
||||
ret = bos.stat(ap, *a, **ka)
|
||||
# debug(" `-stat:ok")
|
||||
return ret
|
||||
except:
|
||||
# white lie: windows freaks out if we raise due to an offline volume
|
||||
# debug(" `-stat:NOPE (faking a directory)")
|
||||
ts = int(time.time())
|
||||
return os.stat_result((16877, -1, -1, 1, 1000, 1000, 8, ts, ts, ts))
|
||||
|
||||
def _unlink(self, vpath: str) -> None:
|
||||
if not self.args.smbw:
|
||||
yeet("blocked delete (no --smbw): " + vpath)
|
||||
|
||||
# return bos.unlink(self._v2a("stat", vpath, *a)[1])
|
||||
vfs, ap = self._v2a("delete", vpath)
|
||||
uname = self._uname()
|
||||
vfs, ap = self._v2a(
|
||||
"delete", vpath, uname=uname, perms=[True, False, False, True]
|
||||
)
|
||||
if not vfs.axs.udel:
|
||||
yeet("blocked delete (no-del-acc): " + vpath)
|
||||
|
||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||
self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], [], False)
|
||||
self.hub.up2k.handle_rm(uname, "1.7.6.2", [vpath], [], False)
|
||||
|
||||
def _utime(self, vpath: str, times: tuple[float, float]) -> None:
|
||||
if not self.args.smbw:
|
||||
yeet("blocked utime (no --smbw): " + vpath)
|
||||
|
||||
vfs, ap = self._v2a("utime", vpath)
|
||||
uname = self._uname()
|
||||
vfs, ap = self._v2a("utime", vpath, uname=uname)
|
||||
if not vfs.axs.uwrite:
|
||||
yeet("blocked utime (no-write-acc): " + vpath)
|
||||
t = "blocked utime (no-write-acc %s): /%s @%s"
|
||||
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
||||
|
||||
return bos.utime(ap, times)
|
||||
|
||||
def _p_exists(self, vpath: str) -> bool:
|
||||
# ap = "?"
|
||||
try:
|
||||
bos.stat(self._v2a("p.exists", vpath)[1])
|
||||
ap = self._v2a("p.exists", vpath, perms=[True, False])[1]
|
||||
bos.stat(ap)
|
||||
# debug(" `-exists((%s)->(%s)):ok", vpath, ap)
|
||||
return True
|
||||
except:
|
||||
# debug(" `-exists((%s)->(%s)):NOPE", vpath, ap)
|
||||
return False
|
||||
|
||||
def _p_getsize(self, vpath: str) -> int:
|
||||
st = bos.stat(self._v2a("p.getsize", vpath)[1])
|
||||
st = bos.stat(self._v2a("p.getsize", vpath, perms=[True, False])[1])
|
||||
return st.st_size
|
||||
|
||||
def _p_isdir(self, vpath: str) -> bool:
|
||||
try:
|
||||
st = bos.stat(self._v2a("p.isdir", vpath)[1])
|
||||
return stat.S_ISDIR(st.st_mode)
|
||||
st = bos.stat(self._v2a("p.isdir", vpath, perms=[True, False])[1])
|
||||
ret = stat.S_ISDIR(st.st_mode)
|
||||
# debug(" `-isdir:%s:%s", st.st_mode, ret)
|
||||
return ret
|
||||
except:
|
||||
return False
|
||||
|
||||
@@ -319,6 +406,7 @@ class SMB(object):
|
||||
|
||||
smbserver.os.path.abspath = self._hook
|
||||
smbserver.os.path.expanduser = self._hook
|
||||
smbserver.os.path.expandvars = self._hook
|
||||
smbserver.os.path.getatime = self._hook
|
||||
smbserver.os.path.getctime = self._hook
|
||||
smbserver.os.path.getmtime = self._hook
|
||||
|
||||
@@ -81,7 +81,7 @@ class SSDPr(object):
|
||||
ubase = "{}://{}:{}".format(proto, sip, sport)
|
||||
zsl = self.args.zsl
|
||||
url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/")
|
||||
name = "{} @ {}".format(self.args.doctitle, self.args.name)
|
||||
name = self.args.doctitle
|
||||
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
|
||||
hc.reply(zs.encode("utf-8", "replace"))
|
||||
return False # close connectino
|
||||
@@ -214,7 +214,7 @@ CONFIGID.UPNP.ORG: 1
|
||||
srv.sck.sendto(zb, addr[:2])
|
||||
|
||||
if cip not in self.txc.c:
|
||||
self.log("{} [{}] --> {}".format(srv.name, srv.ip, cip), "6")
|
||||
self.log("{} [{}] --> {}".format(srv.name, srv.ip, cip), 6)
|
||||
|
||||
self.txc.add(cip)
|
||||
self.txc.cln()
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import stat
|
||||
import tarfile
|
||||
|
||||
@@ -44,6 +45,7 @@ class StreamTar(StreamArc):
|
||||
self,
|
||||
log: "NamedLogger",
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
cmp: str = "",
|
||||
**kwargs: Any
|
||||
):
|
||||
super(StreamTar, self).__init__(log, fgen)
|
||||
@@ -53,14 +55,41 @@ class StreamTar(StreamArc):
|
||||
self.qfile = QFile()
|
||||
self.errf: dict[str, Any] = {}
|
||||
|
||||
# python 3.8 changed to PAX_FORMAT as default,
|
||||
# waste of space and don't care about the new features
|
||||
# python 3.8 changed to PAX_FORMAT as default;
|
||||
# slower, bigger, and no particular advantage
|
||||
fmt = tarfile.GNU_FORMAT
|
||||
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) # type: ignore
|
||||
if "pax" in cmp:
|
||||
# unless a client asks for it (currently
|
||||
# gnu-tar has wider support than pax-tar)
|
||||
fmt = tarfile.PAX_FORMAT
|
||||
cmp = re.sub(r"[^a-z0-9]*pax[^a-z0-9]*", "", cmp)
|
||||
|
||||
try:
|
||||
cmp, lv = cmp.replace(":", ",").split(",")
|
||||
lv = int(lv)
|
||||
except:
|
||||
lv = None
|
||||
|
||||
arg = {"name": None, "fileobj": self.qfile, "mode": "w", "format": fmt}
|
||||
if cmp == "gz":
|
||||
fun = tarfile.TarFile.gzopen
|
||||
arg["compresslevel"] = lv if lv is not None else 3
|
||||
elif cmp == "bz2":
|
||||
fun = tarfile.TarFile.bz2open
|
||||
arg["compresslevel"] = lv if lv is not None else 2
|
||||
elif cmp == "xz":
|
||||
fun = tarfile.TarFile.xzopen
|
||||
arg["preset"] = lv if lv is not None else 1
|
||||
else:
|
||||
fun = tarfile.open
|
||||
arg["mode"] = "w|"
|
||||
|
||||
self.tar = fun(**arg)
|
||||
|
||||
Daemon(self._gen, "star-gen")
|
||||
|
||||
def gen(self) -> Generator[Optional[bytes], None, None]:
|
||||
buf = b""
|
||||
try:
|
||||
while True:
|
||||
buf = self.qfile.q.get()
|
||||
@@ -72,6 +101,12 @@ class StreamTar(StreamArc):
|
||||
|
||||
yield None
|
||||
finally:
|
||||
while buf:
|
||||
try:
|
||||
buf = self.qfile.q.get()
|
||||
except:
|
||||
pass
|
||||
|
||||
if self.errf:
|
||||
bos.unlink(self.errf["ap"])
|
||||
|
||||
@@ -101,6 +136,9 @@ class StreamTar(StreamArc):
|
||||
errors.append((f["vp"], f["err"]))
|
||||
continue
|
||||
|
||||
if self.stopped:
|
||||
break
|
||||
|
||||
try:
|
||||
self.ser(f)
|
||||
except:
|
||||
|
||||
@@ -61,7 +61,7 @@ class Adapter(object):
|
||||
)
|
||||
|
||||
|
||||
if True:
|
||||
if True: # pylint: disable=using-constant-test
|
||||
# Type of an IPv4 address (a string in "xxx.xxx.xxx.xxx" format)
|
||||
_IPv4Address = str
|
||||
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
from .__init__ import CORES
|
||||
from .bos import bos
|
||||
from .th_cli import ThumbCli
|
||||
from .util import UTC, vjoin
|
||||
|
||||
if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Generator, Optional
|
||||
@@ -21,10 +25,78 @@ class StreamArc(object):
|
||||
):
|
||||
self.log = log
|
||||
self.fgen = fgen
|
||||
self.stopped = False
|
||||
|
||||
def gen(self) -> Generator[Optional[bytes], None, None]:
|
||||
raise Exception("override me")
|
||||
|
||||
def stop(self) -> None:
|
||||
self.stopped = True
|
||||
|
||||
|
||||
def gfilter(
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
thumbcli: ThumbCli,
|
||||
uname: str,
|
||||
vtop: str,
|
||||
fmt: str,
|
||||
) -> Generator[dict[str, Any], None, None]:
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
pend = []
|
||||
with ThreadPoolExecutor(max_workers=CORES) as tp:
|
||||
try:
|
||||
for f in fgen:
|
||||
task = tp.submit(enthumb, thumbcli, uname, vtop, f, fmt)
|
||||
pend.append((task, f))
|
||||
if pend[0][0].done() or len(pend) > CORES * 4:
|
||||
task, f = pend.pop(0)
|
||||
try:
|
||||
f = task.result(600)
|
||||
except:
|
||||
pass
|
||||
yield f
|
||||
|
||||
for task, f in pend:
|
||||
try:
|
||||
f = task.result(600)
|
||||
except:
|
||||
pass
|
||||
yield f
|
||||
except Exception as ex:
|
||||
thumbcli.log("gfilter flushing ({})".format(ex))
|
||||
for task, f in pend:
|
||||
try:
|
||||
task.result(600)
|
||||
except:
|
||||
pass
|
||||
thumbcli.log("gfilter flushed")
|
||||
|
||||
|
||||
def enthumb(
|
||||
thumbcli: ThumbCli, uname: str, vtop: str, f: dict[str, Any], fmt: str
|
||||
) -> dict[str, Any]:
|
||||
rem = f["vp"]
|
||||
ext = rem.rsplit(".", 1)[-1].lower()
|
||||
if fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|"):
|
||||
raise Exception()
|
||||
|
||||
vp = vjoin(vtop, rem.split("/", 1)[1])
|
||||
vn, rem = thumbcli.asrv.vfs.get(vp, uname, True, False)
|
||||
dbv, vrem = vn.get_dbv(rem)
|
||||
thp = thumbcli.get(dbv, vrem, f["st"].st_mtime, fmt)
|
||||
if not thp:
|
||||
raise Exception()
|
||||
|
||||
ext = "jpg" if fmt == "j" else "webp" if fmt == "w" else fmt
|
||||
sz = bos.path.getsize(thp)
|
||||
st: os.stat_result = f["st"]
|
||||
ts = st.st_mtime
|
||||
f["ap"] = thp
|
||||
f["vp"] = f["vp"].rsplit(".", 1)[0] + "." + ext
|
||||
f["st"] = os.stat_result((st.st_mode, -1, -1, 1, 1000, 1000, sz, ts, ts, ts))
|
||||
return f
|
||||
|
||||
|
||||
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||
@@ -36,7 +108,7 @@ def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
||||
tf_path = tf.name
|
||||
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
||||
|
||||
dt = datetime.utcnow().strftime("%Y-%m%d-%H%M%S")
|
||||
dt = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S")
|
||||
|
||||
bos.chmod(tf_path, 0o444)
|
||||
return {
|
||||
|
||||
@@ -29,22 +29,29 @@ if True: # pylint: disable=using-constant-test
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
|
||||
from .authsrv import AuthSrv
|
||||
from .authsrv import BAD_CFG, AuthSrv
|
||||
from .cert import ensure_cert
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||
from .tcpsrv import TcpSrv
|
||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||
from .up2k import Up2k
|
||||
from .util import (
|
||||
DEF_EXP,
|
||||
DEF_MTE,
|
||||
DEF_MTH,
|
||||
FFMPEG_URL,
|
||||
UTC,
|
||||
VERSIONS,
|
||||
Daemon,
|
||||
Garda,
|
||||
HLog,
|
||||
HMaccas,
|
||||
ODict,
|
||||
alltrace,
|
||||
ansi_re,
|
||||
min_ex,
|
||||
mp,
|
||||
odfusion,
|
||||
pybin,
|
||||
start_log_thrs,
|
||||
start_stackmon,
|
||||
@@ -99,11 +106,6 @@ class SvcHub(object):
|
||||
|
||||
self.iphash = HMaccas(os.path.join(self.E.cfg, "iphash"), 8)
|
||||
|
||||
# for non-http clients (ftp)
|
||||
self.bans: dict[str, int] = {}
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
|
||||
if args.sss or args.s >= 3:
|
||||
args.ss = True
|
||||
args.no_dav = True
|
||||
@@ -119,7 +121,6 @@ class SvcHub(object):
|
||||
args.no_mv = True
|
||||
args.hardlink = True
|
||||
args.vague_403 = True
|
||||
args.ban_404 = "50,60,1440"
|
||||
args.nih = True
|
||||
|
||||
if args.s:
|
||||
@@ -130,8 +131,20 @@ class SvcHub(object):
|
||||
args.force_js = True
|
||||
|
||||
if not self._process_config():
|
||||
raise Exception("bad config")
|
||||
raise Exception(BAD_CFG)
|
||||
|
||||
# for non-http clients (ftp)
|
||||
self.bans: dict[str, int] = {}
|
||||
self.gpwd = Garda(self.args.ban_pw)
|
||||
self.g404 = Garda(self.args.ban_404)
|
||||
self.g403 = Garda(self.args.ban_403)
|
||||
self.g422 = Garda(self.args.ban_422, False)
|
||||
self.gmal = Garda(self.args.ban_422)
|
||||
self.gurl = Garda(self.args.ban_url)
|
||||
|
||||
self.log_div = 10 ** (6 - args.log_tdec)
|
||||
self.log_efmt = "%02d:%02d:%02d.%0{}d".format(args.log_tdec)
|
||||
self.log_dfmt = "%04d-%04d-%06d.%0{}d".format(args.log_tdec)
|
||||
self.log = self._log_disabled if args.q else self._log_enabled
|
||||
if args.lo:
|
||||
self._setup_logfile(printed)
|
||||
@@ -161,6 +174,14 @@ class SvcHub(object):
|
||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||
|
||||
if args.nih:
|
||||
args.vname = ""
|
||||
args.doctitle = args.doctitle.replace(" @ --name", "")
|
||||
else:
|
||||
args.vname = args.name
|
||||
args.doctitle = args.doctitle.replace("--name", args.vname)
|
||||
args.bname = args.bname.replace("--name", args.vname) or args.vname
|
||||
|
||||
if args.log_fk:
|
||||
args.log_fk = re.compile(args.log_fk)
|
||||
|
||||
@@ -182,6 +203,10 @@ class SvcHub(object):
|
||||
self.log("root", "max clients: {}".format(self.args.nc))
|
||||
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
|
||||
if not self.tcpsrv.srv and self.args.ign_ebind_all:
|
||||
self.args.no_fastboot = True
|
||||
|
||||
self.up2k = Up2k(self)
|
||||
|
||||
decs = {k: 1 for k in self.args.th_dec.split(",")}
|
||||
@@ -239,7 +264,8 @@ class SvcHub(object):
|
||||
if args.ftp or args.ftps:
|
||||
from .ftpd import Ftpd
|
||||
|
||||
self.ftpd = Ftpd(self)
|
||||
self.ftpd: Optional[Ftpd] = None
|
||||
Daemon(self.start_ftpd, "start_ftpd")
|
||||
zms += "f" if args.ftp else "F"
|
||||
|
||||
if args.smb:
|
||||
@@ -269,6 +295,28 @@ class SvcHub(object):
|
||||
|
||||
self.broker = Broker(self)
|
||||
|
||||
def start_ftpd(self) -> None:
|
||||
time.sleep(30)
|
||||
if self.ftpd:
|
||||
return
|
||||
|
||||
self.restart_ftpd()
|
||||
|
||||
def restart_ftpd(self) -> None:
|
||||
if not hasattr(self, "ftpd"):
|
||||
return
|
||||
|
||||
from .ftpd import Ftpd
|
||||
|
||||
if self.ftpd:
|
||||
return # todo
|
||||
|
||||
if not os.path.exists(self.args.cert):
|
||||
ensure_cert(self.log, self.args)
|
||||
|
||||
self.ftpd = Ftpd(self)
|
||||
self.log("root", "started FTPd")
|
||||
|
||||
def thr_httpsrv_up(self) -> None:
|
||||
time.sleep(1 if self.args.ign_ebind_all else 5)
|
||||
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||
@@ -300,12 +348,20 @@ class SvcHub(object):
|
||||
if self.httpsrv_up != self.broker.num_workers:
|
||||
return
|
||||
|
||||
time.sleep(0.1) # purely cosmetic dw
|
||||
ar = self.args
|
||||
for _ in range(10 if ar.ftp or ar.ftps else 0):
|
||||
time.sleep(0.03)
|
||||
if self.ftpd:
|
||||
break
|
||||
|
||||
if self.tcpsrv.qr:
|
||||
self.log("qr-code", self.tcpsrv.qr)
|
||||
else:
|
||||
self.log("root", "workers OK\n")
|
||||
|
||||
self.after_httpsrv_up()
|
||||
|
||||
def after_httpsrv_up(self) -> None:
|
||||
self.up2k.init_vols()
|
||||
|
||||
Daemon(self.sd_notify, "sd-notify")
|
||||
@@ -349,23 +405,70 @@ class SvcHub(object):
|
||||
if al.rsp_jtr:
|
||||
al.rsp_slp = 0.000001
|
||||
|
||||
al.th_covers = set(al.th_covers.split(","))
|
||||
zsl = al.th_covers.split(",")
|
||||
zsl = [x.strip() for x in zsl]
|
||||
zsl = [x for x in zsl if x]
|
||||
al.th_covers = set(zsl)
|
||||
al.th_coversd = set(zsl + ["." + x for x in zsl])
|
||||
|
||||
for k in "c".split(" "):
|
||||
vl = getattr(al, k)
|
||||
if not vl:
|
||||
continue
|
||||
|
||||
vl = [os.path.expanduser(x) if x.startswith("~") else x for x in vl]
|
||||
vl = [os.path.expandvars(os.path.expanduser(x)) for x in vl]
|
||||
setattr(al, k, vl)
|
||||
|
||||
for k in "lo hist ssl_log".split(" "):
|
||||
vs = getattr(al, k)
|
||||
if vs and vs.startswith("~"):
|
||||
setattr(al, k, os.path.expanduser(vs))
|
||||
if vs:
|
||||
vs = os.path.expandvars(os.path.expanduser(vs))
|
||||
setattr(al, k, vs)
|
||||
|
||||
for k in "sus_urls nonsus_urls".split(" "):
|
||||
vs = getattr(al, k)
|
||||
if not vs or vs == "no":
|
||||
setattr(al, k, None)
|
||||
else:
|
||||
setattr(al, k, re.compile(vs))
|
||||
|
||||
if not al.sus_urls:
|
||||
al.ban_url = "no"
|
||||
elif al.ban_url == "no":
|
||||
al.sus_urls = None
|
||||
|
||||
al.xff_hdr = al.xff_hdr.lower()
|
||||
al.idp_h_usr = al.idp_h_usr.lower()
|
||||
# al.idp_h_grp = al.idp_h_grp.lower()
|
||||
|
||||
al.xff_re = self._ipa2re(al.xff_src)
|
||||
al.ipa_re = self._ipa2re(al.ipa)
|
||||
al.ftp_ipa_re = self._ipa2re(al.ftp_ipa or al.ipa)
|
||||
|
||||
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
||||
al.mte = odfusion(mte, al.mte)
|
||||
|
||||
mth = ODict.fromkeys(DEF_MTH.split(","), True)
|
||||
al.mth = odfusion(mth, al.mth)
|
||||
|
||||
exp = ODict.fromkeys(DEF_EXP.split(" "), True)
|
||||
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
||||
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
||||
|
||||
for k in ["no_hash", "no_idx"]:
|
||||
ptn = getattr(self.args, k)
|
||||
if ptn:
|
||||
setattr(self.args, k, re.compile(ptn))
|
||||
|
||||
return True
|
||||
|
||||
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
||||
if txt in ("any", "0", ""):
|
||||
return None
|
||||
|
||||
zs = txt.replace(" ", "").replace(".", "\\.").replace(",", "|")
|
||||
return re.compile("^(?:" + zs + ")")
|
||||
|
||||
def _setlimits(self) -> None:
|
||||
try:
|
||||
import resource
|
||||
@@ -407,7 +510,7 @@ class SvcHub(object):
|
||||
self.args.nc = min(self.args.nc, soft // 2)
|
||||
|
||||
def _logname(self) -> str:
|
||||
dt = datetime.utcnow()
|
||||
dt = datetime.now(UTC)
|
||||
fn = str(self.args.lo)
|
||||
for fs in "YmdHMS":
|
||||
fs = "%" + fs
|
||||
@@ -428,12 +531,17 @@ class SvcHub(object):
|
||||
sel_fn = "{}.{}".format(fn, ctr)
|
||||
|
||||
fn = sel_fn
|
||||
try:
|
||||
os.makedirs(os.path.dirname(fn))
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
if do_xz:
|
||||
import lzma
|
||||
|
||||
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
||||
self.args.no_logflush = True
|
||||
else:
|
||||
lh = open(fn, "wt", encoding="utf-8", errors="replace")
|
||||
except:
|
||||
@@ -594,19 +702,25 @@ class SvcHub(object):
|
||||
ret = 1
|
||||
try:
|
||||
self.pr("OPYTHAT")
|
||||
tasks = []
|
||||
slp = 0.0
|
||||
|
||||
if self.mdns:
|
||||
Daemon(self.mdns.stop)
|
||||
tasks.append(Daemon(self.mdns.stop, "mdns"))
|
||||
slp = time.time() + 0.5
|
||||
|
||||
if self.ssdp:
|
||||
Daemon(self.ssdp.stop)
|
||||
tasks.append(Daemon(self.ssdp.stop, "ssdp"))
|
||||
slp = time.time() + 0.5
|
||||
|
||||
self.broker.shutdown()
|
||||
self.tcpsrv.shutdown()
|
||||
self.up2k.shutdown()
|
||||
|
||||
if hasattr(self, "smbd"):
|
||||
slp = max(slp, time.time() + 0.5)
|
||||
tasks.append(Daemon(self.smbd.stop, "smbd"))
|
||||
|
||||
if self.thumbsrv:
|
||||
self.thumbsrv.shutdown()
|
||||
|
||||
@@ -616,17 +730,19 @@ class SvcHub(object):
|
||||
break
|
||||
|
||||
if n == 3:
|
||||
self.pr("waiting for thumbsrv (10sec)...")
|
||||
self.log("root", "waiting for thumbsrv (10sec)...")
|
||||
|
||||
if hasattr(self, "smbd"):
|
||||
slp = max(slp, time.time() + 0.5)
|
||||
Daemon(self.kill9, a=(1,))
|
||||
Daemon(self.smbd.stop)
|
||||
zf = max(time.time() - slp, 0)
|
||||
Daemon(self.kill9, a=(zf + 0.5,))
|
||||
|
||||
while time.time() < slp:
|
||||
time.sleep(0.1)
|
||||
if not next((x for x in tasks if x.is_alive), None):
|
||||
break
|
||||
|
||||
self.pr("nailed it", end="")
|
||||
time.sleep(0.05)
|
||||
|
||||
self.log("root", "nailed it")
|
||||
ret = self.retcode
|
||||
except:
|
||||
self.pr("\033[31m[ error during shutdown ]\n{}\033[0m".format(min_ex()))
|
||||
@@ -636,7 +752,7 @@ class SvcHub(object):
|
||||
print("\033]0;\033\\", file=sys.stderr, end="")
|
||||
sys.stderr.flush()
|
||||
|
||||
self.pr("\033[0m")
|
||||
self.pr("\033[0m", end="")
|
||||
if self.logf:
|
||||
self.logf.close()
|
||||
|
||||
@@ -648,14 +764,31 @@ class SvcHub(object):
|
||||
return
|
||||
|
||||
with self.log_mutex:
|
||||
zd = datetime.utcnow()
|
||||
ts = "%04d-%04d-%06d.%03d" % (
|
||||
zd = datetime.now(UTC)
|
||||
ts = self.log_dfmt % (
|
||||
zd.year,
|
||||
zd.month * 100 + zd.day,
|
||||
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
||||
zd.microsecond // 1000,
|
||||
zd.microsecond // self.log_div,
|
||||
)
|
||||
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
|
||||
|
||||
if c and not self.args.no_ansi:
|
||||
if isinstance(c, int):
|
||||
msg = "\033[3%sm%s\033[0m" % (c, msg)
|
||||
elif "\033" not in c:
|
||||
msg = "\033[%sm%s\033[0m" % (c, msg)
|
||||
else:
|
||||
msg = "%s%s\033[0m" % (c, msg)
|
||||
|
||||
if "\033" in src:
|
||||
src += "\033[0m"
|
||||
|
||||
if "\033" in msg:
|
||||
msg += "\033[0m"
|
||||
|
||||
self.logf.write("@%s [%-21s] %s\n" % (ts, src, msg))
|
||||
if not self.args.no_logflush:
|
||||
self.logf.flush()
|
||||
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
@@ -666,7 +799,7 @@ class SvcHub(object):
|
||||
self.logf.close()
|
||||
self._setup_logfile("")
|
||||
|
||||
dt = datetime.utcnow()
|
||||
dt = datetime.now(UTC)
|
||||
|
||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
||||
day_now = dt.day
|
||||
@@ -674,14 +807,20 @@ class SvcHub(object):
|
||||
dt += timedelta(hours=12)
|
||||
|
||||
dt = dt.replace(hour=0, minute=0, second=0)
|
||||
self.next_day = calendar.timegm(dt.utctimetuple())
|
||||
try:
|
||||
tt = dt.utctimetuple()
|
||||
except:
|
||||
# still makes me hella uncomfortable
|
||||
tt = dt.timetuple()
|
||||
|
||||
self.next_day = calendar.timegm(tt)
|
||||
|
||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
"""handles logging from all components"""
|
||||
with self.log_mutex:
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
dt = datetime.utcfromtimestamp(now)
|
||||
dt = datetime.fromtimestamp(now, UTC)
|
||||
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
||||
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
||||
print(zs, end="")
|
||||
@@ -704,12 +843,12 @@ class SvcHub(object):
|
||||
else:
|
||||
msg = "%s%s\033[0m" % (c, msg)
|
||||
|
||||
zd = datetime.utcfromtimestamp(now)
|
||||
ts = "%02d:%02d:%02d.%03d" % (
|
||||
zd = datetime.fromtimestamp(now, UTC)
|
||||
ts = self.log_efmt % (
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
zd.microsecond // 1000,
|
||||
zd.microsecond // self.log_div,
|
||||
)
|
||||
msg = fmt % (ts, src, msg)
|
||||
try:
|
||||
@@ -725,6 +864,8 @@ class SvcHub(object):
|
||||
|
||||
if self.logf:
|
||||
self.logf.write(msg)
|
||||
if not self.args.no_logflush:
|
||||
self.logf.flush()
|
||||
|
||||
def pr(self, *a: Any, **ka: Any) -> None:
|
||||
try:
|
||||
|
||||
@@ -221,6 +221,7 @@ class StreamZip(StreamArc):
|
||||
fgen: Generator[dict[str, Any], None, None],
|
||||
utf8: bool = False,
|
||||
pre_crc: bool = False,
|
||||
**kwargs: Any
|
||||
) -> None:
|
||||
super(StreamZip, self).__init__(log, fgen)
|
||||
|
||||
@@ -275,6 +276,7 @@ class StreamZip(StreamArc):
|
||||
def gen(self) -> Generator[bytes, None, None]:
|
||||
errf: dict[str, Any] = {}
|
||||
errors = []
|
||||
mbuf = b""
|
||||
try:
|
||||
for f in self.fgen:
|
||||
if "err" in f:
|
||||
@@ -283,13 +285,20 @@ class StreamZip(StreamArc):
|
||||
|
||||
try:
|
||||
for x in self.ser(f):
|
||||
yield x
|
||||
mbuf += x
|
||||
if len(mbuf) >= 16384:
|
||||
yield mbuf
|
||||
mbuf = b""
|
||||
except GeneratorExit:
|
||||
raise
|
||||
except:
|
||||
ex = min_ex(5, True).replace("\n", "\n-- ")
|
||||
errors.append((f["vp"], ex))
|
||||
|
||||
if mbuf:
|
||||
yield mbuf
|
||||
mbuf = b""
|
||||
|
||||
if errors:
|
||||
errf, txt = errdesc(errors)
|
||||
self.log("\n".join(([repr(errf)] + txt[1:])))
|
||||
@@ -299,20 +308,23 @@ class StreamZip(StreamArc):
|
||||
cdir_pos = self.pos
|
||||
for name, sz, ts, crc, h_pos in self.items:
|
||||
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||
yield self._ct(buf)
|
||||
mbuf += self._ct(buf)
|
||||
if len(mbuf) >= 16384:
|
||||
yield mbuf
|
||||
mbuf = b""
|
||||
cdir_end = self.pos
|
||||
|
||||
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
if need_64:
|
||||
ecdir64_pos = self.pos
|
||||
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(buf)
|
||||
mbuf += self._ct(buf)
|
||||
|
||||
buf = gen_ecdr64_loc(ecdir64_pos)
|
||||
yield self._ct(buf)
|
||||
mbuf += self._ct(buf)
|
||||
|
||||
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(ecdr)
|
||||
yield mbuf + self._ct(ecdr)
|
||||
finally:
|
||||
if errf:
|
||||
bos.unlink(errf["ap"])
|
||||
|
||||
@@ -8,13 +8,14 @@ import sys
|
||||
import time
|
||||
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, unicode
|
||||
from .stolen.qrcodegen import QrCode
|
||||
from .cert import gencert
|
||||
from .stolen.qrcodegen import QrCode
|
||||
from .util import (
|
||||
E_ACCESS,
|
||||
E_ADDR_IN_USE,
|
||||
E_ADDR_NOT_AVAIL,
|
||||
E_UNREACH,
|
||||
IP6ALL,
|
||||
Netdev,
|
||||
min_ex,
|
||||
sunpack,
|
||||
@@ -240,6 +241,11 @@ class TcpSrv(object):
|
||||
raise OSError(E_ADDR_IN_USE[0], "")
|
||||
self.srv.append(srv)
|
||||
except (OSError, socket.error) as ex:
|
||||
try:
|
||||
srv.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
if ex.errno in E_ADDR_IN_USE:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
elif ex.errno in E_ADDR_NOT_AVAIL:
|
||||
@@ -254,6 +260,9 @@ class TcpSrv(object):
|
||||
srvs: list[socket.socket] = []
|
||||
for srv in self.srv:
|
||||
ip, port = srv.getsockname()[:2]
|
||||
if ip == IP6ALL:
|
||||
ip = "::" # jython
|
||||
|
||||
try:
|
||||
srv.listen(self.args.nc)
|
||||
try:
|
||||
@@ -275,6 +284,8 @@ class TcpSrv(object):
|
||||
srv.close()
|
||||
continue
|
||||
|
||||
t = "\n\nERROR: could not open listening socket, probably because one of the server ports ({}) is busy on one of the requested interfaces ({}); avoid this issue by specifying a different port (-p 3939) and/or a specific interface to listen on (-i 192.168.56.1)\n"
|
||||
self.log("tcpsrv", t.format(port, ip), 1)
|
||||
raise
|
||||
|
||||
bound.append((ip, port))
|
||||
@@ -297,6 +308,7 @@ class TcpSrv(object):
|
||||
self.hub.broker.say("set_netdevs", self.netdevs)
|
||||
self.hub.start_zeroconf()
|
||||
gencert(self.log, self.args, self.netdevs)
|
||||
self.hub.restart_ftpd()
|
||||
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
|
||||
@@ -31,7 +31,7 @@ class ThumbCli(object):
|
||||
if not c:
|
||||
raise Exception()
|
||||
except:
|
||||
c = {k: {} for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
|
||||
c = {k: set() for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
|
||||
|
||||
self.thumbable = c["thumbable"]
|
||||
self.fmt_pil = c["pil"]
|
||||
@@ -94,7 +94,7 @@ class ThumbCli(object):
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||
tpaths = [tpath]
|
||||
if fmt == "w":
|
||||
# also check for jpg (maybe webp is unavailable)
|
||||
@@ -108,6 +108,7 @@ class ThumbCli(object):
|
||||
if st.st_size:
|
||||
ret = tpath = tp
|
||||
fmt = ret.rsplit(".")[1]
|
||||
break
|
||||
else:
|
||||
abort = True
|
||||
except:
|
||||
|
||||
@@ -13,11 +13,12 @@ import time
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
||||
from .authsrv import VFS
|
||||
from .bos import bos
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||
from .util import (
|
||||
FFMPEG_URL,
|
||||
BytesIO,
|
||||
BytesIO, # type: ignore
|
||||
Cooldown,
|
||||
Daemon,
|
||||
Pebkac,
|
||||
@@ -36,14 +37,21 @@ if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
|
||||
HAVE_PIL = False
|
||||
HAVE_PILF = False
|
||||
HAVE_HEIF = False
|
||||
HAVE_AVIF = False
|
||||
HAVE_WEBP = False
|
||||
|
||||
try:
|
||||
from PIL import ExifTags, Image, ImageOps
|
||||
from PIL import ExifTags, Image, ImageFont, ImageOps
|
||||
|
||||
HAVE_PIL = True
|
||||
try:
|
||||
ImageFont.load_default(size=16)
|
||||
HAVE_PILF = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||
HAVE_WEBP = True
|
||||
@@ -78,17 +86,23 @@ except:
|
||||
HAVE_VIPS = False
|
||||
|
||||
|
||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
|
||||
# base16 = 16 = 256
|
||||
# b64-lc = 38 = 1444
|
||||
# base64 = 64 = 4096
|
||||
rd, fn = vsplit(rem)
|
||||
if rd:
|
||||
h = hashlib.sha512(afsenc(rd)).digest()
|
||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||
else:
|
||||
rd = "top"
|
||||
if not rd:
|
||||
rd = "\ntop"
|
||||
|
||||
# spectrograms are never cropped; strip fullsize flag
|
||||
ext = rem.split(".")[-1].lower()
|
||||
if ext in ffa and fmt in ("wf", "jf"):
|
||||
fmt = fmt[:1]
|
||||
|
||||
rd += "\n" + fmt
|
||||
h = hashlib.sha512(afsenc(rd)).digest()
|
||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||
|
||||
# could keep original filenames but this is safer re pathlen
|
||||
h = hashlib.sha512(afsenc(fn)).digest()
|
||||
@@ -97,7 +111,8 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
||||
if fmt in ("opus", "caf"):
|
||||
cat = "ac"
|
||||
else:
|
||||
fmt = "webp" if fmt == "w" else "png" if fmt == "p" else "jpg"
|
||||
fc = fmt[:1]
|
||||
fmt = "webp" if fc == "w" else "png" if fc == "p" else "jpg"
|
||||
cat = "th"
|
||||
|
||||
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
|
||||
@@ -110,8 +125,6 @@ class ThumbSrv(object):
|
||||
self.args = hub.args
|
||||
self.log_func = hub.log
|
||||
|
||||
res = hub.args.th_size.split("x")
|
||||
self.res = tuple([int(x) for x in res])
|
||||
self.poke_cd = Cooldown(self.args.th_poke)
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
@@ -119,7 +132,7 @@ class ThumbSrv(object):
|
||||
self.stopping = False
|
||||
self.nthr = max(1, self.args.th_mt)
|
||||
|
||||
self.q: Queue[Optional[tuple[str, str]]] = Queue(self.nthr * 4)
|
||||
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
||||
for n in range(self.nthr):
|
||||
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
|
||||
|
||||
@@ -184,13 +197,17 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
return not self.nthr
|
||||
|
||||
def getres(self, vn: VFS) -> tuple[int, int]:
|
||||
w, h = vn.flags["thsize"].split("x")
|
||||
return int(w), int(h)
|
||||
|
||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||
abspath = os.path.join(ptop, rem)
|
||||
cond = threading.Condition(self.mutex)
|
||||
do_conv = False
|
||||
@@ -211,8 +228,14 @@ class ThumbSrv(object):
|
||||
do_conv = True
|
||||
|
||||
if do_conv:
|
||||
self.q.put((abspath, tpath))
|
||||
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
||||
allvols = list(self.asrv.vfs.all_vols.values())
|
||||
vn = next((x for x in allvols if x.realpath == ptop), None)
|
||||
if not vn:
|
||||
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
||||
vn = self.asrv.vfs.all_aps[0][1]
|
||||
|
||||
self.q.put((abspath, tpath, fmt, vn))
|
||||
self.log("conv {} :{} \033[0m{}".format(tpath, fmt, abspath), c=6)
|
||||
|
||||
while not self.stopping:
|
||||
with self.mutex:
|
||||
@@ -248,7 +271,7 @@ class ThumbSrv(object):
|
||||
if not task:
|
||||
break
|
||||
|
||||
abspath, tpath = task
|
||||
abspath, tpath, fmt, vn = task
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
png_ok = False
|
||||
funs = []
|
||||
@@ -281,7 +304,7 @@ class ThumbSrv(object):
|
||||
|
||||
for fun in funs:
|
||||
try:
|
||||
fun(abspath, ttpath)
|
||||
fun(abspath, ttpath, fmt, vn)
|
||||
break
|
||||
except Exception as ex:
|
||||
msg = "{} could not create thumbnail of {}\n{}"
|
||||
@@ -315,9 +338,10 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
self.nthr -= 1
|
||||
|
||||
def fancy_pillow(self, im: "Image.Image") -> "Image.Image":
|
||||
def fancy_pillow(self, im: "Image.Image", fmt: str, vn: VFS) -> "Image.Image":
|
||||
# exif_transpose is expensive (loads full image + unconditional copy)
|
||||
r = max(*self.res) * 2
|
||||
res = self.getres(vn)
|
||||
r = max(*res) * 2
|
||||
im.thumbnail((r, r), resample=Image.LANCZOS)
|
||||
try:
|
||||
k = next(k for k, v in ExifTags.TAGS.items() if v == "Orientation")
|
||||
@@ -331,23 +355,23 @@ class ThumbSrv(object):
|
||||
if rot in rots:
|
||||
im = im.transpose(rots[rot])
|
||||
|
||||
if self.args.th_no_crop:
|
||||
im.thumbnail(self.res, resample=Image.LANCZOS)
|
||||
if fmt.endswith("f"):
|
||||
im.thumbnail(res, resample=Image.LANCZOS)
|
||||
else:
|
||||
iw, ih = im.size
|
||||
dw, dh = self.res
|
||||
dw, dh = res
|
||||
res = (min(iw, dw), min(ih, dh))
|
||||
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
||||
|
||||
return im
|
||||
|
||||
def conv_pil(self, abspath: str, tpath: str) -> None:
|
||||
def conv_pil(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
with Image.open(fsenc(abspath)) as im:
|
||||
try:
|
||||
im = self.fancy_pillow(im)
|
||||
im = self.fancy_pillow(im, fmt, vn)
|
||||
except Exception as ex:
|
||||
self.log("fancy_pillow {}".format(ex), "90")
|
||||
im.thumbnail(self.res)
|
||||
im.thumbnail(self.getres(vn))
|
||||
|
||||
fmts = ["RGB", "L"]
|
||||
args = {"quality": 40}
|
||||
@@ -370,12 +394,12 @@ class ThumbSrv(object):
|
||||
|
||||
im.save(tpath, **args)
|
||||
|
||||
def conv_vips(self, abspath: str, tpath: str) -> None:
|
||||
def conv_vips(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
crops = ["centre", "none"]
|
||||
if self.args.th_no_crop:
|
||||
if fmt.endswith("f"):
|
||||
crops = ["none"]
|
||||
|
||||
w, h = self.res
|
||||
w, h = self.getres(vn)
|
||||
kw = {"height": h, "size": "down", "intent": "relative"}
|
||||
|
||||
for c in crops:
|
||||
@@ -387,10 +411,11 @@ class ThumbSrv(object):
|
||||
if c == crops[-1]:
|
||||
raise
|
||||
|
||||
assert img # type: ignore
|
||||
img.write_to_file(tpath, Q=40)
|
||||
|
||||
def conv_ffmpeg(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if not ret:
|
||||
return
|
||||
|
||||
@@ -402,12 +427,13 @@ class ThumbSrv(object):
|
||||
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
||||
|
||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||
if self.args.th_no_crop:
|
||||
if fmt.endswith("f"):
|
||||
scale += "decrease,setsar=1:1"
|
||||
else:
|
||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||
|
||||
bscale = scale.format(*list(self.res)).encode("utf-8")
|
||||
res = self.getres(vn)
|
||||
bscale = scale.format(*list(res)).encode("utf-8")
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
@@ -439,11 +465,11 @@ class ThumbSrv(object):
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
def _run_ff(self, cmd: list[bytes]) -> None:
|
||||
def _run_ff(self, cmd: list[bytes], vn: VFS) -> None:
|
||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||
ret, _, serr = runcmd(cmd, timeout=self.args.th_convt)
|
||||
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"], nice=True)
|
||||
if not ret:
|
||||
return
|
||||
|
||||
@@ -486,8 +512,8 @@ class ThumbSrv(object):
|
||||
self.log(t + txt, c=c)
|
||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||
|
||||
def conv_waves(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
def conv_waves(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
@@ -512,10 +538,10 @@ class ThumbSrv(object):
|
||||
# fmt: on
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
def conv_spec(self, abspath: str, tpath: str) -> None:
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
def conv_spec(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
@@ -555,13 +581,13 @@ class ThumbSrv(object):
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
def conv_opus(self, abspath: str, tpath: str) -> None:
|
||||
def conv_opus(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||
if self.args.no_acode:
|
||||
raise Exception("disabled in server config")
|
||||
|
||||
ret, _ = ffprobe(abspath, int(self.args.th_convt / 2))
|
||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||
if "ac" not in ret:
|
||||
raise Exception("not audio")
|
||||
|
||||
@@ -597,7 +623,7 @@ class ThumbSrv(object):
|
||||
fsenc(tmp_opus)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
# iOS fails to play some "insufficiently complex" files
|
||||
# (average file shorter than 8 seconds), so of course we
|
||||
@@ -621,7 +647,7 @@ class ThumbSrv(object):
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
elif want_caf:
|
||||
# simple remux should be safe
|
||||
@@ -639,7 +665,7 @@ class ThumbSrv(object):
|
||||
fsenc(tpath)
|
||||
]
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
self._run_ff(cmd, vn)
|
||||
|
||||
if tmp_opus != tpath:
|
||||
try:
|
||||
|
||||
@@ -9,6 +9,7 @@ import time
|
||||
from operator import itemgetter
|
||||
|
||||
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
||||
from .authsrv import LEELOO_DALLAS, VFS
|
||||
from .bos import bos
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
from .util import (
|
||||
@@ -20,6 +21,7 @@ from .util import (
|
||||
min_ex,
|
||||
quotep,
|
||||
s3dec,
|
||||
vjoin,
|
||||
)
|
||||
|
||||
if HAVE_SQLITE3:
|
||||
@@ -61,7 +63,7 @@ class U2idx(object):
|
||||
self.log_func("u2idx", msg, c)
|
||||
|
||||
def fsearch(
|
||||
self, vols: list[tuple[str, str, dict[str, Any]]], body: dict[str, Any]
|
||||
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
||||
) -> list[dict[str, Any]]:
|
||||
"""search by up2k hashlist"""
|
||||
if not HAVE_SQLITE3:
|
||||
@@ -69,13 +71,13 @@ class U2idx(object):
|
||||
|
||||
fsize = body["size"]
|
||||
fhash = body["hash"]
|
||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
||||
wark = up2k_wark_from_hashlist(self.args.warksalt, fsize, fhash)
|
||||
|
||||
uq = "substr(w,1,16) = ? and w = ?"
|
||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv, True, False, 99999)[0]
|
||||
return self.run_query(uname, vols, uq, uv, False, 99999)[0]
|
||||
except:
|
||||
raise Pebkac(500, min_ex())
|
||||
|
||||
@@ -101,7 +103,7 @@ class U2idx(object):
|
||||
uri = ""
|
||||
try:
|
||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||
db = sqlite3.connect(uri, 2, uri=True, check_same_thread=False)
|
||||
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
||||
cur = db.cursor()
|
||||
cur.execute('pragma table_info("up")').fetchone()
|
||||
self.log("ro: {}".format(db_path))
|
||||
@@ -113,14 +115,14 @@ class U2idx(object):
|
||||
if not cur:
|
||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||
cur = sqlite3.connect(db_path, 2, check_same_thread=False).cursor()
|
||||
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
||||
self.log("opened {}".format(db_path))
|
||||
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
|
||||
def search(
|
||||
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
|
||||
self, uname: str, vols: list[VFS], uq: str, lim: int
|
||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||
"""search by query params"""
|
||||
if not HAVE_SQLITE3:
|
||||
@@ -129,7 +131,6 @@ class U2idx(object):
|
||||
q = ""
|
||||
v: Union[str, int] = ""
|
||||
va: list[Union[str, int]] = []
|
||||
have_up = False # query has up.* operands
|
||||
have_mt = False
|
||||
is_key = True
|
||||
is_size = False
|
||||
@@ -174,21 +175,21 @@ class U2idx(object):
|
||||
if v == "size":
|
||||
v = "up.sz"
|
||||
is_size = True
|
||||
have_up = True
|
||||
|
||||
elif v == "date":
|
||||
v = "up.mt"
|
||||
is_date = True
|
||||
have_up = True
|
||||
|
||||
elif v == "up_at":
|
||||
v = "up.at"
|
||||
is_date = True
|
||||
|
||||
elif v == "path":
|
||||
v = "trim(?||up.rd,'/')"
|
||||
va.append("\nrd")
|
||||
have_up = True
|
||||
|
||||
elif v == "name":
|
||||
v = "up.fn"
|
||||
have_up = True
|
||||
|
||||
elif v == "tags" or ptn_mt.match(v):
|
||||
have_mt = True
|
||||
@@ -264,19 +265,24 @@ class U2idx(object):
|
||||
q += " lower({}) {} ? ) ".format(field, oper)
|
||||
|
||||
try:
|
||||
return self.run_query(vols, q, va, have_up, have_mt, lim)
|
||||
return self.run_query(uname, vols, q, va, have_mt, lim)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def run_query(
|
||||
self,
|
||||
vols: list[tuple[str, str, dict[str, Any]]],
|
||||
uname: str,
|
||||
vols: list[VFS],
|
||||
uq: str,
|
||||
uv: list[Union[str, int]],
|
||||
have_up: bool,
|
||||
have_mt: bool,
|
||||
lim: int,
|
||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||
if self.args.srch_dbg:
|
||||
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
||||
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
||||
self.log(t % (len(vols), zs), 5)
|
||||
|
||||
done_flag: list[bool] = []
|
||||
self.active_id = "{:.6f}_{}".format(
|
||||
time.time(), threading.current_thread().ident
|
||||
@@ -295,13 +301,35 @@ class U2idx(object):
|
||||
|
||||
ret = []
|
||||
seen_rps: set[str] = set()
|
||||
lim = min(lim, int(self.args.srch_hits))
|
||||
clamp = int(self.args.srch_hits)
|
||||
if lim >= clamp:
|
||||
lim = clamp
|
||||
clamped = True
|
||||
else:
|
||||
clamped = False
|
||||
|
||||
taglist = {}
|
||||
for (vtop, ptop, flags) in vols:
|
||||
for vol in vols:
|
||||
if lim < 0:
|
||||
break
|
||||
|
||||
vtop = vol.vpath
|
||||
ptop = vol.realpath
|
||||
flags = vol.flags
|
||||
|
||||
cur = self.get_cur(ptop)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
excl = []
|
||||
for vp2 in self.asrv.vfs.all_vols.keys():
|
||||
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
|
||||
excl.append(vp2[len(vtop) :].lstrip("/"))
|
||||
|
||||
if self.args.srch_dbg:
|
||||
t = "searching in volume /%s (%s), excludelist %s"
|
||||
self.log(t % (vtop, ptop, excl), 5)
|
||||
|
||||
self.active_cur = cur
|
||||
|
||||
vuv = []
|
||||
@@ -313,7 +341,8 @@ class U2idx(object):
|
||||
|
||||
sret = []
|
||||
fk = flags.get("fk")
|
||||
dots = flags.get("dotsrch")
|
||||
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||
fk_alg = 2 if "fka" in flags else 1
|
||||
c = cur.execute(uq, tuple(vuv))
|
||||
for hit in c:
|
||||
w, ts, sz, rd, fn, ip, at = hit[:7]
|
||||
@@ -321,6 +350,13 @@ class U2idx(object):
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
if rd in excl or any([x for x in excl if rd.startswith(x + "/")]):
|
||||
if self.args.srch_dbg:
|
||||
zs = vjoin(vjoin(vtop, rd), fn)
|
||||
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
||||
self.log(t % (vtop, zs), 1)
|
||||
continue
|
||||
|
||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
||||
if not dots and "/." in ("/" + rp):
|
||||
continue
|
||||
@@ -333,21 +369,35 @@ class U2idx(object):
|
||||
else:
|
||||
try:
|
||||
ap = absreal(os.path.join(ptop, rd, fn))
|
||||
inf = bos.stat(ap)
|
||||
ino = 0 if ANYWIN or fk_alg == 2 else bos.stat(ap).st_ino
|
||||
except:
|
||||
continue
|
||||
|
||||
suf = (
|
||||
"?k="
|
||||
+ gen_filekey(
|
||||
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
|
||||
)[:fk]
|
||||
)
|
||||
suf = "?k=" + gen_filekey(
|
||||
fk_alg,
|
||||
self.args.fk_salt,
|
||||
ap,
|
||||
sz,
|
||||
ino,
|
||||
)[:fk]
|
||||
|
||||
lim -= 1
|
||||
if lim < 0:
|
||||
break
|
||||
|
||||
if self.args.srch_dbg:
|
||||
t = "in volume '/%s': hit: %s"
|
||||
self.log(t % (vtop, rp), 5)
|
||||
|
||||
zs = vjoin(vtop, rp)
|
||||
chk_vn, _ = self.asrv.vfs.get(zs, LEELOO_DALLAS, True, False)
|
||||
chk_vn = chk_vn.dbv or chk_vn
|
||||
if chk_vn.vpath != vtop:
|
||||
raise Exception(
|
||||
"database inconsistency! in volume '/%s' (%s), found file [%s] which belongs to volume '/%s' (%s)"
|
||||
% (vtop, ptop, zs, chk_vn.vpath, chk_vn.realpath)
|
||||
)
|
||||
|
||||
seen_rps.add(rp)
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
|
||||
|
||||
@@ -365,12 +415,16 @@ class U2idx(object):
|
||||
ret.extend(sret)
|
||||
# print("[{}] {}".format(ptop, sret))
|
||||
|
||||
if self.args.srch_dbg:
|
||||
t = "in volume '/%s': got %d hits, %d total so far"
|
||||
self.log(t % (vtop, len(sret), len(ret)), 5)
|
||||
|
||||
done_flag.append(True)
|
||||
self.active_id = ""
|
||||
|
||||
ret.sort(key=itemgetter("rp"))
|
||||
|
||||
return ret, list(taglist.keys()), lim < 0
|
||||
return ret, list(taglist.keys()), lim < 0 and not clamped
|
||||
|
||||
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
|
||||
for _ in range(self.timeout):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -25,7 +25,6 @@ import threading
|
||||
import time
|
||||
import traceback
|
||||
from collections import Counter
|
||||
from datetime import datetime
|
||||
from email.utils import formatdate
|
||||
|
||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
||||
@@ -35,6 +34,35 @@ from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
|
||||
from .__version__ import S_BUILD_DT, S_VERSION
|
||||
from .stolen import surrogateescape
|
||||
|
||||
try:
|
||||
from datetime import datetime, timezone
|
||||
|
||||
UTC = timezone.utc
|
||||
except:
|
||||
from datetime import datetime, timedelta, tzinfo
|
||||
|
||||
TD_ZERO = timedelta(0)
|
||||
|
||||
class _UTC(tzinfo):
|
||||
def utcoffset(self, dt):
|
||||
return TD_ZERO
|
||||
|
||||
def tzname(self, dt):
|
||||
return "UTC"
|
||||
|
||||
def dst(self, dt):
|
||||
return TD_ZERO
|
||||
|
||||
UTC = _UTC()
|
||||
|
||||
|
||||
if sys.version_info >= (3, 7) or (
|
||||
sys.version_info >= (3, 6) and platform.python_implementation() == "CPython"
|
||||
):
|
||||
ODict = dict
|
||||
else:
|
||||
from collections import OrderedDict as ODict
|
||||
|
||||
|
||||
def _ens(want: str) -> tuple[int, ...]:
|
||||
ret: list[int] = []
|
||||
@@ -56,6 +84,8 @@ E_ADDR_IN_USE = _ens("EADDRINUSE WSAEADDRINUSE")
|
||||
E_ACCESS = _ens("EACCES WSAEACCES")
|
||||
E_UNREACH = _ens("EHOSTUNREACH WSAEHOSTUNREACH ENETUNREACH WSAENETUNREACH")
|
||||
|
||||
IP6ALL = "0:0:0:0:0:0:0:0"
|
||||
|
||||
|
||||
try:
|
||||
import ctypes
|
||||
@@ -66,7 +96,9 @@ except:
|
||||
|
||||
try:
|
||||
HAVE_SQLITE3 = True
|
||||
import sqlite3 # pylint: disable=unused-import # typechk
|
||||
import sqlite3
|
||||
|
||||
assert hasattr(sqlite3, "connect") # graalpy
|
||||
except:
|
||||
HAVE_SQLITE3 = False
|
||||
|
||||
@@ -83,6 +115,11 @@ if True: # pylint: disable=using-constant-test
|
||||
import typing
|
||||
from typing import Any, Generator, Optional, Pattern, Protocol, Union
|
||||
|
||||
try:
|
||||
from typing import LiteralString
|
||||
except:
|
||||
pass
|
||||
|
||||
class RootLogger(Protocol):
|
||||
def __call__(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||
return None
|
||||
@@ -112,15 +149,15 @@ if not PY2:
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
else:
|
||||
from StringIO import StringIO as BytesIO
|
||||
from urllib import quote # pylint: disable=no-name-in-module
|
||||
from urllib import unquote # pylint: disable=no-name-in-module
|
||||
from StringIO import StringIO as BytesIO # type: ignore
|
||||
from urllib import quote # type: ignore # pylint: disable=no-name-in-module
|
||||
from urllib import unquote # type: ignore # pylint: disable=no-name-in-module
|
||||
|
||||
|
||||
try:
|
||||
struct.unpack(b">i", b"idgi")
|
||||
spack = struct.pack
|
||||
sunpack = struct.unpack
|
||||
spack = struct.pack # type: ignore
|
||||
sunpack = struct.unpack # type: ignore
|
||||
except:
|
||||
|
||||
def spack(fmt: bytes, *a: Any) -> bytes:
|
||||
@@ -171,6 +208,7 @@ HTTPCODE = {
|
||||
500: "Internal Server Error",
|
||||
501: "Not Implemented",
|
||||
503: "Service Unavailable",
|
||||
999: "MissingNo",
|
||||
}
|
||||
|
||||
|
||||
@@ -256,6 +294,13 @@ EXTS["vnd.mozilla.apng"] = "png"
|
||||
MAGIC_MAP = {"jpeg": "jpg"}
|
||||
|
||||
|
||||
DEF_EXP = "self.ip self.ua self.uname self.host cfg.name cfg.logout vf.scan vf.thsize hdr.cf_ipcountry srv.itime srv.htime"
|
||||
|
||||
DEF_MTE = "circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash"
|
||||
|
||||
DEF_MTH = ".vq,.aq,vc,ac,fmt,res,.fps"
|
||||
|
||||
|
||||
REKOBO_KEY = {
|
||||
v: ln.split(" ", 1)[0]
|
||||
for ln in """
|
||||
@@ -293,11 +338,19 @@ REKOBO_KEY = {
|
||||
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
||||
|
||||
|
||||
_exestr = "python3 python ffmpeg ffprobe cfssl cfssljson cfssl-certinfo"
|
||||
CMD_EXEB = set(_exestr.encode("utf-8").split())
|
||||
CMD_EXES = set(_exestr.split())
|
||||
|
||||
|
||||
pybin = sys.executable or ""
|
||||
if EXE:
|
||||
pybin = ""
|
||||
for zsg in "python3 python".split():
|
||||
try:
|
||||
if ANYWIN:
|
||||
zsg += ".exe"
|
||||
|
||||
zsg = shutil.which(zsg)
|
||||
if zsg:
|
||||
pybin = zsg
|
||||
@@ -330,6 +383,7 @@ def py_desc() -> str:
|
||||
|
||||
|
||||
def _sqlite_ver() -> str:
|
||||
assert sqlite3 # type: ignore
|
||||
try:
|
||||
co = sqlite3.connect(":memory:")
|
||||
cur = co.cursor()
|
||||
@@ -925,7 +979,8 @@ class Magician(object):
|
||||
class Garda(object):
|
||||
"""ban clients for repeated offenses"""
|
||||
|
||||
def __init__(self, cfg: str) -> None:
|
||||
def __init__(self, cfg: str, uniq: bool = True) -> None:
|
||||
self.uniq = uniq
|
||||
try:
|
||||
a, b, c = cfg.strip().split(",")
|
||||
self.lim = int(a)
|
||||
@@ -971,7 +1026,7 @@ class Garda(object):
|
||||
# assume /64 clients; drop 4 groups
|
||||
ip = IPv6Address(ip).exploded[:-20]
|
||||
|
||||
if prev:
|
||||
if prev and self.uniq:
|
||||
if self.prev.get(ip) == prev:
|
||||
return 0, ip
|
||||
|
||||
@@ -1019,6 +1074,17 @@ def nuprint(msg: str) -> None:
|
||||
uprint("{}\n".format(msg))
|
||||
|
||||
|
||||
def dedent(txt: str) -> str:
|
||||
pad = 64
|
||||
lns = txt.replace("\r", "").split("\n")
|
||||
for ln in lns:
|
||||
zs = ln.lstrip()
|
||||
pad2 = len(ln) - len(zs)
|
||||
if zs and pad > pad2:
|
||||
pad = pad2
|
||||
return "\n".join([ln[pad:] for ln in lns])
|
||||
|
||||
|
||||
def rice_tid() -> str:
|
||||
tid = threading.current_thread().ident
|
||||
c = sunpack(b"B" * 5, spack(b">Q", tid)[-5:])
|
||||
@@ -1104,7 +1170,7 @@ def stackmon(fp: str, ival: float, suffix: str) -> None:
|
||||
buf = lzma.compress(buf, preset=0)
|
||||
|
||||
if "%" in fp:
|
||||
dt = datetime.utcnow()
|
||||
dt = datetime.now(UTC)
|
||||
for fs in "YmdHMS":
|
||||
fs = "%" + fs
|
||||
if fs in fp:
|
||||
@@ -1227,12 +1293,15 @@ def ren_open(
|
||||
except OSError as ex_:
|
||||
ex = ex_
|
||||
|
||||
if ex.errno == errno.EINVAL and not asciified:
|
||||
# EPERM: android13
|
||||
if ex.errno in (errno.EINVAL, errno.EPERM) and not asciified:
|
||||
asciified = True
|
||||
bname, fname = [
|
||||
zs.encode("ascii", "replace").decode("ascii").replace("?", "_")
|
||||
for zs in [bname, fname]
|
||||
]
|
||||
zsl = []
|
||||
for zs in (bname, fname):
|
||||
zs = zs.encode("ascii", "replace").decode("ascii")
|
||||
zs = re.sub(r"[^][a-zA-Z0-9(){}.,+=!-]", "_", zs)
|
||||
zsl.append(zs)
|
||||
bname, fname = zsl
|
||||
continue
|
||||
|
||||
# ENOTSUP: zfs on ubuntu 20.04
|
||||
@@ -1443,7 +1512,7 @@ class MultipartParser(object):
|
||||
for buf in iterable:
|
||||
ret += buf
|
||||
if len(ret) > max_len:
|
||||
raise Pebkac(400, "field length is too long")
|
||||
raise Pebkac(422, "field length is too long")
|
||||
|
||||
return ret
|
||||
|
||||
@@ -1511,8 +1580,8 @@ def read_header(sr: Unrecv, t_idle: int, t_tot: int) -> list[str]:
|
||||
|
||||
raise Pebkac(
|
||||
400,
|
||||
"protocol error while reading headers:\n"
|
||||
+ ret.decode("utf-8", "replace"),
|
||||
"protocol error while reading headers",
|
||||
log=ret.decode("utf-8", "replace"),
|
||||
)
|
||||
|
||||
ofs = ret.find(b"\r\n\r\n")
|
||||
@@ -1550,15 +1619,18 @@ def rand_name(fdir: str, fn: str, rnd: int) -> str:
|
||||
return fn
|
||||
|
||||
|
||||
def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||
return base64.urlsafe_b64encode(
|
||||
hashlib.sha512(
|
||||
("%s %s %s %s" % (salt, fspath, fsize, inode)).encode("utf-8", "replace")
|
||||
).digest()
|
||||
).decode("ascii")
|
||||
def gen_filekey(alg: int, salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||
if alg == 1:
|
||||
zs = "%s %s %s %s" % (salt, fspath, fsize, inode)
|
||||
else:
|
||||
zs = "%s %s" % (salt, fspath)
|
||||
|
||||
zb = zs.encode("utf-8", "replace")
|
||||
return base64.urlsafe_b64encode(hashlib.sha512(zb).digest()).decode("ascii")
|
||||
|
||||
|
||||
def gen_filekey_dbg(
|
||||
alg: int,
|
||||
salt: str,
|
||||
fspath: str,
|
||||
fsize: int,
|
||||
@@ -1566,7 +1638,7 @@ def gen_filekey_dbg(
|
||||
log: "NamedLogger",
|
||||
log_ptn: Optional[Pattern[str]],
|
||||
) -> str:
|
||||
ret = gen_filekey(salt, fspath, fsize, inode)
|
||||
ret = gen_filekey(alg, salt, fspath, fsize, inode)
|
||||
|
||||
assert log_ptn
|
||||
if log_ptn.search(fspath):
|
||||
@@ -1592,16 +1664,15 @@ def gen_filekey_dbg(
|
||||
return ret
|
||||
|
||||
|
||||
def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str:
|
||||
def gencookie(k: str, v: str, r: str, tls: bool, dur: int = 0, txt: str = "") -> str:
|
||||
v = v.replace("%", "%25").replace(";", "%3B")
|
||||
if dur:
|
||||
exp = formatdate(time.time() + dur, usegmt=True)
|
||||
else:
|
||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||
|
||||
return "{}={}; Path=/{}; Expires={}{}; SameSite=Lax".format(
|
||||
k, v, r, exp, "; Secure" if tls else ""
|
||||
)
|
||||
t = "%s=%s; Path=/%s; Expires=%s%s%s; SameSite=Lax"
|
||||
return t % (k, v, r, exp, "; Secure" if tls else "", txt)
|
||||
|
||||
|
||||
def humansize(sz: float, terse: bool = False) -> str:
|
||||
@@ -1626,7 +1697,12 @@ def unhumanize(sz: str) -> int:
|
||||
pass
|
||||
|
||||
mc = sz[-1:].lower()
|
||||
mi = {"k": 1024, "m": 1024 * 1024, "g": 1024 * 1024 * 1024}.get(mc, 1)
|
||||
mi = {
|
||||
"k": 1024,
|
||||
"m": 1024 * 1024,
|
||||
"g": 1024 * 1024 * 1024,
|
||||
"t": 1024 * 1024 * 1024 * 1024,
|
||||
}.get(mc, 1)
|
||||
return int(float(sz[:-1]) * mi)
|
||||
|
||||
|
||||
@@ -1713,7 +1789,16 @@ def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
|
||||
return fn.strip()
|
||||
|
||||
|
||||
def sanitize_vpath(vp: str, ok: str, bad: list[str]) -> str:
|
||||
parts = vp.replace(os.sep, "/").split("/")
|
||||
ret = [sanitize_fn(x, ok, bad) for x in parts]
|
||||
return "/".join(ret)
|
||||
|
||||
|
||||
def relchk(rp: str) -> str:
|
||||
if "\x00" in rp:
|
||||
return "[nul]"
|
||||
|
||||
if ANYWIN:
|
||||
if "\n" in rp or "\r" in rp:
|
||||
return "x\nx"
|
||||
@@ -1749,6 +1834,26 @@ def exclude_dotfiles(filepaths: list[str]) -> list[str]:
|
||||
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
||||
|
||||
|
||||
def odfusion(
|
||||
base: Union[ODict[str, bool], ODict["LiteralString", bool]], oth: str
|
||||
) -> ODict[str, bool]:
|
||||
# merge an "ordered set" (just a dict really) with another list of keys
|
||||
words0 = [x for x in oth.split(",") if x]
|
||||
words1 = [x for x in oth[1:].split(",") if x]
|
||||
|
||||
ret = base.copy()
|
||||
if oth.startswith("+"):
|
||||
for k in words1:
|
||||
ret[k] = True
|
||||
elif oth[:1] in ("-", "/"):
|
||||
for k in words1:
|
||||
ret.pop(k, None)
|
||||
else:
|
||||
ret = ODict.fromkeys(words0, True)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def ipnorm(ip: str) -> str:
|
||||
if ":" in ip:
|
||||
# assume /64 clients; drop 4 groups
|
||||
@@ -1901,10 +2006,10 @@ else:
|
||||
# moonrunes become \x3f with bytestrings,
|
||||
# losing mojibake support is worth
|
||||
def _not_actually_mbcs_enc(txt: str) -> bytes:
|
||||
return txt
|
||||
return txt # type: ignore
|
||||
|
||||
def _not_actually_mbcs_dec(txt: bytes) -> str:
|
||||
return txt
|
||||
return txt # type: ignore
|
||||
|
||||
fsenc = afsenc = sfsenc = _not_actually_mbcs_enc
|
||||
fsdec = _not_actually_mbcs_dec
|
||||
@@ -1963,6 +2068,7 @@ def atomic_move(usrc: str, udst: str) -> None:
|
||||
def get_df(abspath: str) -> tuple[Optional[int], Optional[int]]:
|
||||
try:
|
||||
# some fuses misbehave
|
||||
assert ctypes
|
||||
if ANYWIN:
|
||||
bfree = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW( # type: ignore
|
||||
@@ -2115,7 +2221,7 @@ def list_ips() -> list[str]:
|
||||
def yieldfile(fn: str) -> Generator[bytes, None, None]:
|
||||
with open(fsenc(fn), "rb", 512 * 1024) as f:
|
||||
while True:
|
||||
buf = f.read(64 * 1024)
|
||||
buf = f.read(128 * 1024)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
@@ -2365,6 +2471,7 @@ def getalive(pids: list[int], pgid: int) -> list[int]:
|
||||
alive.append(pid)
|
||||
else:
|
||||
# windows doesn't have pgroups; assume
|
||||
assert psutil
|
||||
psutil.Process(pid)
|
||||
alive.append(pid)
|
||||
except:
|
||||
@@ -2382,6 +2489,7 @@ def killtree(root: int) -> None:
|
||||
pgid = 0
|
||||
|
||||
if HAVE_PSUTIL:
|
||||
assert psutil
|
||||
pids = [root]
|
||||
parent = psutil.Process(root)
|
||||
for child in parent.children(recursive=True):
|
||||
@@ -2421,9 +2529,34 @@ def killtree(root: int) -> None:
|
||||
pass
|
||||
|
||||
|
||||
def _find_nice() -> str:
|
||||
if WINDOWS:
|
||||
return "" # use creationflags
|
||||
|
||||
try:
|
||||
zs = shutil.which("nice")
|
||||
if zs:
|
||||
return zs
|
||||
except:
|
||||
pass
|
||||
|
||||
# busted PATHs and/or py2
|
||||
for zs in ("/bin", "/sbin", "/usr/bin", "/usr/sbin"):
|
||||
zs += "/nice"
|
||||
if os.path.exists(zs):
|
||||
return zs
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
NICES = _find_nice()
|
||||
NICEB = NICES.encode("utf-8")
|
||||
|
||||
|
||||
def runcmd(
|
||||
argv: Union[list[bytes], list[str]], timeout: Optional[int] = None, **ka: Any
|
||||
argv: Union[list[bytes], list[str]], timeout: Optional[float] = None, **ka: Any
|
||||
) -> tuple[int, str, str]:
|
||||
isbytes = isinstance(argv[0], (bytes, bytearray))
|
||||
kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one
|
||||
capture = ka.pop("capture", 3) # 0=none 1=stdout 2=stderr 3=both
|
||||
|
||||
@@ -2436,6 +2569,23 @@ def runcmd(
|
||||
bout: bytes
|
||||
berr: bytes
|
||||
|
||||
if ANYWIN:
|
||||
if isbytes:
|
||||
if argv[0] in CMD_EXEB:
|
||||
argv[0] += b".exe"
|
||||
else:
|
||||
if argv[0] in CMD_EXES:
|
||||
argv[0] += ".exe"
|
||||
|
||||
if ka.pop("nice", None):
|
||||
if WINDOWS:
|
||||
ka["creationflags"] = 0x4000
|
||||
elif NICEB:
|
||||
if isbytes:
|
||||
argv = [NICEB] + argv
|
||||
else:
|
||||
argv = [NICES] + argv
|
||||
|
||||
p = sp.Popen(argv, stdout=cout, stderr=cerr, **ka)
|
||||
if not timeout or PY2:
|
||||
bout, berr = p.communicate(sin)
|
||||
@@ -2475,7 +2625,7 @@ def chkcmd(argv: Union[list[bytes], list[str]], **ka: Any) -> tuple[str, str]:
|
||||
return sout, serr
|
||||
|
||||
|
||||
def mchkcmd(argv: Union[list[bytes], list[str]], timeout: int = 10) -> None:
|
||||
def mchkcmd(argv: Union[list[bytes], list[str]], timeout: float = 10) -> None:
|
||||
if PY2:
|
||||
with open(os.devnull, "wb") as f:
|
||||
rv = sp.call(argv, stdout=f, stderr=f)
|
||||
@@ -2583,13 +2733,13 @@ def _parsehook(
|
||||
|
||||
sp_ka = {
|
||||
"env": env,
|
||||
"nice": True,
|
||||
"timeout": tout,
|
||||
"kill": kill,
|
||||
"capture": cap,
|
||||
}
|
||||
|
||||
if cmd.startswith("~"):
|
||||
cmd = os.path.expanduser(cmd)
|
||||
cmd = os.path.expandvars(os.path.expanduser(cmd))
|
||||
|
||||
return chk, fork, jtxt, wait, sp_ka, cmd
|
||||
|
||||
@@ -2722,6 +2872,32 @@ def runhook(
|
||||
return True
|
||||
|
||||
|
||||
def loadpy(ap: str, hot: bool) -> Any:
|
||||
"""
|
||||
a nice can of worms capable of causing all sorts of bugs
|
||||
depending on what other inconveniently named files happen
|
||||
to be in the same folder
|
||||
"""
|
||||
ap = os.path.expandvars(os.path.expanduser(ap))
|
||||
mdir, mfile = os.path.split(absreal(ap))
|
||||
mname = mfile.rsplit(".", 1)[0]
|
||||
sys.path.insert(0, mdir)
|
||||
|
||||
if PY2:
|
||||
mod = __import__(mname)
|
||||
if hot:
|
||||
reload(mod) # type: ignore
|
||||
else:
|
||||
import importlib
|
||||
|
||||
mod = importlib.import_module(mname)
|
||||
if hot:
|
||||
importlib.reload(mod)
|
||||
|
||||
sys.path.remove(mdir)
|
||||
return mod
|
||||
|
||||
|
||||
def gzip_orig_sz(fn: str) -> int:
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
f.seek(-4, 2)
|
||||
@@ -2853,6 +3029,7 @@ def termsize() -> tuple[int, int]:
|
||||
def hidedir(dp) -> None:
|
||||
if ANYWIN:
|
||||
try:
|
||||
assert ctypes
|
||||
k32 = ctypes.WinDLL("kernel32")
|
||||
attrs = k32.GetFileAttributesW(dp)
|
||||
if attrs >= 0:
|
||||
@@ -2862,9 +3039,12 @@ def hidedir(dp) -> None:
|
||||
|
||||
|
||||
class Pebkac(Exception):
|
||||
def __init__(self, code: int, msg: Optional[str] = None) -> None:
|
||||
def __init__(
|
||||
self, code: int, msg: Optional[str] = None, log: Optional[str] = None
|
||||
) -> None:
|
||||
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
||||
self.code = code
|
||||
self.log = log
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "Pebkac({}, {})".format(self.code, repr(self.args))
|
||||
|
||||
@@ -127,7 +127,7 @@ window.baguetteBox = (function () {
|
||||
var gallery = [];
|
||||
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
|
||||
var imageElementClickHandler = function (e) {
|
||||
if (ctrl(e))
|
||||
if (ctrl(e) || e && e.shiftKey)
|
||||
return true;
|
||||
|
||||
e.preventDefault ? e.preventDefault() : e.returnValue = false;
|
||||
@@ -261,7 +261,7 @@ window.baguetteBox = (function () {
|
||||
setloop(1);
|
||||
else if (k == "BracketRight")
|
||||
setloop(2);
|
||||
else if (e.shiftKey)
|
||||
else if (e.shiftKey && k != 'KeyR')
|
||||
return;
|
||||
else if (k == "ArrowLeft" || k == "KeyJ")
|
||||
showPreviousImage();
|
||||
@@ -310,7 +310,7 @@ window.baguetteBox = (function () {
|
||||
options = {};
|
||||
setOptions(o);
|
||||
if (tt.en)
|
||||
tt.show.bind(this)();
|
||||
tt.show.call(this);
|
||||
}
|
||||
|
||||
function setVmode() {
|
||||
@@ -356,7 +356,7 @@ window.baguetteBox = (function () {
|
||||
|
||||
setVmode();
|
||||
if (tt.en)
|
||||
tt.show.bind(this)();
|
||||
tt.show.call(this);
|
||||
}
|
||||
|
||||
function findfile() {
|
||||
@@ -376,7 +376,12 @@ window.baguetteBox = (function () {
|
||||
else
|
||||
(vid() || ebi('bbox-overlay')).requestFullscreen();
|
||||
}
|
||||
catch (ex) { alert(ex); }
|
||||
catch (ex) {
|
||||
if (IPHONE)
|
||||
alert('sorry, apple decided to make this impossible on iphones (should work on ipad tho)');
|
||||
else
|
||||
alert(ex);
|
||||
}
|
||||
}
|
||||
|
||||
function tglsel() {
|
||||
@@ -519,7 +524,7 @@ window.baguetteBox = (function () {
|
||||
options[item] = newOptions[item];
|
||||
}
|
||||
|
||||
var an = options.animation = sread('ganim') || anims[ANIM ? 0 : 2];
|
||||
var an = options.animation = sread('ganim', anims) || anims[ANIM ? 0 : 2];
|
||||
btnAnim.textContent = ['⇄', '⮺', '⚡'][anims.indexOf(an)];
|
||||
btnAnim.setAttribute('tt', 'animation: ' + an);
|
||||
|
||||
@@ -870,7 +875,7 @@ window.baguetteBox = (function () {
|
||||
|
||||
if (loopB !== null) {
|
||||
timer.add(loopchk);
|
||||
sethash(window.location.hash.slice(1).split('&')[0] + '&t=' + (loopA || 0) + '-' + loopB);
|
||||
sethash(location.hash.slice(1).split('&')[0] + '&t=' + (loopA || 0) + '-' + loopB);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -968,7 +973,7 @@ window.baguetteBox = (function () {
|
||||
clmod(btnPrev, 'off', 't');
|
||||
clmod(btnNext, 'off', 't');
|
||||
|
||||
if (Date.now() - ctime <= 500)
|
||||
if (Date.now() - ctime <= 500 && !IPHONE)
|
||||
tglfull();
|
||||
|
||||
ctime = Date.now();
|
||||
|
||||
@@ -55,6 +55,7 @@
|
||||
--u2-sbtn-b1: #999;
|
||||
--u2-txt-bg: var(--bg-u5);
|
||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||
--u2-tab-b1: rgba(128,128,128,0.8);
|
||||
--u2-tab-1-fg: #fd7;
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
||||
--u2-tab-1-b1: #7c5;
|
||||
@@ -270,6 +271,7 @@ html.bz {
|
||||
--btn-1h-fg: #000;
|
||||
--txt-sh: a;
|
||||
|
||||
--u2-tab-b1: var(--bg-u5);
|
||||
--u2-tab-1-fg: var(--fg-max);
|
||||
--u2-tab-1-bg: var(--bg);
|
||||
|
||||
@@ -329,6 +331,7 @@ html.c {
|
||||
html.cz {
|
||||
--bgg: var(--bg-u2);
|
||||
--srv-3: #fff;
|
||||
--u2-tab-b1: var(--bg-d3);
|
||||
}
|
||||
html.cy {
|
||||
--fg: #fff;
|
||||
@@ -411,10 +414,11 @@ html.dz {
|
||||
--op-aa-bg: var(--bg-d2);
|
||||
--op-a-sh: rgba(0,0,0,0.5);
|
||||
|
||||
--u2-btn-b1: #999;
|
||||
--u2-sbtn-b1: #999;
|
||||
--u2-btn-b1: var(--fg-weak);
|
||||
--u2-sbtn-b1: var(--fg-weak);
|
||||
--u2-txt-bg: var(--bg-u5);
|
||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
||||
--u2-tab-b1: var(--fg-weak);
|
||||
--u2-tab-1-fg: #fff;
|
||||
--u2-tab-1-bg: linear-gradient(to bottom, var(#353), var(--bg) 80%);
|
||||
--u2-tab-1-b1: #7c5;
|
||||
@@ -423,6 +427,12 @@ html.dz {
|
||||
--u2-b-fg: #fff;
|
||||
--u2-b1-bg: #3a3;
|
||||
--u2-b2-bg: #3a3;
|
||||
--u2-o-bg: var(--btn-bg);
|
||||
--u2-o-b1: var(--bg-u5);
|
||||
--u2-o-h-bg: var(--fg-weak);
|
||||
--u2-o-1-bg: var(--fg-weak);
|
||||
--u2-o-1-b1: var(--a);
|
||||
--u2-o-1h-bg: var(--a);
|
||||
--u2-inf-bg: #07a;
|
||||
--u2-inf-b1: #0be;
|
||||
--u2-ok-bg: #380;
|
||||
@@ -483,6 +493,7 @@ html.dz {
|
||||
--err-ts: #500;
|
||||
|
||||
text-shadow: none;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
}
|
||||
html.dy {
|
||||
--fg: #000;
|
||||
@@ -717,18 +728,22 @@ a:hover {
|
||||
html.y #files thead th {
|
||||
box-shadow: 0 1px 0 rgba(0,0,0,0.12);
|
||||
}
|
||||
html #files.hhpick thead th {
|
||||
color: #f7d;
|
||||
background: #000;
|
||||
box-shadow: .1em .2em 0 #f6c inset, -.1em -.1em 0 #f6c inset;
|
||||
}
|
||||
#files td {
|
||||
margin: 0;
|
||||
padding: .3em .5em;
|
||||
background: var(--bg);
|
||||
max-width: var(--file-td-w);
|
||||
word-wrap: break-word;
|
||||
overflow: hidden;
|
||||
}
|
||||
#files tr:nth-child(2n) td {
|
||||
background: var(--row-alt);
|
||||
}
|
||||
#files td+td+td {
|
||||
max-width: 30em;
|
||||
overflow: hidden;
|
||||
}
|
||||
#files td+td {
|
||||
box-shadow: 1px 0 0 0 rgba(128,128,128,var(--f-sh1)) inset, 0 1px 0 rgba(255,255,255,var(--f-sh2)) inset, 0 -1px 0 rgba(255,255,255,var(--f-sh2)) inset;
|
||||
}
|
||||
@@ -851,7 +866,7 @@ html.y #path a:hover {
|
||||
}
|
||||
.mdo,
|
||||
.mdo * {
|
||||
line-height: 1.4em;
|
||||
line-height: 1.5em;
|
||||
}
|
||||
#srv_info,
|
||||
#srv_info2,
|
||||
@@ -1220,7 +1235,8 @@ html.y #widget.open {
|
||||
#wfm a.hide {
|
||||
display: none;
|
||||
}
|
||||
#files tbody tr.fcut td {
|
||||
#files tbody tr.fcut td,
|
||||
#ggrid>a.fcut {
|
||||
animation: fcut .5s ease-out;
|
||||
}
|
||||
@keyframes fcut {
|
||||
@@ -1387,6 +1403,9 @@ input[type="checkbox"]:checked+label {
|
||||
color: #0e0;
|
||||
color: var(--a);
|
||||
}
|
||||
html.dz input {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
}
|
||||
.opwide div>span>input+label {
|
||||
padding: .3em 0 .3em .3em;
|
||||
margin: 0 0 0 -.3em;
|
||||
@@ -1395,14 +1414,17 @@ input[type="checkbox"]:checked+label {
|
||||
.opview input.i {
|
||||
width: calc(100% - 16.2em);
|
||||
}
|
||||
input.drc_v,
|
||||
input.eq_gain {
|
||||
width: 3em;
|
||||
text-align: center;
|
||||
margin: 0 .6em;
|
||||
}
|
||||
#audio_drc table,
|
||||
#audio_eq table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
#audio_drc td,
|
||||
#audio_eq td {
|
||||
text-align: center;
|
||||
}
|
||||
@@ -1411,11 +1433,15 @@ input.eq_gain {
|
||||
display: block;
|
||||
padding: 0;
|
||||
}
|
||||
#au_drc,
|
||||
#au_eq {
|
||||
display: block;
|
||||
margin-top: .5em;
|
||||
padding: 1.3em .3em;
|
||||
}
|
||||
#au_drc {
|
||||
padding: .4em .3em;
|
||||
}
|
||||
#ico1 {
|
||||
cursor: pointer;
|
||||
}
|
||||
@@ -1456,6 +1482,8 @@ input.eq_gain {
|
||||
width: calc(100% - 2em);
|
||||
margin: .3em 0 0 1.4em;
|
||||
}
|
||||
@media (max-width: 130em) { #srch_form.tags #tq_raw { width: calc(100% - 34em) } }
|
||||
@media (max-width: 95em) { #srch_form.tags #tq_raw { width: calc(100% - 2em) } }
|
||||
#tq_raw td+td {
|
||||
width: 100%;
|
||||
}
|
||||
@@ -1567,6 +1595,7 @@ html.cz .btn {
|
||||
border-bottom: .2em solid #709;
|
||||
}
|
||||
html.dz .btn {
|
||||
font-size: 1em;
|
||||
box-shadow: 0 0 0 .1em #080 inset;
|
||||
}
|
||||
html.dz .tgl.btn.on {
|
||||
@@ -1610,6 +1639,12 @@ html.cz .tgl.btn.on {
|
||||
list-style: none;
|
||||
border-top: 1px solid var(--bg-u5);
|
||||
}
|
||||
#tree li.offline>a:first-child:before {
|
||||
content: '❌';
|
||||
position: absolute;
|
||||
margin-left: -.25em;
|
||||
z-index: 3;
|
||||
}
|
||||
#tree ul a.sel {
|
||||
background: #000;
|
||||
background: var(--bg-d3);
|
||||
@@ -1751,6 +1786,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
display: none;
|
||||
}
|
||||
.ghead {
|
||||
background: #fff;
|
||||
background: var(--bg-u2);
|
||||
border-radius: .3em;
|
||||
padding: .2em .5em;
|
||||
@@ -1781,6 +1817,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
padding: 0;
|
||||
}
|
||||
#rui {
|
||||
background: #fff;
|
||||
background: var(--bg);
|
||||
position: fixed;
|
||||
top: 0;
|
||||
@@ -1837,6 +1874,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
}
|
||||
#doc {
|
||||
overflow: visible;
|
||||
background: #fff;
|
||||
background: var(--bg);
|
||||
margin: -1em 0 .5em 0;
|
||||
padding: 1em 0 1em 0;
|
||||
@@ -1853,6 +1891,10 @@ html.y #doc {
|
||||
text-align: center;
|
||||
padding: .5em;
|
||||
}
|
||||
#docul li.bn span {
|
||||
font-weight: bold;
|
||||
color: var(--fg-max);
|
||||
}
|
||||
#doc.prism {
|
||||
padding-left: 3em;
|
||||
}
|
||||
@@ -2091,12 +2133,12 @@ html.y #bbox-overlay figcaption a {
|
||||
}
|
||||
.bbox-btn,
|
||||
#bbox-btns {
|
||||
opacity: 1;
|
||||
opacity: 1;
|
||||
animation: opacity .2s infinite ease-in-out;
|
||||
}
|
||||
.bbox-btn.off,
|
||||
#bbox-btns.off {
|
||||
opacity: 0;
|
||||
opacity: 0;
|
||||
}
|
||||
#bbox-overlay button {
|
||||
cursor: pointer;
|
||||
@@ -2366,7 +2408,7 @@ html.y #bbox-overlay figcaption a {
|
||||
display: block;
|
||||
}
|
||||
#u2bm sup {
|
||||
font-weight: bold;
|
||||
font-weight: bold;
|
||||
}
|
||||
#u2notbtn {
|
||||
display: none;
|
||||
@@ -2465,7 +2507,7 @@ html.y #bbox-overlay figcaption a {
|
||||
width: 21em;
|
||||
}
|
||||
#u2cards {
|
||||
padding: 1em 1em .3em 1em;
|
||||
padding: 1em 1em .42em 1em;
|
||||
margin: 0 auto;
|
||||
white-space: nowrap;
|
||||
text-align: center;
|
||||
@@ -2473,14 +2515,14 @@ html.y #bbox-overlay figcaption a {
|
||||
min-width: 24em;
|
||||
}
|
||||
#u2cards.w {
|
||||
width: 44em;
|
||||
width: 48em;
|
||||
text-align: left;
|
||||
}
|
||||
#u2cards.ww {
|
||||
display: inline-block;
|
||||
}
|
||||
#u2etaw.w {
|
||||
width: 52em;
|
||||
width: 55em;
|
||||
text-align: right;
|
||||
margin: 2em auto -2.7em auto;
|
||||
}
|
||||
@@ -2490,7 +2532,8 @@ html.y #bbox-overlay figcaption a {
|
||||
#u2cards a {
|
||||
padding: .2em 1em;
|
||||
background: var(--u2-tab-bg);
|
||||
border: 1px solid rgba(128,128,128,0.8);
|
||||
border: 1px solid #999;
|
||||
border-color: var(--u2-tab-b1);
|
||||
border-width: 0 0 1px 0;
|
||||
}
|
||||
#u2cards a:first-child {
|
||||
@@ -2524,10 +2567,10 @@ html.y #bbox-overlay figcaption a {
|
||||
width: 30em;
|
||||
}
|
||||
#u2conf.w {
|
||||
width: 48em;
|
||||
width: 51em;
|
||||
}
|
||||
#u2conf.ww {
|
||||
width: 78em;
|
||||
width: 82em;
|
||||
}
|
||||
#u2conf.ww #u2c3w {
|
||||
width: 29em;
|
||||
@@ -2749,6 +2792,9 @@ html.c .opbox,
|
||||
html.a .opbox {
|
||||
margin: 1.5em 0 0 0;
|
||||
}
|
||||
html.dz .opview input.i {
|
||||
width: calc(100% - 18em);
|
||||
}
|
||||
html.c #tree,
|
||||
html.c #treeh,
|
||||
html.a #tree,
|
||||
@@ -2801,6 +2847,9 @@ html.a #u2btn {
|
||||
html.ay #u2btn {
|
||||
box-shadow: .4em .4em 0 #ccc;
|
||||
}
|
||||
html.dz #u2btn {
|
||||
letter-spacing: -.033em;
|
||||
}
|
||||
html.c #u2conf.ww #u2btn,
|
||||
html.a #u2conf.ww #u2btn {
|
||||
margin: -2em .5em -3em 0;
|
||||
@@ -2963,13 +3012,13 @@ html.b .btn {
|
||||
top: -.1em;
|
||||
}
|
||||
html.b #op_up2k.srch sup {
|
||||
color: #fc0;
|
||||
color: #fc0;
|
||||
}
|
||||
html.by #u2btn sup {
|
||||
color: #06b;
|
||||
color: #06b;
|
||||
}
|
||||
html.by #op_up2k.srch sup {
|
||||
color: #b70;
|
||||
color: #b70;
|
||||
}
|
||||
html.bz #u2cards a.act {
|
||||
box-shadow: 0 -.1em .2em var(--bg-d2);
|
||||
@@ -3018,6 +3067,16 @@ html.d #treepar {
|
||||
|
||||
|
||||
|
||||
@media (max-width: 32em) {
|
||||
#u2conf {
|
||||
font-size: .9em;
|
||||
}
|
||||
}
|
||||
@media (max-width: 28em) {
|
||||
#u2conf {
|
||||
font-size: .8em;
|
||||
}
|
||||
}
|
||||
@media (min-width: 70em) {
|
||||
#barpos,
|
||||
#barbuf {
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/browser.css?_={{ ts }}">
|
||||
{%- if css %}
|
||||
<link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ css }}_={{ ts }}">
|
||||
{%- endif %}
|
||||
</head>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
|
||||
<div id="op_player" class="opview opbox opwide"></div>
|
||||
|
||||
<div id="op_bup" class="opview opbox act">
|
||||
<div id="op_bup" class="opview opbox {% if not ls0 %}act{% endif %}">
|
||||
<div id="u2err"></div>
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="bput" />
|
||||
@@ -39,7 +39,7 @@
|
||||
<a id="bbsw" href="?b=u" rel="nofollow"><br />switch to basic browser</a>
|
||||
</div>
|
||||
|
||||
<div id="op_mkdir" class="opview opbox act">
|
||||
<div id="op_mkdir" class="opview opbox {% if not ls0 %}act{% endif %}">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="mkdir" />
|
||||
📂<input type="text" name="name" class="i" placeholder="awesome mix vol.1">
|
||||
@@ -55,7 +55,7 @@
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_msg" class="opview opbox act">
|
||||
<div id="op_msg" class="opview opbox {% if not ls0 %}act{% endif %}">
|
||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
📟<input type="text" name="msg" class="i" placeholder="lorem ipsum dolor sit amet">
|
||||
<input type="submit" value="send msg to srv log">
|
||||
@@ -135,44 +135,27 @@
|
||||
|
||||
<script>
|
||||
var SR = {{ r|tojson }},
|
||||
CGV = {{ cgv|tojson }},
|
||||
TS = "{{ ts }}",
|
||||
acct = "{{ acct }}",
|
||||
perms = {{ perms }},
|
||||
dgrid = {{ dgrid|tojson }},
|
||||
themes = {{ themes }},
|
||||
dtheme = "{{ dtheme }}",
|
||||
srvinf = "{{ srv_info }}",
|
||||
s_name = "{{ s_name }}",
|
||||
lang = "{{ lang }}",
|
||||
dfavico = "{{ favico }}",
|
||||
def_hcols = {{ def_hcols|tojson }},
|
||||
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||
have_acode = {{ have_acode|tojson }},
|
||||
have_mv = {{ have_mv|tojson }},
|
||||
have_del = {{ have_del|tojson }},
|
||||
have_unpost = {{ have_unpost }},
|
||||
have_zip = {{ have_zip|tojson }},
|
||||
sb_md = "{{ sb_md }}",
|
||||
sb_lg = "{{ sb_lg }}",
|
||||
lifetime = {{ lifetime }},
|
||||
turbolvl = {{ turbolvl }},
|
||||
idxh = {{ idxh }},
|
||||
frand = {{ frand|tojson }},
|
||||
u2sort = "{{ u2sort }}",
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
txt_ext = "{{ txt_ext }}",
|
||||
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||
readme = {{ readme|tojson }},
|
||||
ls0 = {{ ls0|tojson }};
|
||||
|
||||
document.documentElement.className = localStorage.theme || dtheme;
|
||||
document.documentElement.className = localStorage.cpp_thm || dtheme;
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/baguettebox.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/browser.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/up2k.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}?_={{ ts }}"></script>
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
</head>
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
<span id="lno">L#</span>
|
||||
{%- else %}
|
||||
<a href="{{ arg_base }}edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
||||
<a href="{{ arg_base }}edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||
<a href="{{ arg_base }}edit2" id="edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||
<a href="{{ arg_base }}">view raw</a>
|
||||
{%- endif %}
|
||||
</div>
|
||||
|
||||
@@ -52,7 +52,7 @@ var img_load = (function () {
|
||||
var r = {};
|
||||
r.callbacks = [];
|
||||
|
||||
function fire() {
|
||||
var fire = function () {
|
||||
for (var a = 0; a < r.callbacks.length; a++)
|
||||
r.callbacks[a]();
|
||||
}
|
||||
@@ -212,6 +212,8 @@ function convert_markdown(md_text, dest_dom) {
|
||||
|
||||
try {
|
||||
var md_html = marked.parse(md_text, marked_opts);
|
||||
if (!have_emp)
|
||||
md_html = DOMPurify.sanitize(md_html);
|
||||
}
|
||||
catch (ex) {
|
||||
if (ext)
|
||||
@@ -470,7 +472,7 @@ img_load.callbacks = [toc.refresh];
|
||||
// scroll handler
|
||||
var redraw = (function () {
|
||||
var sbs = true;
|
||||
function onresize() {
|
||||
var onresize = function () {
|
||||
if (window.matchMedia)
|
||||
sbs = window.matchMedia('(min-width: 64em)').matches;
|
||||
|
||||
@@ -483,7 +485,7 @@ var redraw = (function () {
|
||||
onscroll();
|
||||
}
|
||||
|
||||
function onscroll() {
|
||||
var onscroll = function () {
|
||||
toc.refresh();
|
||||
}
|
||||
|
||||
@@ -505,6 +507,13 @@ dom_navtgl.onclick = function () {
|
||||
redraw();
|
||||
};
|
||||
|
||||
if (!HTTPS && location.hostname != '127.0.0.1') try {
|
||||
ebi('edit2').onclick = function (e) {
|
||||
toast.err(0, "the fancy editor is only available over https");
|
||||
return ev(e);
|
||||
}
|
||||
} catch (ex) { }
|
||||
|
||||
if (sread('hidenav') == 1)
|
||||
dom_navtgl.onclick();
|
||||
|
||||
|
||||
@@ -92,7 +92,7 @@ var action_stack = null;
|
||||
var nlines = 0;
|
||||
var draw_md = (function () {
|
||||
var delay = 1;
|
||||
function draw_md() {
|
||||
var draw_md = function () {
|
||||
var t0 = Date.now();
|
||||
var src = dom_src.value;
|
||||
convert_markdown(src, dom_pre);
|
||||
@@ -135,7 +135,7 @@ img_load.callbacks = [function () {
|
||||
|
||||
// resize handler
|
||||
redraw = (function () {
|
||||
function onresize() {
|
||||
var onresize = function () {
|
||||
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
|
||||
dom_wrap.style.top = y;
|
||||
dom_swrap.style.top = y;
|
||||
@@ -143,12 +143,12 @@ redraw = (function () {
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
}
|
||||
function setsbs() {
|
||||
var setsbs = function () {
|
||||
dom_wrap.className = '';
|
||||
dom_swrap.className = '';
|
||||
onresize();
|
||||
}
|
||||
function modetoggle() {
|
||||
var modetoggle = function () {
|
||||
var mode = dom_nsbs.innerHTML;
|
||||
dom_nsbs.innerHTML = mode == 'editor' ? 'preview' : 'editor';
|
||||
mode += ' single';
|
||||
@@ -172,7 +172,7 @@ redraw = (function () {
|
||||
(function () {
|
||||
var skip_src = false, skip_pre = false;
|
||||
|
||||
function scroll(src, srcmap, dst, dstmap) {
|
||||
var scroll = function (src, srcmap, dst, dstmap) {
|
||||
var y = src.scrollTop;
|
||||
if (y < 8) {
|
||||
dst.scrollTop = 0;
|
||||
@@ -278,6 +278,7 @@ function Modpoll() {
|
||||
return;
|
||||
|
||||
var new_md = this.responseText,
|
||||
new_mt = this.getResponseHeader('X-Lastmod3') || r.lastmod,
|
||||
server_ref = server_md.replace(/\r/g, ''),
|
||||
server_now = new_md.replace(/\r/g, '');
|
||||
|
||||
@@ -285,6 +286,7 @@ function Modpoll() {
|
||||
if (r.initial && server_ref != server_now)
|
||||
return modal.confirm('Your browser decided to show an outdated copy of the document!\n\nDo you want to load the latest version from the server instead?', function () {
|
||||
dom_src.value = server_md = new_md;
|
||||
last_modified = new_mt;
|
||||
draw_md();
|
||||
}, null);
|
||||
|
||||
@@ -898,12 +900,12 @@ var set_lno = (function () {
|
||||
pv = null,
|
||||
lno = ebi('lno');
|
||||
|
||||
function poke() {
|
||||
var poke = function () {
|
||||
clearTimeout(t);
|
||||
t = setTimeout(fire, 20);
|
||||
}
|
||||
|
||||
function fire() {
|
||||
var fire = function () {
|
||||
try {
|
||||
clearTimeout(t);
|
||||
|
||||
@@ -928,8 +930,14 @@ var set_lno = (function () {
|
||||
|
||||
// hotkeys / toolbar
|
||||
(function () {
|
||||
function keydown(ev) {
|
||||
ev = ev || window.event;
|
||||
var keydown = function (ev) {
|
||||
if (!ev && window.event) {
|
||||
ev = window.event;
|
||||
if (localStorage.dev_fbw == 1) {
|
||||
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
|
||||
console.error('using window.event');
|
||||
}
|
||||
}
|
||||
var kc = ev.code || ev.keyCode || ev.which,
|
||||
editing = document.activeElement == dom_src;
|
||||
|
||||
@@ -1001,7 +1009,7 @@ var set_lno = (function () {
|
||||
md_home(ev.shiftKey);
|
||||
return false;
|
||||
}
|
||||
if (!ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
|
||||
if (!ev.shiftKey && (ev.code.endsWith("Enter") || kc == 13)) {
|
||||
return md_newline();
|
||||
}
|
||||
if (!ev.shiftKey && kc == 8) {
|
||||
@@ -1056,7 +1064,7 @@ action_stack = (function () {
|
||||
var ignore = false;
|
||||
var ref = dom_src.value;
|
||||
|
||||
function diff(from, to, cpos) {
|
||||
var diff = function (from, to, cpos) {
|
||||
if (from === to)
|
||||
return null;
|
||||
|
||||
@@ -1087,14 +1095,14 @@ action_stack = (function () {
|
||||
};
|
||||
}
|
||||
|
||||
function undiff(from, change) {
|
||||
var undiff = function (from, change) {
|
||||
return {
|
||||
txt: from.substring(0, change.car) + change.txt + from.substring(change.cdr),
|
||||
cpos: change.cpos
|
||||
};
|
||||
}
|
||||
|
||||
function apply(src, dst) {
|
||||
var apply = function (src, dst) {
|
||||
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
|
||||
|
||||
if (src.length === 0)
|
||||
@@ -1118,7 +1126,7 @@ action_stack = (function () {
|
||||
return true;
|
||||
}
|
||||
|
||||
function schedule_push() {
|
||||
var schedule_push = function () {
|
||||
if (ignore) {
|
||||
ignore = false;
|
||||
return;
|
||||
@@ -1129,7 +1137,7 @@ action_stack = (function () {
|
||||
sched_timer = setTimeout(push, 500);
|
||||
}
|
||||
|
||||
function undo() {
|
||||
var undo = function () {
|
||||
if (hist.re.length == 0) {
|
||||
clearTimeout(sched_timer);
|
||||
push();
|
||||
@@ -1137,11 +1145,11 @@ action_stack = (function () {
|
||||
return apply(hist.un, hist.re);
|
||||
}
|
||||
|
||||
function redo() {
|
||||
var redo = function () {
|
||||
return apply(hist.re, hist.un);
|
||||
}
|
||||
|
||||
function push() {
|
||||
var push = function () {
|
||||
var newtxt = dom_src.value;
|
||||
var change = diff(ref, newtxt, sched_cpos);
|
||||
if (change !== null)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
@@ -42,7 +42,7 @@
|
||||
{%- if redir %}
|
||||
<script>
|
||||
setTimeout(function() {
|
||||
window.location.replace("{{ redir }}");
|
||||
location.replace("{{ redir }}");
|
||||
}, 1000);
|
||||
</script>
|
||||
{%- endif %}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ svcname }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
@@ -110,7 +110,7 @@ var SR = {{ r|tojson }},
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
document.documentElement.className=localStorage.theme||"{{ this.args.theme }}";
|
||||
document.documentElement.className=localStorage.cpp_thm||"{{ this.args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
|
||||
@@ -35,7 +35,7 @@ var Ls = {
|
||||
"v2": "use this server as a local HDD$N$NWARNING: this will show your password!",
|
||||
}
|
||||
},
|
||||
d = Ls[sread("lang") || lang];
|
||||
d = Ls[sread("cpp_lang", ["eng", "nor"]) || lang] || Ls.eng || Ls.nor;
|
||||
|
||||
for (var k in (d || {})) {
|
||||
var f = k.slice(-1),
|
||||
|
||||
@@ -3,13 +3,14 @@
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ args.doctitle }} @ {{ args.name }}</title>
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<style>ul{padding-left:1.3em}li{margin:.4em 0}</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
@@ -48,9 +49,13 @@
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
||||
{% endif %}
|
||||
<ul>
|
||||
{% if s %}
|
||||
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
|
||||
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
||||
<pre>
|
||||
@@ -73,10 +78,13 @@
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
||||
{% endif %}
|
||||
|
||||
<ul>
|
||||
{% if s %}
|
||||
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
<p>or the emergency alternative (gnome/gui-only):</p>
|
||||
<!-- gnome-bug: ignores vp -->
|
||||
<pre>
|
||||
@@ -123,8 +131,14 @@
|
||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
||||
{% endif %}
|
||||
<ul>
|
||||
{% if args.ftps %}
|
||||
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||
<pre>
|
||||
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||
@@ -145,8 +159,14 @@
|
||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
||||
{% endif %}
|
||||
<ul>
|
||||
{% if args.ftps %}
|
||||
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
<p>emergency alternative (gnome/gui-only):</p>
|
||||
<!-- gnome-bug: ignores vp -->
|
||||
<pre>
|
||||
@@ -178,7 +198,7 @@
|
||||
partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ rvp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p>
|
||||
<ul><li>if you are on LAN (or just dont have valid certificates), add <code>-td</code></li></ul>
|
||||
{% endif %}
|
||||
<p>
|
||||
you can use <a href="{{ r }}/.cpr/a/u2c.py">u2c.py</a> to upload (sometimes faster than web-browsers)
|
||||
@@ -218,7 +238,7 @@ var SR = {{ r|tojson }},
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
document.documentElement.className=localStorage.theme||"{{ args.theme }}";
|
||||
document.documentElement.className=localStorage.cpp_thm||"{{ args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
|
||||
@@ -1,9 +1,26 @@
|
||||
:root {
|
||||
--fg: #ccc;
|
||||
--fg-max: #fff;
|
||||
--bg-u2: #2b2b2b;
|
||||
--bg-u5: #444;
|
||||
}
|
||||
html.y {
|
||||
--fg: #222;
|
||||
--fg-max: #000;
|
||||
--bg-u2: #f7f7f7;
|
||||
--bg-u5: #ccc;
|
||||
}
|
||||
html.bz {
|
||||
--bg-u2: #202231;
|
||||
}
|
||||
@font-face {
|
||||
font-family: 'scp';
|
||||
font-display: swap;
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html {
|
||||
text-size-adjust: 100%;
|
||||
-webkit-text-size-adjust: 100%;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#tt, #toast {
|
||||
@@ -12,7 +29,9 @@ html {
|
||||
max-width: min(34em, 90%);
|
||||
max-width: min(34em, calc(100% - 7em));
|
||||
color: #ddd;
|
||||
color: var(--fg);
|
||||
background: #333;
|
||||
background: var(--bg-u2);
|
||||
border: 0 solid #777;
|
||||
box-shadow: 0 .2em .5em #111;
|
||||
border-radius: .4em;
|
||||
@@ -73,7 +92,7 @@ html {
|
||||
#toastb {
|
||||
max-height: 70vh;
|
||||
overflow-y: auto;
|
||||
padding: 1px;
|
||||
padding: .1em;
|
||||
}
|
||||
#toast.scroll #toastb {
|
||||
overflow-y: scroll;
|
||||
@@ -157,9 +176,10 @@ html {
|
||||
#modalc code,
|
||||
#tt code {
|
||||
color: #eee;
|
||||
color: var(--fg-max);
|
||||
background: #444;
|
||||
background: var(--bg-u5);
|
||||
padding: .1em .3em;
|
||||
border-top: 1px solid #777;
|
||||
border-radius: .3em;
|
||||
line-height: 1.7em;
|
||||
}
|
||||
@@ -167,22 +187,15 @@ html {
|
||||
color: #f6a;
|
||||
}
|
||||
html.y #tt {
|
||||
color: #333;
|
||||
background: #fff;
|
||||
border-color: #888 #000 #777 #000;
|
||||
}
|
||||
html.bz #tt {
|
||||
background: #202231;
|
||||
border-color: #3b3f58;
|
||||
}
|
||||
html.y #tt,
|
||||
html.y #toast {
|
||||
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
||||
}
|
||||
html.y #tt code {
|
||||
background: #060;
|
||||
color: #fff;
|
||||
}
|
||||
#modalc code {
|
||||
color: #060;
|
||||
background: transparent;
|
||||
@@ -320,6 +333,9 @@ html.y .btn:focus {
|
||||
box-shadow: 0 .1em .2em #037 inset;
|
||||
outline: #037 solid .1em;
|
||||
}
|
||||
input[type="submit"] {
|
||||
cursor: pointer;
|
||||
}
|
||||
input[type="text"]:focus,
|
||||
input:not([type]):focus,
|
||||
textarea:focus {
|
||||
|
||||
@@ -588,7 +588,7 @@ function U2pvis(act, btns, uc, st) {
|
||||
btns[a].onclick = function (e) {
|
||||
ev(e);
|
||||
var newtab = this.getAttribute('act');
|
||||
function go() {
|
||||
var go = function () {
|
||||
for (var b = 0; b < btns.length; b++) {
|
||||
btns[b].className = (
|
||||
btns[b].getAttribute('act') == newtab) ? 'act' : '';
|
||||
@@ -723,7 +723,7 @@ function Donut(uc, st) {
|
||||
|
||||
function strobe() {
|
||||
var txt = strobes.pop();
|
||||
wintitle(txt);
|
||||
wintitle(txt, false);
|
||||
if (!txt)
|
||||
clearInterval(tstrober);
|
||||
}
|
||||
@@ -807,7 +807,7 @@ function up2k_init(subtle) {
|
||||
function init_deps() {
|
||||
if (!loading_deps && !got_deps()) {
|
||||
var fn = 'sha512.' + sha_js + '.js',
|
||||
m = L.u_https1 + ' <a href="' + (window.location + '').replace(':', 's:') + '">' + L.u_https2 + '</a> ' + L.u_https3;
|
||||
m = L.u_https1 + ' <a href="' + (location + '').replace(':', 's:') + '">' + L.u_https2 + '</a> ' + L.u_https3;
|
||||
|
||||
showmodal('<h1>loading ' + fn + '</h1>');
|
||||
import_js(SR + '/.cpr/deps/' + fn, unmodal);
|
||||
@@ -852,7 +852,7 @@ function up2k_init(subtle) {
|
||||
|
||||
setmsg(suggest_up2k, 'msg');
|
||||
|
||||
var parallel_uploads = icfg_get('nthread'),
|
||||
var parallel_uploads = ebi('nthread').value = icfg_get('nthread', u2j),
|
||||
uc = {},
|
||||
fdom_ctr = 0,
|
||||
biggest_file = 0;
|
||||
@@ -861,6 +861,7 @@ function up2k_init(subtle) {
|
||||
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
||||
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
|
||||
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
||||
bcfg_bind(uc, 'u2ts', 'u2ts', !u2ts.endsWith('u'), set_u2ts, false);
|
||||
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
|
||||
|
||||
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
|
||||
@@ -971,7 +972,7 @@ function up2k_init(subtle) {
|
||||
if (++nenters <= 0)
|
||||
nenters = 1;
|
||||
|
||||
if (onover.bind(this)(e))
|
||||
if (onover.call(this, e))
|
||||
return true;
|
||||
|
||||
var mup, up = QS('#up_zd');
|
||||
@@ -995,16 +996,29 @@ function up2k_init(subtle) {
|
||||
function onoverb(e) {
|
||||
// zones are alive; disable cuo2duo branch
|
||||
document.body.ondragover = document.body.ondrop = null;
|
||||
return onover.bind(this)(e);
|
||||
return onover.call(this, e);
|
||||
}
|
||||
function onover(e) {
|
||||
return onovercmn(this, e, false);
|
||||
}
|
||||
function onoverbtn(e) {
|
||||
return onovercmn(this, e, true);
|
||||
}
|
||||
function onovercmn(self, e, btn) {
|
||||
try {
|
||||
var ok = false, dt = e.dataTransfer.types;
|
||||
for (var a = 0; a < dt.length; a++)
|
||||
if (dt[a] == 'Files')
|
||||
ok = true;
|
||||
else if (dt[a] == 'text/uri-list')
|
||||
return true;
|
||||
else if (dt[a] == 'text/uri-list') {
|
||||
if (btn) {
|
||||
ok = true;
|
||||
if (toast.txt == L.u_uri)
|
||||
toast.hide();
|
||||
}
|
||||
else
|
||||
return toast.inf(10, L.u_uri) || true;
|
||||
}
|
||||
|
||||
if (!ok)
|
||||
return true;
|
||||
@@ -1020,13 +1034,16 @@ function up2k_init(subtle) {
|
||||
document.body.ondragenter = document.body.ondragleave = document.body.ondragover = null;
|
||||
return modal.alert('your browser does not support drag-and-drop uploading');
|
||||
}
|
||||
if (btn)
|
||||
return;
|
||||
|
||||
clmod(ebi('drops'), 'vis', 1);
|
||||
var v = this.getAttribute('v');
|
||||
var v = self.getAttribute('v');
|
||||
if (v)
|
||||
clmod(ebi(v), 'hl', 1);
|
||||
}
|
||||
function offdrag(e) {
|
||||
ev(e);
|
||||
noope(e);
|
||||
|
||||
var v = this.getAttribute('v');
|
||||
if (v)
|
||||
@@ -1045,6 +1062,8 @@ function up2k_init(subtle) {
|
||||
document.body.ondragleave = offdrag;
|
||||
document.body.ondragover = onover;
|
||||
document.body.ondrop = gotfile;
|
||||
ebi('u2btn').ondrop = gotfile;
|
||||
ebi('u2btn').ondragover = onoverbtn;
|
||||
|
||||
var drops = [ebi('up_dz'), ebi('srch_dz')];
|
||||
for (var a = 0; a < 2; a++) {
|
||||
@@ -1088,7 +1107,7 @@ function up2k_init(subtle) {
|
||||
function gotfile(e) {
|
||||
ev(e);
|
||||
nenters = 0;
|
||||
offdrag.bind(this)();
|
||||
offdrag.call(this);
|
||||
var dz = this && this.getAttribute('id');
|
||||
if (!dz && e && e.clientY)
|
||||
// cuo2duo fallback
|
||||
@@ -1132,7 +1151,7 @@ function up2k_init(subtle) {
|
||||
dst = good_files;
|
||||
|
||||
if (is_itemlist) {
|
||||
if (fobj.kind !== 'file')
|
||||
if (fobj.kind !== 'file' && fobj.type !== 'text/uri-list')
|
||||
continue;
|
||||
|
||||
try {
|
||||
@@ -1144,6 +1163,8 @@ function up2k_init(subtle) {
|
||||
}
|
||||
catch (ex) { }
|
||||
fobj = fobj.getAsFile();
|
||||
if (!fobj)
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
if (fobj.size < 1)
|
||||
@@ -1319,6 +1340,7 @@ function up2k_init(subtle) {
|
||||
|
||||
function up_them(good_files) {
|
||||
start_actx();
|
||||
draw_turbo();
|
||||
var evpath = get_evpath(),
|
||||
draw_each = good_files.length < 50;
|
||||
|
||||
@@ -1341,7 +1363,7 @@ function up2k_init(subtle) {
|
||||
name = good_files[a][1],
|
||||
fdir = evpath,
|
||||
now = Date.now(),
|
||||
lmod = fobj.lastModified || now,
|
||||
lmod = uc.u2ts ? (fobj.lastModified || now) : 0,
|
||||
ofs = name.lastIndexOf('/') + 1;
|
||||
|
||||
if (ofs) {
|
||||
@@ -1385,7 +1407,7 @@ function up2k_init(subtle) {
|
||||
|
||||
pvis.addfile([
|
||||
uc.fsearch ? esc(entry.name) : linksplit(
|
||||
entry.purl + uricom_enc(entry.name)).join(' '),
|
||||
entry.purl + uricom_enc(entry.name)).join(' / '),
|
||||
'📐 ' + L.u_hashing,
|
||||
''
|
||||
], entry.size, draw_each);
|
||||
@@ -1568,7 +1590,7 @@ function up2k_init(subtle) {
|
||||
return;
|
||||
|
||||
st.oserr = true;
|
||||
var msg = HTTPS ? L.u_emtleak3 : L.u_emtleak2.format((window.location + '').replace(':', 's:'));
|
||||
var msg = HTTPS ? L.u_emtleak3 : L.u_emtleak2.format((location + '').replace(':', 's:'));
|
||||
modal.alert(L.u_emtleak1 + msg + (CHROME ? L.u_emtleakc : FIREFOX ? L.u_emtleakf : ''));
|
||||
}
|
||||
|
||||
@@ -1634,11 +1656,11 @@ function up2k_init(subtle) {
|
||||
var running = false,
|
||||
was_busy = false;
|
||||
|
||||
function defer() {
|
||||
var defer = function () {
|
||||
running = false;
|
||||
}
|
||||
|
||||
function taskerd() {
|
||||
var taskerd = function () {
|
||||
if (running)
|
||||
return;
|
||||
|
||||
@@ -1668,7 +1690,7 @@ function up2k_init(subtle) {
|
||||
is_busy = st.todo.handshake.length;
|
||||
try {
|
||||
if (!is_busy && !uc.fsearch && !msel.getsel().length && (!mp.au || mp.au.paused))
|
||||
treectl.goto(get_evpath());
|
||||
treectl.goto();
|
||||
}
|
||||
catch (ex) { }
|
||||
}
|
||||
@@ -1936,7 +1958,7 @@ function up2k_init(subtle) {
|
||||
st.bytes.hashed += cdr - car;
|
||||
st.etac.h++;
|
||||
|
||||
function orz(e) {
|
||||
var orz = function (e) {
|
||||
bpend--;
|
||||
segm_next();
|
||||
hash_calc(nch, e.target.result);
|
||||
@@ -2140,6 +2162,9 @@ function up2k_init(subtle) {
|
||||
|
||||
function exec_head() {
|
||||
var t = st.todo.head.shift();
|
||||
if (t.done)
|
||||
return console.log('done; skip head1', t.name, t);
|
||||
|
||||
st.busy.head.push(t);
|
||||
|
||||
var xhr = new XMLHttpRequest();
|
||||
@@ -2152,6 +2177,9 @@ function up2k_init(subtle) {
|
||||
st.todo.head.unshift(t);
|
||||
};
|
||||
function orz(e) {
|
||||
if (t.done)
|
||||
return console.log('done; skip head2', t.name, t);
|
||||
|
||||
var ok = false;
|
||||
if (xhr.status == 200) {
|
||||
var srv_sz = xhr.getResponseHeader('Content-Length'),
|
||||
@@ -2198,6 +2226,9 @@ function up2k_init(subtle) {
|
||||
keepalive = t.keepalive,
|
||||
me = Date.now();
|
||||
|
||||
if (t.done)
|
||||
return console.log('done; skip hs', t.name, t);
|
||||
|
||||
st.busy.handshake.push(t);
|
||||
t.keepalive = undefined;
|
||||
t.t_busied = me;
|
||||
@@ -2207,10 +2238,9 @@ function up2k_init(subtle) {
|
||||
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.onerror = function () {
|
||||
if (t.t_busied != me) {
|
||||
console.log('zombie handshake onerror,', t.name, t);
|
||||
return;
|
||||
}
|
||||
if (t.t_busied != me) // t.done ok
|
||||
return console.log('zombie handshake onerror', t.name, t);
|
||||
|
||||
if (!toast.visible)
|
||||
toast.warn(9.98, L.u_eneths + "\n\nfile: " + t.name, t);
|
||||
|
||||
@@ -2219,11 +2249,10 @@ function up2k_init(subtle) {
|
||||
st.todo.handshake.unshift(t);
|
||||
t.keepalive = keepalive;
|
||||
};
|
||||
function orz(e) {
|
||||
if (t.t_busied != me) {
|
||||
console.log('zombie handshake onload,', t.name, t);
|
||||
return;
|
||||
}
|
||||
var orz = function (e) {
|
||||
if (t.t_busied != me || t.done)
|
||||
return console.log('zombie handshake onload', t.name, t);
|
||||
|
||||
if (xhr.status == 200) {
|
||||
t.t_handshake = Date.now();
|
||||
if (keepalive) {
|
||||
@@ -2255,7 +2284,7 @@ function up2k_init(subtle) {
|
||||
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
||||
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
||||
|
||||
msg.push(linksplit(hit.rp).join('') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
|
||||
msg.push(linksplit(hit.rp).join(' / ') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
|
||||
}
|
||||
msg = msg.join('<br />\n');
|
||||
}
|
||||
@@ -2289,7 +2318,7 @@ function up2k_init(subtle) {
|
||||
url += '?k=' + fk;
|
||||
}
|
||||
|
||||
pvis.seth(t.n, 0, linksplit(url).join(' '));
|
||||
pvis.seth(t.n, 0, linksplit(url).join(' / '));
|
||||
}
|
||||
|
||||
var chunksize = get_chunksize(t.size),
|
||||
@@ -2373,15 +2402,12 @@ function up2k_init(subtle) {
|
||||
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
||||
|
||||
var err = "",
|
||||
rsp = (xhr.responseText + ''),
|
||||
rsp = unpre(xhr.responseText),
|
||||
ofs = rsp.lastIndexOf('\nURL: ');
|
||||
|
||||
if (ofs !== -1)
|
||||
rsp = rsp.slice(0, ofs);
|
||||
|
||||
if (rsp.indexOf('<pre>') === 0)
|
||||
rsp = rsp.slice(5);
|
||||
|
||||
if (rsp.indexOf('rate-limit ') !== -1) {
|
||||
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
||||
console.log("rate-limit: " + penalty);
|
||||
@@ -2400,7 +2426,7 @@ function up2k_init(subtle) {
|
||||
err = rsp;
|
||||
ofs = err.indexOf('\n/');
|
||||
if (ofs !== -1) {
|
||||
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
|
||||
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' / ');
|
||||
}
|
||||
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||
t.rechecks = 0;
|
||||
@@ -2483,14 +2509,17 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
function exec_upload() {
|
||||
var upt = st.todo.upload.shift();
|
||||
var upt = st.todo.upload.shift(),
|
||||
t = st.files[upt.nfile],
|
||||
npart = upt.npart,
|
||||
tries = 0;
|
||||
|
||||
if (t.done)
|
||||
return console.log('done; skip chunk', t.name, t);
|
||||
|
||||
st.busy.upload.push(upt);
|
||||
st.nfile.upload = upt.nfile;
|
||||
|
||||
var npart = upt.npart,
|
||||
t = st.files[upt.nfile],
|
||||
tries = 0;
|
||||
|
||||
if (!t.t_uploading)
|
||||
t.t_uploading = Date.now();
|
||||
|
||||
@@ -2503,8 +2532,8 @@ function up2k_init(subtle) {
|
||||
if (cdr >= t.size)
|
||||
cdr = t.size;
|
||||
|
||||
function orz(xhr) {
|
||||
var txt = ((xhr.response && xhr.response.err) || xhr.responseText) + '';
|
||||
var orz = function (xhr) {
|
||||
var txt = unpre((xhr.response && xhr.response.err) || xhr.responseText);
|
||||
if (txt.indexOf('upload blocked by x') + 1) {
|
||||
apop(st.busy.upload, upt);
|
||||
apop(t.postlist, npart);
|
||||
@@ -2532,7 +2561,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
orz2(xhr);
|
||||
}
|
||||
function orz2(xhr) {
|
||||
var orz2 = function (xhr) {
|
||||
apop(st.busy.upload, upt);
|
||||
apop(t.postlist, npart);
|
||||
if (!t.postlist.length) {
|
||||
@@ -2590,7 +2619,7 @@ function up2k_init(subtle) {
|
||||
wpx = window.innerWidth,
|
||||
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
||||
wem = wpx * 1.0 / fpx,
|
||||
wide = wem > 54 ? 'w' : '',
|
||||
wide = wem > 57 ? 'w' : '',
|
||||
parent = ebi(wide ? 'u2btn_cw' : 'u2btn_ct'),
|
||||
btn = ebi('u2btn');
|
||||
|
||||
@@ -2599,7 +2628,7 @@ function up2k_init(subtle) {
|
||||
ebi('u2conf').className = ebi('u2cards').className = ebi('u2etaw').className = wide;
|
||||
}
|
||||
|
||||
wide = wem > 82 ? 'ww' : wide;
|
||||
wide = wem > 86 ? 'ww' : wide;
|
||||
parent = ebi(wide == 'ww' ? 'u2c3w' : 'u2c3t');
|
||||
var its = [ebi('u2etaw'), ebi('u2cards')];
|
||||
if (its[0].parentNode !== parent) {
|
||||
@@ -2610,8 +2639,7 @@ function up2k_init(subtle) {
|
||||
}
|
||||
}
|
||||
}
|
||||
window.addEventListener('resize', onresize);
|
||||
onresize();
|
||||
onresize100.add(onresize, true);
|
||||
|
||||
if (MOBILE) {
|
||||
// android-chrome wobbles for a bit; firefox / iOS-safari are OK
|
||||
@@ -2657,7 +2685,11 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
parallel_uploads = v;
|
||||
swrite('nthread', v);
|
||||
if (v == u2j)
|
||||
localStorage.removeItem('nthread');
|
||||
else
|
||||
swrite('nthread', v);
|
||||
|
||||
clmod(obj, 'err');
|
||||
return;
|
||||
}
|
||||
@@ -2671,7 +2703,7 @@ function up2k_init(subtle) {
|
||||
parallel_uploads = 16;
|
||||
|
||||
obj.value = parallel_uploads;
|
||||
bumpthread({ "target": 1 })
|
||||
bumpthread({ "target": 1 });
|
||||
}
|
||||
|
||||
function tgl_fsearch() {
|
||||
@@ -2679,6 +2711,16 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
function draw_turbo() {
|
||||
if (turbolvl < 0 && uc.turbo) {
|
||||
bcfg_set('u2turbo', uc.turbo = false);
|
||||
toast.err(10, L.u_turbo_c);
|
||||
}
|
||||
|
||||
if (uc.turbo && !has(perms, 'read')) {
|
||||
bcfg_set('u2turbo', uc.turbo = false);
|
||||
toast.warn(30, L.u_turbo_g);
|
||||
}
|
||||
|
||||
var msg = (turbolvl || !uc.turbo) ? null : uc.fsearch ? L.u_ts : L.u_tu,
|
||||
html = ebi('u2foot').innerHTML;
|
||||
|
||||
@@ -2777,6 +2819,8 @@ function up2k_init(subtle) {
|
||||
|
||||
function set_fsearch(new_state) {
|
||||
var fixed = false,
|
||||
persist = new_state !== undefined,
|
||||
preferred = bcfg_get('fsearch', undefined),
|
||||
can_write = false;
|
||||
|
||||
if (!ebi('fsearch')) {
|
||||
@@ -2793,8 +2837,14 @@ function up2k_init(subtle) {
|
||||
}
|
||||
}
|
||||
|
||||
if (new_state === undefined)
|
||||
new_state = preferred;
|
||||
|
||||
if (new_state !== undefined)
|
||||
bcfg_set('fsearch', uc.fsearch = new_state);
|
||||
if (persist)
|
||||
bcfg_set('fsearch', uc.fsearch = new_state);
|
||||
else
|
||||
bcfg_upd_ui('fsearch', uc.fsearch = new_state);
|
||||
|
||||
try {
|
||||
clmod(ebi('u2c3w'), 's', !can_write);
|
||||
@@ -2817,6 +2867,9 @@ function up2k_init(subtle) {
|
||||
ebi('u2cards').style.display = ebi('u2tab').style.display = potato ? 'none' : '';
|
||||
ebi('u2mu').style.display = potato ? '' : 'none';
|
||||
|
||||
if (u2ts.startsWith('f') || !sread('u2ts'))
|
||||
uc.u2ts = bcfg_upd_ui('u2ts', !u2ts.endsWith('u'));
|
||||
|
||||
draw_turbo();
|
||||
draw_life();
|
||||
onresize();
|
||||
@@ -2841,12 +2894,24 @@ function up2k_init(subtle) {
|
||||
}
|
||||
}
|
||||
|
||||
function set_u2sort() {
|
||||
function set_u2sort(en) {
|
||||
if (u2sort.indexOf('f') < 0)
|
||||
return;
|
||||
|
||||
bcfg_set('u2sort', uc.az = u2sort.indexOf('n') + 1);
|
||||
localStorage.removeItem('u2sort');
|
||||
var fen = uc.az = u2sort.indexOf('n') + 1;
|
||||
bcfg_upd_ui('u2sort', fen);
|
||||
if (en != fen)
|
||||
toast.warn(10, L.ul_btnlk);
|
||||
}
|
||||
|
||||
function set_u2ts(en) {
|
||||
if (u2ts.indexOf('f') < 0)
|
||||
return;
|
||||
|
||||
var fen = !u2ts.endsWith('u');
|
||||
bcfg_upd_ui('u2ts', fen);
|
||||
if (en != fen)
|
||||
toast.warn(10, L.ul_btnlk);
|
||||
}
|
||||
|
||||
function set_hashw() {
|
||||
@@ -2944,7 +3009,7 @@ ebi('ico1').onclick = function () {
|
||||
if (QS('#op_up2k.act'))
|
||||
goto_up2k();
|
||||
|
||||
apply_perms({ "perms": perms, "frand": frand });
|
||||
apply_perms({ "perms": perms, "frand": frand, "u2ts": u2ts });
|
||||
|
||||
|
||||
(function () {
|
||||
|
||||
@@ -6,13 +6,19 @@ if (!window.console || !console.log)
|
||||
};
|
||||
|
||||
|
||||
if (window.CGV)
|
||||
for (var k in CGV)
|
||||
window[k] = CGV[k];
|
||||
|
||||
|
||||
var wah = '',
|
||||
NOAC = 'autocorrect="off" autocapitalize="off"',
|
||||
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
||||
CB = '?_=' + Date.now(),
|
||||
R = SR.slice(1),
|
||||
RS = R ? "/" + R : "",
|
||||
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
||||
HTTPS = (window.location + '').indexOf('https:') === 0,
|
||||
HTTPS = ('' + location).indexOf('https:') === 0,
|
||||
TOUCH = 'ontouchstart' in window,
|
||||
MOBILE = TOUCH,
|
||||
CHROME = !!window.chrome,
|
||||
@@ -139,29 +145,35 @@ catch (ex) {
|
||||
}
|
||||
var crashed = false, ignexd = {}, evalex_fatal = false;
|
||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
if ((msg + '').indexOf('ResizeObserver') + 1)
|
||||
msg = String(msg);
|
||||
url = String(url);
|
||||
|
||||
if (msg.indexOf('ResizeObserver') + 1)
|
||||
return; // chrome issue 809574 (benign, from <video>)
|
||||
|
||||
if ((msg + '').indexOf('l2d.js') + 1)
|
||||
if (msg.indexOf('l2d.js') + 1)
|
||||
return; // `t` undefined in tapEvent -> hitTestSimpleCustom
|
||||
|
||||
if (!/\.js($|\?)/.exec('' + url))
|
||||
if (!/\.js($|\?)/.exec(url))
|
||||
return; // chrome debugger
|
||||
|
||||
if ((url + '').indexOf(' > eval') + 1 && !evalex_fatal)
|
||||
if (url.indexOf(' > eval') + 1 && !evalex_fatal)
|
||||
return; // md timer
|
||||
|
||||
var ekey = url + '\n' + lineNo + '\n' + msg;
|
||||
if (ignexd[ekey] || crashed)
|
||||
return;
|
||||
|
||||
if (url.indexOf('deps/marked.js') + 1 && !window.WebAssembly)
|
||||
return; // ff<52
|
||||
|
||||
crashed = true;
|
||||
window.onerror = undefined;
|
||||
var html = [
|
||||
'<h1>you hit a bug!</h1>',
|
||||
'<p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a> / <a href="?b=u">basic</a></p>',
|
||||
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">github issue</a></p>',
|
||||
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)).replace(/\n/g, '<br />') + '</p>',
|
||||
'<p style="font-size:1.3em;margin:0;line-height:2em">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a> / <a href="?b=u">basic</a></p>',
|
||||
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">new github issue</a></p>',
|
||||
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(msg).replace(/\n/g, '<br />') + '</p>',
|
||||
'<p><b>UA:</b> ' + esc(navigator.userAgent + '')
|
||||
];
|
||||
|
||||
@@ -225,7 +237,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
'#exbox{background:#222;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' +
|
||||
'#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' +
|
||||
'#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' +
|
||||
'#exbox a{text-decoration:underline;color:#fc0} ' +
|
||||
'#exbox a{text-decoration:underline;color:#fc0;background:#222;border:none} ' +
|
||||
'#exbox h1{margin:.5em 1em 0 0;padding:0} ' +
|
||||
'#exbox p.b{border-top:1px solid #999;margin:1em 0 0 0;font-size:1em} ' +
|
||||
'#exbox ul, #exbox li {margin:0 0 0 .5em;padding:0} ' +
|
||||
@@ -265,7 +277,13 @@ function anymod(e, shift_ok) {
|
||||
|
||||
|
||||
function ev(e) {
|
||||
e = e || window.event;
|
||||
if (!e && window.event) {
|
||||
e = window.event;
|
||||
if (localStorage.dev_fbw == 1) {
|
||||
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
|
||||
console.error('using window.event');
|
||||
}
|
||||
}
|
||||
if (!e)
|
||||
return;
|
||||
|
||||
@@ -284,7 +302,7 @@ function ev(e) {
|
||||
|
||||
|
||||
function noope(e) {
|
||||
ev(e);
|
||||
try { ev(e); } catch (ex) { }
|
||||
}
|
||||
|
||||
|
||||
@@ -353,13 +371,13 @@ catch (ex) {
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/a/950146
|
||||
function import_js(url, cb) {
|
||||
function import_js(url, cb, ecb) {
|
||||
var head = document.head || document.getElementsByTagName('head')[0];
|
||||
var script = mknod('script');
|
||||
script.type = 'text/javascript';
|
||||
script.src = url;
|
||||
script.src = url + '?_=' + (window.TS || 'a');
|
||||
script.onload = cb;
|
||||
script.onerror = function () {
|
||||
script.onerror = ecb || function () {
|
||||
var m = 'Failed to load module:\n' + url;
|
||||
console.log(m);
|
||||
toast.err(0, m);
|
||||
@@ -368,6 +386,15 @@ function import_js(url, cb) {
|
||||
}
|
||||
|
||||
|
||||
function unsmart(txt) {
|
||||
return !IPHONE ? txt : (txt.
|
||||
replace(/[\u2014]/g, "--").
|
||||
replace(/[\u2022]/g, "*").
|
||||
replace(/[\u2018\u2019]/g, "'").
|
||||
replace(/[\u201c\u201d]/g, '"'));
|
||||
}
|
||||
|
||||
|
||||
var crctab = (function () {
|
||||
var c, tab = [];
|
||||
for (var n = 0; n < 256; n++) {
|
||||
@@ -462,7 +489,7 @@ function yscroll() {
|
||||
|
||||
function showsort(tab) {
|
||||
var v, vn, v1, v2, th = tab.tHead,
|
||||
sopts = jread('fsort', [["href", 1, ""]]);
|
||||
sopts = jread('fsort', jcp(dsort));
|
||||
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
|
||||
@@ -601,9 +628,8 @@ function linksplit(rp, id) {
|
||||
}
|
||||
var vlink = esc(uricom_dec(link));
|
||||
|
||||
if (link.indexOf('/') !== -1) {
|
||||
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
||||
}
|
||||
if (link.indexOf('/') !== -1)
|
||||
vlink = vlink.slice(0, -1);
|
||||
|
||||
if (!rp) {
|
||||
if (q)
|
||||
@@ -735,17 +761,6 @@ function noq_href(el) {
|
||||
}
|
||||
|
||||
|
||||
function get_pwd() {
|
||||
var k = HTTPS ? 's=' : 'd=',
|
||||
pwd = ('; ' + document.cookie).split('; cppw' + k);
|
||||
|
||||
if (pwd.length < 2)
|
||||
return null;
|
||||
|
||||
return decodeURIComponent(pwd[1].split(';')[0]);
|
||||
}
|
||||
|
||||
|
||||
function unix2iso(ts) {
|
||||
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
||||
}
|
||||
@@ -866,9 +881,10 @@ function jcp(obj) {
|
||||
}
|
||||
|
||||
|
||||
function sread(key) {
|
||||
function sread(key, al) {
|
||||
try {
|
||||
return localStorage.getItem(key);
|
||||
var ret = localStorage.getItem(key);
|
||||
return (!al || has(al, ret)) ? ret : null;
|
||||
}
|
||||
catch (e) {
|
||||
return null;
|
||||
@@ -890,7 +906,13 @@ function jread(key, fb) {
|
||||
if (!str)
|
||||
return fb;
|
||||
|
||||
return JSON.parse(str);
|
||||
try {
|
||||
// '' throws, null is ok, sasuga
|
||||
return JSON.parse(str);
|
||||
}
|
||||
catch (e) {
|
||||
return fb;
|
||||
}
|
||||
}
|
||||
|
||||
function jwrite(key, val) {
|
||||
@@ -909,7 +931,7 @@ function fcfg_get(name, defval) {
|
||||
val = parseFloat(sread(name));
|
||||
|
||||
if (!isNum(val))
|
||||
return parseFloat(o ? o.value : defval);
|
||||
return parseFloat(o && o.value !== '' ? o.value : defval);
|
||||
|
||||
if (o)
|
||||
o.value = val;
|
||||
@@ -954,13 +976,14 @@ function bcfg_set(name, val) {
|
||||
function bcfg_upd_ui(name, val) {
|
||||
var o = ebi(name);
|
||||
if (!o)
|
||||
return;
|
||||
return val;
|
||||
|
||||
if (o.getAttribute('type') == 'checkbox')
|
||||
o.checked = val;
|
||||
else if (o) {
|
||||
clmod(o, 'on', val);
|
||||
}
|
||||
return val;
|
||||
}
|
||||
|
||||
function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
|
||||
@@ -1051,10 +1074,13 @@ function cliptxt(txt, ok) {
|
||||
}
|
||||
|
||||
|
||||
var timer = (function () {
|
||||
var r = {};
|
||||
function Debounce(delay) {
|
||||
var r = this;
|
||||
r.delay = delay;
|
||||
r.timer = 0;
|
||||
r.t_hit = 0;
|
||||
r.t_run = 0;
|
||||
r.q = [];
|
||||
r.last = 0;
|
||||
|
||||
r.add = function (fun, run) {
|
||||
r.rm(fun);
|
||||
@@ -1068,7 +1094,67 @@ var timer = (function () {
|
||||
apop(r.q, fun);
|
||||
};
|
||||
|
||||
function doevents() {
|
||||
r.run = function () {
|
||||
if (crashed)
|
||||
return;
|
||||
|
||||
r.t_run = Date.now();
|
||||
|
||||
var q = r.q.slice(0);
|
||||
for (var a = 0; a < q.length; a++)
|
||||
q[a]();
|
||||
};
|
||||
|
||||
r.hit = function () {
|
||||
if (crashed)
|
||||
return;
|
||||
|
||||
var now = Date.now(),
|
||||
td_hit = now - r.t_hit,
|
||||
td_run = now - r.t_run;
|
||||
|
||||
if (td_run >= r.delay * 2)
|
||||
r.t_run = now;
|
||||
|
||||
if (td_run >= r.delay && td_run <= r.delay * 2) {
|
||||
// r.delay is also deadline
|
||||
clearTimeout(r.timer);
|
||||
return r.run();
|
||||
}
|
||||
|
||||
if (td_hit < r.delay / 5)
|
||||
return;
|
||||
|
||||
clearTimeout(r.timer);
|
||||
r.timer = setTimeout(r.run, r.delay);
|
||||
r.t_hit = now;
|
||||
};
|
||||
};
|
||||
|
||||
var onresize100 = new Debounce(100);
|
||||
window.addEventListener('resize', onresize100.hit);
|
||||
|
||||
|
||||
var timer = (function () {
|
||||
var r = {};
|
||||
r.q = [];
|
||||
r.last = 0;
|
||||
r.fs = 0;
|
||||
r.fc = 0;
|
||||
|
||||
r.add = function (fun, run) {
|
||||
r.rm(fun);
|
||||
r.q.push(fun);
|
||||
|
||||
if (run)
|
||||
fun();
|
||||
};
|
||||
|
||||
r.rm = function (fun) {
|
||||
apop(r.q, fun);
|
||||
};
|
||||
|
||||
var doevents = function () {
|
||||
if (crashed)
|
||||
return;
|
||||
|
||||
@@ -1080,6 +1166,7 @@ var timer = (function () {
|
||||
q[a]();
|
||||
|
||||
r.last = Date.now();
|
||||
//r.fc++; if (r.last - r.fs >= 2000) { console.log(r.last - r.fs, r.fc); r.fs = r.last; r.fc = 0; }
|
||||
}
|
||||
setInterval(doevents, 100);
|
||||
|
||||
@@ -1104,7 +1191,7 @@ var tt = (function () {
|
||||
var prev = null;
|
||||
r.cshow = function () {
|
||||
if (this !== prev)
|
||||
r.show.bind(this)();
|
||||
r.show.call(this);
|
||||
|
||||
prev = this;
|
||||
};
|
||||
@@ -1116,7 +1203,7 @@ var tt = (function () {
|
||||
return;
|
||||
|
||||
if (Date.now() - r.lvis < 400)
|
||||
return r.show.bind(this)();
|
||||
return r.show.call(this);
|
||||
|
||||
tev = setTimeout(r.show.bind(this), 800);
|
||||
if (TOUCH)
|
||||
@@ -1264,6 +1351,11 @@ function lf2br(txt) {
|
||||
}
|
||||
|
||||
|
||||
function unpre(txt) {
|
||||
return ('' + txt).replace(/^<pre>/, '');
|
||||
}
|
||||
|
||||
|
||||
var toast = (function () {
|
||||
var r = {},
|
||||
te = null,
|
||||
@@ -1274,8 +1366,11 @@ var toast = (function () {
|
||||
r.visible = false;
|
||||
r.txt = null;
|
||||
r.tag = obj; // filler value (null is scary)
|
||||
r.p_txt = '';
|
||||
r.p_sec = 0;
|
||||
r.p_t = 0;
|
||||
|
||||
function scrollchk() {
|
||||
var scrollchk = function () {
|
||||
if (scrolling)
|
||||
return;
|
||||
|
||||
@@ -1290,7 +1385,7 @@ var toast = (function () {
|
||||
scrolling = true;
|
||||
}
|
||||
|
||||
function unscroll() {
|
||||
var unscroll = function () {
|
||||
timer.rm(scrollchk);
|
||||
clmod(obj, 'scroll');
|
||||
scrolling = false;
|
||||
@@ -1306,10 +1401,23 @@ var toast = (function () {
|
||||
};
|
||||
|
||||
r.show = function (cl, sec, txt, tag) {
|
||||
var same = r.visible && txt == r.p_txt && r.p_sec == sec,
|
||||
delta = Date.now() - r.p_t;
|
||||
|
||||
if (same && delta < 100)
|
||||
return;
|
||||
|
||||
r.p_txt = txt;
|
||||
r.p_sec = sec;
|
||||
r.p_t = Date.now();
|
||||
|
||||
clearTimeout(te);
|
||||
if (sec)
|
||||
te = setTimeout(r.hide, sec * 1000);
|
||||
|
||||
if (same && delta < 1000)
|
||||
return;
|
||||
|
||||
if (txt.indexOf('<body>') + 1)
|
||||
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
|
||||
|
||||
@@ -1360,6 +1468,7 @@ var modal = (function () {
|
||||
r.load();
|
||||
|
||||
r.busy = false;
|
||||
r.nofocus = 0;
|
||||
|
||||
r.show = function (html) {
|
||||
o = mknod('div', 'modal');
|
||||
@@ -1373,6 +1482,7 @@ var modal = (function () {
|
||||
a.onclick = ng;
|
||||
|
||||
a = ebi('modal-ok');
|
||||
a.addEventListener('blur', onblur);
|
||||
a.onclick = ok;
|
||||
|
||||
var inp = ebi('modali');
|
||||
@@ -1383,6 +1493,7 @@ var modal = (function () {
|
||||
}, 0);
|
||||
|
||||
document.addEventListener('focus', onfocus);
|
||||
document.addEventListener('selectionchange', onselch);
|
||||
timer.add(onfocus);
|
||||
if (cb_up)
|
||||
setTimeout(cb_up, 1);
|
||||
@@ -1390,13 +1501,18 @@ var modal = (function () {
|
||||
|
||||
r.hide = function () {
|
||||
timer.rm(onfocus);
|
||||
try {
|
||||
ebi('modal-ok').removeEventListener('blur', onblur);
|
||||
}
|
||||
catch (ex) { }
|
||||
document.removeEventListener('selectionchange', onselch);
|
||||
document.removeEventListener('focus', onfocus);
|
||||
document.removeEventListener('keydown', onkey);
|
||||
o.parentNode.removeChild(o);
|
||||
r.busy = false;
|
||||
setTimeout(next, 50);
|
||||
};
|
||||
function ok(e) {
|
||||
var ok = function (e) {
|
||||
ev(e);
|
||||
var v = ebi('modali');
|
||||
v = v ? v.value : true;
|
||||
@@ -1404,26 +1520,44 @@ var modal = (function () {
|
||||
if (cb_ok)
|
||||
cb_ok(v);
|
||||
}
|
||||
function ng(e) {
|
||||
var ng = function (e) {
|
||||
ev(e);
|
||||
r.hide();
|
||||
if (cb_ng)
|
||||
cb_ng(null);
|
||||
}
|
||||
|
||||
function onfocus(e) {
|
||||
var onselch = function () {
|
||||
try {
|
||||
if (window.getSelection() + '')
|
||||
r.nofocus = 15;
|
||||
}
|
||||
catch (ex) { }
|
||||
};
|
||||
|
||||
var onblur = function () {
|
||||
r.nofocus = 3;
|
||||
};
|
||||
|
||||
var onfocus = function (e) {
|
||||
if (MOBILE)
|
||||
return;
|
||||
|
||||
var ctr = ebi('modalc');
|
||||
if (!ctr || !ctr.contains || !document.activeElement || ctr.contains(document.activeElement))
|
||||
return;
|
||||
|
||||
setTimeout(function () {
|
||||
if (--r.nofocus >= 0)
|
||||
return;
|
||||
|
||||
if (ctr = ebi('modal-ok'))
|
||||
ctr.focus();
|
||||
}, 20);
|
||||
ev(e);
|
||||
}
|
||||
};
|
||||
|
||||
function onkey(e) {
|
||||
var onkey = function (e) {
|
||||
var k = e.code,
|
||||
eok = ebi('modal-ok'),
|
||||
eng = ebi('modal-ng'),
|
||||
@@ -1432,21 +1566,21 @@ var modal = (function () {
|
||||
if (k == 'Space' && ae && (ae === eok || ae === eng))
|
||||
k = 'Enter';
|
||||
|
||||
if (k == 'Enter') {
|
||||
if (k.endsWith('Enter')) {
|
||||
if (ae && ae == eng)
|
||||
return ng();
|
||||
return ng(e);
|
||||
|
||||
return ok();
|
||||
return ok(e);
|
||||
}
|
||||
|
||||
if ((k == 'ArrowLeft' || k == 'ArrowRight') && eng && (ae == eok || ae == eng))
|
||||
return (ae == eok ? eng : eok).focus() || ev(e);
|
||||
|
||||
if (k == 'Escape')
|
||||
return ng();
|
||||
return ng(e);
|
||||
}
|
||||
|
||||
function next() {
|
||||
var next = function () {
|
||||
if (!r.busy && q.length)
|
||||
q.shift()();
|
||||
}
|
||||
@@ -1457,7 +1591,7 @@ var modal = (function () {
|
||||
});
|
||||
next();
|
||||
};
|
||||
function _alert(html, cb, fun) {
|
||||
var _alert = function (html, cb, fun) {
|
||||
cb_ok = cb_ng = cb;
|
||||
cb_up = fun;
|
||||
html += '<div id="modalb"><a href="#" id="modal-ok">OK</a></div>';
|
||||
@@ -1470,7 +1604,7 @@ var modal = (function () {
|
||||
});
|
||||
next();
|
||||
}
|
||||
function _confirm(html, cok, cng, fun, btns) {
|
||||
var _confirm = function (html, cok, cng, fun, btns) {
|
||||
cb_ok = cok;
|
||||
cb_ng = cng === undefined ? cok : cng;
|
||||
cb_up = fun;
|
||||
@@ -1484,11 +1618,11 @@ var modal = (function () {
|
||||
});
|
||||
next();
|
||||
}
|
||||
function _prompt(html, v, cok, cng, fun) {
|
||||
var _prompt = function (html, v, cok, cng, fun) {
|
||||
cb_ok = cok;
|
||||
cb_ng = cng === undefined ? cok : null;
|
||||
cb_up = fun;
|
||||
html += '<input id="modali" type="text" /><div id="modalb">' + ok_cancel + '</div>';
|
||||
html += '<input id="modali" type="text" ' + NOAC + ' /><div id="modalb">' + ok_cancel + '</div>';
|
||||
r.show(html);
|
||||
|
||||
ebi('modali').value = v || '';
|
||||
@@ -1520,7 +1654,7 @@ function repl_load() {
|
||||
ret = [
|
||||
'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)',
|
||||
"for (var a of QSA('#files a[id]')) a.setAttribute('download','')",
|
||||
'console.hist.slice(-10).join("\\n")'
|
||||
'console.hist.slice(-50).join("\\n")'
|
||||
];
|
||||
|
||||
ipre.innerHTML = '<option value=""></option>';
|
||||
@@ -1576,6 +1710,8 @@ function repl(e) {
|
||||
if (!cmd)
|
||||
return toast.inf(3, 'eval aborted');
|
||||
|
||||
cmd = unsmart(cmd);
|
||||
|
||||
if (cmd.startsWith(',')) {
|
||||
evalex_fatal = true;
|
||||
return modal.alert(esc(eval(cmd.slice(1)) + ''));
|
||||
@@ -1614,7 +1750,7 @@ function load_md_plug(md_text, plug_type, defer) {
|
||||
return md_text;
|
||||
|
||||
var find = '\n```copyparty_' + plug_type + '\n',
|
||||
md = md_text.replace(/\r/g, ''),
|
||||
md = '\n' + md_text.replace(/\r/g, '') + '\n',
|
||||
ofs = md.indexOf(find),
|
||||
ofs2 = md.indexOf('\n```', ofs + 1);
|
||||
|
||||
@@ -1701,7 +1837,7 @@ var favico = (function () {
|
||||
r.en = true;
|
||||
r.tag = null;
|
||||
|
||||
function gx(txt) {
|
||||
var gx = function (txt) {
|
||||
return (svg_decl +
|
||||
'<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg">\n' +
|
||||
(r.bg ? '<rect width="100%" height="100%" rx="16" fill="#' + r.bg + '" />\n' : '') +
|
||||
@@ -1786,16 +1922,17 @@ function xhrchk(xhr, prefix, e404, lvl, tag) {
|
||||
if (xhr.status < 400 && xhr.status >= 200)
|
||||
return true;
|
||||
|
||||
if (xhr.status == 403)
|
||||
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
|
||||
fun = toast[lvl || 'err'],
|
||||
is_cf = /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser|\/chall[e]nge-platform|"chall[e]nge-error|nable Ja[v]aScript and cook/.test(errtxt);
|
||||
|
||||
if (xhr.status == 403 && !is_cf)
|
||||
return toast.err(0, prefix + (L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"), tag);
|
||||
|
||||
if (xhr.status == 404)
|
||||
return toast.err(0, prefix + e404, tag);
|
||||
|
||||
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
|
||||
fun = toast[lvl || 'err'];
|
||||
|
||||
if (xhr.status == 503 && /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser/.test(errtxt)) {
|
||||
if (is_cf && (xhr.status == 403 || xhr.status == 503)) {
|
||||
var now = Date.now(), td = now - cf_cha_t;
|
||||
if (td < 15000)
|
||||
return;
|
||||
|
||||
@@ -1,3 +1,757 @@
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1208-2133 `v1.9.27` another dedup bug
|
||||
|
||||
so [v1.9.26](https://github.com/9001/copyparty/releases/tag/v1.9.26) fixed how moving a symlink could break other related symlinks, and then it turns out symlinks themselves could also die when moving them to another location, and somehow nobody encountered any of these until now... surely there are no more deduplication-related issues left at this point, yeah?
|
||||
|
||||
## bugfixes
|
||||
|
||||
* #65 moving deduplicated copies of files (symlinks) from one location to another could make them disappear (break the symlinks)
|
||||
|
||||
* don't worry, we are **not** talking about data loss! but see the [release notes for v1.9.26](https://github.com/9001/copyparty/releases/tag/v1.9.26) which explain how to deal with this issue (how to find, diagnose, and repair broken symlinks)
|
||||
|
||||
----
|
||||
|
||||
## regarding fedora packages
|
||||
|
||||
[copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/) (fedora's build service) is not building at the moment; ***if you installed copyparty from copr-pypi,*** you can upgrade to this release by running one of the following:
|
||||
|
||||
```bash
|
||||
dnf install https://ocv.me/copyparty/fedora/37/python3-copyparty.fc37.noarch.rpm
|
||||
dnf install https://ocv.me/copyparty/fedora/38/python3-copyparty.fc38.noarch.rpm
|
||||
dnf install https://ocv.me/copyparty/fedora/39/python3-copyparty.fc39.noarch.rpm
|
||||
```
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1208-0136 `v1.9.26` dont break symlinks
|
||||
|
||||
## new features
|
||||
* *tumbleweed*
|
||||
|
||||
## bugfixes
|
||||
|
||||
* deleting files from the server could make some duplicates of that file unavailable (by breaking nested symlinks)
|
||||
|
||||
* don't worry, we are **not** talking about data loss! but such broken links would disappear from the directory listing and would need to be remedied by replacing the broken links manually, either by using a file explorer or commandline
|
||||
|
||||
* **only** affected linux/macos, did **not** affect servers with `--hardlink` or `--never-symlink` or `--no-dedup`, and **mainly** affected servers with lots of duplicate files (with some dupes in the same folder and some elsewhere)
|
||||
|
||||
* if you want to check for such broken symlinks, the following unix command will find all of them: `find -L -type l`
|
||||
|
||||
* to repair a broken link, first remove it and then replace it: `rm thelink.opus; ln -s /mnt/music/realfile.opus thelink.opus`
|
||||
|
||||
* if you are left with a mystery file and want to know where its duplicates are, you can grep for the filename in the logs and you'll find something like the following line, where the `wark` is the file identifier; grep for that to find all the other copies of that file -- `purl` is the folder/URL which that copy of the file was uploaded to:
|
||||
```json
|
||||
{"name": "04. GHOST.opus", "purl": "/mu/vt/suisei/still-still-stellar/", "size": 4520986, "lmod": 1697091772, "sprs": true, "hash": [], "wark": "SJMASMtWOa0UZnc002nn5unO5iCBMa-krt2CDcq8eJe9"}
|
||||
```
|
||||
|
||||
* the server would throw an error if you tried to delete a broken symlink
|
||||
* prevent warnings about duplicate file entries in the database by preventing that from happening in the first place
|
||||
* `u2c.py` (commandline uploader) would fail to delete files from the server if there's more than ~10'000 files to be deleted
|
||||
* and forgot to bump the version number... `1.11 (2nd season)`
|
||||
|
||||
## other changes
|
||||
* `--help` was slightly improved
|
||||
* docker images are now based on alpine v3.19
|
||||
* `copyparty.exe` is now based on python v3.11.7
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1201-2326 `v1.9.25` focus
|
||||
|
||||
## new features
|
||||
* remember and restore scroll position when leaving the textfile viewer
|
||||
|
||||
## bugfixes
|
||||
* the request-smuggling detetcor was too strict, blocking access to textfiles with newlines / control-codes in the filename
|
||||
* focus and text selection in messageboxes was still jank, mainly in firefox and especially phones
|
||||
|
||||
## other changes
|
||||
* the banhammer now applies on attempts at request-smuggling and path traversals
|
||||
* these were merely detected and rejected before, might as well bonk them
|
||||
* reject bad requests with a terse 500 instead of abruptly disconnecting in some cases
|
||||
* stops firefox from rapidly spamming additional attempts
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1201-0210 `v1.9.24` header auth
|
||||
|
||||
## new features
|
||||
* initial work on #62 (support identity providers, oauth/SSO/...); see [readme](https://github.com/9001/copyparty#identity-providers)
|
||||
* only authentication so far; no authorization yet, and users must exist in the copyparty config with bogus passwords
|
||||
* new option `--ipa` rejects connections from clients outside of a given allowlist of IP prefixes
|
||||
* environment variables can be used almost everywhere that takes a filesystem path; should make it way more comfy to write configs for docker / systemd
|
||||
* #59 added a basic [docker-compose yaml](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/docker/basic-docker-compose) and an example config
|
||||
* probably much room for improvement on everything docker still
|
||||
|
||||
## bugfixes
|
||||
* the nftables-based port-forwarding in the [systemd example](https://github.com/9001/copyparty/tree/hovudstraum/contrib/systemd) was buggy; replaced with CAP_NET_BIND_SERVICE
|
||||
* palemoon-specific js crash if a text selection was dragged
|
||||
* text selection in messageboxes was jank
|
||||
|
||||
## other changes
|
||||
* improved [systemd example](https://github.com/9001/copyparty/tree/hovudstraum/contrib/systemd) with hardening and a better example config
|
||||
* logfiles are flushed for every line written; can be disabled with `--no-logflush` for ~3% more performance best-case
|
||||
* iphones probably won't broadcast cover-art to car stereos over bluetooth anymore since the thingamajig in iOS that's in charge of that doesn't have cookie-access, and strapping in the auth is too funky so let's stop doing that b7723ac245b8b3e38d6410891ef1aa92d4772114
|
||||
* can be remedied by enabling filekeys and granting unauthenticated people access that way, but that's too much effort for anyone to bother with I'm sure
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1125-1417 `v1.9.21` in a bind
|
||||
|
||||
## new features
|
||||
* #63 the grid-view will open textfiles in the textfile viewer
|
||||
* [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh) now accepts user/group names (in addition to IDs)
|
||||
|
||||
## bugfixes
|
||||
* the `Y` hotkey (which turns all links into download links) didn't affect the grid-view
|
||||
* on some servers with unusual filesystem layouts (especially ubuntu-zfs), [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh) would make an unholy mess of recursive bind-mounts, quickly running out of inodes and requiring a server reboot
|
||||
* added several safeguards to avoid anything like this in the future
|
||||
* mutex around jail setup/teardown to prevent racing other instances
|
||||
* verify jail status by inspecting /proc/mounts between each folder to bind
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1121-2325 `v1.9.20` nice
|
||||
|
||||
## new features
|
||||
* expensive subprocesses (ffmpeg, parsers, hooks) will run with `nice` to reduce cpu priority
|
||||
* ...so listening to flacs won't grind everything else to a halt
|
||||
|
||||
## bugfixes
|
||||
* the "load more" search results button didn't disappear if you hit the serverside limit
|
||||
* the "show all" button for huge folders didn't disappear when navigating into a smaller folder
|
||||
* trying to play the previous track when you're already playing the first track in a folder would send you on a wild adventure
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1119-1229 `v1.9.19` shadow filter
|
||||
|
||||
## bugfixes
|
||||
* #61 Mk.II: filter search results to also handle this issue in volumes where reindexing is disabled, or (spoiler warning:) a bug in the directory indexer prevents shadowed files from being forgotten
|
||||
* filekeys didn't always get included in the up2k UI for world-readable folders
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1118-2106 `v1.9.18` cache invalidation
|
||||
|
||||
## bugfixes
|
||||
* #61 search results could contain stale records from overlapping volumes:
|
||||
* if volume `/foo` is indexed and then volume `/foo/bar` is later created, any files inside the `bar` subfolder would not become forgotten in `/foo`'s database until something in `/foo` changes, which could be never
|
||||
* as a result, search results could show stale metadata from `/foo`'s database regarding files in `/foo/bar`
|
||||
* fix this by dropping caches and reindexing if copyparty is started with a different list of volumes than last time
|
||||
* #60 client error when ctrl-clicking search results
|
||||
* icons for the close/more buttons in search results are now pillow-10.x compatible
|
||||
|
||||
## other changes
|
||||
* `u2c.exe`: upgraded certifi to version `2023.11.17`
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1111-1738 `v1.9.17` 11-11
|
||||
|
||||
## new features
|
||||
* `u2c.py` / `u2c.exe` (the commandline uploader):
|
||||
* `-x` is now case-insensitive
|
||||
* if a file fails to upload after 30 attempts, give up (bitflips)
|
||||
* add 5 sec delay before reattempts (configurable with `--cd`)
|
||||
|
||||
## bugfixes
|
||||
* clients could crash the file indexer by uploading and then instantly deleting files (as some webdav clients tend to do)
|
||||
* and fix some upload errorhandling which broke during a refactoring in v1.9.16
|
||||
|
||||
## other changes
|
||||
* upgraded pyftpdlib to v1.5.9
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1104-2158 `v1.9.16` windedup
|
||||
|
||||
## breaking changes
|
||||
* two of the prometheus metrics have changed slightly; see the [breaking changes readme section](https://github.com/9001/copyparty#breaking-changes)
|
||||
* (i'm not familiar with prometheus so i'm not sure if this is a big deal)
|
||||
|
||||
## new features
|
||||
* #58 versioned docker images! no longer just `latest`
|
||||
* browser: the mkdir feature now accepts `foo/bar/qux` and `../foo` and `/bar`
|
||||
* add 14 more prometheus metrics; see [readme](https://github.com/9001/copyparty#prometheus) for details
|
||||
* connections, requests, malicious requests, volume state, file hashing/analyzation queues
|
||||
* catch some more malicious requests in the autoban filters
|
||||
* some malicious requests are now answered with HTTP 422, so that they count against `--ban-422`
|
||||
|
||||
## bugfixes
|
||||
* windows: fix symlink-based upload deduplication
|
||||
* MS decided to make symlinks relative to working-directory rather than destination-path...
|
||||
* `--stats` would produce invalid metrics if a volume was offline
|
||||
* minor improvements to password hashing ux:
|
||||
* properly warn if `--ah-cli` or `--ah-gen` is used without `--ah-alg`
|
||||
* support `^D` during `--ah-cli`
|
||||
* browser-ux / cosmetics:
|
||||
* fix toast/tooltip colors on splashpage
|
||||
* easier to do partial text selection inside links (search results, breadcrumbs, uploads)
|
||||
* more rclone-related hints on the connect-page
|
||||
|
||||
## other changes
|
||||
* malformed http headers from clients are no longer included in the client error-message
|
||||
* just in case there are deployments with a reverse-proxy inserting interesting stuff on the way in
|
||||
* the serverlog still contains all the necessary info to debug your own clients
|
||||
* updated [example nginx config](https://github.com/9001/copyparty/blob/hovudstraum/contrib/nginx/copyparty.conf) to recover faster from brief server outages
|
||||
* the default value of `fail_timeout` (10sec) makes nginx cache the outage for longer than necessary
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1024-1643 `v1.9.15` expand placeholder
|
||||
|
||||
[made it just in time!](https://a.ocv.me/pub/g/nerd-stuff/PXL_20231024_170348367.jpg) (EDIT: nevermind, three of the containers didn't finish uploading to ghcr before takeoff ;_; all up now)
|
||||
|
||||
## new features
|
||||
* #56 placeholder variables in markdown documents and prologue/epilogue html files
|
||||
* default-disabled; must be enabled globally with `--exp` or per-volume with volflag `exp`
|
||||
* `{{self.ip}}` becomes the client IP; see [/srv/expand/README.md](https://github.com/9001/copyparty/blob/hovudstraum/srv/expand/README.md) for more examples
|
||||
* dynamic-range-compressor: reduced volume jumps between songs when enabled
|
||||
|
||||
## bugfixes
|
||||
* v1.9.14 broke the `scan` volflag, causing volume rescans to happen every 10sec if enabled
|
||||
* its global counterpart `--re-maxage` was not affected
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1021-1443 `v1.9.14` uptime
|
||||
|
||||
## new features
|
||||
* search for files by upload time
|
||||
* option to display upload time in directory listings
|
||||
* enable globally with `-e2d -mte +.up_at` or per-volume with volflags `e2d,mte=+.up_at`
|
||||
* has a ~17% performance impact on directory listings
|
||||
* [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression) in the audioplayer settings
|
||||
* `--ban-404` is now default-enabled
|
||||
* the turbo-uploader will now un-turbo when necessary to avoid banning itself
|
||||
* this only affects accounts with permissions `g`, `G`, or `h`
|
||||
* accounts with read-access (which are able to see directory listings anyways) and accounts with write-only access are no longer affected by `--ban-404` or `--ban-url`
|
||||
|
||||
## bugfixes
|
||||
* #55 clients could hit the `--url-ban` filter when uploading over webdav
|
||||
* fixed by limiting `--ban-404` and `--ban-url` to accounts with permission `g`, `G`, or `h`
|
||||
* fixed 20% performance drop in python 3.12 due to utcfromtimestamp deprecation
|
||||
* but 3.12.0 is still 5% slower than 3.11.6 for some reason
|
||||
* volume listing on startup would display some redundant info
|
||||
|
||||
## other changes
|
||||
* timeout for unfinished uploads increased from 6 to 24 hours
|
||||
* and is now configurable with `--snap-drop`
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1015-2006 `v1.9.12` more buttons
|
||||
|
||||
just adding requested features, nothing important
|
||||
|
||||
## new features
|
||||
* button `📅` in the uploader (default-enabled) sends your local last-modified timestamps to the server
|
||||
* when deselected, the files on the server will have the upload time as their timestamps instead
|
||||
* `--u2ts` specifies the default setting, `c` client-last-modified or `u` upload-time, or `fc` and `fu` to force
|
||||
* button `full` in the gridview decides if thumbnails should be center-cropped or not
|
||||
* `--no-crop` and the `nocrop` volflag now sets the default value of this instead of forcing the setting
|
||||
* thumbnail cleanup is now more granular, cleaning full-jpg separately from cropped-webp for example
|
||||
* set default sort order with `--sort` or volflag `sort`
|
||||
* one or more comma-separated values; `tags/Cirle,tags/.tn,tags/Artist,tags/Title,href`
|
||||
* see the column header tooltips in the browser to know what names (`id`) to use
|
||||
* prefix a column name with `-` for descending sort
|
||||
* specifying a sort order in the client will override all server-defined ones
|
||||
* when visiting a read-only folder, the upload-or-filesearch toggle will remember its previous state and restore it when leaving the folder
|
||||
* much more intuitive, if anything about this UI can be called that...
|
||||
|
||||
## bugfixes
|
||||
* iPhone: rare javascript panic when switching between safari and another app
|
||||
* ie9: file-rename ui was borked
|
||||
|
||||
## other changes
|
||||
* copyparty.exe: upgrade to pillow 10.1 (which adds a new font for thumbnails in chrome)
|
||||
* still based on python 3.11.6 because 3.12 is currently slower than 3.11
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1009-0036 `v1.9.11` bustin'
|
||||
|
||||
okay, i swear this is the last version for weeks! probably
|
||||
|
||||
## bugfixes
|
||||
* cachebuster didn't apply to dynamically loaded javascript files
|
||||
* READMEs could fail to render with `ReferenceError: DOMPurify is not defined` after upgrading from a copyparty older than v1.9.2
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1008-2051 `v1.9.10` badpwd
|
||||
|
||||
## new features
|
||||
* argument `--log-badpwd` specifies how to log invalid login attempts;
|
||||
* `0` = just a warning with no further information
|
||||
* `1` = log incorrect password in plaintext (default)
|
||||
* `2` = log sha512 hash of the incorrect password
|
||||
* `1` and `2` are convenient for stuff like setting up autoban triggers for common passwords using fail2ban or similar
|
||||
|
||||
## bugfixes
|
||||
* none!
|
||||
* the formerly mentioned caching-directives bug turned out to be unreachable... oh well, better safe than sorry
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1007-2229 `v1.9.9` fix cross-volume dedup moves
|
||||
|
||||
## bugfixes
|
||||
* v1.6.2 introduced a bug which, when moving files between volumes, could cause the move operation to abort when it encounters a deduplicated file
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-1006-1750 `v1.9.8` static filekeys
|
||||
|
||||
## new features
|
||||
* #52 add alternative filekey generator:
|
||||
* volflag `fka` changes the calculation to ignore filesize and inode-number, only caring about the absolute-path on the filesystem and the `--fk-salt`
|
||||
* good for linking to markdown files which might be edited, but reduces security a tiny bit
|
||||
* add warning on startup if `--fk-salt` is too weak (for example when it was upgraded from before [v1.7.6](https://github.com/9001/copyparty/releases/tag/v1.7.6))
|
||||
* removed the filekey upgrade feaure to ensure a weak fk-salt is not selected; a new filekey will be generated from scratch on startup if necessary
|
||||
|
||||
## other changes
|
||||
* pyftpdlib upgraded to 1.5.8
|
||||
* copyparty.exe built on python 3.11.6
|
||||
* the exe in this release will be replaced with an 3.12.0 exe as soon as [pillow adds 3.12 support](https://github.com/python-pillow/Pillow/issues/6941)
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0930-2332 `v1.9.7` better column hider
|
||||
|
||||
## new features
|
||||
* column hiding on phones is much more intuitive
|
||||
* since you usually want to hide multiple columns, the hiding mode must now be manually disengaged
|
||||
* click-handler now covers the entire header cell, preventing a misclick from accidentally sorting the table instead
|
||||
|
||||
## bugfixes
|
||||
* #51 running copyparty with an invalid value for `--lang` made it crash with a confusing error message
|
||||
* also makes it more compatible with other localStorage-using webservices running on the same domain
|
||||
|
||||
## other changes
|
||||
* CVE-2023-5217, a vulnerability in libvpx, was fixed by alpine recently and no longer present in the docker images
|
||||
* unlike the fix in v1.9.6, this is irrelevant since it was impossible to reach in all conceivable setups, but still nice
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0923-1215 `v1.9.6` configurable x-forwarded-for
|
||||
|
||||
## new features
|
||||
* rudimentary support for jython and graalpy, and directory tree sidebar in internet explorer 9 through 11, and firefox 10
|
||||
* all older browsers (ie4, ie6, ie8, Netscape) get basic html instead
|
||||
* #35 adds a [hook](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/msg-log.py) which extends the message-to-serverlog feature so it writes the message to a textfile on the server
|
||||
* could theoretically be extended into a [full instant-messaging feature](https://github.com/9001/copyparty/blob/hovudstraum/srv/chat.md) but that's silly, [nobody would do that](https://ocv.me/stuff/cchat.webm)
|
||||
* [r0c is much better](https://github.com/9001/r0c) than this joke
|
||||
|
||||
## bugfixes
|
||||
* 163e3fce46122d64bf824762b6733ff2c3551ba5 the `x-forwarded-for` header was ignored if the nearest reverse-proxy is not asking from 127.0.0.1, which broke client IPs in containerized deployments
|
||||
* the serverlog will now explain how to trust the reverse-proxy to provide client IPs, but basically,
|
||||
* `--xff-hdr` specifies which header to read the client's real ip from
|
||||
* `--xff-src` is an allowlist of IP-addresses to trust that header from
|
||||
* a62f744a187bc9f821b540e8bb4e0b9a67bd01c8 if copyparty was started while an external HDD was not connected, and that volume's index was stored elsewhere, then the index would get wiped (since all the files are gone)
|
||||
* 3b8f66c0d5c27a68841814ec06f1758f146a5ff5 javascript could crash while uploading from a very unreliable internet connection
|
||||
|
||||
## other changes
|
||||
* copyparty.exe: updated pillow to 10.0.1 which fixes the webp cve
|
||||
* alpine, which the docker images are based on, turns out to be fairly slow -- currently working on a new docker image (probably fedora-based) which will be 30% faster at analyzing multimedia files and in general 20% faster on average
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0909-1336 `v1.9.5` webhotell
|
||||
|
||||
[happy 9/9!](https://safebooru.org/index.php?page=post&s=view&id=4027419)
|
||||
|
||||
## new features
|
||||
* new permission `h` disables directory listing (so works like `g`) except it redirects to the folder's index.html instead of 404
|
||||
* index.html is accessible by anyone with `h` even if filekeys are enabled
|
||||
* well suited for running a shared-webhosting gig (thx kipu) especially now that the...
|
||||
* markdown editor can now be used on non-markdown files if account has `w`rite and `d`elete
|
||||
* hotkey `e` to edit a textfile while it's open in the textfile viewer
|
||||
* SMB: account permissions now work fully as intended, thanks to impacket 0.11
|
||||
* but enabling `--smb` is still strongly discouraged as it's a massive security hazard
|
||||
* download-as-zip can be 2.5x faster on tiny files, at least 15% faster in general
|
||||
* download folders as pax-format tarfiles with `?tar=pax` or `?tar=pax,xz:9`
|
||||
|
||||
## bugfixes
|
||||
* 422-autoban accidentally triggered when uploading lots of duplicate files (thx hiem!)
|
||||
* `--css-browser` and `--js-browser` now accepts URLs with cache directives
|
||||
* `--css-browser=/the.css?cache=600` (seconds) or `--js-browser=/.res/the.js?cache=i` (7 days)
|
||||
* SMB: avoid windows freaking out and disconnecting if it hits an offline volume
|
||||
* hotkey shift-r to rotate pictures counter-clockwise didn't do anything
|
||||
* hacker theme wasn't hacker enough (everything is monospace now)
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0902-0018 `v1.9.4` yes symlink times
|
||||
|
||||
hello! it's been a while, an entire day even...
|
||||
|
||||
## new features
|
||||
* download folder as tar.gz, tar.bz2, tar.xz
|
||||
* single-threaded, so extremely slow, but nice for easily compressed data or challenged networks
|
||||
* append `?tar=gz`, `?tar=bz2` or `?tar=xz` to a folder URL to do it
|
||||
* default compression levels are gz:3, bz2:2, xz:1; override with `?tar=gz:9`
|
||||
|
||||
# bugfixes
|
||||
* c1efd227b7377144a5760bc6cff64f4e87b626d9 symlink-deduplicated files got indexed with the wrong last-modified timestamp
|
||||
* mostly inconsequential; would cause the dupe's uploader-ip to be forgotten on the next server restart since it would reindex to "fix" the timestamp
|
||||
* when linking [a search query](https://a.ocv.me/pub/#q=tags%20like%20soundsho*) it loads the results faster
|
||||
|
||||
# other changes
|
||||
* update readme to mention that iPhones and iPads dislike the preload feature and respond by glitching the audio a bit when a song is exactly 20 seconds away from ending and yet how it's probably a bad idea to disable preloading since i bet it's load-bearing against other iOS bugs
|
||||
* speaking of iPhones and iPads, the [previous version](https://github.com/9001/copyparty/releases/tag/v1.9.3) should have fixed album playback on those
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0831-2211 `v1.9.3` iOS and http fixes
|
||||
|
||||
## new features
|
||||
* iPhones and iPads are now able to...
|
||||
* 9986136dfb2364edb35aa9fbb87410641c6d6af3 play entire albums while the screen is off without the music randomly stopping
|
||||
* apple keeps breaking AudioContext in new and interesting ways; time to give up (no more equalizer)
|
||||
* 1c0d978979a703edeb792e552b18d3b7695b2d90 perform search queries and execude js code
|
||||
* by translating [smart-quotes](https://stackoverflow.com/questions/48678359/ios-11-safari-html-disable-smart-punctuation) into regular `'` and `"` characters
|
||||
* python 3.12 support
|
||||
* technically a bugfix since it was added [a year ago](https://github.com/9001/copyparty/commit/32e22dfe84d5e0b13914b4d0e15c1b8c9725a76d) way before the first py3.12 alpha was released but turns out i botched it, oh well
|
||||
* filter error messages so they never include the filesystem path where copyparty's python files reside
|
||||
* print more context in server logs if someone hits an unexpected permission-denied
|
||||
|
||||
# bugfixes
|
||||
found some iffy stuff combing over the code but, as far as I can tell, luckily none of these were dangerous:
|
||||
* URL normalization was a bit funky, but it appears everything access-control-related was unaffected
|
||||
* some url parameters were double-decoded, causing the unpost filtering and file renaming to fail if the values contained `%`
|
||||
* clients could cause the server to return an invalid cache-control header, but newlines and control-characters got rejected correctly
|
||||
* minor cosmetics / qol fixes:
|
||||
* reduced flickering on page load in chrome
|
||||
* fixed some console spam in search results
|
||||
* markdown documents now have the same line-height in directory listings and the editor
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0826-2116 `v1.9.2` bigger hammer
|
||||
|
||||
## new features
|
||||
* more ways to automatically ban users! three new sensors, all default-enabled, giving a 1 day ban after 9 hits in 2 minutes:
|
||||
* `--ban-403`: trying to access volumes that dont exist or require authentication
|
||||
* `--ban-422`: invalid POST messages (from brutefocing POST parameters and such)
|
||||
* `--ban-url`: URLs which 404 and also match `--sus-urls` (scanners/crawlers)
|
||||
* if you want to run a vulnerability scan on copyparty, please just [download the server](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and do it locally! takes less than 30 seconds to set up, you get lower latency, and you won't be filling up the logfiles on the demo server with junk, thank you 🙏
|
||||
* more ban-related stuff,
|
||||
* new global option `--nonsus-urls` specifies regex of URLs which are OK to 404 and shouldn't ban people
|
||||
* `--turbo` now accepts the value `-1` which makes it impossible for clients to enable it, making `--ban-404` safe to use
|
||||
* range-selecting files in the list-view by shift-pgup/pgdn
|
||||
* volumes which are currently unavailable (dead nfs share, external HDD which is off, ...) are marked with a ❌ in the directory tree sidebar
|
||||
* the toggle-button to see dotfiles is now persisted as a cookie so it also applies on the initial page load
|
||||
* more effort is made to prevent `<script>`s inside markdown documents from running in the markdown editor and the fullpage viewer
|
||||
* anyone who wanted to use markdown files for malicious stuff can still just upload an html file instead, so this doesn't make anything more secure, just less confusing
|
||||
* the safest approach is still the `nohtml` volflag which disables markdown rendering outside sandboxes entirely, or only giving out write-access to trustworthy people
|
||||
* enabling markdown plugins with `-emp` now has the side-effect of cancelling this band-aid too
|
||||
|
||||
## bugfixes
|
||||
* textfile navigation hotkeys broke in the previous version
|
||||
|
||||
## other changes
|
||||
* example [nginx config](https://github.com/9001/copyparty/blob/hovudstraum/contrib/nginx/copyparty.conf) was not compatible with cloudflare (suggest `$http_cf_connecting_ip` instead of `$proxy_add_x_forwarded_for`)
|
||||
* `copyparty.exe` is now built with python 3.11.5 which fixes [CVE-2023-40217](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-40217)
|
||||
* `copyparty32.exe` is not, because python understandably ended win7 support
|
||||
* [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md):
|
||||
* copyparty appears to be 30x faster than nextcloud and seafile at receiving uploads of many small files
|
||||
* seafile has a size limit when zip-downloading folders
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0820-2338 `v1.9.1` prometheable
|
||||
|
||||
## new features
|
||||
* #49 prometheus / grafana / openmetrics integration ([see readme](https://github.com/9001/copyparty#prometheus))
|
||||
* read metrics from http://127.0.0.1:3923/.cpr/metrics after enabling with `--stats`
|
||||
* download a folder with all music transcoded to opus by adding `?tar=opus` or `?zip&opus` to the URL
|
||||
* can also be used to download thumbnails instead of full images; `?tar=w` for webp, `?tar=j` for jpg
|
||||
* so i guess the long-time requested feature of pre-generating thumbnails kind of happened after all, if you schedule a `curl http://127.0.0.1:3923/?tar=w >/dev/null` after server startup
|
||||
* u2c (commandline uploader): argument `-x` to exclude files by regex (compares absolute filesystem paths)
|
||||
* `--zm-spam 30` can be used to improve zeroconf / mDNS reliability on crazy networks
|
||||
* only necessary if there are clients with multiple IPs and some of the IPs are outside the subnets that copyparty are in -- not spec-compliant, not really recommended, but shouldn't cause any issues either
|
||||
* and `--mc-hop` wasn't actually implemented until now
|
||||
* dragging an image from another browser window onto the upload button is now possible
|
||||
* only works on chrome, and only on windows or linux (not macos)
|
||||
* server hostname is prefixed in all window titles
|
||||
* can be adjusted with `--bname` (the file explorer) and `--doctitle` (all other documents)
|
||||
* can be disabled with `--nth` (just window title) or `--nih` (title + header)
|
||||
|
||||
## bugfixes
|
||||
* docker: the autogenerated seeds for filekeys and account passwords now get persisted to the config volume (thx noktuas)
|
||||
* uploading files with fancy filenames could fail if the copyparty server is running on android
|
||||
* improve workarounds for some apple/iphone/ios jank (thx noktuas and spiky)
|
||||
* some ui elements had their font-size selected by fair dice roll
|
||||
* the volume control does nothing because [apple disabled it](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11), so add a warning
|
||||
* the image gallery cannot be fullscreened [as apple intended](https://developer.mozilla.org/en-US/docs/Web/API/Element/requestFullscreen#browser_compatibility) so add a warning
|
||||
|
||||
## other changes
|
||||
* file table columns are now limited to browser window width
|
||||
* readme: mention that nginx-QUIC is currently very slow (thx noktuas)
|
||||
* #50 add a safeguard to the wget plugin in case wget at some point adds support for `file://` or similar
|
||||
* show a suggestion on startup to enable the database
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0725-1550 `v1.8.8` just boring bugfixes
|
||||
|
||||
final release until late august unless something bad happens and i end up building this thing on a shinkansen
|
||||
|
||||
## recent security / vulnerability fixes
|
||||
* there is a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` in case of future important updates
|
||||
* [v1.8.7](https://github.com/9001/copyparty/releases/tag/v1.8.7) (2023-07-23) - [CVE-2023-38501](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-38501) - reflected XSS
|
||||
* [v1.8.2](https://github.com/9001/copyparty/releases/tag/v1.8.2) (2023-07-14) - [CVE-2023-37474](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-37474) - path traversal (first CVE)
|
||||
* all serverlogs reviewed so far (5 public servers) showed no signs of exploitation
|
||||
|
||||
## bugfixes
|
||||
* range-select with shiftclick:
|
||||
* don't crash when entering another folder and shift-clicking some more
|
||||
* remember selection origin when lazy-loading more stuff into the viewport
|
||||
* markdown editor:
|
||||
* fix confusing warnings when the browser cache decides it *really* wants to cache
|
||||
* and when a document starts with a newline
|
||||
* remember intended actions such as `?edit` on login prompts
|
||||
* Windows: TLS-cert generation (triggered by network changes) could occasionally fail
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0723-1543 `v1.8.7` XSS for days
|
||||
|
||||
at the lack of better ideas, there is now a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` for all future important updates such as this one
|
||||
|
||||
## bugfixes
|
||||
* reflected XSS through `/?k304` and `/?setck`
|
||||
* if someone tricked you into clicking a URL containing a chain of `%0d` and `%0a` they could potentially have moved/deleted existing files on the server, or uploaded new files, using your account
|
||||
* if you use a reverse proxy, you can check if you have been exploited like so:
|
||||
* nginx: grep your logs for URLs containing `%0d%0a%0d%0a`, for example using the following command:
|
||||
```bash
|
||||
(gzip -dc access.log*.gz; cat access.log) | sed -r 's/" [0-9]+ .*//' | grep -iE '%0[da]%0[da]%0[da]%0[da]'
|
||||
```
|
||||
* if you find any traces of exploitation (or just want to be on the safe side) it's recommended to change the passwords of your copyparty accounts
|
||||
* huge thanks *again* to @TheHackyDog !
|
||||
* the original fix for CVE-2023-37474 broke the download links for u2c.py and partyfuse.py
|
||||
* fix mediaplayer spinlock if the server only has a single audio file
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0721-0036 `v1.8.6` fix reflected XSS
|
||||
|
||||
## bugfixes
|
||||
* reflected XSS through `/?hc` (the optional subfolder parameter to the [connect](https://a.ocv.me/?hc) page)
|
||||
* if someone tricked you into clicking `http://127.0.0.1:3923/?hc=<script>alert(1)</script>` they could potentially have moved/deleted existing files on the server, or uploaded new files, using your account
|
||||
* if you use a reverse proxy, you can check if you have been exploited like so:
|
||||
* nginx: grep your logs for URLs containing `?hc=` with `<` somewhere in its value, for example using the following command:
|
||||
```bash
|
||||
(gzip -dc access.log*.gz; cat access.log) | sed -r 's/" [0-9]+ .*//' | grep -E '[?&](hc|pw)=.*[<>]'
|
||||
```
|
||||
* if you find any traces of exploitation (or just want to be on the safe side) it's recommended to change the passwords of your copyparty accounts
|
||||
* thanks again to @TheHackyDog !
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0718-0746 `v1.8.4` range-select v2
|
||||
|
||||
**IMPORTANT:** `v1.8.2` (previous release) fixed [CVE-2023-37474](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-37474) ; please see the [1.8.2 release notes](https://github.com/9001/copyparty/releases/tag/v1.8.2) (all serverlogs reviewed so far showed no signs of exploitation)
|
||||
|
||||
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||
* [docker image](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) ╱ [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) ╱ [client testbed](https://cd.ocv.me/b/)
|
||||
|
||||
## new features
|
||||
* #47 file selection by shift-clicking
|
||||
* in list-view: click a table row to select it, then shift-click another to select all files in-between
|
||||
* in grid-view: either enable the `multiselect` button (mainly for phones/tablets), or the new `sel` button in the `[⚙️] settings` tab (better for mouse+keyboard), then shift-click two files
|
||||
* volflag `fat32` avoids a bug in android's sdcardfs causing excessive reindexing on startup if any files were modified on the sdcard since last reboot
|
||||
|
||||
## bugfixes
|
||||
* minor corrections to the new features from #45
|
||||
* uploader IPs are now visible for `a`dmin accounts in `d2t` volumes as well
|
||||
|
||||
## other changes
|
||||
* the admin-panel is only accessible for accounts which have the `a` (admin) permission-level in one or more volumes; so instead of giving your user `rwmd` access, you'll want `rwmda` instead:
|
||||
```bash
|
||||
python3 copyparty-sfx.py -a joe:hunter2 -v /mnt/nas/pub:pub:rwmda,joe
|
||||
```
|
||||
or in a settings file,
|
||||
```yaml
|
||||
[/pub]
|
||||
/mnt/nas/pub
|
||||
accs:
|
||||
rwmda: joe
|
||||
```
|
||||
* until now, `rw` was enough, however most readwrite users don't need access to those features
|
||||
* grabbing a stacktrace with `?stack` is permitted for both `rw` and `a`
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0714-1558 `v1.8.2` URGENT: fix path traversal vulnerability
|
||||
|
||||
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||
* [docker image](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) ╱ [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) ╱ [client testbed](https://cd.ocv.me/b/)
|
||||
|
||||
Starting with the bad and important news; this release fixes https://github.com/9001/copyparty/security/advisories/GHSA-pxfv-7rr3-2qjg / [CVE-2023-37474](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-37474) -- so please upgrade!
|
||||
|
||||
Every version until now had a [path traversal vulnerability](https://owasp.org/www-community/attacks/Path_Traversal) which allowed read-access to any file on the server's filesystem. To summarize,
|
||||
* Every file that the copyparty process had the OS-level permissions to read, could be retrieved over HTTP without password authentication
|
||||
* However, an attacker would need to know the full (or copyparty-module-relative) path to the file; it was luckily impossible to list directory contents to discover files on the server
|
||||
* You may have been running copyparty with some mitigations against this:
|
||||
* [prisonparty](https://github.com/9001/copyparty/tree/hovudstraum/bin#prisonpartysh) limited the scope of access to files which were intentionally given to copyparty for sharing; meaning all volumes, as well as the following read-only filesystem locations: `/bin`, `/lib`, `/lib32`, `/lib64`, `/sbin`, `/usr`, `/etc/alternatives`
|
||||
* the [nix package](https://github.com/9001/copyparty#nix-package) has a similar mitigation implemented using systemd concepts
|
||||
* [docker containers](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) would only expose the files which were intentionally mounted into the container, so even better
|
||||
* More conventional setups, such as just running the sfx (python or exe editions), would unfortunately expose all files readable by the current user
|
||||
* The following configurations would have made the impact much worse:
|
||||
* running copyparty as root
|
||||
|
||||
So, three years, and finally a CVE -- which has been there since day one... Not great huh. There is a list of all the copyparty alternatives that I know of in the `similar software` link above.
|
||||
|
||||
Thanks for flying copyparty! And especially if you decide to continue doing so :-)
|
||||
|
||||
## new features
|
||||
* #43 volflags to specify thumbnailer behavior per-volume;
|
||||
* `--th-no-crop` / volflag `nocrop` to specify whether autocrop should be disabled
|
||||
* `--th-size` / volflag `thsize` to set a custom thumbnail resolution
|
||||
* `--th-convt` / volflag `convt` to specify conversion timeout
|
||||
* #45 resulted in a handful of opportunities to tighten security in intentionally-dangerous setups (public folders with anonymous uploads enabled):
|
||||
* a new permission, `a` (in addition to the existing `rwmdgG`), to show the uploader-IP and upload-time for each file in the file listing
|
||||
* accidentally incompatible with the `d2t` volflag (will be fixed in the next ver)
|
||||
* volflag `nohtml` is a good defense against (un)intentional XSS; it returns HTML-files and markdown-files as plaintext instead of rendering them, meaning any malicious `<script>` won't run -- bad idea for regular use since it breaks fundamental functionality, but good when you really need it
|
||||
* the README-previews below the file-listing still renders as usual, as this is fine thanks to the sandbox
|
||||
* a new eventhook `--xban` to run a plugin when copyparty decides to ban someone (for password bruteforcing or excessive 404's), for example to blackhole the IP using fail2ban or similar
|
||||
|
||||
## bugfixes
|
||||
* **fixes a path traversal vulnerability,** https://github.com/9001/copyparty/security/advisories/GHSA-pxfv-7rr3-2qjg / [CVE-2023-37474](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-37474)
|
||||
* HUGE thanks to @TheHackyDog for reporting this !!
|
||||
* if you use a reverse proxy, you can check if you have been exploited like so:
|
||||
* nginx: grep your logs for URLs containing both `.cpr/` and `%2[^0]`, for example using the following command:
|
||||
```bash
|
||||
(gzip -dc access.log.*.gz; cat access.log) | sed -r 's/" [0-9]+ .*//' | grep -E 'cpr/.*%2[^0]' | grep -vF data:image/svg
|
||||
```
|
||||
* 77f1e5144455eb946db7368792ea11c934f0f6da fixes an extremely unlikely race-condition (see the commit for details)
|
||||
* 8f59afb1593a75b8ce8c91ceee304097a07aea6e fixes another race-condition which is a bit worse:
|
||||
* the unpost feature could collide with other database activity, with the worst-case outcome being aborted batch operations, for example a directory move or a batch-rename which stops halfways
|
||||
|
||||
----
|
||||
|
||||
# 💾 what to download?
|
||||
| download link | is it good? | description |
|
||||
| -- | -- | -- |
|
||||
| **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** | ✅ the best 👍 | runs anywhere! only needs python |
|
||||
| [a docker image](https://github.com/9001/copyparty/blob/hovudstraum/scripts/docker/README.md) | it's ok | good if you prefer docker 🐋 |
|
||||
| [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) | ⚠️ [acceptable](https://github.com/9001/copyparty#copypartyexe) | for [win8](https://user-images.githubusercontent.com/241032/221445946-1e328e56-8c5b-44a9-8b9f-dee84d942535.png) or later; built-in thumbnailer |
|
||||
| [u2c.exe](https://github.com/9001/copyparty/releases/download/v1.7.1/u2c.exe) | ⚠️ acceptable | [CLI uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) as a win7+ exe ([video](https://a.ocv.me/pub/demo/pics-vids/u2cli.webm)) |
|
||||
| [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) | ⛔️ [dangerous](https://github.com/9001/copyparty#copypartyexe) | for [win7](https://user-images.githubusercontent.com/241032/221445944-ae85d1f4-d351-4837-b130-82cab57d6cca.png) -- never expose to the internet! |
|
||||
| [cpp-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.8.2/copyparty-winpe64.exe) | ⛔️ dangerous | runs on [64bit WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png), otherwise useless |
|
||||
|
||||
* except for [u2c.exe](https://github.com/9001/copyparty/releases/download/v1.7.1/u2c.exe), all of the options above are equivalent
|
||||
* the zip and tar.gz files below are just source code
|
||||
* python packages are available at [PyPI](https://pypi.org/project/copyparty/#files)
|
||||
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0707-2220 `v1.8.1` in case of 404
|
||||
|
||||
## new features
|
||||
* [handlers](https://github.com/9001/copyparty/tree/hovudstraum/bin/handlers); change the behavior of 404 / 403 with plugins
|
||||
* makes it possible to use copyparty as a [caching proxy](https://github.com/9001/copyparty/blob/hovudstraum/bin/handlers/caching-proxy.py)
|
||||
* #42 add mpv + streamlink support to [very-bad-idea](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag#dangerous-plugins)
|
||||
* add support for Pillow 10
|
||||
* also improved text rendering in icons
|
||||
* mention the [fedora package](https://github.com/9001/copyparty#fedora-package) in the readme
|
||||
|
||||
## bugfixes
|
||||
* theme 6 (hacker) didn't show the state of some toggle-switches
|
||||
* windows: keep quickedit enabled when hashing passwords interactively
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0626-0005 `v1.8.0` argon
|
||||
|
||||
News: if you use rclone as a copyparty webdav client, upgrading to [rclone v1.63](https://github.com/rclone/rclone/releases/tag/v1.63.0) (just released) will give you [a huge speed boost](https://github.com/rclone/rclone/pull/6897) for small files
|
||||
|
||||
## new features
|
||||
* #39 hashed passwords
|
||||
* instead of keeping plaintext account passwords in config files, you can now store hashed ones instead
|
||||
* `--ah-alg` specifies algorithm; best to worst: `argon2`, `scrypt`, `sha2`, or the default `none`
|
||||
* the default settings of each algorithm takes `0.4 sec` to hash a password, and argon2 eats `256 MiB` RAM
|
||||
* can be adjusted with optional comma-separated args after the algorithm name; see `--help-pwhash`
|
||||
* `--ah-salt` is the [static salt](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#hashed-passwords) for all passwords, and is autogenerated-and-persisted if not specified
|
||||
* `--ah-cli` switches copyparty into a shell where you can hash passwords interactively
|
||||
* but copyparty will also autoconvert any unhashed passwords on startup and give you the values to insert into the config anyways
|
||||
* #40 volume size limit
|
||||
* volflag `vmaxb` specifies max size of a volume
|
||||
* volflag `vmaxn` specifies max number of files in a volume
|
||||
* example: `-v [...]:c,vmaxb=900g:c,vmaxn=20k` blocks uploads if the volume reaches 900 GiB or a total of 20480 files
|
||||
* good alternative to `--df` since it works per-volume
|
||||
|
||||
## bugfixes
|
||||
* autogenerated TLS certs didn't include the mDNS name
|
||||
|
||||
## other changes
|
||||
* improved cloudflare challenge detection
|
||||
* markdown edits will now trigger upload hooks
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0611-0814 `v1.7.6` NO_COLOR
|
||||
|
||||
## new features
|
||||
* #31 `--grid` shows thumbnails instead of file-list by default
|
||||
* #28 `--unlist` regex-exclude files from browser listings
|
||||
* for example `--unlist '\.(js|css)$'` hides all `.js` and `.css` files
|
||||
* **purely cosmetic!** the files are still fully accessible, and still appear in API calls
|
||||
* auto-generate TLS certificates on startup / network-change
|
||||
* mostly good for LAN, requires [cfssl](https://github.com/cloudflare/cfssl/releases/latest), can be disabled with `--no-crt`
|
||||
* creates a self-signed CA and certs with SANs of all detected server IPs
|
||||
* so it's still recommended to use a reverse-proxy / letsencrypt for WAN servers
|
||||
* the default `--fk-salt` is now much stronger
|
||||
* all existing installations will keep the previously selected seed -- you can choose to upgrade by deleting `~/.config/copyparty/cert.pem` but this will change all filekeys / per-file passwords
|
||||
* the `NO_COLOR` environment-variable is now supported, removing colors from stdout
|
||||
* see https://no-color.org/ and more importantly https://youtu.be/biW5UVGkPMA?t=150
|
||||
* `--ansi` and `--no-ansi` can also be used to force-enable/disable colored output
|
||||
* #33 disable colors when stdout is redirected to a pipe/file -- by @clach04
|
||||
* #32 simplify building sfx from source
|
||||
* upgraded [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) to [python 3.11.4](https://pythoninsider.blogspot.com/2023/06/python-3114-31012-3917-3817-3717-and.html)
|
||||
|
||||
## bugfixes
|
||||
* #30 `--ftps` didn't work without `--ftp`
|
||||
* tiny css bug in light themes (opaque thumbnail controls)
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0513-0000 `v1.7.2` hard resolve
|
||||
|
||||
|
||||
@@ -4,8 +4,9 @@
|
||||
* [future plans](#future-plans) - some improvement ideas
|
||||
* [design](#design)
|
||||
* [up2k](#up2k) - quick outline of the up2k protocol
|
||||
* [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/)
|
||||
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
|
||||
* [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/)
|
||||
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
|
||||
* [hashed passwords](#hashed-passwords) - regarding the curious decisions
|
||||
* [http api](#http-api)
|
||||
* [read](#read)
|
||||
* [write](#write)
|
||||
@@ -68,14 +69,14 @@ regarding the frequent server log message during uploads;
|
||||
* on this http connection, `2.77 GiB` transferred, `102.9 MiB/s` average, `948` chunks handled
|
||||
* client says `4` uploads OK, `0` failed, `3` busy, `1` queued, `10042 MiB` total size, `7198 MiB` and `00:01:09` left
|
||||
|
||||
## why not tus
|
||||
### why not tus
|
||||
|
||||
I didn't know about [tus](https://tus.io/) when I made this, but:
|
||||
* up2k has the advantage that it supports parallel uploading of non-contiguous chunks straight into the final file -- [tus does a merge at the end](https://tus.io/protocols/resumable-upload.html#concatenation) which is slow and taxing on the server HDD / filesystem (unless i'm misunderstanding)
|
||||
* up2k has the slight disadvantage of requiring the client to hash the entire file before an upload can begin, but this has the benefit of immediately skipping duplicate files
|
||||
* and the hashing happens in a separate thread anyways so it's usually not a bottleneck
|
||||
|
||||
## why chunk-hashes
|
||||
### why chunk-hashes
|
||||
|
||||
a single sha512 would be better, right?
|
||||
|
||||
@@ -92,6 +93,15 @@ hashwasm would solve the streaming issue but reduces hashing speed for sha512 (x
|
||||
* blake2 might be a better choice since xxh is non-cryptographic, but that gets ~15 MiB/s on slower androids
|
||||
|
||||
|
||||
# hashed passwords
|
||||
|
||||
regarding the curious decisions
|
||||
|
||||
there is a static salt for all passwords;
|
||||
* because most copyparty APIs allow users to authenticate using only their password, making the username unknown, so impossible to do per-account salts
|
||||
* the drawback of this is that an attacker can bruteforce all accounts in parallel, however most copyparty instances only have a handful of accounts in the first place, and it can be compensated by increasing the hashing cost anyways
|
||||
|
||||
|
||||
# http api
|
||||
|
||||
* table-column `params` = URL parameters; `?foo=bar&qux=...`
|
||||
@@ -115,8 +125,14 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| GET | `?b` | list files/folders at URL as simplified HTML |
|
||||
| GET | `?tree=.` | list one level of subdirectories inside URL |
|
||||
| GET | `?tree` | list one level of subdirectories for each level until URL |
|
||||
| GET | `?tar` | download everything below URL as a tar file |
|
||||
| GET | `?zip=utf-8` | download everything below URL as a zip file |
|
||||
| GET | `?tar` | download everything below URL as a gnu-tar file |
|
||||
| GET | `?tar=gz:9` | ...as a gzip-level-9 gnu-tar file |
|
||||
| GET | `?tar=xz:9` | ...as an xz-level-9 gnu-tar file |
|
||||
| GET | `?tar=pax` | ...as a pax-tar file |
|
||||
| GET | `?tar=pax,xz` | ...as an xz-level-1 pax-tar file |
|
||||
| GET | `?zip=utf-8` | ...as a zip file |
|
||||
| GET | `?zip` | ...as a WinXP-compatible zip file |
|
||||
| GET | `?zip=crc` | ...as an MSDOS-compatible zip file |
|
||||
| GET | `?ups` | show recent uploads from your IP |
|
||||
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
||||
| GET | `?mime=foo` | specify return mimetype `foo` |
|
||||
|
||||
33
docs/examples/docker/basic-docker-compose/copyparty.conf
Normal file
33
docs/examples/docker/basic-docker-compose/copyparty.conf
Normal file
@@ -0,0 +1,33 @@
|
||||
# not actually YAML but lets pretend:
|
||||
# -*- mode: yaml -*-
|
||||
# vim: ft=yaml:
|
||||
|
||||
|
||||
[global]
|
||||
e2dsa # enable file indexing and filesystem scanning
|
||||
e2ts # enable multimedia indexing
|
||||
ansi # enable colors in log messages
|
||||
|
||||
# q, lo: /cfg/log/%Y-%m%d.log # log to file instead of docker
|
||||
|
||||
# ftp: 3921 # enable ftp server on port 3921
|
||||
# p: 3939 # listen on another port
|
||||
# ipa: 10.89. # only allow connections from 10.89.*
|
||||
# df: 16 # stop accepting uploads if less than 16 GB free disk space
|
||||
# ver # show copyparty version in the controlpanel
|
||||
# grid # show thumbnails/grid-view by default
|
||||
# theme: 2 # monokai
|
||||
# name: datasaver # change the server-name that's displayed in the browser
|
||||
# stats, nos-dup # enable the prometheus endpoint, but disable the dupes counter (too slow)
|
||||
# no-robots, force-js # make it harder for search engines to read your server
|
||||
|
||||
|
||||
[accounts]
|
||||
ed: wark # username: password
|
||||
|
||||
|
||||
[/] # create a volume at "/" (the webroot), which will
|
||||
/w # share /w (the docker data volume)
|
||||
accs:
|
||||
rw: * # everyone gets read-write access, but
|
||||
rwmda: ed # the user "ed" gets read-write-move-delete-admin
|
||||
20
docs/examples/docker/basic-docker-compose/docker-compose.yml
Normal file
20
docs/examples/docker/basic-docker-compose/docker-compose.yml
Normal file
@@ -0,0 +1,20 @@
|
||||
version: '3'
|
||||
services:
|
||||
|
||||
copyparty:
|
||||
image: copyparty/ac:latest
|
||||
container_name: copyparty
|
||||
user: "1000:1000"
|
||||
ports:
|
||||
- 3923:3923
|
||||
volumes:
|
||||
- ./:/cfg:z
|
||||
- /path/to/your/fileshare/top/folder:/w:z
|
||||
|
||||
stop_grace_period: 15s # thumbnailer is allowed to continue finishing up for 10s after the shutdown signal
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget --spider -q 127.0.0.1:3923/?reset"]
|
||||
interval: 1m
|
||||
timeout: 2s
|
||||
retries: 5
|
||||
start_period: 15s
|
||||
72
docs/examples/docker/idp/copyparty.conf
Normal file
72
docs/examples/docker/idp/copyparty.conf
Normal file
@@ -0,0 +1,72 @@
|
||||
# not actually YAML but lets pretend:
|
||||
# -*- mode: yaml -*-
|
||||
# vim: ft=yaml:
|
||||
|
||||
|
||||
# example config for how copyparty can be used with an identity
|
||||
# provider, replacing the built-in authentication/authorization
|
||||
# mechanism, and instead expecting the reverse-proxy to provide
|
||||
# the requester's username (and possibly a group-name, for
|
||||
# optional group-based access control)
|
||||
#
|
||||
# the filesystem-path `/w` is used as the storage location
|
||||
# because that is the data-volume in the docker containers,
|
||||
# because a deployment like this (with an IdP) is more commonly
|
||||
# seen in containerized environments -- but this is not required
|
||||
|
||||
|
||||
[global]
|
||||
e2dsa # enable file indexing and filesystem scanning
|
||||
e2ts # enable multimedia indexing
|
||||
ansi # enable colors in log messages
|
||||
|
||||
# enable IdP support by expecting username/groupname in
|
||||
# http-headers provided by the reverse-proxy; header "X-IdP-User"
|
||||
# will contain the username, "X-IdP-Group" the groupname
|
||||
idp-h-usr: x-idp-user
|
||||
idp-h-grp: x-idp-group
|
||||
|
||||
|
||||
[/] # create a volume at "/" (the webroot), which will
|
||||
/w # share /w (the docker data volume)
|
||||
accs:
|
||||
rw: * # everyone gets read-access, but
|
||||
rwmda: %su # the group "su" gets read-write-move-delete-admin
|
||||
|
||||
|
||||
[/u/${u}] # each user gets their own home-folder at /u/username
|
||||
/w/u/${u} # which will be "u/username" in the docker data volume
|
||||
accs:
|
||||
r: * # read-access for anyone, and
|
||||
rwmda: ${u}, %su # read-write-move-delete-admin for that username + the "su" group
|
||||
|
||||
|
||||
[/u/${u}/priv] # each user also gets a private area at /u/username/priv
|
||||
/w/u/${u}/priv # stored at DATAVOLUME/u/username/priv
|
||||
accs:
|
||||
rwmda: ${u}, %su # read-write-move-delete-admin for that username + the "su" group
|
||||
|
||||
|
||||
[/lounge/${g}] # each group gets their own shared volume
|
||||
/w/lounge/${g} # stored at DATAVOLUME/lounge/groupname
|
||||
accs:
|
||||
r: * # read-access for anyone, and
|
||||
rwmda: %${g}, %su # read-write-move-delete-admin for that group + the "su" group
|
||||
|
||||
|
||||
[/lounge/${g}/priv] # and a private area for each group too
|
||||
/w/lounge/${g}/priv # stored at DATAVOLUME/lounge/groupname/priv
|
||||
accs:
|
||||
rwmda: %${g}, %su # read-write-move-delete-admin for that group + the "su" group
|
||||
|
||||
|
||||
# and create some strategic volumes to prevent anyone from gaining
|
||||
# unintended access to priv folders if the users/groups db is lost
|
||||
[/u]
|
||||
/w/u
|
||||
accs:
|
||||
rwmda: %su
|
||||
[/lounge]
|
||||
/w/lounge
|
||||
accs:
|
||||
rwmda: %su
|
||||
@@ -28,10 +28,6 @@ https://github.com/nayuki/QR-Code-generator/
|
||||
C: Project Nayuki
|
||||
L: MIT
|
||||
|
||||
https://github.com/python/cpython/blob/3.10/Lib/asyncore.py
|
||||
C: 1996 Sam Rushing
|
||||
L: ISC
|
||||
|
||||
https://github.com/ahupp/python-magic/
|
||||
C: 2001-2014 Adam Hupp
|
||||
L: MIT
|
||||
|
||||
@@ -148,7 +148,7 @@ symbol legend,
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| download folder as zip | █ | █ | █ | █ | █ | | █ | | █ | █ | ╱ | █ |
|
||||
| download folder as zip | █ | █ | █ | █ | ╱ | | █ | | █ | █ | ╱ | █ |
|
||||
| download folder as tar | █ | | | | | | | | | █ | | |
|
||||
| upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| parallel uploads | █ | | | █ | █ | | • | | █ | | █ | |
|
||||
@@ -183,6 +183,7 @@ symbol legend,
|
||||
* `cloud storage backend` = able to serve files from (and write to) s3 or similar cloud services; `╱` means the software can do this with some help from `rclone mount` as a bridge
|
||||
|
||||
* `a`/copyparty can reject uploaded files (based on complex conditions), for example [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py)
|
||||
* `e`/seafile download-as-zip is not streaming; it creates the full zipfile before download can start, and fails on big folders
|
||||
* `j`/filebrowser remarks:
|
||||
* can provide checksums for single files on request
|
||||
* can probably do extension/mimetype rejection similar to copyparty
|
||||
@@ -254,7 +255,7 @@ symbol legend,
|
||||
| per-file permissions | | | | █ | █ | | █ | | █ | | | |
|
||||
| per-file passwords | █ | | | █ | █ | | █ | | █ | | | |
|
||||
| unmap subfolders | █ | | | | | | █ | | | █ | ╱ | • |
|
||||
| index.html blocks list | | | | | | | █ | | | • | | |
|
||||
| index.html blocks list | ╱ | | | | | | █ | | | • | | |
|
||||
| write-only folders | █ | | | | | | | | | | █ | █ |
|
||||
| files stored as-is | █ | █ | █ | █ | | █ | █ | | | █ | █ | █ |
|
||||
| file versioning | | | | █ | █ | | | | | | | |
|
||||
@@ -290,6 +291,7 @@ symbol legend,
|
||||
* one-way folder sync from local to server can be done efficiently with [u2c.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#u2cpy), or with webdav and conventional rsync
|
||||
* can hot-reload config files (with just a few exceptions)
|
||||
* can set per-folder permissions if that folder is made into a separate volume, so there is configuration overhead
|
||||
* `index.html` on its own does not prevent directory listing, but permission `h` (instead of `r`) enforces index.html to be returned instead of folder contents
|
||||
* [event hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) ([discord](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png), [desktop](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png)) inspired by filebrowser, as well as the more complex [media parser](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) alternative
|
||||
* upload history can be visualized using [partyjournal](https://github.com/9001/copyparty/blob/hovudstraum/bin/partyjournal.py)
|
||||
* `k`/filegator remarks:
|
||||
@@ -432,6 +434,7 @@ symbol legend,
|
||||
* not that bad, can probably be remedied with bindmounts or maybe symlinks
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ⚠️ uploading small files is slow; `2.2` files per sec (copyparty does `87`/sec), tested locally with [linuxserver/nextcloud](https://hub.docker.com/r/linuxserver/nextcloud) (sqlite)
|
||||
* ⚠️ no write-only / upload-only folders
|
||||
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||
* ⚠️ less awesome music player
|
||||
@@ -451,7 +454,9 @@ symbol legend,
|
||||
* *much worse than nextcloud* in that regard
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ⚠️ uploading small files is slow; `2.7` files per sec (copyparty does `87`/sec), tested locally with [official container](https://manual.seafile.com/docker/deploy_seafile_with_docker/)
|
||||
* ⚠️ no write-only / upload-only folders
|
||||
* ⚠️ big folders cannot be zip-downloaded
|
||||
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||
* ⚠️ less awesome music player
|
||||
* ⚠️ doesn't run on android or ipads
|
||||
|
||||
@@ -48,6 +48,7 @@ thumbnails2 = ["pyvips"]
|
||||
audiotags = ["mutagen"]
|
||||
ftpd = ["pyftpdlib"]
|
||||
ftps = ["pyftpdlib", "pyopenssl"]
|
||||
pwhash = ["argon2-cffi"]
|
||||
|
||||
[project.scripts]
|
||||
copyparty = "copyparty.__main__:main"
|
||||
@@ -99,6 +100,10 @@ include_trailing_comma = true
|
||||
[tool.bandit]
|
||||
skips = ["B104", "B110", "B112"]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 120
|
||||
ignore = ["E402", "E722"]
|
||||
|
||||
# =====================================================================
|
||||
|
||||
[tool.pylint.MAIN]
|
||||
|
||||
68
scripts/bench/filehash.sh
Executable file
68
scripts/bench/filehash.sh
Executable file
@@ -0,0 +1,68 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# check how fast copyparty is able to hash files during indexing
|
||||
# assuming an infinitely fast HDD to read from (alternatively,
|
||||
# checks whether you will be bottlenecked by CPU or HDD)
|
||||
#
|
||||
# uses copyparty's default config of using, well, it's complicated:
|
||||
# * if you have more than 8 cores, then 5 threads,
|
||||
# * if you have between 4 and 8, then 4 threads,
|
||||
# * anything less and it takes your number of cores
|
||||
#
|
||||
# can be adjusted with --hash-mt (but alpine caps out at 5)
|
||||
|
||||
[ $# -ge 1 ] || {
|
||||
echo 'need arg 1: path to copyparty-sfx.py'
|
||||
echo ' (remaining args will be passed on to copyparty,'
|
||||
echo ' for example to tweak the hasher settings)'
|
||||
exit 1
|
||||
}
|
||||
sfx="$1"
|
||||
shift
|
||||
sfx="$(realpath "$sfx" || readlink -e "$sfx" || echo "$sfx")"
|
||||
awk=$(command -v gawk || command -v awk)
|
||||
|
||||
# try to use /dev/shm to avoid hitting filesystems at all,
|
||||
# otherwise fallback to mktemp which probably uses /tmp
|
||||
td=/dev/shm/cppbenchtmp
|
||||
mkdir $td || td=$(mktemp -d)
|
||||
trap "rm -rf $td" INT TERM EXIT
|
||||
cd $td
|
||||
|
||||
echo creating 256 MiB testfile in $td
|
||||
head -c $((1024*1024*256)) /dev/urandom > 1
|
||||
|
||||
echo creating 127 symlinks to it
|
||||
for n in $(seq 2 128); do ln -s 1 $n; done
|
||||
|
||||
echo warming up cache
|
||||
cat 1 >/dev/null
|
||||
|
||||
echo ok lets go
|
||||
python3 "$sfx" -p39204 -e2dsa --dbd=yolo --exit=idx -lo=t -q "$@"
|
||||
|
||||
echo and the results are...
|
||||
$awk '/1 volumes in / {printf "%s MiB/s\n", 256*128/$(NF-1)}' <t
|
||||
|
||||
echo deleting $td and exiting
|
||||
|
||||
##
|
||||
## some results:
|
||||
|
||||
# MiB/s @ cpu or device (copyparty, pythonver, distro/os) // comment
|
||||
|
||||
# 3608 @ Ryzen 5 4500U (cpp 1.9.5, py 3.11.5, fedora 38) // --hash-mt=6; laptop
|
||||
# 2726 @ Ryzen 5 4500U (cpp 1.9.5, py 3.11.5, fedora 38) // --hash-mt=4 (old-default)
|
||||
# 2202 @ Ryzen 5 4500U (cpp 1.9.5, py 3.11.5, docker-alpine 3.18.3) ??? alpine slow
|
||||
# 2719 @ Ryzen 5 4500U (cpp 1.9.5, py 3.11.2, docker-debian 12.1)
|
||||
|
||||
# 5544 @ Intel i5-12500 (cpp 1.9.5, py 3.11.2, debian 12.0) // --hash-mt=12; desktop
|
||||
# 5197 @ Ryzen 7 3700X (cpp 1.9.5, py 3.9.18, freebsd 13.2) // --hash-mt=8; 2u server
|
||||
# 2606 @ Ryzen 7 3700X (cpp 1.9.5, py 3.9.18, freebsd 13.2) // --hash-mt=4 (old-default)
|
||||
# 1436 @ Ryzen 5 5500U (cpp 1.9.5, py 3.11.4, alpine 3.18.3) // nuc
|
||||
# 1065 @ Pixel 7 (cpp 1.9.5, py 3.11.5, termux 2023-09)
|
||||
|
||||
# notes,
|
||||
# podman run --rm -it --shm-size 512m --entrypoint /bin/ash localhost/copyparty-min
|
||||
# podman <filehash.sh run --rm -i --shm-size 512m --entrypoint /bin/ash localhost/copyparty-min -s - /z/copyparty-sfx.py
|
||||
@@ -3,6 +3,7 @@ WORKDIR /z
|
||||
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
||||
ver_hashwasm=4.9.0 \
|
||||
ver_marked=4.3.0 \
|
||||
ver_dompf=3.0.5 \
|
||||
ver_mde=2.18.0 \
|
||||
ver_codemirror=5.65.12 \
|
||||
ver_fontawesome=5.13.0 \
|
||||
@@ -13,6 +14,7 @@ ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
||||
# https://github.com/markedjs/marked/releases
|
||||
# https://github.com/Ionaru/easy-markdown-editor/tags
|
||||
# https://github.com/codemirror/codemirror5/releases
|
||||
# https://github.com/cure53/DOMPurify/releases
|
||||
# https://github.com/Daninet/hash-wasm/releases
|
||||
# https://github.com/openpgpjs/asmcrypto.js
|
||||
# https://github.com/google/zopfli/tags
|
||||
@@ -23,10 +25,12 @@ ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
||||
RUN mkdir -p /z/dist/no-pk \
|
||||
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
|
||||
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
|
||||
&& rm -f /usr/lib/python3*/EXTERNALLY-MANAGED \
|
||||
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
|
||||
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
|
||||
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
|
||||
&& wget https://github.com/codemirror/codemirror5/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
|
||||
&& wget https://github.com/cure53/DOMPurify/archive/refs/tags/$ver_dompf.tar.gz -O dompurify.tgz \
|
||||
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
|
||||
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
|
||||
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
|
||||
@@ -48,6 +52,7 @@ RUN mkdir -p /z/dist/no-pk \
|
||||
&& cd easy-markdown-editor* \
|
||||
&& npm install \
|
||||
&& npm i gulp-cli -g ) \
|
||||
&& tar -xf dompurify.tgz \
|
||||
&& tar -xf prism.tgz \
|
||||
&& unzip fontawesome.zip \
|
||||
&& tar -xf zopfli.tgz
|
||||
@@ -120,6 +125,10 @@ RUN cd easy-markdown-editor-$ver_mde \
|
||||
&& cp -pv dist/easymde.min.js /z/dist/easymde.js
|
||||
|
||||
|
||||
# build dompurify
|
||||
RUN (echo; cat DOMPurify-$ver_dompf/dist/purify.min.js) >> /z/dist/marked.js
|
||||
|
||||
|
||||
# build fontawesome and scp
|
||||
COPY mini-fa.sh /z
|
||||
COPY mini-fa.css /z
|
||||
@@ -134,10 +143,11 @@ RUN ./genprism.sh $ver_prism
|
||||
|
||||
|
||||
# compress
|
||||
COPY zopfli.makefile /z/dist/Makefile
|
||||
COPY brotli.makefile zopfli.makefile /z/dist/
|
||||
RUN cd /z/dist \
|
||||
&& make -j$(nproc) \
|
||||
&& rm Makefile \
|
||||
&& make -j$(nproc) -f brotli.makefile \
|
||||
&& make -j$(nproc) -f zopfli.makefile \
|
||||
&& rm *.makefile \
|
||||
&& mv no-pk/* . \
|
||||
&& rmdir no-pk
|
||||
|
||||
|
||||
4
scripts/deps-docker/brotli.makefile
Normal file
4
scripts/deps-docker/brotli.makefile
Normal file
@@ -0,0 +1,4 @@
|
||||
all: $(addsuffix .br, $(wildcard easymde*))
|
||||
|
||||
%.br: %
|
||||
brotli -jZ $<
|
||||
@@ -1,10 +1,6 @@
|
||||
all: $(addsuffix .gz, $(wildcard *.*))
|
||||
all: $(addsuffix .gz, $(wildcard *.js *.css))
|
||||
|
||||
%.gz: %
|
||||
#brotli -q 11 $<
|
||||
pigz -11 -I 573 $<
|
||||
|
||||
# pigz -11 -J 34 -I 100 -F < $< > $@.first
|
||||
|
||||
# disabling brotli after all since the gain is meh
|
||||
# and it bloats sfx and wheels by like 70%
|
||||
|
||||
@@ -5,11 +5,12 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-ac" \
|
||||
org.opencontainers.image.description="copyparty with Pillow and FFmpeg (image/audio/video thumbnails, audio transcoding, media tags)"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
|
||||
XDG_CONFIG_HOME=/cfg
|
||||
|
||||
RUN apk --no-cache add !pyc \
|
||||
wget \
|
||||
py3-pillow \
|
||||
py3-argon2-cffi py3-pillow \
|
||||
ffmpeg \
|
||||
&& rm -rf /tmp/pyc \
|
||||
&& mkdir /cfg /w \
|
||||
@@ -19,4 +20,4 @@ RUN apk --no-cache add !pyc \
|
||||
COPY i/dist/copyparty-sfx.py ./
|
||||
WORKDIR /w
|
||||
EXPOSE 3923
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]
|
||||
|
||||
@@ -5,14 +5,15 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-dj" \
|
||||
org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
|
||||
XDG_CONFIG_HOME=/cfg
|
||||
|
||||
COPY i/bin/mtag/install-deps.sh ./
|
||||
COPY i/bin/mtag/audio-bpm.py /mtag/
|
||||
COPY i/bin/mtag/audio-key.py /mtag/
|
||||
RUN apk add -U !pyc \
|
||||
wget \
|
||||
py3-pillow py3-pip py3-cffi \
|
||||
py3-argon2-cffi py3-pillow py3-pip py3-cffi \
|
||||
ffmpeg \
|
||||
vips-jxl vips-heif vips-poppler vips-magick \
|
||||
py3-numpy fftw libsndfile \
|
||||
@@ -22,6 +23,7 @@ RUN apk add -U !pyc \
|
||||
python3-dev ffmpeg-dev fftw-dev libsndfile-dev \
|
||||
py3-wheel py3-numpy-dev \
|
||||
vamp-sdk-dev \
|
||||
&& rm -f /usr/lib/python3*/EXTERNALLY-MANAGED \
|
||||
&& python3 -m pip install pyvips \
|
||||
&& bash install-deps.sh \
|
||||
&& apk del py3-pip .bd \
|
||||
@@ -35,4 +37,8 @@ RUN apk add -U !pyc \
|
||||
COPY i/dist/copyparty-sfx.py ./
|
||||
WORKDIR /w
|
||||
EXPOSE 3923
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]
|
||||
|
||||
# size: 286 MB
|
||||
# bpm/key: 529 sec
|
||||
# idx-bench: 2352 MB/s
|
||||
|
||||
78
scripts/docker/Dockerfile.djd
Normal file
78
scripts/docker/Dockerfile.djd
Normal file
@@ -0,0 +1,78 @@
|
||||
FROM debian:12-slim
|
||||
WORKDIR /z
|
||||
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-djd" \
|
||||
org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection, and higher performance than the other editions"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
|
||||
XDG_CONFIG_HOME=/cfg
|
||||
|
||||
COPY i/bin/mtag/install-deps.sh ./
|
||||
COPY i/bin/mtag/audio-bpm.py /mtag/
|
||||
COPY i/bin/mtag/audio-key.py /mtag/
|
||||
|
||||
# Suites: bookworm bookworm-updates
|
||||
# Components: main
|
||||
RUN sed -ri 's/( main)$/\1 contrib non-free non-free-firmware/' /etc/apt/sources.list.d/debian.sources \
|
||||
&& apt update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt install -y --no-install-recommends \
|
||||
wget \
|
||||
python3-argon2 python3-pillow python3-pip \
|
||||
ffmpeg libvips42 vamp-plugin-sdk \
|
||||
python3-numpy libfftw3-double3 libsndfile1 \
|
||||
gcc g++ make cmake patchelf jq \
|
||||
libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavutil-dev \
|
||||
libfftw3-dev python3-dev libsndfile1-dev python3-pip \
|
||||
patchelf cmake \
|
||||
&& rm -f /usr/lib/python3*/EXTERNALLY-MANAGED \
|
||||
&& python3 -m pip install --user pyvips \
|
||||
&& bash install-deps.sh \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt purge -y \
|
||||
gcc g++ make cmake patchelf jq \
|
||||
libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavutil-dev \
|
||||
libfftw3-dev python3-dev libsndfile1-dev python3-pip \
|
||||
patchelf cmake \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get autoremove -y \
|
||||
&& dpkg -r --force-depends libraqm0 libgdbm-compat4 libgdbm6 libperl5.36 perl-modules-5.36 mailcap mime-support \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get clean -y \
|
||||
&& find /usr/ -name __pycache__ | xargs rm -rf \
|
||||
&& find /usr/ -type d -name tests | grep packages/numpy | xargs rm -rf \
|
||||
&& rm -rf \
|
||||
/var/lib/apt/lists/* \
|
||||
/tmp/pyc \
|
||||
/usr/lib/python*/dist-packages/pip \
|
||||
/usr/lib/python*/dist-packages/setuptools \
|
||||
/usr/lib/*/dri \
|
||||
/usr/lib/*/mfx \
|
||||
/usr/share/doc \
|
||||
/usr/share/X11 \
|
||||
/usr/share/fonts \
|
||||
/usr/share/libmysofa \
|
||||
/usr/share/libthai \
|
||||
/usr/share/alsa \
|
||||
/usr/share/bash-completion \
|
||||
&& chmod 777 /root \
|
||||
&& ln -s /root/vamp /root/.local / \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
|
||||
COPY i/dist/copyparty-sfx.py ./
|
||||
WORKDIR /w
|
||||
EXPOSE 3923
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]
|
||||
|
||||
# size: 598 MB
|
||||
# bpm/key: 485 sec
|
||||
# idx-bench: 2751 MB/s
|
||||
|
||||
# notes:
|
||||
# libraqm0 (pillow dep) pulls in the other packages mentioned on the dpkg line; saves 50m
|
||||
|
||||
# advantage: official packages only
|
||||
# advantage: ffmpeg with gme, codec2, radiance-hdr
|
||||
# drawback: ffmpeg bloat; dc1394, flite, mfx, xorg
|
||||
# drawback: python packaging is a bit jank
|
||||
# drawback: they apply exciting patches due to old deps
|
||||
# drawback: dropping perl the hard way might cause issues
|
||||
69
scripts/docker/Dockerfile.djf
Normal file
69
scripts/docker/Dockerfile.djf
Normal file
@@ -0,0 +1,69 @@
|
||||
FROM fedora:38
|
||||
WORKDIR /z
|
||||
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-djf" \
|
||||
org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection, and higher performance than the other editions"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
|
||||
XDG_CONFIG_HOME=/cfg
|
||||
|
||||
COPY i/bin/mtag/install-deps.sh ./
|
||||
COPY i/bin/mtag/audio-bpm.py /mtag/
|
||||
COPY i/bin/mtag/audio-key.py /mtag/
|
||||
RUN dnf install -y \
|
||||
https://mirrors.rpmfusion.org/free/fedora/rpmfusion-free-release-$(rpm -E %fedora).noarch.rpm \
|
||||
https://mirrors.rpmfusion.org/nonfree/fedora/rpmfusion-nonfree-release-$(rpm -E %fedora).noarch.rpm \
|
||||
&& dnf install -y --setopt=install_weak_deps=False \
|
||||
wget \
|
||||
python3-argon2-cffi python3-pillow python3-pip python3-cffi \
|
||||
ffmpeg \
|
||||
vips vips-jxl vips-poppler vips-magick \
|
||||
python3-numpy fftw libsndfile \
|
||||
gcc gcc-c++ make cmake patchelf jq \
|
||||
python3-devel ffmpeg-devel fftw-devel libsndfile-devel python3-setuptools \
|
||||
vamp-plugin-sdk qm-vamp-plugins \
|
||||
vamp-plugin-sdk-devel vamp-plugin-sdk-static \
|
||||
&& rm -f /usr/lib/python3*/EXTERNALLY-MANAGED \
|
||||
&& python3 -m pip install --user pyvips \
|
||||
&& bash install-deps.sh \
|
||||
&& dnf erase -y \
|
||||
gcc gcc-c++ make cmake patchelf jq \
|
||||
python3-devel ffmpeg-devel fftw-devel libsndfile-devel python3-setuptools \
|
||||
vamp-plugin-sdk-devel vamp-plugin-sdk-static \
|
||||
&& dnf clean all \
|
||||
&& find /usr/ -name __pycache__ | xargs rm -rf \
|
||||
&& find /usr/ -type d -name tests | grep packages/numpy | xargs rm -rf \
|
||||
&& rm -rf \
|
||||
/usr/share/adobe \
|
||||
/usr/share/fonts \
|
||||
/usr/share/graphviz \
|
||||
/usr/share/poppler/cMap \
|
||||
/usr/share/licenses \
|
||||
/usr/share/ghostscript \
|
||||
/usr/share/tesseract \
|
||||
/usr/share/X11 \
|
||||
/usr/share/hwdata \
|
||||
/usr/share/python-wheels \
|
||||
/usr/bin/cyrusbdb2current \
|
||||
&& rm -rf /tmp/pyc \
|
||||
&& chmod 777 /root \
|
||||
&& ln -s /root/vamp /root/.local / \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
|
||||
COPY i/dist/copyparty-sfx.py ./
|
||||
WORKDIR /w
|
||||
EXPOSE 3923
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]
|
||||
|
||||
# size: 648 MB
|
||||
# bpm/key: 410 sec
|
||||
# idx-bench: 2744 MB/s
|
||||
|
||||
# advantage: fairly recent and sane ffmpeg build
|
||||
# drawback: ffmpeg without gme, codec2, radiance-hdr
|
||||
# drawback: ffmpeg from rpmfusion, which is both better and smaller than ffmpeg-free, can occasionally fail to install due to repo desync / conflicts
|
||||
# drawback: ffmpeg bloat; samba, modplug, v4l2
|
||||
# drawback: manual purging (graphviz/poppler/cmap) can break stuff
|
||||
62
scripts/docker/Dockerfile.djff
Normal file
62
scripts/docker/Dockerfile.djff
Normal file
@@ -0,0 +1,62 @@
|
||||
FROM fedora:38
|
||||
WORKDIR /z
|
||||
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||
org.opencontainers.image.licenses="MIT" \
|
||||
org.opencontainers.image.title="copyparty-djff" \
|
||||
org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection, and higher performance than the other editions"
|
||||
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
|
||||
XDG_CONFIG_HOME=/cfg
|
||||
|
||||
COPY i/bin/mtag/install-deps.sh ./
|
||||
COPY i/bin/mtag/audio-bpm.py /mtag/
|
||||
COPY i/bin/mtag/audio-key.py /mtag/
|
||||
RUN dnf install -y --setopt=install_weak_deps=False \
|
||||
wget \
|
||||
python3-argon2-cffi python3-pillow python3-pip python3-cffi \
|
||||
ffmpeg-free \
|
||||
vips vips-jxl vips-poppler vips-magick \
|
||||
python3-numpy fftw libsndfile \
|
||||
gcc gcc-c++ make cmake patchelf jq \
|
||||
python3-devel ffmpeg-free-devel fftw-devel libsndfile-devel python3-setuptools \
|
||||
vamp-plugin-sdk qm-vamp-plugins \
|
||||
vamp-plugin-sdk-devel vamp-plugin-sdk-static \
|
||||
&& rm -f /usr/lib/python3*/EXTERNALLY-MANAGED \
|
||||
&& python3 -m pip install --user pyvips \
|
||||
&& bash install-deps.sh \
|
||||
&& dnf erase -y \
|
||||
gcc gcc-c++ make cmake patchelf jq \
|
||||
python3-devel ffmpeg-free-devel fftw-devel libsndfile-devel python3-setuptools \
|
||||
vamp-plugin-sdk-devel vamp-plugin-sdk-static \
|
||||
&& dnf clean all \
|
||||
&& find /usr/ -name __pycache__ | xargs rm -rf \
|
||||
&& find /usr/ -type d -name tests | grep packages/numpy | xargs rm -rf \
|
||||
&& rm -rf \
|
||||
/usr/share/adobe \
|
||||
/usr/share/fonts \
|
||||
/usr/share/graphviz \
|
||||
/usr/share/poppler/cMap \
|
||||
/usr/share/licenses \
|
||||
/usr/share/ghostscript \
|
||||
/usr/share/tesseract \
|
||||
/usr/share/X11 \
|
||||
/usr/share/hwdata \
|
||||
/usr/share/python-wheels \
|
||||
/usr/bin/cyrusbdb2current \
|
||||
&& rm -rf /tmp/pyc \
|
||||
&& chmod 777 /root \
|
||||
&& ln -s /root/vamp /root/.local / \
|
||||
&& mkdir /cfg /w \
|
||||
&& chmod 777 /cfg /w \
|
||||
&& echo % /cfg > initcfg
|
||||
|
||||
COPY i/dist/copyparty-sfx.py ./
|
||||
WORKDIR /w
|
||||
EXPOSE 3923
|
||||
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]
|
||||
|
||||
# size: 673 MB
|
||||
# bpm/key: 408 sec
|
||||
# idx-bench: 2744 MB/s
|
||||
|
||||
# drawback: less ffmpeg features we want, more features we don't (compared to rpmfusion)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user