Compare commits
139 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
987caec15d | ||
|
|
ab40ff5051 | ||
|
|
bed133d3dd | ||
|
|
829c8fca96 | ||
|
|
5b26ab0096 | ||
|
|
39554b4bc3 | ||
|
|
97d9c149f1 | ||
|
|
59688bc8d7 | ||
|
|
a18f63895f | ||
|
|
27433d6214 | ||
|
|
374c535cfa | ||
|
|
ac7815a0ae | ||
|
|
0c50ea1757 | ||
|
|
c057c5e8e8 | ||
|
|
46d667716e | ||
|
|
cba2e10d29 | ||
|
|
b1693f95cb | ||
|
|
3f00073256 | ||
|
|
d15000062d | ||
|
|
6cb3b35a54 | ||
|
|
b4031e8d43 | ||
|
|
a3ca0638cb | ||
|
|
a360ac29da | ||
|
|
9672b8c9b3 | ||
|
|
e70ecd98ef | ||
|
|
5f7ce78d7f | ||
|
|
2077dca66f | ||
|
|
91f010290c | ||
|
|
395e3386b7 | ||
|
|
a1dce0f24e | ||
|
|
c7770904e6 | ||
|
|
1690889ed8 | ||
|
|
842817d9e3 | ||
|
|
5fc04152bd | ||
|
|
1be85bdb26 | ||
|
|
2eafaa88a2 | ||
|
|
900cc463c3 | ||
|
|
97b999c463 | ||
|
|
a7cef91b8b | ||
|
|
a4a112c0ee | ||
|
|
e6bcee28d6 | ||
|
|
626b5770a5 | ||
|
|
c2f92cacc1 | ||
|
|
4f8a1f5f6a | ||
|
|
4a98b73915 | ||
|
|
00812cb1da | ||
|
|
16766e702e | ||
|
|
5e932a9504 | ||
|
|
ccab44daf2 | ||
|
|
8c52b88767 | ||
|
|
c9fd26255b | ||
|
|
0b9b8dbe72 | ||
|
|
b7723ac245 | ||
|
|
35b75c3db1 | ||
|
|
f902779050 | ||
|
|
fdddd36a5d | ||
|
|
c4ba123779 | ||
|
|
72e355eb2c | ||
|
|
43d409a5d9 | ||
|
|
b1fffc2246 | ||
|
|
edd3e53ab3 | ||
|
|
aa0b119031 | ||
|
|
eddce00765 | ||
|
|
6f4bde2111 | ||
|
|
f3035e8869 | ||
|
|
a9730499c0 | ||
|
|
b66843efe2 | ||
|
|
cc1aaea300 | ||
|
|
9ccc238799 | ||
|
|
8526ef9368 | ||
|
|
3c36727d07 | ||
|
|
ef33ce94cd | ||
|
|
d500baf5c5 | ||
|
|
deef32335e | ||
|
|
fc4b51ad00 | ||
|
|
fa762754bf | ||
|
|
29bd8f57c4 | ||
|
|
abc37354ef | ||
|
|
ee3333362f | ||
|
|
7c0c6b94a3 | ||
|
|
bac733113c | ||
|
|
32ab65d7cb | ||
|
|
c6744dc483 | ||
|
|
b9997d677d | ||
|
|
10defe6aef | ||
|
|
736aa125a8 | ||
|
|
eb48373b8b | ||
|
|
d4a7b7d84d | ||
|
|
2923a38b87 | ||
|
|
dabdaaee33 | ||
|
|
65e4d67c3e | ||
|
|
4b720f4150 | ||
|
|
2e85a25614 | ||
|
|
713fffcb8e | ||
|
|
8020b11ea0 | ||
|
|
2523d76756 | ||
|
|
7ede509973 | ||
|
|
7c1d97af3b | ||
|
|
95566e8388 | ||
|
|
76afb62b7b | ||
|
|
7dec922c70 | ||
|
|
c07e0110f8 | ||
|
|
2808734047 | ||
|
|
1f75314463 | ||
|
|
063fa3efde | ||
|
|
44693d79ec | ||
|
|
cea746377e | ||
|
|
59a98bd2b5 | ||
|
|
250aa28185 | ||
|
|
5280792cd7 | ||
|
|
2529aa151d | ||
|
|
fc658e5b9e | ||
|
|
a4bad62b60 | ||
|
|
e1d78d8b23 | ||
|
|
c7f826dbbe | ||
|
|
801da8079b | ||
|
|
7d797dba3f | ||
|
|
cda90c285e | ||
|
|
4b5a0787ab | ||
|
|
2048b7538e | ||
|
|
ac40dccc8f | ||
|
|
9ca8154651 | ||
|
|
db668ba491 | ||
|
|
edbafd94c2 | ||
|
|
2df76eb6e1 | ||
|
|
9b77c9ce7d | ||
|
|
dc2b67f155 | ||
|
|
9f32e9e11d | ||
|
|
7086d2a305 | ||
|
|
575615ca2d | ||
|
|
c0da4b09bf | ||
|
|
22880ccc9a | ||
|
|
e4001550c1 | ||
|
|
e9f65be86a | ||
|
|
3b9919a486 | ||
|
|
acc363133f | ||
|
|
8f2d502d4d | ||
|
|
2ae93ad715 | ||
|
|
bb590e364a |
4
.vscode/launch.json
vendored
4
.vscode/launch.json
vendored
@@ -9,6 +9,10 @@
|
|||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
"cwd": "${workspaceFolder}",
|
"cwd": "${workspaceFolder}",
|
||||||
"justMyCode": false,
|
"justMyCode": false,
|
||||||
|
"env": {
|
||||||
|
"PYDEVD_DISABLE_FILE_VALIDATION": "1",
|
||||||
|
"PYTHONWARNINGS": "always", //error
|
||||||
|
},
|
||||||
"args": [
|
"args": [
|
||||||
//"-nw",
|
//"-nw",
|
||||||
"-ed",
|
"-ed",
|
||||||
|
|||||||
2
.vscode/launch.py
vendored
2
.vscode/launch.py
vendored
@@ -41,7 +41,7 @@ if sfx:
|
|||||||
argv = [sys.executable, sfx] + argv
|
argv = [sys.executable, sfx] + argv
|
||||||
sp.check_call(argv)
|
sp.check_call(argv)
|
||||||
elif re.search(" -j ?[0-9]", " ".join(argv)):
|
elif re.search(" -j ?[0-9]", " ".join(argv)):
|
||||||
argv = [sys.executable, "-m", "copyparty"] + argv
|
argv = [sys.executable, "-Wa", "-m", "copyparty"] + argv
|
||||||
sp.check_call(argv)
|
sp.check_call(argv)
|
||||||
else:
|
else:
|
||||||
sys.path.insert(0, os.getcwd())
|
sys.path.insert(0, os.getcwd())
|
||||||
|
|||||||
1
.vscode/tasks.json
vendored
1
.vscode/tasks.json
vendored
@@ -11,6 +11,7 @@
|
|||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "${config:python.pythonPath}",
|
"command": "${config:python.pythonPath}",
|
||||||
"args": [
|
"args": [
|
||||||
|
"-Wa", //-We
|
||||||
".vscode/launch.py"
|
".vscode/launch.py"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,43 @@
|
|||||||
* do something cool
|
* do something cool
|
||||||
|
|
||||||
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight
|
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight 👍👍
|
||||||
|
|
||||||
|
but to be more specific,
|
||||||
|
|
||||||
|
|
||||||
|
# contribution ideas
|
||||||
|
|
||||||
|
|
||||||
|
## documentation
|
||||||
|
|
||||||
|
I think we can agree that the documentation leaves a LOT to be desired. I've realized I'm not exactly qualified for this 😅 but maybe the [soon-to-come setup GUI](https://github.com/9001/copyparty/issues/57) will make this more manageable. The best documentation is the one that never had to be written, right? :> so I suppose we can give this a wait-and-see approach for a bit longer.
|
||||||
|
|
||||||
|
|
||||||
|
## crazy ideas & features
|
||||||
|
|
||||||
|
assuming they won't cause too much problems or side-effects :>
|
||||||
|
|
||||||
|
i think someone was working on a way to list directories over DNS for example...
|
||||||
|
|
||||||
|
if you wanna have a go at coding it up yourself then maybe mention the idea on discord before you get too far, otherwise just go nuts 👍
|
||||||
|
|
||||||
|
|
||||||
|
## others
|
||||||
|
|
||||||
|
aside from documentation and ideas, some other things that would be cool to have some help with is:
|
||||||
|
|
||||||
|
* **translations** -- the copyparty web-UI has translations for english and norwegian at the top of [browser.js](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/web/browser.js); if you'd like to add a translation for another language then that'd be welcome! and if that language has a grammar that doesn't fit into the way the strings are assembled, then we'll fix that as we go :>
|
||||||
|
|
||||||
|
* **UI ideas** -- at some point I was thinking of rewriting the UI in react/preact/something-not-vanilla-javascript, but I'll admit the comfiness of not having any build stage combined with raw performance has kinda convinced me otherwise :p but I'd be very open to ideas on how the UI could be improved, or be more intuitive.
|
||||||
|
|
||||||
|
* **docker improvements** -- I don't really know what I'm doing when it comes to containers, so I'm sure there's a *huge* room for improvement here, mainly regarding how you're supposed to use the container with kubernetes / docker-compose / any of the other popular ways to do things. At some point I swear I'll start learning about docker so I can pick up clach04's [docker-compose draft](https://github.com/9001/copyparty/issues/38) and learn how that stuff ticks, unless someone beats me to it!
|
||||||
|
|
||||||
|
* **packaging** for various linux distributions -- this could either be as simple as just plopping the sfx.py in the right place and calling that from systemd (the archlinux package [originally did this](https://github.com/9001/copyparty/pull/18)); maybe with a small config-file which would cause copyparty to load settings from `/etc/copyparty.d` (like the [archlinux package](https://github.com/9001/copyparty/tree/hovudstraum/contrib/package/arch) does with `copyparty.conf`), or it could be a proper installation of the copyparty python package into /usr/lib or similar (the archlinux package [eventually went for this approach](https://github.com/9001/copyparty/pull/26))
|
||||||
|
|
||||||
|
* [fpm](https://github.com/jordansissel/fpm) can probably help with the technical part of it, but someone needs to handle distro relations :-)
|
||||||
|
|
||||||
|
* **software integration** -- I'm sure there's a lot of usecases where copyparty could complement something else, or the other way around, so any ideas or any work in this regard would be dope. This doesn't necessarily have to be code inside copyparty itself;
|
||||||
|
|
||||||
|
* [hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) -- these are small programs which are called by copyparty when certain things happen (files are uploaded, someone hits a 404, etc.), and could be a fun way to add support for more usecases
|
||||||
|
|
||||||
|
* [parser plugins](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) -- if you want to have copyparty analyze and index metadata for some oddball file-formats, then additional plugins would be neat :>
|
||||||
|
|||||||
148
README.md
148
README.md
@@ -20,13 +20,13 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [testimonials](#testimonials) - small collection of user feedback
|
* [testimonials](#testimonials) - small collection of user feedback
|
||||||
* [motivations](#motivations) - project goals / philosophy
|
* [motivations](#motivations) - project goals / philosophy
|
||||||
* [notes](#notes) - general notes
|
* [notes](#notes) - general notes
|
||||||
* [bugs](#bugs)
|
* [bugs](#bugs) - roughly sorted by chance of encounter
|
||||||
* [general bugs](#general-bugs)
|
* [not my bugs](#not-my-bugs) - same order here too
|
||||||
* [not my bugs](#not-my-bugs)
|
|
||||||
* [breaking changes](#breaking-changes) - upgrade notes
|
* [breaking changes](#breaking-changes) - upgrade notes
|
||||||
* [FAQ](#FAQ) - "frequently" asked questions
|
* [FAQ](#FAQ) - "frequently" asked questions
|
||||||
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
|
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
|
||||||
* [shadowing](#shadowing) - hiding specific subfolders
|
* [shadowing](#shadowing) - hiding specific subfolders
|
||||||
|
* [dotfiles](#dotfiles) - unix-style hidden files/folders
|
||||||
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
|
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
|
||||||
* [tabs](#tabs) - the main tabs in the ui
|
* [tabs](#tabs) - the main tabs in the ui
|
||||||
* [hotkeys](#hotkeys) - the browser has the following hotkeys
|
* [hotkeys](#hotkeys) - the browser has the following hotkeys
|
||||||
@@ -40,7 +40,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
||||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||||
* [media player](#media-player) - plays almost every audio format there is
|
* [media player](#media-player) - plays almost every audio format there is
|
||||||
* [audio equalizer](#audio-equalizer) - bass boosted
|
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||||
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
||||||
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
||||||
* [other tricks](#other-tricks)
|
* [other tricks](#other-tricks)
|
||||||
@@ -54,6 +54,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [webdav server](#webdav-server) - with read-write support
|
* [webdav server](#webdav-server) - with read-write support
|
||||||
* [connecting to webdav from windows](#connecting-to-webdav-from-windows) - using the GUI
|
* [connecting to webdav from windows](#connecting-to-webdav-from-windows) - using the GUI
|
||||||
* [smb server](#smb-server) - unsafe, slow, not recommended for wan
|
* [smb server](#smb-server) - unsafe, slow, not recommended for wan
|
||||||
|
* [browser ux](#browser-ux) - tweaking the ui
|
||||||
* [file indexing](#file-indexing) - enables dedup and music search ++
|
* [file indexing](#file-indexing) - enables dedup and music search ++
|
||||||
* [exclude-patterns](#exclude-patterns) - to save some time
|
* [exclude-patterns](#exclude-patterns) - to save some time
|
||||||
* [filesystem guards](#filesystem-guards) - avoid traversing into other filesystems
|
* [filesystem guards](#filesystem-guards) - avoid traversing into other filesystems
|
||||||
@@ -67,6 +68,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
||||||
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
||||||
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
|
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
|
||||||
|
* [identity providers](#identity-providers) - replace copyparty passwords with oauth and such
|
||||||
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
||||||
* [themes](#themes)
|
* [themes](#themes)
|
||||||
* [complete examples](#complete-examples)
|
* [complete examples](#complete-examples)
|
||||||
@@ -74,7 +76,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [prometheus](#prometheus) - metrics/stats can be enabled
|
* [prometheus](#prometheus) - metrics/stats can be enabled
|
||||||
* [packages](#packages) - the party might be closer than you think
|
* [packages](#packages) - the party might be closer than you think
|
||||||
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||||
* [fedora package](#fedora-package) - now [available on copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/)
|
* [fedora package](#fedora-package) - currently **NOT** available on [copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/)
|
||||||
* [nix package](#nix-package) - `nix profile install github:9001/copyparty`
|
* [nix package](#nix-package) - `nix profile install github:9001/copyparty`
|
||||||
* [nixos module](#nixos-module)
|
* [nixos module](#nixos-module)
|
||||||
* [browser support](#browser-support) - TLDR: yes
|
* [browser support](#browser-support) - TLDR: yes
|
||||||
@@ -111,7 +113,7 @@ just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/
|
|||||||
|
|
||||||
* or install through pypi: `python3 -m pip install --user -U copyparty`
|
* or install through pypi: `python3 -m pip install --user -U copyparty`
|
||||||
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
|
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
|
||||||
* or install [on arch](#arch-package) ╱ [on fedora](#fedora-package) ╱ [on NixOS](#nixos-module) ╱ [through nix](#nix-package)
|
* or install [on arch](#arch-package) ╱ [on NixOS](#nixos-module) ╱ [through nix](#nix-package)
|
||||||
* or if you are on android, [install copyparty in termux](#install-on-android)
|
* or if you are on android, [install copyparty in termux](#install-on-android)
|
||||||
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
|
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
|
||||||
* docker has all deps built-in, so skip this step:
|
* docker has all deps built-in, so skip this step:
|
||||||
@@ -119,8 +121,8 @@ just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/
|
|||||||
enable thumbnails (images/audio/video), media indexing, and audio transcoding by installing some recommended deps:
|
enable thumbnails (images/audio/video), media indexing, and audio transcoding by installing some recommended deps:
|
||||||
|
|
||||||
* **Alpine:** `apk add py3-pillow ffmpeg`
|
* **Alpine:** `apk add py3-pillow ffmpeg`
|
||||||
* **Debian:** `apt install python3-pil ffmpeg`
|
* **Debian:** `apt install --no-install-recommends python3-pil ffmpeg`
|
||||||
* **Fedora:** `dnf install python3-pillow ffmpeg`
|
* **Fedora:** rpmfusion + `dnf install python3-pillow ffmpeg`
|
||||||
* **FreeBSD:** `pkg install py39-sqlite3 py39-pillow ffmpeg`
|
* **FreeBSD:** `pkg install py39-sqlite3 py39-pillow ffmpeg`
|
||||||
* **MacOS:** `port install py-Pillow ffmpeg`
|
* **MacOS:** `port install py-Pillow ffmpeg`
|
||||||
* **MacOS** (alternative): `brew install pillow ffmpeg`
|
* **MacOS** (alternative): `brew install pillow ffmpeg`
|
||||||
@@ -147,9 +149,10 @@ you may also want these, especially on servers:
|
|||||||
|
|
||||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service (see guide inside)
|
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service (see guide inside)
|
||||||
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
||||||
|
* [contrib/openrc/copyparty](contrib/openrc/copyparty) to run copyparty on Alpine / Gentoo
|
||||||
* [contrib/rc/copyparty](contrib/rc/copyparty) to run copyparty on FreeBSD
|
* [contrib/rc/copyparty](contrib/rc/copyparty) to run copyparty on FreeBSD
|
||||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
|
||||||
* [nixos module](#nixos-module) to run copyparty on NixOS hosts
|
* [nixos module](#nixos-module) to run copyparty on NixOS hosts
|
||||||
|
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
||||||
|
|
||||||
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
|
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
|
||||||
```
|
```
|
||||||
@@ -263,20 +266,28 @@ server notes:
|
|||||||
|
|
||||||
# bugs
|
# bugs
|
||||||
|
|
||||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
roughly sorted by chance of encounter
|
||||||
* Windows: python 2.7 cannot handle filenames with mojibake
|
|
||||||
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
|
||||||
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
|
|
||||||
|
|
||||||
## general bugs
|
* general:
|
||||||
|
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
||||||
|
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
|
||||||
|
* if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
||||||
|
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
||||||
|
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||||
|
* probably more, pls let me know
|
||||||
|
|
||||||
* Windows: if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
* python 3.4 and older (including 2.7):
|
||||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
* many rare and exciting edge-cases because [python didn't handle EINTR yet](https://peps.python.org/pep-0475/)
|
||||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
* downloads from copyparty may suddenly fail, but uploads *should* be fine
|
||||||
* probably more, pls let me know
|
|
||||||
|
* python 2.7 on Windows:
|
||||||
|
* cannot index non-ascii filenames with `-e2d`
|
||||||
|
* cannot handle filenames with mojibake
|
||||||
|
|
||||||
## not my bugs
|
## not my bugs
|
||||||
|
|
||||||
|
same order here too
|
||||||
|
|
||||||
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
|
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
|
||||||
|
|
||||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
||||||
@@ -310,6 +321,8 @@ server notes:
|
|||||||
|
|
||||||
upgrade notes
|
upgrade notes
|
||||||
|
|
||||||
|
* `1.9.16` (2023-11-04):
|
||||||
|
* `--stats`/prometheus: `cpp_bans` renamed to `cpp_active_bans`, and that + `cpp_uptime` are gauges
|
||||||
* `1.6.0` (2023-01-29):
|
* `1.6.0` (2023-01-29):
|
||||||
* http-api: delete/move is now `POST` instead of `GET`
|
* http-api: delete/move is now `POST` instead of `GET`
|
||||||
* everything other than `GET` and `HEAD` must pass [cors validation](#cors)
|
* everything other than `GET` and `HEAD` must pass [cors validation](#cors)
|
||||||
@@ -330,7 +343,7 @@ upgrade notes
|
|||||||
* yes, using [hooks](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
* yes, using [hooks](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
||||||
|
|
||||||
* i want to learn python and/or programming and am considering looking at the copyparty source code in that occasion
|
* i want to learn python and/or programming and am considering looking at the copyparty source code in that occasion
|
||||||
```bash
|
* ```bash
|
||||||
_| _ __ _ _|_
|
_| _ __ _ _|_
|
||||||
(_| (_) | | (_) |_
|
(_| (_) | | (_) |_
|
||||||
```
|
```
|
||||||
@@ -356,10 +369,12 @@ permissions:
|
|||||||
* `w` (write): upload files, move files *into* this folder
|
* `w` (write): upload files, move files *into* this folder
|
||||||
* `m` (move): move files/folders *from* this folder
|
* `m` (move): move files/folders *from* this folder
|
||||||
* `d` (delete): delete files/folders
|
* `d` (delete): delete files/folders
|
||||||
|
* `.` (dots): user can ask to show dotfiles in directory listings
|
||||||
* `g` (get): only download files, cannot see folder contents or zip/tar
|
* `g` (get): only download files, cannot see folder contents or zip/tar
|
||||||
* `G` (upget): same as `g` except uploaders get to see their own [filekeys](#filekeys) (see `fk` in examples below)
|
* `G` (upget): same as `g` except uploaders get to see their own [filekeys](#filekeys) (see `fk` in examples below)
|
||||||
* `h` (html): same as `g` except folders return their index.html, and filekeys are not necessary for index.html
|
* `h` (html): same as `g` except folders return their index.html, and filekeys are not necessary for index.html
|
||||||
* `a` (admin): can see uploader IPs, config-reload
|
* `a` (admin): can see upload time, uploader IPs, config-reload
|
||||||
|
* `A` ("all"): same as `rwmda.` (read/write/move/delete/admin/dotfiles)
|
||||||
|
|
||||||
examples:
|
examples:
|
||||||
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
|
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
|
||||||
@@ -387,6 +402,17 @@ hiding specific subfolders by mounting another volume on top of them
|
|||||||
for example `-v /mnt::r -v /var/empty:web/certs:r` mounts the server folder `/mnt` as the webroot, but another volume is mounted at `/web/certs` -- so visitors can only see the contents of `/mnt` and `/mnt/web` (at URLs `/` and `/web`), but not `/mnt/web/certs` because URL `/web/certs` is mapped to `/var/empty`
|
for example `-v /mnt::r -v /var/empty:web/certs:r` mounts the server folder `/mnt` as the webroot, but another volume is mounted at `/web/certs` -- so visitors can only see the contents of `/mnt` and `/mnt/web` (at URLs `/` and `/web`), but not `/mnt/web/certs` because URL `/web/certs` is mapped to `/var/empty`
|
||||||
|
|
||||||
|
|
||||||
|
## dotfiles
|
||||||
|
|
||||||
|
unix-style hidden files/folders by starting the name with a dot
|
||||||
|
|
||||||
|
anyone can access these if they know the name, but they normally don't appear in directory listings
|
||||||
|
|
||||||
|
a client can request to see dotfiles in directory listings if global option `-ed` is specified, or the volume has volflag `dots`, or the user has permission `.`
|
||||||
|
|
||||||
|
dotfiles do not appear in search results unless one of the above is true, **and** the global option / volflag `dotsrch` is set
|
||||||
|
|
||||||
|
|
||||||
# the browser
|
# the browser
|
||||||
|
|
||||||
accessing a copyparty server using a web-browser
|
accessing a copyparty server using a web-browser
|
||||||
@@ -499,7 +525,7 @@ it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video f
|
|||||||
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
||||||
|
|
||||||
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
||||||
* and, if you enable [file indexing](#file-indexing), all remaining folders will also get thumbnails (as long as they contain any pics at all)
|
* and, if you enable [file indexing](#file-indexing), it will also try those names as dotfiles (`.folder.jpg` and so), and then fallback on the first picture in the folder (if it has any pictures at all)
|
||||||
|
|
||||||
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||||
* indicated by the audio files having the ▶ icon instead of 💾
|
* indicated by the audio files having the ▶ icon instead of 💾
|
||||||
@@ -527,7 +553,7 @@ select which type of archive you want in the `[⚙️] config` tab:
|
|||||||
* gzip default level is `3` (0=fast, 9=best), change with `?tar=gz:9`
|
* gzip default level is `3` (0=fast, 9=best), change with `?tar=gz:9`
|
||||||
* xz default level is `1` (0=fast, 9=best), change with `?tar=xz:9`
|
* xz default level is `1` (0=fast, 9=best), change with `?tar=xz:9`
|
||||||
* bz2 default level is `2` (1=fast, 9=best), change with `?tar=bz2:9`
|
* bz2 default level is `2` (1=fast, 9=best), change with `?tar=bz2:9`
|
||||||
* hidden files (dotfiles) are excluded unless `-ed`
|
* hidden files ([dotfiles](#dotfiles)) are excluded unless account is allowed to list them
|
||||||
* `up2k.db` and `dir.txt` is always excluded
|
* `up2k.db` and `dir.txt` is always excluded
|
||||||
* bsdtar supports streaming unzipping: `curl foo?zip=utf8 | bsdtar -xv`
|
* bsdtar supports streaming unzipping: `curl foo?zip=utf8 | bsdtar -xv`
|
||||||
* good, because copyparty's zip is faster than tar on small files
|
* good, because copyparty's zip is faster than tar on small files
|
||||||
@@ -580,7 +606,8 @@ the up2k UI is the epitome of polished inutitive experiences:
|
|||||||
* "parallel uploads" specifies how many chunks to upload at the same time
|
* "parallel uploads" specifies how many chunks to upload at the same time
|
||||||
* `[🏃]` analysis of other files should continue while one is uploading
|
* `[🏃]` analysis of other files should continue while one is uploading
|
||||||
* `[🥔]` shows a simpler UI for faster uploads from slow devices
|
* `[🥔]` shows a simpler UI for faster uploads from slow devices
|
||||||
* `[💭]` ask for confirmation before files are added to the queue
|
* `[🎲]` generate random filenames during upload
|
||||||
|
* `[📅]` preserve last-modified timestamps; server times will match yours
|
||||||
* `[🔎]` switch between upload and [file-search](#file-search) mode
|
* `[🔎]` switch between upload and [file-search](#file-search) mode
|
||||||
* ignore `[🔎]` if you add files by dragging them into the browser
|
* ignore `[🔎]` if you add files by dragging them into the browser
|
||||||
|
|
||||||
@@ -735,7 +762,7 @@ open the `[🎺]` media-player-settings tab to configure it,
|
|||||||
|
|
||||||
### audio equalizer
|
### audio equalizer
|
||||||
|
|
||||||
bass boosted
|
and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||||
|
|
||||||
can also boost the volume in general, or increase/decrease stereo width (like [crossfeed](https://www.foobar2000.org/components/view/foo_dsp_meiercf) just worse)
|
can also boost the volume in general, or increase/decrease stereo width (like [crossfeed](https://www.foobar2000.org/components/view/foo_dsp_meiercf) just worse)
|
||||||
|
|
||||||
@@ -781,6 +808,8 @@ other notes,
|
|||||||
|
|
||||||
* files named `README.md` / `readme.md` will be rendered after directory listings unless `--no-readme` (but `.epilogue.html` takes precedence)
|
* files named `README.md` / `readme.md` will be rendered after directory listings unless `--no-readme` (but `.epilogue.html` takes precedence)
|
||||||
|
|
||||||
|
* `README.md` and `*logue.html` can contain placeholder values which are replaced server-side before embedding into directory listings; see `--help-exp`
|
||||||
|
|
||||||
|
|
||||||
## searching
|
## searching
|
||||||
|
|
||||||
@@ -809,6 +838,9 @@ using arguments or config files, or a mix of both:
|
|||||||
* or click the `[reload cfg]` button in the control-panel if the user has `a`/admin in any volume
|
* or click the `[reload cfg]` button in the control-panel if the user has `a`/admin in any volume
|
||||||
* changes to the `[global]` config section requires a restart to take effect
|
* changes to the `[global]` config section requires a restart to take effect
|
||||||
|
|
||||||
|
**NB:** as humongous as this readme is, there is also a lot of undocumented features. Run copyparty with `--help` to see all available global options; all of those can be used in the `[global]` section of config files, and everything listed in `--help-flags` can be used in volumes as volflags.
|
||||||
|
* if running in docker/podman, try this: `docker run --rm -it copyparty/ac --help`
|
||||||
|
|
||||||
|
|
||||||
## zeroconf
|
## zeroconf
|
||||||
|
|
||||||
@@ -946,6 +978,16 @@ authenticate with one of the following:
|
|||||||
* username `$password`, password `k`
|
* username `$password`, password `k`
|
||||||
|
|
||||||
|
|
||||||
|
## browser ux
|
||||||
|
|
||||||
|
tweaking the ui
|
||||||
|
|
||||||
|
* set default sort order globally with `--sort` or per-volume with the `sort` volflag; specify one or more comma-separated columns to sort by, and prefix the column name with `-` for reverse sort
|
||||||
|
* the column names you can use are visible as tooltips when hovering over the column headers in the directory listing, for example `href ext sz ts tags/.up_at tags/Cirle tags/.tn tags/Artist tags/Title`
|
||||||
|
* to sort in music order (album, track, artist, title) with filename as fallback, you could `--sort tags/Cirle,tags/.tn,tags/Artist,tags/Title,href`
|
||||||
|
* to sort by upload date, first enable showing the upload date in the listing with `-e2d -mte +.up_at` and then `--sort tags/.up_at`
|
||||||
|
|
||||||
|
|
||||||
## file indexing
|
## file indexing
|
||||||
|
|
||||||
enables dedup and music search ++
|
enables dedup and music search ++
|
||||||
@@ -972,6 +1014,7 @@ the same arguments can be set as volflags, in addition to `d2d`, `d2ds`, `d2t`,
|
|||||||
* `-v ~/music::r:c,d2ts` same except only affecting tags
|
* `-v ~/music::r:c,d2ts` same except only affecting tags
|
||||||
|
|
||||||
note:
|
note:
|
||||||
|
* upload-times can be displayed in the file listing by enabling the `.up_at` metadata key, either globally with `-e2d -mte +.up_at` or per-volume with volflags `e2d,mte=+.up_at` (will have a ~17% performance impact on directory listings)
|
||||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||||
* deduplication is possible on windows if you run copyparty as administrator (not saying you should!)
|
* deduplication is possible on windows if you run copyparty as administrator (not saying you should!)
|
||||||
@@ -1169,6 +1212,17 @@ redefine behavior with plugins ([examples](./bin/handlers/))
|
|||||||
replace 404 and 403 errors with something completely different (that's it for now)
|
replace 404 and 403 errors with something completely different (that's it for now)
|
||||||
|
|
||||||
|
|
||||||
|
## identity providers
|
||||||
|
|
||||||
|
replace copyparty passwords with oauth and such
|
||||||
|
|
||||||
|
work is [ongoing](https://github.com/9001/copyparty/issues/62) to support authenticating / authorizing users based on a separate authentication proxy, which makes it possible to support oauth, single-sign-on, etc.
|
||||||
|
|
||||||
|
it is currently possible to specify `--idp-h-usr x-username`; copyparty will then skip password validation and blindly trust the username specified in the `X-Username` request header
|
||||||
|
|
||||||
|
the remaining stuff (accepting user groups through another header, creating volumes on the fly) are still to-do; configuration will probably [look like this](./docs/examples/docker/idp/copyparty.conf)
|
||||||
|
|
||||||
|
|
||||||
## hiding from google
|
## hiding from google
|
||||||
|
|
||||||
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
|
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
|
||||||
@@ -1228,8 +1282,8 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
|
|||||||
* anyone can upload, and receive "secret" links for each upload they do:
|
* anyone can upload, and receive "secret" links for each upload they do:
|
||||||
`python copyparty-sfx.py -e2dsa -v .::wG:c,fk=8`
|
`python copyparty-sfx.py -e2dsa -v .::wG:c,fk=8`
|
||||||
|
|
||||||
* anyone can browse, only `kevin` (password `okgo`) can upload/move/delete files:
|
* anyone can browse (`r`), only `kevin` (password `okgo`) can upload/move/delete (`A`) files:
|
||||||
`python copyparty-sfx.py -e2dsa -a kevin:okgo -v .::r:rwmd,kevin`
|
`python copyparty-sfx.py -e2dsa -a kevin:okgo -v .::r:A,kevin`
|
||||||
|
|
||||||
* read-only music server:
|
* read-only music server:
|
||||||
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2`
|
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts --no-robots --force-js --theme 2`
|
||||||
@@ -1283,8 +1337,23 @@ scrape_configs:
|
|||||||
```
|
```
|
||||||
|
|
||||||
currently the following metrics are available,
|
currently the following metrics are available,
|
||||||
* `cpp_uptime_seconds`
|
* `cpp_uptime_seconds` time since last copyparty restart
|
||||||
* `cpp_bans` number of banned IPs
|
* `cpp_boot_unixtime_seconds` same but as an absolute timestamp
|
||||||
|
* `cpp_http_conns` number of open http(s) connections
|
||||||
|
* `cpp_http_reqs` number of http(s) requests handled
|
||||||
|
* `cpp_sus_reqs` number of 403/422/malicious requests
|
||||||
|
* `cpp_active_bans` number of currently banned IPs
|
||||||
|
* `cpp_total_bans` number of IPs banned since last restart
|
||||||
|
|
||||||
|
these are available unless `--nos-vst` is specified:
|
||||||
|
* `cpp_db_idle_seconds` time since last database activity (upload/rename/delete)
|
||||||
|
* `cpp_db_act_seconds` same but as an absolute timestamp
|
||||||
|
* `cpp_idle_vols` number of volumes which are idle / ready
|
||||||
|
* `cpp_busy_vols` number of volumes which are busy / indexing
|
||||||
|
* `cpp_offline_vols` number of volumes which are offline / unavailable
|
||||||
|
* `cpp_hashing_files` number of files queued for hashing / indexing
|
||||||
|
* `cpp_tagq_files` number of files queued for metadata scanning
|
||||||
|
* `cpp_mtpq_files` number of files queued for plugin-based analysis
|
||||||
|
|
||||||
and these are available per-volume only:
|
and these are available per-volume only:
|
||||||
* `cpp_disk_size_bytes` total HDD size
|
* `cpp_disk_size_bytes` total HDD size
|
||||||
@@ -1303,31 +1372,40 @@ some of the metrics have additional requirements to function correctly,
|
|||||||
the following options are available to disable some of the metrics:
|
the following options are available to disable some of the metrics:
|
||||||
* `--nos-hdd` disables `cpp_disk_*` which can prevent spinning up HDDs
|
* `--nos-hdd` disables `cpp_disk_*` which can prevent spinning up HDDs
|
||||||
* `--nos-vol` disables `cpp_vol_*` which reduces server startup time
|
* `--nos-vol` disables `cpp_vol_*` which reduces server startup time
|
||||||
|
* `--nos-vst` disables volume state, reducing the worst-case prometheus query time by 0.5 sec
|
||||||
* `--nos-dup` disables `cpp_dupe_*` which reduces the server load caused by prometheus queries
|
* `--nos-dup` disables `cpp_dupe_*` which reduces the server load caused by prometheus queries
|
||||||
* `--nos-unf` disables `cpp_unf_*` for no particular purpose
|
* `--nos-unf` disables `cpp_unf_*` for no particular purpose
|
||||||
|
|
||||||
|
note: the following metrics are counted incorrectly if multiprocessing is enabled with `-j`: `cpp_http_conns`, `cpp_http_reqs`, `cpp_sus_reqs`, `cpp_active_bans`, `cpp_total_bans`
|
||||||
|
|
||||||
|
|
||||||
# packages
|
# packages
|
||||||
|
|
||||||
the party might be closer than you think
|
the party might be closer than you think
|
||||||
|
|
||||||
|
if your distro/OS is not mentioned below, there might be some hints in the [«on servers»](#on-servers) section
|
||||||
|
|
||||||
|
|
||||||
## arch package
|
## arch package
|
||||||
|
|
||||||
now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
||||||
|
|
||||||
|
it comes with a [systemd service](./contrib/package/arch/copyparty.service) and expects to find one or more [config files](./docs/example.conf) in `/etc/copyparty.d/`
|
||||||
|
|
||||||
|
|
||||||
## fedora package
|
## fedora package
|
||||||
|
|
||||||
now [available on copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/) , maintained autonomously -- [track record](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/package/python-copyparty/) seems OK
|
currently **NOT** available on [copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/) , fedora is having issues with their build servers and won't be fixed for several months
|
||||||
|
|
||||||
|
if you previously installed copyparty from copr, you may run one of the following commands to upgrade to a more recent version:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
dnf copr enable @copr/PyPI
|
dnf install https://ocv.me/copyparty/fedora/37/python3-copyparty.fc37.noarch.rpm
|
||||||
dnf install python3-copyparty # just a minimal install, or...
|
dnf install https://ocv.me/copyparty/fedora/38/python3-copyparty.fc38.noarch.rpm
|
||||||
dnf install python3-{copyparty,pillow,argon2-cffi,pyftpdlib,pyOpenSSL} ffmpeg-free # with recommended deps
|
dnf install https://ocv.me/copyparty/fedora/39/python3-copyparty.fc39.noarch.rpm
|
||||||
```
|
```
|
||||||
|
|
||||||
this *may* also work on RHEL but [I'm not paying IBM to verify that](https://www.jeffgeerling.com/blog/2023/dear-red-hat-are-you-dumb)
|
to run copyparty as a service, use the [systemd service scripts](https://github.com/9001/copyparty/tree/hovudstraum/contrib/systemd), just replace `/usr/bin/python3 /usr/local/bin/copyparty-sfx.py` with `/usr/bin/copyparty`
|
||||||
|
|
||||||
|
|
||||||
## nix package
|
## nix package
|
||||||
@@ -1463,7 +1541,7 @@ TLDR: yes
|
|||||||
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
|
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
|
||||||
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
||||||
|
|
||||||
* internet explorer 6 to 8 behave the same
|
* internet explorer 6 through 8 behave the same
|
||||||
* firefox 52 and chrome 49 are the final winxp versions
|
* firefox 52 and chrome 49 are the final winxp versions
|
||||||
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
|
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
|
||||||
* `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server
|
* `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server
|
||||||
@@ -1635,8 +1713,6 @@ safety profiles:
|
|||||||
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
||||||
* however note if you edit one file it will also affect the other copies
|
* however note if you edit one file it will also affect the other copies
|
||||||
* `--vague-403` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
|
* `--vague-403` returns a "404 not found" instead of "401 unauthorized" which is a common enterprise meme
|
||||||
* `--ban-404=50,60,1440` ban client for 1440min (24h) if they hit 50 404's in 60min
|
|
||||||
* `--turbo=-1` to force-disable turbo-mode in the uploader which could otherwise hit the 404-ban
|
|
||||||
* `--nih` removes the server hostname from directory listings
|
* `--nih` removes the server hostname from directory listings
|
||||||
|
|
||||||
* option `-sss` is a shortcut for the above plus:
|
* option `-sss` is a shortcut for the above plus:
|
||||||
|
|||||||
@@ -207,7 +207,7 @@ def examples():
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
global NC, BY_PATH
|
global NC, BY_PATH # pylint: disable=global-statement
|
||||||
os.system("")
|
os.system("")
|
||||||
print()
|
print()
|
||||||
|
|
||||||
@@ -282,7 +282,8 @@ def main():
|
|||||||
if ver == "corrupt":
|
if ver == "corrupt":
|
||||||
die("{} database appears to be corrupt, sorry")
|
die("{} database appears to be corrupt, sorry")
|
||||||
|
|
||||||
if ver < DB_VER1 or ver > DB_VER2:
|
iver = int(ver)
|
||||||
|
if iver < DB_VER1 or iver > DB_VER2:
|
||||||
m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
|
m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
|
||||||
die(m)
|
die(m)
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,11 @@ import json
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from datetime import datetime
|
|
||||||
|
try:
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
except:
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@@ -96,7 +100,11 @@ def main(argv=None):
|
|||||||
msg_info = json.loads(sys.argv[1])
|
msg_info = json.loads(sys.argv[1])
|
||||||
# print(msg_info)
|
# print(msg_info)
|
||||||
|
|
||||||
dt = datetime.utcfromtimestamp(msg_info["at"])
|
try:
|
||||||
|
dt = datetime.fromtimestamp(msg_info["at"], timezone.utc)
|
||||||
|
except:
|
||||||
|
dt = datetime.utcfromtimestamp(msg_info["at"])
|
||||||
|
|
||||||
msg_info["datetime"] = dt.strftime("%Y-%m-%d, %H:%M:%S")
|
msg_info["datetime"] = dt.strftime("%Y-%m-%d, %H:%M:%S")
|
||||||
|
|
||||||
msg_text = TEMPLATE % msg_info
|
msg_text = TEMPLATE % msg_info
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import json
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from plyer import notification
|
from plyer import notification
|
||||||
|
|
||||||
|
|
||||||
@@ -43,7 +43,8 @@ def main():
|
|||||||
fp = inf["ap"]
|
fp = inf["ap"]
|
||||||
sz = humansize(inf["sz"])
|
sz = humansize(inf["sz"])
|
||||||
dp, fn = os.path.split(fp)
|
dp, fn = os.path.split(fp)
|
||||||
mt = datetime.utcfromtimestamp(inf["mt"]).strftime("%Y-%m-%d %H:%M:%S")
|
dt = datetime.fromtimestamp(inf["mt"], timezone.utc)
|
||||||
|
mt = dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
msg = f"{fn} ({sz})\n📁 {dp}"
|
msg = f"{fn} ({sz})\n📁 {dp}"
|
||||||
title = "File received"
|
title = "File received"
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
|
||||||
_ = r"""
|
_ = r"""
|
||||||
@@ -43,8 +43,11 @@ except:
|
|||||||
return p
|
return p
|
||||||
|
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
|
||||||
def humantime(ts):
|
def humantime(ts):
|
||||||
return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S")
|
return datetime.fromtimestamp(ts, UTC).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
|
||||||
def find_files_root(inf):
|
def find_files_root(inf):
|
||||||
@@ -96,7 +99,7 @@ def main():
|
|||||||
|
|
||||||
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
|
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
|
||||||
ret.append("")
|
ret.append("")
|
||||||
ftime = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")
|
ftime = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S.%f")
|
||||||
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
|
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
|
||||||
with open(fsenc(fp), "wb") as f:
|
with open(fsenc(fp), "wb") as f:
|
||||||
f.write("\n".join(ret).encode("utf-8", "replace"))
|
f.write("\n".join(ret).encode("utf-8", "replace"))
|
||||||
|
|||||||
@@ -46,13 +46,20 @@ import traceback
|
|||||||
import http.client # py2: httplib
|
import http.client # py2: httplib
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import calendar
|
import calendar
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from urllib.parse import quote_from_bytes as quote
|
from urllib.parse import quote_from_bytes as quote
|
||||||
from urllib.parse import unquote_to_bytes as unquote
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
|
|
||||||
WINDOWS = sys.platform == "win32"
|
WINDOWS = sys.platform == "win32"
|
||||||
MACOS = platform.system() == "Darwin"
|
MACOS = platform.system() == "Darwin"
|
||||||
info = log = dbg = None
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
|
||||||
|
def print(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
builtins.print(*list(args), **kwargs)
|
||||||
|
except:
|
||||||
|
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
|
||||||
|
|
||||||
|
|
||||||
print(
|
print(
|
||||||
@@ -64,6 +71,13 @@ print(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def null_log(msg):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
info = log = dbg = null_log
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from fuse import FUSE, FuseOSError, Operations
|
from fuse import FUSE, FuseOSError, Operations
|
||||||
except:
|
except:
|
||||||
@@ -83,13 +97,6 @@ except:
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def print(*args, **kwargs):
|
|
||||||
try:
|
|
||||||
builtins.print(*list(args), **kwargs)
|
|
||||||
except:
|
|
||||||
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def termsafe(txt):
|
def termsafe(txt):
|
||||||
try:
|
try:
|
||||||
return txt.encode(sys.stdout.encoding, "backslashreplace").decode(
|
return txt.encode(sys.stdout.encoding, "backslashreplace").decode(
|
||||||
@@ -118,10 +125,6 @@ def fancy_log(msg):
|
|||||||
print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="")
|
print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="")
|
||||||
|
|
||||||
|
|
||||||
def null_log(msg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def hexler(binary):
|
def hexler(binary):
|
||||||
return binary.replace("\r", "\\r").replace("\n", "\\n")
|
return binary.replace("\r", "\\r").replace("\n", "\\n")
|
||||||
return " ".join(["{}\033[36m{:02x}\033[0m".format(b, ord(b)) for b in binary])
|
return " ".join(["{}\033[36m{:02x}\033[0m".format(b, ord(b)) for b in binary])
|
||||||
@@ -176,7 +179,7 @@ class RecentLog(object):
|
|||||||
def put(self, msg):
|
def put(self, msg):
|
||||||
msg = "{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg)
|
msg = "{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg)
|
||||||
if self.f:
|
if self.f:
|
||||||
fmsg = " ".join([datetime.utcnow().strftime("%H%M%S.%f"), str(msg)])
|
fmsg = " ".join([datetime.now(UTC).strftime("%H%M%S.%f"), str(msg)])
|
||||||
self.f.write(fmsg.encode("utf-8"))
|
self.f.write(fmsg.encode("utf-8"))
|
||||||
|
|
||||||
with self.mtx:
|
with self.mtx:
|
||||||
|
|||||||
@@ -20,12 +20,13 @@ import sys
|
|||||||
import base64
|
import base64
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import argparse
|
import argparse
|
||||||
from datetime import datetime
|
from datetime import datetime, timezone
|
||||||
from urllib.parse import quote_from_bytes as quote
|
from urllib.parse import quote_from_bytes as quote
|
||||||
from urllib.parse import unquote_to_bytes as unquote
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
|
|
||||||
|
|
||||||
FS_ENCODING = sys.getfilesystemencoding()
|
FS_ENCODING = sys.getfilesystemencoding()
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
|
||||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||||
@@ -155,11 +156,10 @@ th {
|
|||||||
link = txt.decode("utf-8")[4:]
|
link = txt.decode("utf-8")[4:]
|
||||||
|
|
||||||
sz = "{:,}".format(sz)
|
sz = "{:,}".format(sz)
|
||||||
|
dt = datetime.fromtimestamp(at if at > 0 else mt, UTC)
|
||||||
v = [
|
v = [
|
||||||
w[:16],
|
w[:16],
|
||||||
datetime.utcfromtimestamp(at if at > 0 else mt).strftime(
|
dt.strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
"%Y-%m-%d %H:%M:%S"
|
|
||||||
),
|
|
||||||
sz,
|
sz,
|
||||||
imap.get(ip, ip),
|
imap.get(ip, ip),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -12,13 +12,13 @@ done
|
|||||||
help() { cat <<'EOF'
|
help() { cat <<'EOF'
|
||||||
|
|
||||||
usage:
|
usage:
|
||||||
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
|
./prisonparty.sh <ROOTDIR> <USER|UID> <GROUP|GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
|
||||||
|
|
||||||
example:
|
example:
|
||||||
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
|
./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
|
||||||
|
|
||||||
example for running straight from source (instead of using an sfx):
|
example for running straight from source (instead of using an sfx):
|
||||||
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
|
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
|
||||||
|
|
||||||
note that if you have python modules installed as --user (such as bpm/key detectors),
|
note that if you have python modules installed as --user (such as bpm/key detectors),
|
||||||
you should add /home/foo/.local as a VOLDIR
|
you should add /home/foo/.local as a VOLDIR
|
||||||
@@ -28,6 +28,16 @@ exit 1
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
errs=
|
||||||
|
for c in awk chroot dirname getent lsof mknod mount realpath sed sort stat uniq; do
|
||||||
|
command -v $c >/dev/null || {
|
||||||
|
echo ERROR: command not found: $c
|
||||||
|
errs=1
|
||||||
|
}
|
||||||
|
done
|
||||||
|
[ $errs ] && exit 1
|
||||||
|
|
||||||
|
|
||||||
# read arguments
|
# read arguments
|
||||||
trap help EXIT
|
trap help EXIT
|
||||||
jail="$(realpath "$1")"; shift
|
jail="$(realpath "$1")"; shift
|
||||||
@@ -58,11 +68,18 @@ cpp="$1"; shift
|
|||||||
}
|
}
|
||||||
trap - EXIT
|
trap - EXIT
|
||||||
|
|
||||||
|
usr="$(getent passwd $uid | cut -d: -f1)"
|
||||||
|
[ "$usr" ] || { echo "ERROR invalid username/uid $uid"; exit 1; }
|
||||||
|
uid="$(getent passwd $uid | cut -d: -f3)"
|
||||||
|
|
||||||
|
grp="$(getent group $gid | cut -d: -f1)"
|
||||||
|
[ "$grp" ] || { echo "ERROR invalid groupname/gid $gid"; exit 1; }
|
||||||
|
gid="$(getent group $gid | cut -d: -f3)"
|
||||||
|
|
||||||
# debug/vis
|
# debug/vis
|
||||||
echo
|
echo
|
||||||
echo "chroot-dir = $jail"
|
echo "chroot-dir = $jail"
|
||||||
echo "user:group = $uid:$gid"
|
echo "user:group = $uid:$gid ($usr:$grp)"
|
||||||
echo " copyparty = $cpp"
|
echo " copyparty = $cpp"
|
||||||
echo
|
echo
|
||||||
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
|
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
|
||||||
@@ -80,34 +97,39 @@ jail="${jail%/}"
|
|||||||
|
|
||||||
|
|
||||||
# bind-mount system directories and volumes
|
# bind-mount system directories and volumes
|
||||||
|
for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
|
||||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
||||||
while IFS= read -r v; do
|
while IFS= read -r v; do
|
||||||
[ -e "$v" ] || {
|
[ -e "$v" ] || {
|
||||||
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
|
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
i1=$(stat -c%D.%i "$v/" 2>/dev/null || echo a)
|
||||||
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
|
i2=$(stat -c%D.%i "$jail$v/" 2>/dev/null || echo b)
|
||||||
# echo "v [$v] i1 [$i1] i2 [$i2]"
|
|
||||||
[ $i1 = $i2 ] && continue
|
[ $i1 = $i2 ] && continue
|
||||||
|
mount | grep -qF " $jail$v " && echo wtf $i1 $i2 $v && continue
|
||||||
mkdir -p "$jail$v"
|
mkdir -p "$jail$v"
|
||||||
mount --bind "$v" "$jail$v"
|
mount --bind "$v" "$jail$v"
|
||||||
done
|
done
|
||||||
|
rmdir "$jail/.prisonlock" || true
|
||||||
|
|
||||||
|
|
||||||
cln() {
|
cln() {
|
||||||
rv=$?
|
trap - EXIT
|
||||||
wait -f -p rv $p || true
|
wait -f -n $p && rv=0 || rv=$?
|
||||||
cd /
|
cd /
|
||||||
echo "stopping chroot..."
|
echo "stopping chroot..."
|
||||||
lsof "$jail" | grep -F "$jail" &&
|
for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
|
||||||
|
lsof "$jail" 2>/dev/null | grep -F "$jail" &&
|
||||||
echo "chroot is in use; will not unmount" ||
|
echo "chroot is in use; will not unmount" ||
|
||||||
{
|
{
|
||||||
mount | grep -F " on $jail" |
|
mount | grep -F " on $jail" |
|
||||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||||
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
|
LC_ALL=C sort -r | while IFS= read -r v; do
|
||||||
|
umount "$v" && echo "umount OK: $v"
|
||||||
|
done
|
||||||
}
|
}
|
||||||
|
rmdir "$jail/.prisonlock" || true
|
||||||
exit $rv
|
exit $rv
|
||||||
}
|
}
|
||||||
trap cln EXIT
|
trap cln EXIT
|
||||||
@@ -128,8 +150,8 @@ chmod 777 "$jail/tmp"
|
|||||||
|
|
||||||
|
|
||||||
# run copyparty
|
# run copyparty
|
||||||
export HOME=$(getent passwd $uid | cut -d: -f6)
|
export HOME="$(getent passwd $uid | cut -d: -f6)"
|
||||||
export USER=$(getent passwd $uid | cut -d: -f1)
|
export USER="$usr"
|
||||||
export LOGNAME="$USER"
|
export LOGNAME="$USER"
|
||||||
#echo "pybin [$pybin]"
|
#echo "pybin [$pybin]"
|
||||||
#echo "pyarg [$pyarg]"
|
#echo "pyarg [$pyarg]"
|
||||||
@@ -137,5 +159,5 @@ export LOGNAME="$USER"
|
|||||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
||||||
p=$!
|
p=$!
|
||||||
trap 'kill -USR1 $p' USR1
|
trap 'kill -USR1 $p' USR1
|
||||||
trap 'kill $p' INT TERM
|
trap 'trap - INT TERM; kill $p' INT TERM
|
||||||
wait
|
wait
|
||||||
|
|||||||
67
bin/u2c.py
67
bin/u2c.py
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
S_VERSION = "1.10"
|
S_VERSION = "1.12"
|
||||||
S_BUILD_DT = "2023-08-15"
|
S_BUILD_DT = "2023-12-08"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
u2c.py: upload to copyparty
|
u2c.py: upload to copyparty
|
||||||
@@ -105,12 +105,14 @@ class File(object):
|
|||||||
# set by handshake
|
# set by handshake
|
||||||
self.recheck = False # duplicate; redo handshake after all files done
|
self.recheck = False # duplicate; redo handshake after all files done
|
||||||
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||||
self.wark = None # type: str
|
self.wark = "" # type: str
|
||||||
self.url = None # type: str
|
self.url = "" # type: str
|
||||||
|
self.nhs = 0
|
||||||
|
|
||||||
# set by upload
|
# set by upload
|
||||||
self.up_b = 0 # type: int
|
self.up_b = 0 # type: int
|
||||||
self.up_c = 0 # type: int
|
self.up_c = 0 # type: int
|
||||||
|
self.cd = 0
|
||||||
|
|
||||||
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||||
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||||
@@ -221,6 +223,7 @@ class MTHash(object):
|
|||||||
|
|
||||||
def hash_at(self, nch):
|
def hash_at(self, nch):
|
||||||
f = self.f
|
f = self.f
|
||||||
|
assert f
|
||||||
ofs = ofs0 = nch * self.csz
|
ofs = ofs0 = nch * self.csz
|
||||||
hashobj = hashlib.sha512()
|
hashobj = hashlib.sha512()
|
||||||
chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
|
chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
|
||||||
@@ -433,7 +436,7 @@ def walkdirs(err, tops, excl):
|
|||||||
za = [x.replace(b"/", b"\\") for x in za]
|
za = [x.replace(b"/", b"\\") for x in za]
|
||||||
tops = za
|
tops = za
|
||||||
|
|
||||||
ptn = re.compile(excl.encode("utf-8") or b"\n")
|
ptn = re.compile(excl.encode("utf-8") or b"\n", re.I)
|
||||||
|
|
||||||
for top in tops:
|
for top in tops:
|
||||||
isdir = os.path.isdir(top)
|
isdir = os.path.isdir(top)
|
||||||
@@ -461,7 +464,7 @@ def quotep(btxt):
|
|||||||
if not PY2:
|
if not PY2:
|
||||||
quot1 = quot1.encode("ascii")
|
quot1 = quot1.encode("ascii")
|
||||||
|
|
||||||
return quot1.replace(b" ", b"+")
|
return quot1.replace(b" ", b"+") # type: ignore
|
||||||
|
|
||||||
|
|
||||||
# from copyparty/util.py
|
# from copyparty/util.py
|
||||||
@@ -498,7 +501,7 @@ def up2k_chunksize(filesize):
|
|||||||
|
|
||||||
# mostly from copyparty/up2k.py
|
# mostly from copyparty/up2k.py
|
||||||
def get_hashlist(file, pcb, mth):
|
def get_hashlist(file, pcb, mth):
|
||||||
# type: (File, any, any) -> None
|
# type: (File, Any, Any) -> None
|
||||||
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||||
|
|
||||||
chunk_sz = up2k_chunksize(file.size)
|
chunk_sz = up2k_chunksize(file.size)
|
||||||
@@ -598,7 +601,7 @@ def handshake(ar, file, search):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
||||||
time.sleep(1)
|
time.sleep(ar.cd)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = r.json()
|
r = r.json()
|
||||||
@@ -689,6 +692,7 @@ class Ctl(object):
|
|||||||
|
|
||||||
def __init__(self, ar, stats=None):
|
def __init__(self, ar, stats=None):
|
||||||
self.ok = False
|
self.ok = False
|
||||||
|
self.errs = 0
|
||||||
self.ar = ar
|
self.ar = ar
|
||||||
self.stats = stats or self._scan()
|
self.stats = stats or self._scan()
|
||||||
if not self.stats:
|
if not self.stats:
|
||||||
@@ -736,7 +740,7 @@ class Ctl(object):
|
|||||||
|
|
||||||
self._fancy()
|
self._fancy()
|
||||||
|
|
||||||
self.ok = True
|
self.ok = not self.errs
|
||||||
|
|
||||||
def _safe(self):
|
def _safe(self):
|
||||||
"""minimal basic slow boring fallback codepath"""
|
"""minimal basic slow boring fallback codepath"""
|
||||||
@@ -903,12 +907,23 @@ class Ctl(object):
|
|||||||
dp = os.path.join(top, rd)
|
dp = os.path.join(top, rd)
|
||||||
lnodes = set(os.listdir(dp))
|
lnodes = set(os.listdir(dp))
|
||||||
bnames = [x for x in ls if x not in lnodes]
|
bnames = [x for x in ls if x not in lnodes]
|
||||||
if bnames:
|
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||||
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
names = [x.decode("utf-8", "replace") for x in bnames]
|
||||||
names = [x.decode("utf-8", "replace") for x in bnames]
|
locs = [vpath + srd + "/" + x for x in names]
|
||||||
locs = [vpath + srd + "/" + x for x in names]
|
while locs:
|
||||||
print("DELETING ~{0}/#{1}".format(srd, len(names)))
|
req = locs
|
||||||
req_ses.post(self.ar.url + "?delete", json=locs)
|
while req:
|
||||||
|
print("DELETING ~%s/#%s" % (srd, len(req)))
|
||||||
|
r = req_ses.post(self.ar.url + "?delete", json=req)
|
||||||
|
if r.status_code == 413 and "json 2big" in r.text:
|
||||||
|
print(" (delete request too big; slicing...)")
|
||||||
|
req = req[: len(req) // 2]
|
||||||
|
continue
|
||||||
|
elif not r:
|
||||||
|
t = "delete request failed: %r %s"
|
||||||
|
raise Exception(t % (r, r.text))
|
||||||
|
break
|
||||||
|
locs = locs[len(req) :]
|
||||||
|
|
||||||
if isdir:
|
if isdir:
|
||||||
continue
|
continue
|
||||||
@@ -961,13 +976,22 @@ class Ctl(object):
|
|||||||
self.q_upload.put(None)
|
self.q_upload.put(None)
|
||||||
break
|
break
|
||||||
|
|
||||||
with self.mutex:
|
|
||||||
self.handshaker_busy += 1
|
|
||||||
|
|
||||||
upath = file.abs.decode("utf-8", "replace")
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
if not VT100:
|
if not VT100:
|
||||||
upath = upath.lstrip("\\?")
|
upath = upath.lstrip("\\?")
|
||||||
|
|
||||||
|
file.nhs += 1
|
||||||
|
if file.nhs > 32:
|
||||||
|
print("ERROR: giving up on file %s" % (upath))
|
||||||
|
self.errs += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.handshaker_busy += 1
|
||||||
|
|
||||||
|
while time.time() < file.cd:
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
hs, sprs = handshake(self.ar, file, search)
|
hs, sprs = handshake(self.ar, file, search)
|
||||||
if search:
|
if search:
|
||||||
if hs:
|
if hs:
|
||||||
@@ -1050,6 +1074,7 @@ class Ctl(object):
|
|||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
t = "upload failed, retrying: {0} #{1} ({2})\n"
|
t = "upload failed, retrying: {0} #{1} ({2})\n"
|
||||||
eprint(t.format(file.name, cid[:8], ex))
|
eprint(t.format(file.name, cid[:8], ex))
|
||||||
|
file.cd = time.time() + self.ar.cd
|
||||||
# handshake will fix it
|
# handshake will fix it
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
@@ -1103,7 +1128,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
ap.add_argument("-v", action="store_true", help="verbose")
|
ap.add_argument("-v", action="store_true", help="verbose")
|
||||||
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
||||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||||
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\.hist/.*'")
|
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\\.hist/.*'")
|
||||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||||
ap.add_argument("--version", action="store_true", help="show version and exit")
|
ap.add_argument("--version", action="store_true", help="show version and exit")
|
||||||
|
|
||||||
@@ -1121,6 +1146,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||||
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
|
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
|
||||||
|
ap.add_argument("--cd", type=float, metavar="SEC", default=5, help="delay before reattempting a failed handshake/upload")
|
||||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||||
|
|
||||||
@@ -1187,6 +1213,9 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
ar.z = True
|
ar.z = True
|
||||||
ctl = Ctl(ar, ctl.stats)
|
ctl = Ctl(ar, ctl.stats)
|
||||||
|
|
||||||
|
if ctl.errs:
|
||||||
|
print("WARNING: %d errors" % (ctl.errs))
|
||||||
|
|
||||||
sys.exit(0 if ctl.ok else 1)
|
sys.exit(0 if ctl.ok else 1)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ def main():
|
|||||||
ofs = ln.find("{")
|
ofs = ln.find("{")
|
||||||
j = json.loads(ln[ofs:])
|
j = json.loads(ln[ofs:])
|
||||||
except:
|
except:
|
||||||
pass
|
continue
|
||||||
|
|
||||||
w = j["wark"]
|
w = j["wark"]
|
||||||
if db.execute("select w from up where w = ?", (w,)).fetchone():
|
if db.execute("select w from up where w = ?", (w,)).fetchone():
|
||||||
|
|||||||
@@ -13,7 +13,7 @@
|
|||||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
|
|
||||||
upstream cpp {
|
upstream cpp {
|
||||||
server 127.0.0.1:3923;
|
server 127.0.0.1:3923 fail_timeout=1s;
|
||||||
keepalive 1;
|
keepalive 1;
|
||||||
}
|
}
|
||||||
server {
|
server {
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
# Maintainer: icxes <dev.null@need.moe>
|
# Maintainer: icxes <dev.null@need.moe>
|
||||||
pkgname=copyparty
|
pkgname=copyparty
|
||||||
pkgver="1.9.8"
|
pkgver="1.9.27"
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc="Portable file sharing hub"
|
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, zeroconf, media indexer, thumbnails++"
|
||||||
arch=("any")
|
arch=("any")
|
||||||
url="https://github.com/9001/${pkgname}"
|
url="https://github.com/9001/${pkgname}"
|
||||||
license=('MIT')
|
license=('MIT')
|
||||||
depends=("python" "lsof" "python-jinja")
|
depends=("python" "lsof" "python-jinja")
|
||||||
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
|
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
|
||||||
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
||||||
|
"cfssl: generate TLS certificates on startup (pointless when reverse-proxied)"
|
||||||
"python-mutagen: music tags (alternative)"
|
"python-mutagen: music tags (alternative)"
|
||||||
"python-pillow: thumbnails for images"
|
"python-pillow: thumbnails for images"
|
||||||
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
|
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
|
||||||
@@ -20,7 +21,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
|||||||
)
|
)
|
||||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||||
backup=("etc/${pkgname}.d/init" )
|
backup=("etc/${pkgname}.d/init" )
|
||||||
sha256sums=("ae8510f02f0b52d6fec4a22e95dd739ccffc4c39eb86abfd5c5feb836860c366")
|
sha256sums=("0bcf9362bc1bd9c85c228312c8341fcb9a37e383af6d8bee123e3a84e66394be")
|
||||||
|
|
||||||
build() {
|
build() {
|
||||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
# this will start `/usr/bin/copyparty-sfx.py`
|
# this will start `/usr/bin/copyparty-sfx.py`
|
||||||
# in a chroot, preventing accidental access elsewhere
|
# in a chroot, preventing accidental access elsewhere,
|
||||||
# and read config from `/etc/copyparty.d/*.conf`
|
# and read copyparty config from `/etc/copyparty.d/*.conf`
|
||||||
#
|
#
|
||||||
# expose additional filesystem locations to copyparty
|
# expose additional filesystem locations to copyparty
|
||||||
# by listing them between the last `1000` and `--`
|
# by listing them between the last `cpp` and `--`
|
||||||
#
|
#
|
||||||
# `1000 1000` = what user to run copyparty as
|
# `cpp cpp` = user/group to run copyparty as; can be IDs (1000 1000)
|
||||||
#
|
#
|
||||||
# unless you add -q to disable logging, you may want to remove the
|
# unless you add -q to disable logging, you may want to remove the
|
||||||
# following line to allow buffering (slightly better performance):
|
# following line to allow buffering (slightly better performance):
|
||||||
@@ -24,7 +24,9 @@ ExecReload=/bin/kill -s USR1 $MAINPID
|
|||||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
# run copyparty
|
# run copyparty
|
||||||
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail 1000 1000 /etc/copyparty.d -- \
|
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail cpp cpp \
|
||||||
|
/etc/copyparty.d \
|
||||||
|
-- \
|
||||||
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
|
|||||||
@@ -3,6 +3,9 @@
|
|||||||
# use argon2id-hashed passwords in config files (sha2 is always available)
|
# use argon2id-hashed passwords in config files (sha2 is always available)
|
||||||
withHashedPasswords ? true,
|
withHashedPasswords ? true,
|
||||||
|
|
||||||
|
# generate TLS certificates on startup (pointless when reverse-proxied)
|
||||||
|
withCertgen ? false,
|
||||||
|
|
||||||
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
|
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
|
||||||
withThumbnails ? true,
|
withThumbnails ? true,
|
||||||
|
|
||||||
@@ -34,6 +37,7 @@ let
|
|||||||
]
|
]
|
||||||
++ lib.optional withSMB impacket
|
++ lib.optional withSMB impacket
|
||||||
++ lib.optional withFTPS pyopenssl
|
++ lib.optional withFTPS pyopenssl
|
||||||
|
++ lib.optional withCertgen cfssl
|
||||||
++ lib.optional withThumbnails pillow
|
++ lib.optional withThumbnails pillow
|
||||||
++ lib.optional withFastThumbnails pyvips
|
++ lib.optional withFastThumbnails pyvips
|
||||||
++ lib.optional withMediaProcessing ffmpeg
|
++ lib.optional withMediaProcessing ffmpeg
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"url": "https://github.com/9001/copyparty/releases/download/v1.9.8/copyparty-sfx.py",
|
"url": "https://github.com/9001/copyparty/releases/download/v1.9.27/copyparty-sfx.py",
|
||||||
"version": "1.9.8",
|
"version": "1.9.27",
|
||||||
"hash": "sha256-j64rMm3znrfN3c+vpFQloEAyp8PVna67kzjpunk0byw="
|
"hash": "sha256-o06o/gUIkQhDw9a9SLjuAyQUoLMfFCqkIeonUFzezds="
|
||||||
}
|
}
|
||||||
@@ -10,7 +10,7 @@ name="copyparty"
|
|||||||
rcvar="copyparty_enable"
|
rcvar="copyparty_enable"
|
||||||
copyparty_user="copyparty"
|
copyparty_user="copyparty"
|
||||||
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
|
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
|
||||||
copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
copyparty_command="/usr/local/bin/python3.9 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
||||||
pidfile="/var/run/copyparty/${name}.pid"
|
pidfile="/var/run/copyparty/${name}.pid"
|
||||||
command="/usr/sbin/daemon"
|
command="/usr/sbin/daemon"
|
||||||
command_args="-P ${pidfile} -r -f ${copyparty_command}"
|
command_args="-P ${pidfile} -r -f ${copyparty_command}"
|
||||||
|
|||||||
42
contrib/systemd/copyparty.conf
Normal file
42
contrib/systemd/copyparty.conf
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
# not actually YAML but lets pretend:
|
||||||
|
# -*- mode: yaml -*-
|
||||||
|
# vim: ft=yaml:
|
||||||
|
|
||||||
|
|
||||||
|
# put this file in /etc/
|
||||||
|
|
||||||
|
|
||||||
|
[global]
|
||||||
|
e2dsa # enable file indexing and filesystem scanning
|
||||||
|
e2ts # and enable multimedia indexing
|
||||||
|
ansi # and colors in log messages
|
||||||
|
|
||||||
|
# disable logging to stdout/journalctl and log to a file instead;
|
||||||
|
# $LOGS_DIRECTORY is usually /var/log/copyparty (comes from systemd)
|
||||||
|
# and copyparty replaces %Y-%m%d with Year-MonthDay, so the
|
||||||
|
# full path will be something like /var/log/copyparty/2023-1130.txt
|
||||||
|
# (note: enable compression by adding .xz at the end)
|
||||||
|
q, lo: $LOGS_DIRECTORY/%Y-%m%d.log
|
||||||
|
|
||||||
|
# p: 80,443,3923 # listen on 80/443 as well (requires CAP_NET_BIND_SERVICE)
|
||||||
|
# i: 127.0.0.1 # only allow connections from localhost (reverse-proxies)
|
||||||
|
# ftp: 3921 # enable ftp server on port 3921
|
||||||
|
# p: 3939 # listen on another port
|
||||||
|
# df: 16 # stop accepting uploads if less than 16 GB free disk space
|
||||||
|
# ver # show copyparty version in the controlpanel
|
||||||
|
# grid # show thumbnails/grid-view by default
|
||||||
|
# theme: 2 # monokai
|
||||||
|
# name: datasaver # change the server-name that's displayed in the browser
|
||||||
|
# stats, nos-dup # enable the prometheus endpoint, but disable the dupes counter (too slow)
|
||||||
|
# no-robots, force-js # make it harder for search engines to read your server
|
||||||
|
|
||||||
|
|
||||||
|
[accounts]
|
||||||
|
ed: wark # username: password
|
||||||
|
|
||||||
|
|
||||||
|
[/] # create a volume at "/" (the webroot), which will
|
||||||
|
/mnt # share the contents of the "/mnt" folder
|
||||||
|
accs:
|
||||||
|
rw: * # everyone gets read-write access, but
|
||||||
|
rwmda: ed # the user "ed" gets read-write-move-delete-admin
|
||||||
@@ -1,28 +1,27 @@
|
|||||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
# this will start `/usr/local/bin/copyparty-sfx.py` and
|
||||||
# and share '/mnt' with anonymous read+write
|
# read copyparty config from `/etc/copyparty.conf`, for example:
|
||||||
|
# https://github.com/9001/copyparty/blob/hovudstraum/contrib/systemd/copyparty.conf
|
||||||
#
|
#
|
||||||
# installation:
|
# installation:
|
||||||
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
|
||||||
# cp -pv copyparty.service /etc/systemd/system/
|
# useradd -r -s /sbin/nologin -d /var/lib/copyparty copyparty
|
||||||
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
# firewall-cmd --permanent --add-port=3923/tcp # --zone=libvirt
|
||||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
|
|
||||||
# firewall-cmd --reload
|
# firewall-cmd --reload
|
||||||
|
# cp -pv copyparty.service /etc/systemd/system/
|
||||||
|
# cp -pv copyparty.conf /etc/
|
||||||
|
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
|
||||||
# systemctl daemon-reload && systemctl enable --now copyparty
|
# systemctl daemon-reload && systemctl enable --now copyparty
|
||||||
#
|
#
|
||||||
# if it fails to start, first check this: systemctl status copyparty
|
# if it fails to start, first check this: systemctl status copyparty
|
||||||
# then try starting it while viewing logs: journalctl -fan 100
|
# then try starting it while viewing logs:
|
||||||
|
# journalctl -fan 100
|
||||||
|
# tail -Fn 100 /var/log/copyparty/$(date +%Y-%m%d.log)
|
||||||
#
|
#
|
||||||
# you may want to:
|
# you may want to:
|
||||||
# change "User=cpp" and "/home/cpp/" to another user
|
# - change "User=copyparty" and "/var/lib/copyparty/" to another user
|
||||||
# remove the nft lines to only listen on port 3923
|
# - edit /etc/copyparty.conf to configure copyparty
|
||||||
# and in the ExecStart= line:
|
# and in the ExecStart= line:
|
||||||
# change '/usr/bin/python3' to another interpreter
|
# - change '/usr/bin/python3' to another interpreter
|
||||||
# change '/mnt::rw' to another location or permission-set
|
|
||||||
# add '-q' to disable logging on busy servers
|
|
||||||
# add '-i 127.0.0.1' to only allow local connections
|
|
||||||
# add '-e2dsa' to enable filesystem scanning + indexing
|
|
||||||
# add '-e2ts' to enable metadata indexing
|
|
||||||
# remove '--ansi' to disable colored logs
|
|
||||||
#
|
#
|
||||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||||
# accept connections; correctly delaying units depending on copyparty.
|
# accept connections; correctly delaying units depending on copyparty.
|
||||||
@@ -30,11 +29,9 @@
|
|||||||
# python disabling line-buffering, so messages are out-of-order:
|
# python disabling line-buffering, so messages are out-of-order:
|
||||||
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
||||||
#
|
#
|
||||||
# unless you add -q to disable logging, you may want to remove the
|
########################################################################
|
||||||
# following line to allow buffering (slightly better performance):
|
########################################################################
|
||||||
# Environment=PYTHONUNBUFFERED=x
|
|
||||||
#
|
|
||||||
# keep ExecStartPre before ExecStart, at least on rhel8
|
|
||||||
|
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=copyparty file server
|
Description=copyparty file server
|
||||||
@@ -44,23 +41,52 @@ Type=notify
|
|||||||
SyslogIdentifier=copyparty
|
SyslogIdentifier=copyparty
|
||||||
Environment=PYTHONUNBUFFERED=x
|
Environment=PYTHONUNBUFFERED=x
|
||||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||||
|
PermissionsStartOnly=true
|
||||||
|
|
||||||
# user to run as + where the TLS certificate is (if any)
|
## user to run as + where the TLS certificate is (if any)
|
||||||
User=cpp
|
##
|
||||||
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
User=copyparty
|
||||||
|
Group=copyparty
|
||||||
|
WorkingDirectory=/var/lib/copyparty
|
||||||
|
Environment=XDG_CONFIG_HOME=/var/lib/copyparty/.config
|
||||||
|
|
||||||
# OPTIONAL: setup forwarding from ports 80 and 443 to port 3923
|
## OPTIONAL: allow copyparty to listen on low ports (like 80/443);
|
||||||
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true'
|
## you need to uncomment the "p: 80,443,3923" in the config too
|
||||||
ExecStartPre=+nft add table ip nat
|
## ------------------------------------------------------------
|
||||||
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; }
|
## a slightly safer alternative is to enable partyalone.service
|
||||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 80 redirect to :3923
|
## which does portforwarding with nftables instead, but an even
|
||||||
ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923
|
## better option is to use a reverse-proxy (nginx/caddy/...)
|
||||||
|
##
|
||||||
|
AmbientCapabilities=CAP_NET_BIND_SERVICE
|
||||||
|
|
||||||
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
## some quick hardening; TODO port more from the nixos package
|
||||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
##
|
||||||
|
MemoryMax=50%
|
||||||
|
MemorySwapMax=50%
|
||||||
|
ProtectClock=true
|
||||||
|
ProtectControlGroups=true
|
||||||
|
ProtectHostname=true
|
||||||
|
ProtectKernelLogs=true
|
||||||
|
ProtectKernelModules=true
|
||||||
|
ProtectKernelTunables=true
|
||||||
|
ProtectProc=invisible
|
||||||
|
RemoveIPC=true
|
||||||
|
RestrictNamespaces=true
|
||||||
|
RestrictRealtime=true
|
||||||
|
RestrictSUIDSGID=true
|
||||||
|
|
||||||
# copyparty settings
|
## create a directory for logfiles;
|
||||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw
|
## this defines $LOGS_DIRECTORY which is used in copyparty.conf
|
||||||
|
##
|
||||||
|
LogsDirectory=copyparty
|
||||||
|
|
||||||
|
## finally, start copyparty and give it the config file:
|
||||||
|
##
|
||||||
|
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -c /etc/copyparty.conf
|
||||||
|
|
||||||
|
# NOTE: if you installed copyparty from an OS package repo (nice)
|
||||||
|
# then you probably want something like this instead:
|
||||||
|
#ExecStart=/usr/bin/copyparty -c /etc/copyparty.conf
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||||
# in a chroot, preventing accidental access elsewhere
|
# in a chroot, preventing accidental access elsewhere,
|
||||||
# and share '/mnt' with anonymous read+write
|
# and share '/mnt' with anonymous read+write
|
||||||
#
|
#
|
||||||
# installation:
|
# installation:
|
||||||
@@ -7,9 +7,9 @@
|
|||||||
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
||||||
#
|
#
|
||||||
# expose additional filesystem locations to copyparty
|
# expose additional filesystem locations to copyparty
|
||||||
# by listing them between the last `1000` and `--`
|
# by listing them between the last `cpp` and `--`
|
||||||
#
|
#
|
||||||
# `1000 1000` = what user to run copyparty as
|
# `cpp cpp` = user/group to run copyparty as; can be IDs (1000 1000)
|
||||||
#
|
#
|
||||||
# you may want to:
|
# you may want to:
|
||||||
# change '/mnt::rw' to another location or permission-set
|
# change '/mnt::rw' to another location or permission-set
|
||||||
@@ -32,7 +32,9 @@ ExecReload=/bin/kill -s USR1 $MAINPID
|
|||||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
# run copyparty
|
# run copyparty
|
||||||
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
|
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail cpp cpp \
|
||||||
|
/mnt \
|
||||||
|
-- \
|
||||||
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ if not PY2:
|
|||||||
unicode: Callable[[Any], str] = str
|
unicode: Callable[[Any], str] = str
|
||||||
else:
|
else:
|
||||||
sys.dont_write_bytecode = True
|
sys.dont_write_bytecode = True
|
||||||
unicode = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable
|
unicode = unicode # type: ignore
|
||||||
|
|
||||||
WINDOWS: Any = (
|
WINDOWS: Any = (
|
||||||
[int(x) for x in platform.version().split(".")]
|
[int(x) for x in platform.version().split(".")]
|
||||||
|
|||||||
@@ -19,14 +19,16 @@ import threading
|
|||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
import uuid
|
||||||
from textwrap import dedent
|
|
||||||
|
|
||||||
from .__init__ import ANYWIN, CORES, EXE, PY2, VT100, WINDOWS, E, EnvParams, unicode
|
from .__init__ import ANYWIN, CORES, EXE, PY2, VT100, WINDOWS, E, EnvParams, unicode
|
||||||
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
|
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
|
||||||
from .authsrv import expand_config_file, re_vol, split_cfg_ln, upgrade_cfg_fmt
|
from .authsrv import expand_config_file, split_cfg_ln, upgrade_cfg_fmt
|
||||||
from .cfg import flagcats, onedash
|
from .cfg import flagcats, onedash
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
from .util import (
|
from .util import (
|
||||||
|
DEF_EXP,
|
||||||
|
DEF_MTE,
|
||||||
|
DEF_MTH,
|
||||||
IMPLICATIONS,
|
IMPLICATIONS,
|
||||||
JINJA_VER,
|
JINJA_VER,
|
||||||
PYFTPD_VER,
|
PYFTPD_VER,
|
||||||
@@ -34,6 +36,7 @@ from .util import (
|
|||||||
UNPLICATIONS,
|
UNPLICATIONS,
|
||||||
align_tab,
|
align_tab,
|
||||||
ansi_re,
|
ansi_re,
|
||||||
|
dedent,
|
||||||
min_ex,
|
min_ex,
|
||||||
py_desc,
|
py_desc,
|
||||||
pybin,
|
pybin,
|
||||||
@@ -140,9 +143,11 @@ def warn(msg: str) -> None:
|
|||||||
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
|
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
|
||||||
|
|
||||||
|
|
||||||
def init_E(E: EnvParams) -> None:
|
def init_E(EE: EnvParams) -> None:
|
||||||
# __init__ runs 18 times when oxidized; do expensive stuff here
|
# __init__ runs 18 times when oxidized; do expensive stuff here
|
||||||
|
|
||||||
|
E = EE # pylint: disable=redefined-outer-name
|
||||||
|
|
||||||
def get_unixdir() -> str:
|
def get_unixdir() -> str:
|
||||||
paths: list[tuple[Callable[..., Any], str]] = [
|
paths: list[tuple[Callable[..., Any], str]] = [
|
||||||
(os.environ.get, "XDG_CONFIG_HOME"),
|
(os.environ.get, "XDG_CONFIG_HOME"),
|
||||||
@@ -184,7 +189,10 @@ def init_E(E: EnvParams) -> None:
|
|||||||
|
|
||||||
with open_binary("copyparty", "z.tar") as tgz:
|
with open_binary("copyparty", "z.tar") as tgz:
|
||||||
with tarfile.open(fileobj=tgz) as tf:
|
with tarfile.open(fileobj=tgz) as tf:
|
||||||
tf.extractall(tdn) # nosec (archive is safe)
|
try:
|
||||||
|
tf.extractall(tdn, filter="tar")
|
||||||
|
except TypeError:
|
||||||
|
tf.extractall(tdn) # nosec (archive is safe)
|
||||||
|
|
||||||
return tdn
|
return tdn
|
||||||
|
|
||||||
@@ -240,7 +248,7 @@ def get_srvname() -> str:
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def get_fk_salt(cert_path) -> str:
|
def get_fk_salt() -> str:
|
||||||
fp = os.path.join(E.cfg, "fk-salt.txt")
|
fp = os.path.join(E.cfg, "fk-salt.txt")
|
||||||
try:
|
try:
|
||||||
with open(fp, "rb") as f:
|
with open(fp, "rb") as f:
|
||||||
@@ -314,6 +322,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
|||||||
# oh man i love openssl
|
# oh man i love openssl
|
||||||
# check this out
|
# check this out
|
||||||
# hold my beer
|
# hold my beer
|
||||||
|
assert ssl
|
||||||
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
|
||||||
sslver = terse_sslver(al.ssl_ver).split(",")
|
sslver = terse_sslver(al.ssl_ver).split(",")
|
||||||
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
flags = [k for k in ssl.__dict__ if ptn.match(k)]
|
||||||
@@ -347,6 +356,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
||||||
|
assert ssl
|
||||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||||
if al.ssl_ver:
|
if al.ssl_ver:
|
||||||
ctx.options &= ~al.ssl_flags_en
|
ctx.options &= ~al.ssl_flags_en
|
||||||
@@ -426,9 +436,9 @@ def disable_quickedit() -> None:
|
|||||||
if PY2:
|
if PY2:
|
||||||
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
|
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
|
||||||
|
|
||||||
k32.GetStdHandle.errcheck = ecb
|
k32.GetStdHandle.errcheck = ecb # type: ignore
|
||||||
k32.GetConsoleMode.errcheck = ecb
|
k32.GetConsoleMode.errcheck = ecb # type: ignore
|
||||||
k32.SetConsoleMode.errcheck = ecb
|
k32.SetConsoleMode.errcheck = ecb # type: ignore
|
||||||
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
|
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
|
||||||
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
|
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
|
||||||
|
|
||||||
@@ -488,7 +498,9 @@ def get_sects():
|
|||||||
"g" (get): download files, but cannot see folder contents
|
"g" (get): download files, but cannot see folder contents
|
||||||
"G" (upget): "get", but can see filekeys of their own uploads
|
"G" (upget): "get", but can see filekeys of their own uploads
|
||||||
"h" (html): "get", but folders return their index.html
|
"h" (html): "get", but folders return their index.html
|
||||||
|
"." (dots): user can ask to show dotfiles in listings
|
||||||
"a" (admin): can see uploader IPs, config-reload
|
"a" (admin): can see uploader IPs, config-reload
|
||||||
|
"A" ("all"): same as "rwmda." (read/write/move/delete/admin/dotfiles)
|
||||||
|
|
||||||
too many volflags to list here, see --help-flags
|
too many volflags to list here, see --help-flags
|
||||||
|
|
||||||
@@ -641,6 +653,47 @@ def get_sects():
|
|||||||
"""
|
"""
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
[
|
||||||
|
"exp",
|
||||||
|
"text expansion",
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
specify --exp or the "exp" volflag to enable placeholder expansions
|
||||||
|
in README.md / .prologue.html / .epilogue.html
|
||||||
|
|
||||||
|
--exp-md (volflag exp_md) holds the list of placeholders which can be
|
||||||
|
expanded in READMEs, and --exp-lg (volflag exp_lg) likewise for logues;
|
||||||
|
any placeholder not given in those lists will be ignored and shown as-is
|
||||||
|
|
||||||
|
the default list will expand the following placeholders:
|
||||||
|
\033[36m{{self.ip}} \033[35mclient ip
|
||||||
|
\033[36m{{self.ua}} \033[35mclient user-agent
|
||||||
|
\033[36m{{self.uname}} \033[35mclient username
|
||||||
|
\033[36m{{self.host}} \033[35mthe "Host" header, or the server's external IP otherwise
|
||||||
|
\033[36m{{cfg.name}} \033[35mthe --name global-config
|
||||||
|
\033[36m{{cfg.logout}} \033[35mthe --logout global-config
|
||||||
|
\033[36m{{vf.scan}} \033[35mthe "scan" volflag
|
||||||
|
\033[36m{{vf.thsize}} \033[35mthumbnail size
|
||||||
|
\033[36m{{srv.itime}} \033[35mserver time in seconds
|
||||||
|
\033[36m{{srv.htime}} \033[35mserver time as YY-mm-dd, HH:MM:SS (UTC)
|
||||||
|
\033[36m{{hdr.cf_ipcountry}} \033[35mthe "CF-IPCountry" client header (probably blank)
|
||||||
|
\033[0m
|
||||||
|
so the following types of placeholders can be added to the lists:
|
||||||
|
* any client header can be accessed through {{hdr.*}}
|
||||||
|
* any variable in httpcli.py can be accessed through {{self.*}}
|
||||||
|
* any global server setting can be accessed through {{cfg.*}}
|
||||||
|
* any volflag can be accessed through {{vf.*}}
|
||||||
|
|
||||||
|
remove vf.scan from default list using --exp-md /vf.scan
|
||||||
|
add "accept" header to def. list using --exp-md +hdr.accept
|
||||||
|
|
||||||
|
for performance reasons, expansion only happens while embedding
|
||||||
|
documents into directory listings, and when accessing a ?doc=...
|
||||||
|
link, but never otherwise, so if you click a -txt- link you'll
|
||||||
|
have to refresh the page to apply expansion
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"ls",
|
"ls",
|
||||||
"volume inspection",
|
"volume inspection",
|
||||||
@@ -654,6 +707,7 @@ def get_sects():
|
|||||||
\033[36mln\033[0m only prints symlinks leaving the volume mountpoint
|
\033[36mln\033[0m only prints symlinks leaving the volume mountpoint
|
||||||
\033[36mp\033[0m exits 1 if any such symlinks are found
|
\033[36mp\033[0m exits 1 if any such symlinks are found
|
||||||
\033[36mr\033[0m resumes startup after the listing
|
\033[36mr\033[0m resumes startup after the listing
|
||||||
|
|
||||||
examples:
|
examples:
|
||||||
--ls '**' # list all files which are possible to read
|
--ls '**' # list all files which are possible to read
|
||||||
--ls '**,*,ln' # check for dangerous symlinks
|
--ls '**,*,ln' # check for dangerous symlinks
|
||||||
@@ -687,9 +741,12 @@ def get_sects():
|
|||||||
"""
|
"""
|
||||||
when \033[36m--ah-alg\033[0m is not the default [\033[32mnone\033[0m], all account passwords must be hashed
|
when \033[36m--ah-alg\033[0m is not the default [\033[32mnone\033[0m], all account passwords must be hashed
|
||||||
|
|
||||||
passwords can be hashed on the commandline with \033[36m--ah-gen\033[0m, but copyparty will also hash and print any passwords that are non-hashed (password which do not start with '+') and then terminate afterwards
|
passwords can be hashed on the commandline with \033[36m--ah-gen\033[0m, but
|
||||||
|
copyparty will also hash and print any passwords that are non-hashed
|
||||||
|
(password which do not start with '+') and then terminate afterwards
|
||||||
|
|
||||||
\033[36m--ah-alg\033[0m specifies the hashing algorithm and a list of optional comma-separated arguments:
|
\033[36m--ah-alg\033[0m specifies the hashing algorithm and a
|
||||||
|
list of optional comma-separated arguments:
|
||||||
|
|
||||||
\033[36m--ah-alg argon2\033[0m # which is the same as:
|
\033[36m--ah-alg argon2\033[0m # which is the same as:
|
||||||
\033[36m--ah-alg argon2,3,256,4,19\033[0m
|
\033[36m--ah-alg argon2,3,256,4,19\033[0m
|
||||||
@@ -770,11 +827,9 @@ def add_general(ap, nc, srvname):
|
|||||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
|
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
|
||||||
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, \033[33mUSER\033[0m:\033[33mPASS\033[0m; example [\033[32med:wark\033[0m]")
|
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, \033[33mUSER\033[0m:\033[33mPASS\033[0m; example [\033[32med:wark\033[0m]")
|
||||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m]")
|
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m]")
|
||||||
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files")
|
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files (volflag=dots)")
|
||||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see \033[33m--help-urlform\033[0m")
|
||||||
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="server terminal title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]")
|
||||||
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see --help-urlform")
|
|
||||||
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]")
|
|
||||||
ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)")
|
ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)")
|
||||||
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
|
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
|
||||||
ap2.add_argument("--version", action="store_true", help="show versions and exit")
|
ap2.add_argument("--version", action="store_true", help="show versions and exit")
|
||||||
@@ -785,7 +840,7 @@ def add_qr(ap, tty):
|
|||||||
ap2.add_argument("--qr", action="store_true", help="show http:// QR-code on startup")
|
ap2.add_argument("--qr", action="store_true", help="show http:// QR-code on startup")
|
||||||
ap2.add_argument("--qrs", action="store_true", help="show https:// QR-code on startup")
|
ap2.add_argument("--qrs", action="store_true", help="show https:// QR-code on startup")
|
||||||
ap2.add_argument("--qrl", metavar="PATH", type=u, default="", help="location to include in the url, for example [\033[32mpriv/?pw=hunter2\033[0m]")
|
ap2.add_argument("--qrl", metavar="PATH", type=u, default="", help="location to include in the url, for example [\033[32mpriv/?pw=hunter2\033[0m]")
|
||||||
ap2.add_argument("--qri", metavar="PREFIX", type=u, default="", help="select IP which starts with PREFIX; [\033[32m.\033[0m] to force default IP when mDNS URL would have been used instead")
|
ap2.add_argument("--qri", metavar="PREFIX", type=u, default="", help="select IP which starts with \033[33mPREFIX\033[0m; [\033[32m.\033[0m] to force default IP when mDNS URL would have been used instead")
|
||||||
ap2.add_argument("--qr-fg", metavar="COLOR", type=int, default=0 if tty else 16, help="foreground; try [\033[32m0\033[0m] if the qr-code is unreadable")
|
ap2.add_argument("--qr-fg", metavar="COLOR", type=int, default=0 if tty else 16, help="foreground; try [\033[32m0\033[0m] if the qr-code is unreadable")
|
||||||
ap2.add_argument("--qr-bg", metavar="COLOR", type=int, default=229, help="background (white=255)")
|
ap2.add_argument("--qr-bg", metavar="COLOR", type=int, default=229, help="background (white=255)")
|
||||||
ap2.add_argument("--qrp", metavar="CELLS", type=int, default=4, help="padding (spec says 4 or more, but 1 is usually fine)")
|
ap2.add_argument("--qrp", metavar="CELLS", type=int, default=4, help="padding (spec says 4 or more, but 1 is usually fine)")
|
||||||
@@ -794,24 +849,28 @@ def add_qr(ap, tty):
|
|||||||
|
|
||||||
def add_upload(ap):
|
def add_upload(ap):
|
||||||
ap2 = ap.add_argument_group('upload options')
|
ap2 = ap.add_argument_group('upload options')
|
||||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
|
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m")
|
||||||
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
||||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled, default=12h")
|
||||||
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than SEC seconds ago)")
|
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
||||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
|
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without \033[33m-e2d\033[0m; roughly 1 MiB RAM per 600")
|
||||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)")
|
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (bad idea to enable this on windows and/or cow filesystems)")
|
||||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
||||||
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem) (volflag=hardlink)")
|
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem) (volflag=hardlink)")
|
||||||
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=neversymlink)")
|
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=neversymlink)")
|
||||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes)")
|
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes)")
|
||||||
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
||||||
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
||||||
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, --nrand chars long (volflag=rand)")
|
ap2.add_argument("--snap-wri", metavar="SEC", type=int, default=300, help="write upload state to ./hist/up2k.snap every \033[33mSEC\033[0m seconds; allows resuming incomplete uploads after a server crash")
|
||||||
|
ap2.add_argument("--snap-drop", metavar="MIN", type=float, default=1440, help="forget unfinished uploads after \033[33mMIN\033[0m minutes; impossible to resume them after that (360=6h, 1440=24h)")
|
||||||
|
ap2.add_argument("--u2ts", metavar="TXT", type=u, default="c", help="how to timestamp uploaded files; [\033[32mc\033[0m]=client-last-modified, [\033[32mu\033[0m]=upload-time, [\033[32mfc\033[0m]=force-c, [\033[32mfu\033[0m]=force-u (volflag=u2ts)")
|
||||||
|
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, \033[33m--nrand\033[0m chars long (volflag=rand)")
|
||||||
ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)")
|
ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)")
|
||||||
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
|
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
|
||||||
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
|
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure \033[33mGiB\033[0m free disk space by rejecting upload requests")
|
||||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||||
|
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16-32 for cross-atlantic (max=64)")
|
||||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||||
|
|
||||||
@@ -820,11 +879,12 @@ def add_network(ap):
|
|||||||
ap2 = ap.add_argument_group('network options')
|
ap2 = ap.add_argument_group('network options')
|
||||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
|
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
|
||||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||||
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 even if the NIC has routable IPs (breaks some mdns clients)")
|
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 in mDNS replies, even if the NIC has routable IPs (breaks some mDNS clients)")
|
||||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy")
|
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to associate clients with; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy")
|
||||||
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from (argument must be lowercase, but not the actual header)")
|
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from")
|
||||||
ap2.add_argument("--xff-src", metavar="IP", type=u, default="127., ::1", help="comma-separated list of trusted reverse-proxy IPs; only accept the real-ip header (--xff-hdr) if the incoming connection is from an IP starting with either of these. Can be disabled with [\033[32many\033[0m] if you are behind cloudflare (or similar) and are using --xff-hdr=cf-connecting-ip (or similar)")
|
ap2.add_argument("--xff-src", metavar="IP", type=u, default="127., ::1", help="comma-separated list of trusted reverse-proxy IPs; only accept the real-ip header (\033[33m--xff-hdr\033[0m) if the incoming connection is from an IP starting with either of these. Can be disabled with [\033[32many\033[0m] if you are behind cloudflare (or similar) and are using \033[32m--xff-hdr=cf-connecting-ip\033[0m (or similar)")
|
||||||
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here (eg. /foo/bar)")
|
ap2.add_argument("--ipa", metavar="PREFIX", type=u, default="", help="only accept connections from IP-addresses starting with \033[33mPREFIX\033[0m; example: [\033[32m127., 10.89., 192.168.\033[0m]")
|
||||||
|
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here; example: [\033[32m/foo/bar\033[0m]")
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
||||||
else:
|
else:
|
||||||
@@ -834,7 +894,7 @@ def add_network(ap):
|
|||||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
||||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
||||||
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..SEC")
|
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..\033[33mSEC\033[0m")
|
||||||
|
|
||||||
|
|
||||||
def add_tls(ap, cert_path):
|
def add_tls(ap, cert_path):
|
||||||
@@ -853,7 +913,7 @@ def add_cert(ap, cert_path):
|
|||||||
ap2 = ap.add_argument_group('TLS certificate generator options')
|
ap2 = ap.add_argument_group('TLS certificate generator options')
|
||||||
ap2.add_argument("--no-crt", action="store_true", help="disable automatic certificate creation")
|
ap2.add_argument("--no-crt", action="store_true", help="disable automatic certificate creation")
|
||||||
ap2.add_argument("--crt-ns", metavar="N,N", type=u, default="", help="comma-separated list of FQDNs (domains) to add into the certificate")
|
ap2.add_argument("--crt-ns", metavar="N,N", type=u, default="", help="comma-separated list of FQDNs (domains) to add into the certificate")
|
||||||
ap2.add_argument("--crt-exact", action="store_true", help="do not add wildcard entries for each --crt-ns")
|
ap2.add_argument("--crt-exact", action="store_true", help="do not add wildcard entries for each \033[33m--crt-ns\033[0m")
|
||||||
ap2.add_argument("--crt-noip", action="store_true", help="do not add autodetected IP addresses into cert")
|
ap2.add_argument("--crt-noip", action="store_true", help="do not add autodetected IP addresses into cert")
|
||||||
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
|
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
|
||||||
ap2.add_argument("--crt-nohn", action="store_true", help="do not add mDNS names / hostname into cert")
|
ap2.add_argument("--crt-nohn", action="store_true", help="do not add mDNS names / hostname into cert")
|
||||||
@@ -864,7 +924,14 @@ def add_cert(ap, cert_path):
|
|||||||
ap2.add_argument("--crt-cnc", metavar="TXT", type=u, default="--crt-cn", help="override CA name")
|
ap2.add_argument("--crt-cnc", metavar="TXT", type=u, default="--crt-cn", help="override CA name")
|
||||||
ap2.add_argument("--crt-cns", metavar="TXT", type=u, default="--crt-cn cpp", help="override server-cert name")
|
ap2.add_argument("--crt-cns", metavar="TXT", type=u, default="--crt-cn cpp", help="override server-cert name")
|
||||||
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72, help="backdate in hours")
|
ap2.add_argument("--crt-back", metavar="HRS", type=float, default=72, help="backdate in hours")
|
||||||
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: ecdsa-256 rsa-4096 rsa-2048")
|
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: \033[32mecdsa-256 rsa-4096 rsa-2048\033[0m")
|
||||||
|
|
||||||
|
|
||||||
|
def add_auth(ap):
|
||||||
|
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
||||||
|
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks and assume the request-header \033[33mHN\033[0m contains the username of the requesting user (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
||||||
|
return
|
||||||
|
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
||||||
|
|
||||||
|
|
||||||
def add_zeroconf(ap):
|
def add_zeroconf(ap):
|
||||||
@@ -872,21 +939,21 @@ def add_zeroconf(ap):
|
|||||||
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
|
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
|
||||||
ap2.add_argument("--z-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes\n └─example: \033[32meth0, wlo1, virhost0, 192.168.123.0/24, fd00:fda::/96\033[0m")
|
ap2.add_argument("--z-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes\n └─example: \033[32meth0, wlo1, virhost0, 192.168.123.0/24, fd00:fda::/96\033[0m")
|
||||||
ap2.add_argument("--z-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
ap2.add_argument("--z-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||||
ap2.add_argument("--z-chk", metavar="SEC", type=int, default=10, help="check for network changes every SEC seconds (0=disable)")
|
ap2.add_argument("--z-chk", metavar="SEC", type=int, default=10, help="check for network changes every \033[33mSEC\033[0m seconds (0=disable)")
|
||||||
ap2.add_argument("-zv", action="store_true", help="verbose all zeroconf backends")
|
ap2.add_argument("-zv", action="store_true", help="verbose all zeroconf backends")
|
||||||
ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every SEC seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]")
|
ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every \033[33mSEC\033[0m seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]\n └─note: can be due to firewalls; make sure UDP port 5353 is open in both directions (on clients too)")
|
||||||
|
|
||||||
|
|
||||||
def add_zc_mdns(ap):
|
def add_zc_mdns(ap):
|
||||||
ap2 = ap.add_argument_group("Zeroconf-mDNS options; also see --help-zm")
|
ap2 = ap.add_argument_group("Zeroconf-mDNS options; also see --help-zm")
|
||||||
ap2.add_argument("--zm", action="store_true", help="announce the enabled protocols over mDNS (multicast DNS-SD) -- compatible with KDE, gnome, macOS, ...")
|
ap2.add_argument("--zm", action="store_true", help="announce the enabled protocols over mDNS (multicast DNS-SD) -- compatible with KDE, gnome, macOS, ...")
|
||||||
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable mDNS ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||||
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable mDNS on the comma-separated list of subnets and/or interface names/indexes")
|
||||||
ap2.add_argument("--zm4", action="store_true", help="IPv4 only -- try this if some clients can't connect")
|
ap2.add_argument("--zm4", action="store_true", help="IPv4 only -- try this if some clients can't connect")
|
||||||
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
||||||
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
||||||
ap2.add_argument("--zmvv", action="store_true", help="verboser mdns")
|
ap2.add_argument("--zmvv", action="store_true", help="verboser mdns")
|
||||||
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set)")
|
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set, \033[32mDd\033[0m otherwise)")
|
||||||
ap2.add_argument("--zm-ld", metavar="PATH", type=u, default="", help="link a specific folder for webdav shares")
|
ap2.add_argument("--zm-ld", metavar="PATH", type=u, default="", help="link a specific folder for webdav shares")
|
||||||
ap2.add_argument("--zm-lh", metavar="PATH", type=u, default="", help="link a specific folder for http shares")
|
ap2.add_argument("--zm-lh", metavar="PATH", type=u, default="", help="link a specific folder for http shares")
|
||||||
ap2.add_argument("--zm-lf", metavar="PATH", type=u, default="", help="link a specific folder for ftp shares")
|
ap2.add_argument("--zm-lf", metavar="PATH", type=u, default="", help="link a specific folder for ftp shares")
|
||||||
@@ -894,14 +961,14 @@ def add_zc_mdns(ap):
|
|||||||
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
|
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
|
||||||
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
|
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
|
||||||
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
|
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
|
||||||
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every SEC; useful if clients have IPs in a subnet which doesn't overlap with the server")
|
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every \033[33mSEC\033[0m; useful if clients have IPs in a subnet which doesn't overlap with the server, or to avoid some firewall issues")
|
||||||
|
|
||||||
|
|
||||||
def add_zc_ssdp(ap):
|
def add_zc_ssdp(ap):
|
||||||
ap2 = ap.add_argument_group("Zeroconf-SSDP options")
|
ap2 = ap.add_argument_group("Zeroconf-SSDP options")
|
||||||
ap2.add_argument("--zs", action="store_true", help="announce the enabled protocols over SSDP -- compatible with Windows")
|
ap2.add_argument("--zs", action="store_true", help="announce the enabled protocols over SSDP -- compatible with Windows")
|
||||||
ap2.add_argument("--zs-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
ap2.add_argument("--zs-on", metavar="NETS", type=u, default="", help="enable SSDP ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||||
ap2.add_argument("--zs-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
ap2.add_argument("--zs-off", metavar="NETS", type=u, default="", help="disable SSDP on the comma-separated list of subnets and/or interface names/indexes")
|
||||||
ap2.add_argument("--zsv", action="store_true", help="verbose SSDP")
|
ap2.add_argument("--zsv", action="store_true", help="verbose SSDP")
|
||||||
ap2.add_argument("--zsl", metavar="PATH", type=u, default="/?hc", help="location to include in the url (or a complete external URL), for example [\033[32mpriv/?pw=hunter2\033[0m] (goes directly to /priv/ with password hunter2) or [\033[32m?hc=priv&pw=hunter2\033[0m] (shows mounting options for /priv/ with password)")
|
ap2.add_argument("--zsl", metavar="PATH", type=u, default="/?hc", help="location to include in the url (or a complete external URL), for example [\033[32mpriv/?pw=hunter2\033[0m] (goes directly to /priv/ with password hunter2) or [\033[32m?hc=priv&pw=hunter2\033[0m] (shows mounting options for /priv/ with password)")
|
||||||
ap2.add_argument("--zsid", metavar="UUID", type=u, default=zsid, help="USN (device identifier) to announce")
|
ap2.add_argument("--zsid", metavar="UUID", type=u, default=zsid, help="USN (device identifier) to announce")
|
||||||
@@ -909,11 +976,12 @@ def add_zc_ssdp(ap):
|
|||||||
|
|
||||||
def add_ftp(ap):
|
def add_ftp(ap):
|
||||||
ap2 = ap.add_argument_group('FTP options')
|
ap2 = ap.add_argument_group('FTP options')
|
||||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921")
|
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on \033[33mPORT\033[0m, for example \033[32m3921")
|
||||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990")
|
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on \033[33mPORT\033[0m, for example \033[32m3990")
|
||||||
ap2.add_argument("--ftpv", action="store_true", help="verbose")
|
ap2.add_argument("--ftpv", action="store_true", help="verbose")
|
||||||
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
|
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
|
||||||
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than SEC seconds ago)")
|
ap2.add_argument("--ftp-ipa", metavar="PFX", type=u, default="", help="only accept connections from IP-addresses starting with \033[33mPFX\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Example: [\033[32m127., 10.89., 192.168.\033[0m]")
|
||||||
|
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
||||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
||||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
||||||
|
|
||||||
@@ -929,7 +997,7 @@ def add_webdav(ap):
|
|||||||
|
|
||||||
def add_smb(ap):
|
def add_smb(ap):
|
||||||
ap2 = ap.add_argument_group('SMB/CIFS options')
|
ap2 = ap.add_argument_group('SMB/CIFS options')
|
||||||
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless --smb-port is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is dangerous! Never expose to the internet!")
|
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless \033[33m--smb-port\033[0m is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is DANGEROUS and buggy! Never expose to the internet!")
|
||||||
ap2.add_argument("--smbw", action="store_true", help="enable write support (please dont)")
|
ap2.add_argument("--smbw", action="store_true", help="enable write support (please dont)")
|
||||||
ap2.add_argument("--smb1", action="store_true", help="disable SMBv2, only enable SMBv1 (CIFS)")
|
ap2.add_argument("--smb1", action="store_true", help="disable SMBv2, only enable SMBv1 (CIFS)")
|
||||||
ap2.add_argument("--smb-port", metavar="PORT", type=int, default=445, help="port to listen on -- if you change this value, you must NAT from TCP:445 to this port using iptables or similar")
|
ap2.add_argument("--smb-port", metavar="PORT", type=int, default=445, help="port to listen on -- if you change this value, you must NAT from TCP:445 to this port using iptables or similar")
|
||||||
@@ -943,22 +1011,22 @@ def add_smb(ap):
|
|||||||
|
|
||||||
def add_handlers(ap):
|
def add_handlers(ap):
|
||||||
ap2 = ap.add_argument_group('handlers (see --help-handlers)')
|
ap2 = ap.add_argument_group('handlers (see --help-handlers)')
|
||||||
ap2.add_argument("--on404", metavar="PY", type=u, action="append", help="handle 404s by executing PY file")
|
ap2.add_argument("--on404", metavar="PY", type=u, action="append", help="handle 404s by executing \033[33mPY\033[0m file")
|
||||||
ap2.add_argument("--on403", metavar="PY", type=u, action="append", help="handle 403s by executing PY file")
|
ap2.add_argument("--on403", metavar="PY", type=u, action="append", help="handle 403s by executing \033[33mPY\033[0m file")
|
||||||
ap2.add_argument("--hot-handlers", action="store_true", help="reload handlers on each request -- expensive but convenient when hacking on stuff")
|
ap2.add_argument("--hot-handlers", action="store_true", help="recompile handlers on each request -- expensive but convenient when hacking on stuff")
|
||||||
|
|
||||||
|
|
||||||
def add_hooks(ap):
|
def add_hooks(ap):
|
||||||
ap2 = ap.add_argument_group('event hooks (see --help-hooks)')
|
ap2 = ap.add_argument_group('event hooks (see --help-hooks)')
|
||||||
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute CMD before a file upload starts")
|
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file upload starts")
|
||||||
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute CMD after a file upload finishes")
|
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file upload finishes")
|
||||||
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute CMD after all uploads finish and volume is idle")
|
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after all uploads finish and volume is idle")
|
||||||
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute CMD before a file move/rename")
|
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file move/rename")
|
||||||
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute CMD after a file move/rename")
|
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file move/rename")
|
||||||
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute CMD before a file delete")
|
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file delete")
|
||||||
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute CMD after a file delete")
|
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file delete")
|
||||||
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute CMD on message")
|
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m on message")
|
||||||
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute CMD if someone gets banned (pw/404/403/url)")
|
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m if someone gets banned (pw/404/403/url)")
|
||||||
|
|
||||||
|
|
||||||
def add_stats(ap):
|
def add_stats(ap):
|
||||||
@@ -966,6 +1034,7 @@ def add_stats(ap):
|
|||||||
ap2.add_argument("--stats", action="store_true", help="enable openmetrics at /.cpr/metrics for admin accounts")
|
ap2.add_argument("--stats", action="store_true", help="enable openmetrics at /.cpr/metrics for admin accounts")
|
||||||
ap2.add_argument("--nos-hdd", action="store_true", help="disable disk-space metrics (used/free space)")
|
ap2.add_argument("--nos-hdd", action="store_true", help="disable disk-space metrics (used/free space)")
|
||||||
ap2.add_argument("--nos-vol", action="store_true", help="disable volume size metrics (num files, total bytes, vmaxb/vmaxn)")
|
ap2.add_argument("--nos-vol", action="store_true", help="disable volume size metrics (num files, total bytes, vmaxb/vmaxn)")
|
||||||
|
ap2.add_argument("--nos-vst", action="store_true", help="disable volume state metrics (indexing, analyzing, activity)")
|
||||||
ap2.add_argument("--nos-dup", action="store_true", help="disable dupe-files metrics (good idea; very slow)")
|
ap2.add_argument("--nos-dup", action="store_true", help="disable dupe-files metrics (good idea; very slow)")
|
||||||
ap2.add_argument("--nos-unf", action="store_true", help="disable unfinished-uploads metrics")
|
ap2.add_argument("--nos-unf", action="store_true", help="disable unfinished-uploads metrics")
|
||||||
|
|
||||||
@@ -979,54 +1048,54 @@ def add_yolo(ap):
|
|||||||
def add_optouts(ap):
|
def add_optouts(ap):
|
||||||
ap2 = ap.add_argument_group('opt-outs')
|
ap2 = ap.add_argument_group('opt-outs')
|
||||||
ap2.add_argument("-nw", action="store_true", help="never write anything to disk (debug/benchmark)")
|
ap2.add_argument("-nw", action="store_true", help="never write anything to disk (debug/benchmark)")
|
||||||
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection which will deadlock copyparty)")
|
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection in the terminal window, as this would pause execution)")
|
||||||
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
|
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
|
||||||
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
||||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||||
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show --name in <title>")
|
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show \033[33m--name\033[0m in <title>")
|
||||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||||
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
|
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
|
||||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||||
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
||||||
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)")
|
ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
|
||||||
|
|
||||||
|
|
||||||
def add_safety(ap):
|
def add_safety(ap):
|
||||||
ap2 = ap.add_argument_group('safety options')
|
ap2 = ap.add_argument_group('safety options')
|
||||||
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
||||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 --turbo=-1 -nih")
|
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 -nih")
|
||||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
|
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m (see \033[33m--help-ls\033[0m); example [\033[32m**,*,ln,p,r\033[0m]")
|
||||||
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
|
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
|
||||||
ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
||||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to turn something into a dotfile")
|
||||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||||
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||||
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore --no-robots")
|
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore \033[33m--no-robots\033[0m")
|
||||||
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)")
|
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)")
|
||||||
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after \033[33mH\033[0m hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
||||||
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
||||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (disabled by default since turbo-up2k counts as 404s)")
|
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="50,60,1440", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; only affects users who cannot see directory listings because their access is either g/G/h")
|
||||||
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
|
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
|
||||||
ap2.add_argument("--ban-422", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 422's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (422 is server fuzzing, invalid POSTs and so)")
|
ap2.add_argument("--ban-422", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 422's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (invalid requests, attempted exploits ++)")
|
||||||
ap2.add_argument("--ban-url", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m sus URL's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (decent replacement for --ban-404 if that can't be used)")
|
ap2.add_argument("--ban-url", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m sus URL's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; applies only to permissions g/G/h (decent replacement for \033[33m--ban-404\033[0m if that can't be used)")
|
||||||
ap2.add_argument("--sus-urls", metavar="R", type=u, default=r"\.php$|(^|/)wp-(admin|content|includes)/", help="URLs which are considered sus / eligible for banning; disable with blank or [\033[32mno\033[0m]")
|
ap2.add_argument("--sus-urls", metavar="R", type=u, default=r"\.php$|(^|/)wp-(admin|content|includes)/", help="URLs which are considered sus / eligible for banning; disable with blank or [\033[32mno\033[0m]")
|
||||||
ap2.add_argument("--nonsus-urls", metavar="R", type=u, default=r"^(favicon\.ico|robots\.txt)$|^apple-touch-icon|^\.well-known", help="harmless URLs ignored from 404-bans; disable with blank or [\033[32mno\033[0m]")
|
ap2.add_argument("--nonsus-urls", metavar="R", type=u, default=r"^(favicon\.ico|robots\.txt)$|^apple-touch-icon|^\.well-known", help="harmless URLs ignored from 404-bans; disable with blank or [\033[32mno\033[0m]")
|
||||||
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0")
|
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for \033[33mMIN\033[0m minutes (and also kill its active connections) -- disable with 0")
|
||||||
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]")
|
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for \033[33mB\033[0m minutes; disable with [\033[32m0\033[0m]")
|
||||||
ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives")
|
ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives")
|
||||||
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)")
|
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like \033[33m--acao\033[0m's description)")
|
||||||
|
|
||||||
|
|
||||||
def add_salt(ap, fk_salt, ah_salt):
|
def add_salt(ap, fk_salt, ah_salt):
|
||||||
ap2 = ap.add_argument_group('salting options')
|
ap2 = ap.add_argument_group('salting options')
|
||||||
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: argon2 scrypt sha2 none (each optionally followed by alg-specific comma-sep. config)")
|
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: \033[32margon2 scrypt sha2 none\033[0m (each optionally followed by alg-specific comma-sep. config)")
|
||||||
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if --ah-alg is none (default)")
|
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if \033[33m--ah-alg\033[0m is none (default)")
|
||||||
ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]")
|
ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]")
|
||||||
ap2.add_argument("--ah-cli", action="store_true", help="interactive shell which hashes passwords without ever storing or displaying the original passwords")
|
ap2.add_argument("--ah-cli", action="store_true", help="launch an interactive shell which hashes passwords without ever storing or displaying the original passwords")
|
||||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
|
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
|
||||||
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
||||||
|
|
||||||
@@ -1035,21 +1104,23 @@ def add_shutdown(ap):
|
|||||||
ap2 = ap.add_argument_group('shutdown options')
|
ap2 = ap.add_argument_group('shutdown options')
|
||||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing")
|
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after \033[33mWHEN\033[0m has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing")
|
||||||
|
|
||||||
|
|
||||||
def add_logging(ap):
|
def add_logging(ap):
|
||||||
ap2 = ap.add_argument_group('logging options')
|
ap2 = ap.add_argument_group('logging options')
|
||||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
ap2.add_argument("-q", action="store_true", help="quiet; disable most STDOUT messages")
|
||||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz\033[0m (NB: some errors may appear on STDOUT only)")
|
||||||
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
|
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
|
||||||
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
|
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
|
||||||
|
ap2.add_argument("--no-logflush", action="store_true", help="don't flush the logfile after each write; tiny bit faster")
|
||||||
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
|
||||||
ap2.add_argument("--log-tdec", metavar="N", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
|
ap2.add_argument("--log-tdec", metavar="N", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
|
||||||
|
ap2.add_argument("--log-badpwd", metavar="N", type=int, default=1, help="log failed login attempt passwords: 0=terse, 1=plaintext, 2=hashed")
|
||||||
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
||||||
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
||||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
|
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="print request \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
||||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching")
|
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
|
||||||
|
|
||||||
|
|
||||||
def add_admin(ap):
|
def add_admin(ap):
|
||||||
@@ -1067,22 +1138,22 @@ def add_thumbnail(ap):
|
|||||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
|
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
|
||||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
||||||
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60, help="conversion timeout in seconds (volflag=convt)")
|
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60, help="conversion timeout in seconds (volflag=convt)")
|
||||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image (volflag=nocrop)")
|
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image by default (client can override in UI) (volflag=nocrop)")
|
||||||
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
||||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg output for video thumbs")
|
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg output for video thumbs (avoids issues on some FFmpeg builds)")
|
||||||
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs")
|
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs (faster, lower accuracy, avoids issues on some FFmpeg builds)")
|
||||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
|
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than \033[33mSEC\033[0m seconds")
|
||||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
|
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than \033[33m--th-poke\033[0m seconds will get deleted every \033[33m--th-clean\033[0m seconds")
|
||||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling -e2d will make these case-insensitive, and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
|
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling \033[33m-e2d\033[0m will make these case-insensitive, and try them as dotfiles (.folder.jpg), and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
|
||||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||||
# https://github.com/libvips/libvips
|
# https://github.com/libvips/libvips
|
||||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,qoi,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
||||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
||||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,m4a,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,tak,tta,ulaw,wav,wma,wv,xm,xpk", help="audio formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,m4a,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,tak,tta,ulaw,wav,wma,wv,xm,xpk", help="audio formats to decode using ffmpeg")
|
||||||
|
|
||||||
@@ -1091,14 +1162,14 @@ def add_transcoding(ap):
|
|||||||
ap2 = ap.add_argument_group('transcoding options')
|
ap2 = ap.add_argument_group('transcoding options')
|
||||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||||
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
|
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
|
||||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
|
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
||||||
|
|
||||||
|
|
||||||
def add_db_general(ap, hcores):
|
def add_db_general(ap, hcores):
|
||||||
ap2 = ap.add_argument_group('general db options')
|
ap2 = ap.add_argument_group('general db options')
|
||||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplication")
|
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplication")
|
||||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
|
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets \033[33m-e2d\033[0m")
|
||||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets -e2ds")
|
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets \033[33m-e2ds\033[0m")
|
||||||
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
||||||
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
||||||
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
||||||
@@ -1107,13 +1178,13 @@ def add_db_general(ap, hcores):
|
|||||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
||||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||||
ap2.add_argument("--re-dhash", action="store_true", help="rebuild the cache if it gets out of sync (for example crash on startup during metadata scanning)")
|
ap2.add_argument("--re-dhash", action="store_true", help="rebuild the cache if it gets out of sync (for example crash on startup during metadata scanning)")
|
||||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice (volflag=noforget)")
|
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice -- only useful for offloading uploads to a cloud service or something (volflag=noforget)")
|
||||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see --help-dbd (volflag=dbd)")
|
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see \033[33m--help-dbd\033[0m (volflag=dbd)")
|
||||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
||||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off (volflag=scan)")
|
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="rescan filesystem for changes every \033[33mSEC\033[0m seconds; 0=off (volflag=scan)")
|
||||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
|
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
|
||||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than SEC seconds")
|
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than \033[33mSEC\033[0m seconds")
|
||||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||||
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
||||||
|
|
||||||
@@ -1121,29 +1192,37 @@ def add_db_general(ap, hcores):
|
|||||||
def add_db_metadata(ap):
|
def add_db_metadata(ap):
|
||||||
ap2 = ap.add_argument_group('metadata db options')
|
ap2 = ap.add_argument_group('metadata db options')
|
||||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing; makes it possible to search for artist/title/codec/resolution/...")
|
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing; makes it possible to search for artist/title/codec/resolution/...")
|
||||||
ap2.add_argument("-e2ts", action="store_true", help="scan existing files on startup; sets -e2t")
|
ap2.add_argument("-e2ts", action="store_true", help="scan newly discovered files for metadata on startup; sets \033[33m-e2t\033[0m")
|
||||||
ap2.add_argument("-e2tsr", action="store_true", help="delete all metadata from DB and do a full rescan; sets -e2ts")
|
ap2.add_argument("-e2tsr", action="store_true", help="delete all metadata from DB and do a full rescan; sets \033[33m-e2ts\033[0m")
|
||||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead; will catch more tags")
|
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead; will detect more tags")
|
||||||
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader; is probably safer")
|
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader; is probably safer")
|
||||||
ap2.add_argument("--mtag-to", metavar="SEC", type=int, default=60, help="timeout for ffprobe tag-scan")
|
ap2.add_argument("--mtag-to", metavar="SEC", type=int, default=60, help="timeout for FFprobe tag-scan")
|
||||||
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for tag scanning")
|
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for tag scanning")
|
||||||
ap2.add_argument("--mtag-v", action="store_true", help="verbose tag scanning; print errors from mtp subprocesses and such")
|
ap2.add_argument("--mtag-v", action="store_true", help="verbose tag scanning; print errors from mtp subprocesses and such")
|
||||||
ap2.add_argument("--mtag-vv", action="store_true", help="debug mtp settings and mutagen/ffprobe parsers")
|
ap2.add_argument("--mtag-vv", action="store_true", help="debug mtp settings and mutagen/FFprobe parsers")
|
||||||
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
||||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.); either an entire replacement list, or add/remove stuff on the default-list with +foo or /bar", default=DEF_MTE)
|
||||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash,up_ip,.up_at")
|
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.); assign/add/remove same as \033[33m-mte\033[0m", default=DEF_MTH)
|
||||||
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
|
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag \033[33mM\033[0m using program \033[33mBIN\033[0m to parse the file")
|
||||||
default=".vq,.aq,vc,ac,fmt,res,.fps")
|
|
||||||
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
|
|
||||||
|
def add_txt(ap):
|
||||||
|
ap2 = ap.add_argument_group('textfile options')
|
||||||
|
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="the textfile editor will check for serverside changes every \033[33mSEC\033[0m seconds")
|
||||||
|
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
||||||
|
ap2.add_argument("--exp", action="store_true", help="enable textfile expansion -- replace {{self.ip}} and such; see \033[33m--help-exp\033[0m (volflag=exp)")
|
||||||
|
ap2.add_argument("--exp-md", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in markdown files; add/remove stuff on the default list with +hdr_foo or /vf.scan (volflag=exp_md)")
|
||||||
|
ap2.add_argument("--exp-lg", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in prologue/epilogue files (volflag=exp_lg)")
|
||||||
|
|
||||||
|
|
||||||
def add_ui(ap, retry):
|
def add_ui(ap, retry):
|
||||||
ap2 = ap.add_argument_group('ui options')
|
ap2 = ap.add_argument_group('ui options')
|
||||||
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
|
ap2.add_argument("--grid", action="store_true", help="show grid/thumbnails by default (volflag=grid)")
|
||||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: eng nor")
|
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language; one of the following: \033[32meng nor\033[0m")
|
||||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
|
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use (0..7)")
|
||||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching REGEX in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
||||||
|
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||||
@@ -1154,8 +1233,8 @@ def add_ui(ap, retry):
|
|||||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
|
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
|
||||||
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
|
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
|
||||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with -np")
|
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with \033[33m-np\033[0m")
|
||||||
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with -nb)")
|
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
|
||||||
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
||||||
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
||||||
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
|
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
|
||||||
@@ -1166,17 +1245,18 @@ def add_debug(ap):
|
|||||||
ap2 = ap.add_argument_group('debug options')
|
ap2 = ap.add_argument_group('debug options')
|
||||||
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
|
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
|
||||||
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
|
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
|
||||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
|
ap2.add_argument("--no-sendfile", action="store_true", help="kernel-bug workaround: disable sendfile; do a safe and slow read-send-loop instead")
|
||||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
|
ap2.add_argument("--no-scandir", action="store_true", help="kernel-bug workaround: disable scandir; do a listdir + stat on each file instead")
|
||||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
|
ap2.add_argument("--no-fastboot", action="store_true", help="wait for initial filesystem indexing before accepting client requests")
|
||||||
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||||
|
ap2.add_argument("--srch-dbg", action="store_true", help="explain search processing, and do some extra expensive sanity checks")
|
||||||
ap2.add_argument("--rclone-mdns", action="store_true", help="use mdns-domain instead of server-ip on /?hc")
|
ap2.add_argument("--rclone-mdns", action="store_true", help="use mdns-domain instead of server-ip on /?hc")
|
||||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to \033[33mP\033[0math every \033[33mS\033[0m second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
|
||||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
|
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every \033[33mSEC\033[0m")
|
||||||
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches REGEX; [\033[32m.\033[0m] (a single dot) = all files")
|
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches \033[33mREGEX\033[0m; [\033[32m.\033[0m] (a single dot) = all files")
|
||||||
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to --bf-nc and --bf-dir")
|
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to \033[33m--bf-nc\033[0m and \033[33m--bf-dir\033[0m")
|
||||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than NUM files at --kf-dir already; default: 6.3 GiB max (200*32M)")
|
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than \033[33mNUM\033[0m files at \033[33m--kf-dir\033[0m already; default: 6.3 GiB max (200*32M)")
|
||||||
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at PATH; default: folder named 'bf' wherever copyparty was started")
|
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at \033[33mPATH\033[0m; default: folder named 'bf' wherever copyparty was started")
|
||||||
|
|
||||||
|
|
||||||
# fmt: on
|
# fmt: on
|
||||||
@@ -1193,7 +1273,7 @@ def run_argparse(
|
|||||||
|
|
||||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||||
|
|
||||||
fk_salt = get_fk_salt(cert_path)
|
fk_salt = get_fk_salt()
|
||||||
ah_salt = get_ah_salt()
|
ah_salt = get_ah_salt()
|
||||||
|
|
||||||
# alpine peaks at 5 threads for some reason,
|
# alpine peaks at 5 threads for some reason,
|
||||||
@@ -1209,6 +1289,7 @@ def run_argparse(
|
|||||||
add_network(ap)
|
add_network(ap)
|
||||||
add_tls(ap, cert_path)
|
add_tls(ap, cert_path)
|
||||||
add_cert(ap, cert_path)
|
add_cert(ap, cert_path)
|
||||||
|
add_auth(ap)
|
||||||
add_qr(ap, tty)
|
add_qr(ap, tty)
|
||||||
add_zeroconf(ap)
|
add_zeroconf(ap)
|
||||||
add_zc_mdns(ap)
|
add_zc_mdns(ap)
|
||||||
@@ -1229,6 +1310,7 @@ def run_argparse(
|
|||||||
add_handlers(ap)
|
add_handlers(ap)
|
||||||
add_hooks(ap)
|
add_hooks(ap)
|
||||||
add_stats(ap)
|
add_stats(ap)
|
||||||
|
add_txt(ap)
|
||||||
add_ui(ap, retry)
|
add_ui(ap, retry)
|
||||||
add_admin(ap)
|
add_admin(ap)
|
||||||
add_logging(ap)
|
add_logging(ap)
|
||||||
@@ -1256,7 +1338,7 @@ def run_argparse(
|
|||||||
for k, h, t in sects:
|
for k, h, t in sects:
|
||||||
k2 = "help_" + k.replace("-", "_")
|
k2 = "help_" + k.replace("-", "_")
|
||||||
if vars(ret)[k2]:
|
if vars(ret)[k2]:
|
||||||
lprint("# {} help page".format(k))
|
lprint("# %s help page (%s)" % (k, h))
|
||||||
lprint(t + "\033[0m")
|
lprint(t + "\033[0m")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
@@ -1309,7 +1391,10 @@ def main(argv: Optional[list[str]] = None) -> None:
|
|||||||
supp = args_from_cfg(v)
|
supp = args_from_cfg(v)
|
||||||
argv.extend(supp)
|
argv.extend(supp)
|
||||||
|
|
||||||
deprecated: list[tuple[str, str]] = [("--salt", "--warksalt")]
|
deprecated: list[tuple[str, str]] = [
|
||||||
|
("--salt", "--warksalt"),
|
||||||
|
("--hdr-au-usr", "--idp-h-usr"),
|
||||||
|
]
|
||||||
for dk, nk in deprecated:
|
for dk, nk in deprecated:
|
||||||
idx = -1
|
idx = -1
|
||||||
ov = ""
|
ov = ""
|
||||||
@@ -1384,40 +1469,6 @@ def main(argv: Optional[list[str]] = None) -> None:
|
|||||||
if al.ansi:
|
if al.ansi:
|
||||||
al.wintitle = ""
|
al.wintitle = ""
|
||||||
|
|
||||||
nstrs: list[str] = []
|
|
||||||
anymod = False
|
|
||||||
for ostr in al.v or []:
|
|
||||||
m = re_vol.match(ostr)
|
|
||||||
if not m:
|
|
||||||
# not our problem
|
|
||||||
nstrs.append(ostr)
|
|
||||||
continue
|
|
||||||
|
|
||||||
src, dst, perms = m.groups()
|
|
||||||
na = [src, dst]
|
|
||||||
mod = False
|
|
||||||
for opt in perms.split(":"):
|
|
||||||
if re.match("c[^,]", opt):
|
|
||||||
mod = True
|
|
||||||
na.append("c," + opt[1:])
|
|
||||||
elif re.sub("^[rwmdgGha]*", "", opt) and "," not in opt:
|
|
||||||
mod = True
|
|
||||||
perm = opt[0]
|
|
||||||
na.append(perm + "," + opt[1:])
|
|
||||||
else:
|
|
||||||
na.append(opt)
|
|
||||||
|
|
||||||
nstr = ":".join(na)
|
|
||||||
nstrs.append(nstr if mod else ostr)
|
|
||||||
if mod:
|
|
||||||
msg = "\033[1;31mWARNING:\033[0;1m\n -v {} \033[0;33mwas replaced with\033[0;1m\n -v {} \n\033[0m"
|
|
||||||
lprint(msg.format(ostr, nstr))
|
|
||||||
anymod = True
|
|
||||||
|
|
||||||
if anymod:
|
|
||||||
al.v = nstrs
|
|
||||||
time.sleep(2)
|
|
||||||
|
|
||||||
# propagate implications
|
# propagate implications
|
||||||
for k1, k2 in IMPLICATIONS:
|
for k1, k2 in IMPLICATIONS:
|
||||||
if getattr(al, k1):
|
if getattr(al, k1):
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (1, 9, 9)
|
VERSION = (1, 9, 28)
|
||||||
CODENAME = "prometheable"
|
CODENAME = "prometheable"
|
||||||
BUILD_DT = (2023, 10, 8)
|
BUILD_DT = (2023, 12, 31)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import threading
|
|||||||
import time
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from .__init__ import ANYWIN, E, TYPE_CHECKING, WINDOWS
|
from .__init__ import ANYWIN, TYPE_CHECKING, WINDOWS, E
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .cfg import flagdescs, permdescs, vf_bmap, vf_cmap, vf_vmap
|
from .cfg import flagdescs, permdescs, vf_bmap, vf_cmap, vf_vmap
|
||||||
from .pwhash import PWHash
|
from .pwhash import PWHash
|
||||||
@@ -21,11 +21,14 @@ from .util import (
|
|||||||
META_NOBOTS,
|
META_NOBOTS,
|
||||||
SQLITE_VER,
|
SQLITE_VER,
|
||||||
UNPLICATIONS,
|
UNPLICATIONS,
|
||||||
|
UTC,
|
||||||
|
ODict,
|
||||||
Pebkac,
|
Pebkac,
|
||||||
absreal,
|
absreal,
|
||||||
afsenc,
|
afsenc,
|
||||||
get_df,
|
get_df,
|
||||||
humansize,
|
humansize,
|
||||||
|
odfusion,
|
||||||
relchk,
|
relchk,
|
||||||
statdir,
|
statdir,
|
||||||
uncyg,
|
uncyg,
|
||||||
@@ -69,6 +72,7 @@ class AXS(object):
|
|||||||
upget: Optional[Union[list[str], set[str]]] = None,
|
upget: Optional[Union[list[str], set[str]]] = None,
|
||||||
uhtml: Optional[Union[list[str], set[str]]] = None,
|
uhtml: Optional[Union[list[str], set[str]]] = None,
|
||||||
uadmin: Optional[Union[list[str], set[str]]] = None,
|
uadmin: Optional[Union[list[str], set[str]]] = None,
|
||||||
|
udot: Optional[Union[list[str], set[str]]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.uread: set[str] = set(uread or [])
|
self.uread: set[str] = set(uread or [])
|
||||||
self.uwrite: set[str] = set(uwrite or [])
|
self.uwrite: set[str] = set(uwrite or [])
|
||||||
@@ -78,9 +82,10 @@ class AXS(object):
|
|||||||
self.upget: set[str] = set(upget or [])
|
self.upget: set[str] = set(upget or [])
|
||||||
self.uhtml: set[str] = set(uhtml or [])
|
self.uhtml: set[str] = set(uhtml or [])
|
||||||
self.uadmin: set[str] = set(uadmin or [])
|
self.uadmin: set[str] = set(uadmin or [])
|
||||||
|
self.udot: set[str] = set(udot or [])
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
ks = "uread uwrite umove udel uget upget uhtml uadmin".split()
|
ks = "uread uwrite umove udel uget upget uhtml uadmin udot".split()
|
||||||
return "AXS(%s)" % (", ".join("%s=%r" % (k, self.__dict__[k]) for k in ks),)
|
return "AXS(%s)" % (", ".join("%s=%r" % (k, self.__dict__[k]) for k in ks),)
|
||||||
|
|
||||||
|
|
||||||
@@ -213,7 +218,7 @@ class Lim(object):
|
|||||||
if self.rot_re.search(path.replace("\\", "/")):
|
if self.rot_re.search(path.replace("\\", "/")):
|
||||||
return path, ""
|
return path, ""
|
||||||
|
|
||||||
suf = datetime.utcnow().strftime(self.rotf)
|
suf = datetime.now(UTC).strftime(self.rotf)
|
||||||
if path:
|
if path:
|
||||||
path += "/"
|
path += "/"
|
||||||
|
|
||||||
@@ -333,6 +338,8 @@ class VFS(object):
|
|||||||
self.apget: dict[str, list[str]] = {}
|
self.apget: dict[str, list[str]] = {}
|
||||||
self.ahtml: dict[str, list[str]] = {}
|
self.ahtml: dict[str, list[str]] = {}
|
||||||
self.aadmin: dict[str, list[str]] = {}
|
self.aadmin: dict[str, list[str]] = {}
|
||||||
|
self.adot: dict[str, list[str]] = {}
|
||||||
|
self.all_vols: dict[str, VFS] = {}
|
||||||
|
|
||||||
if realpath:
|
if realpath:
|
||||||
rp = realpath + ("" if realpath.endswith(os.sep) else os.sep)
|
rp = realpath + ("" if realpath.endswith(os.sep) else os.sep)
|
||||||
@@ -411,7 +418,7 @@ class VFS(object):
|
|||||||
hist = flags.get("hist")
|
hist = flags.get("hist")
|
||||||
if hist and hist != "-":
|
if hist and hist != "-":
|
||||||
zs = "{}/{}".format(hist.rstrip("/"), name)
|
zs = "{}/{}".format(hist.rstrip("/"), name)
|
||||||
flags["hist"] = os.path.expanduser(zs) if zs.startswith("~") else zs
|
flags["hist"] = os.path.expandvars(os.path.expanduser(zs))
|
||||||
|
|
||||||
return flags
|
return flags
|
||||||
|
|
||||||
@@ -442,8 +449,8 @@ class VFS(object):
|
|||||||
|
|
||||||
def can_access(
|
def can_access(
|
||||||
self, vpath: str, uname: str
|
self, vpath: str, uname: str
|
||||||
) -> tuple[bool, bool, bool, bool, bool, bool, bool]:
|
) -> tuple[bool, bool, bool, bool, bool, bool, bool, bool]:
|
||||||
"""can Read,Write,Move,Delete,Get,Upget,Admin"""
|
"""can Read,Write,Move,Delete,Get,Upget,Admin,Dot"""
|
||||||
if vpath:
|
if vpath:
|
||||||
vn, _ = self._find(undot(vpath))
|
vn, _ = self._find(undot(vpath))
|
||||||
else:
|
else:
|
||||||
@@ -451,13 +458,14 @@ class VFS(object):
|
|||||||
|
|
||||||
c = vn.axs
|
c = vn.axs
|
||||||
return (
|
return (
|
||||||
uname in c.uread or "*" in c.uread,
|
uname in c.uread,
|
||||||
uname in c.uwrite or "*" in c.uwrite,
|
uname in c.uwrite,
|
||||||
uname in c.umove or "*" in c.umove,
|
uname in c.umove,
|
||||||
uname in c.udel or "*" in c.udel,
|
uname in c.udel,
|
||||||
uname in c.uget or "*" in c.uget,
|
uname in c.uget,
|
||||||
uname in c.upget or "*" in c.upget,
|
uname in c.upget,
|
||||||
uname in c.uadmin or "*" in c.uadmin,
|
uname in c.uadmin,
|
||||||
|
uname in c.udot,
|
||||||
)
|
)
|
||||||
# skip uhtml because it's rarely needed
|
# skip uhtml because it's rarely needed
|
||||||
|
|
||||||
@@ -473,12 +481,10 @@ class VFS(object):
|
|||||||
err: int = 403,
|
err: int = 403,
|
||||||
) -> tuple["VFS", str]:
|
) -> tuple["VFS", str]:
|
||||||
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
||||||
if ANYWIN:
|
if relchk(vpath):
|
||||||
mod = relchk(vpath)
|
if self.log:
|
||||||
if mod:
|
self.log("vfs", "invalid relpath [{}]".format(vpath))
|
||||||
if self.log:
|
raise Pebkac(422)
|
||||||
self.log("vfs", "invalid relpath [{}]".format(vpath))
|
|
||||||
raise Pebkac(404)
|
|
||||||
|
|
||||||
cvpath = undot(vpath)
|
cvpath = undot(vpath)
|
||||||
vn, rem = self._find(cvpath)
|
vn, rem = self._find(cvpath)
|
||||||
@@ -491,14 +497,14 @@ class VFS(object):
|
|||||||
(will_del, c.udel, "delete"),
|
(will_del, c.udel, "delete"),
|
||||||
(will_get, c.uget, "get"),
|
(will_get, c.uget, "get"),
|
||||||
]:
|
]:
|
||||||
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
|
if req and uname not in d and uname != LEELOO_DALLAS:
|
||||||
if vpath != cvpath and vpath != "." and self.log:
|
if vpath != cvpath and vpath != "." and self.log:
|
||||||
ap = vn.canonical(rem)
|
ap = vn.canonical(rem)
|
||||||
t = "{} has no {} in [{}] => [{}] => [{}]"
|
t = "{} has no {} in [{}] => [{}] => [{}]"
|
||||||
self.log("vfs", t.format(uname, msg, vpath, cvpath, ap), 6)
|
self.log("vfs", t.format(uname, msg, vpath, cvpath, ap), 6)
|
||||||
|
|
||||||
t = "you don't have {}-access for this location"
|
t = 'you don\'t have %s-access in "/%s"'
|
||||||
raise Pebkac(err, t.format(msg))
|
raise Pebkac(err, t % (msg, cvpath))
|
||||||
|
|
||||||
return vn, rem
|
return vn, rem
|
||||||
|
|
||||||
@@ -552,7 +558,7 @@ class VFS(object):
|
|||||||
for pset in permsets:
|
for pset in permsets:
|
||||||
ok = True
|
ok = True
|
||||||
for req, lst in zip(pset, axs):
|
for req, lst in zip(pset, axs):
|
||||||
if req and uname not in lst and "*" not in lst:
|
if req and uname not in lst:
|
||||||
ok = False
|
ok = False
|
||||||
if ok:
|
if ok:
|
||||||
break
|
break
|
||||||
@@ -576,7 +582,7 @@ class VFS(object):
|
|||||||
seen: list[str],
|
seen: list[str],
|
||||||
uname: str,
|
uname: str,
|
||||||
permsets: list[list[bool]],
|
permsets: list[list[bool]],
|
||||||
dots: bool,
|
wantdots: bool,
|
||||||
scandir: bool,
|
scandir: bool,
|
||||||
lstat: bool,
|
lstat: bool,
|
||||||
subvols: bool = True,
|
subvols: bool = True,
|
||||||
@@ -620,6 +626,10 @@ class VFS(object):
|
|||||||
rm1.append(le)
|
rm1.append(le)
|
||||||
_ = [vfs_ls.remove(x) for x in rm1] # type: ignore
|
_ = [vfs_ls.remove(x) for x in rm1] # type: ignore
|
||||||
|
|
||||||
|
dots_ok = wantdots and uname in dbv.axs.udot
|
||||||
|
if not dots_ok:
|
||||||
|
vfs_ls = [x for x in vfs_ls if "/." not in "/" + x[0]]
|
||||||
|
|
||||||
seen = seen[:] + [fsroot]
|
seen = seen[:] + [fsroot]
|
||||||
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
|
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
|
||||||
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||||
@@ -632,13 +642,13 @@ class VFS(object):
|
|||||||
yield dbv, vrem, rel, fsroot, rfiles, rdirs, vfs_virt
|
yield dbv, vrem, rel, fsroot, rfiles, rdirs, vfs_virt
|
||||||
|
|
||||||
for rdir, _ in rdirs:
|
for rdir, _ in rdirs:
|
||||||
if not dots and rdir.startswith("."):
|
if not dots_ok and rdir.startswith("."):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
wrel = (rel + "/" + rdir).lstrip("/")
|
wrel = (rel + "/" + rdir).lstrip("/")
|
||||||
wrem = (rem + "/" + rdir).lstrip("/")
|
wrem = (rem + "/" + rdir).lstrip("/")
|
||||||
for x in self.walk(
|
for x in self.walk(
|
||||||
wrel, wrem, seen, uname, permsets, dots, scandir, lstat, subvols
|
wrel, wrem, seen, uname, permsets, wantdots, scandir, lstat, subvols
|
||||||
):
|
):
|
||||||
yield x
|
yield x
|
||||||
|
|
||||||
@@ -646,11 +656,13 @@ class VFS(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
for n, vfs in sorted(vfs_virt.items()):
|
for n, vfs in sorted(vfs_virt.items()):
|
||||||
if not dots and n.startswith("."):
|
if not dots_ok and n.startswith("."):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
wrel = (rel + "/" + n).lstrip("/")
|
wrel = (rel + "/" + n).lstrip("/")
|
||||||
for x in vfs.walk(wrel, "", seen, uname, permsets, dots, scandir, lstat):
|
for x in vfs.walk(
|
||||||
|
wrel, "", seen, uname, permsets, wantdots, scandir, lstat
|
||||||
|
):
|
||||||
yield x
|
yield x
|
||||||
|
|
||||||
def zipgen(
|
def zipgen(
|
||||||
@@ -659,7 +671,6 @@ class VFS(object):
|
|||||||
vrem: str,
|
vrem: str,
|
||||||
flt: set[str],
|
flt: set[str],
|
||||||
uname: str,
|
uname: str,
|
||||||
dots: bool,
|
|
||||||
dirs: bool,
|
dirs: bool,
|
||||||
scandir: bool,
|
scandir: bool,
|
||||||
wrap: bool = True,
|
wrap: bool = True,
|
||||||
@@ -669,7 +680,7 @@ class VFS(object):
|
|||||||
# if single folder: the folder itself is the top-level item
|
# if single folder: the folder itself is the top-level item
|
||||||
folder = "" if flt or not wrap else (vpath.split("/")[-1].lstrip(".") or "top")
|
folder = "" if flt or not wrap else (vpath.split("/")[-1].lstrip(".") or "top")
|
||||||
|
|
||||||
g = self.walk(folder, vrem, [], uname, [[True, False]], dots, scandir, False)
|
g = self.walk(folder, vrem, [], uname, [[True, False]], True, scandir, False)
|
||||||
for _, _, vpath, apath, files, rd, vd in g:
|
for _, _, vpath, apath, files, rd, vd in g:
|
||||||
if flt:
|
if flt:
|
||||||
files = [x for x in files if x[0] in flt]
|
files = [x for x in files if x[0] in flt]
|
||||||
@@ -688,18 +699,6 @@ class VFS(object):
|
|||||||
apaths = [os.path.join(apath, n) for n in fnames]
|
apaths = [os.path.join(apath, n) for n in fnames]
|
||||||
ret = list(zip(vpaths, apaths, files))
|
ret = list(zip(vpaths, apaths, files))
|
||||||
|
|
||||||
if not dots:
|
|
||||||
# dotfile filtering based on vpath (intended visibility)
|
|
||||||
ret = [x for x in ret if "/." not in "/" + x[0]]
|
|
||||||
|
|
||||||
zel = [ze for ze in rd if ze[0].startswith(".")]
|
|
||||||
for ze in zel:
|
|
||||||
rd.remove(ze)
|
|
||||||
|
|
||||||
zsl = [zs for zs in vd.keys() if zs.startswith(".")]
|
|
||||||
for zs in zsl:
|
|
||||||
del vd[zs]
|
|
||||||
|
|
||||||
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in ret]:
|
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in ret]:
|
||||||
yield f
|
yield f
|
||||||
|
|
||||||
@@ -946,9 +945,7 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
if vp is not None and ap is None:
|
if vp is not None and ap is None:
|
||||||
ap = ln
|
ap = ln
|
||||||
if ap.startswith("~"):
|
ap = os.path.expandvars(os.path.expanduser(ap))
|
||||||
ap = os.path.expanduser(ap)
|
|
||||||
|
|
||||||
ap = absreal(ap)
|
ap = absreal(ap)
|
||||||
self._l(ln, 2, "bound to filesystem-path [{}]".format(ap))
|
self._l(ln, 2, "bound to filesystem-path [{}]".format(ap))
|
||||||
self._map_volume(ap, vp, mount, daxs, mflags)
|
self._map_volume(ap, vp, mount, daxs, mflags)
|
||||||
@@ -959,16 +956,17 @@ class AuthSrv(object):
|
|||||||
try:
|
try:
|
||||||
self._l(ln, 5, "volume access config:")
|
self._l(ln, 5, "volume access config:")
|
||||||
sk, sv = ln.split(":")
|
sk, sv = ln.split(":")
|
||||||
if re.sub("[rwmdgGha]", "", sk) or not sk:
|
if re.sub("[rwmdgGhaA.]", "", sk) or not sk:
|
||||||
err = "invalid accs permissions list; "
|
err = "invalid accs permissions list; "
|
||||||
raise Exception(err)
|
raise Exception(err)
|
||||||
if " " in re.sub(", *", "", sv).strip():
|
if " " in re.sub(", *", "", sv).strip():
|
||||||
err = "list of users is not comma-separated; "
|
err = "list of users is not comma-separated; "
|
||||||
raise Exception(err)
|
raise Exception(err)
|
||||||
|
assert vp is not None
|
||||||
self._read_vol_str(sk, sv.replace(" ", ""), daxs[vp], mflags[vp])
|
self._read_vol_str(sk, sv.replace(" ", ""), daxs[vp], mflags[vp])
|
||||||
continue
|
continue
|
||||||
except:
|
except:
|
||||||
err += "accs entries must be 'rwmdgGha: user1, user2, ...'"
|
err += "accs entries must be 'rwmdgGhaA.: user1, user2, ...'"
|
||||||
raise Exception(err + SBADCFG)
|
raise Exception(err + SBADCFG)
|
||||||
|
|
||||||
if cat == catf:
|
if cat == catf:
|
||||||
@@ -987,9 +985,11 @@ class AuthSrv(object):
|
|||||||
fstr += "," + sk
|
fstr += "," + sk
|
||||||
else:
|
else:
|
||||||
fstr += ",{}={}".format(sk, sv)
|
fstr += ",{}={}".format(sk, sv)
|
||||||
|
assert vp is not None
|
||||||
self._read_vol_str("c", fstr[1:], daxs[vp], mflags[vp])
|
self._read_vol_str("c", fstr[1:], daxs[vp], mflags[vp])
|
||||||
fstr = ""
|
fstr = ""
|
||||||
if fstr:
|
if fstr:
|
||||||
|
assert vp is not None
|
||||||
self._read_vol_str("c", fstr[1:], daxs[vp], mflags[vp])
|
self._read_vol_str("c", fstr[1:], daxs[vp], mflags[vp])
|
||||||
continue
|
continue
|
||||||
except:
|
except:
|
||||||
@@ -1004,8 +1004,9 @@ class AuthSrv(object):
|
|||||||
def _read_vol_str(
|
def _read_vol_str(
|
||||||
self, lvl: str, uname: str, axs: AXS, flags: dict[str, Any]
|
self, lvl: str, uname: str, axs: AXS, flags: dict[str, Any]
|
||||||
) -> None:
|
) -> None:
|
||||||
if lvl.strip("crwmdgGha"):
|
if lvl.strip("crwmdgGhaA."):
|
||||||
raise Exception("invalid volflag: {},{}".format(lvl, uname))
|
t = "%s,%s" % (lvl, uname) if uname else lvl
|
||||||
|
raise Exception("invalid config value (volume or volflag): %s" % (t,))
|
||||||
|
|
||||||
if lvl == "c":
|
if lvl == "c":
|
||||||
cval: Union[bool, str] = True
|
cval: Union[bool, str] = True
|
||||||
@@ -1027,19 +1028,31 @@ class AuthSrv(object):
|
|||||||
if uname == "":
|
if uname == "":
|
||||||
uname = "*"
|
uname = "*"
|
||||||
|
|
||||||
|
junkset = set()
|
||||||
for un in uname.replace(",", " ").strip().split():
|
for un in uname.replace(",", " ").strip().split():
|
||||||
|
for alias, mapping in [
|
||||||
|
("h", "gh"),
|
||||||
|
("G", "gG"),
|
||||||
|
("A", "rwmda.A"),
|
||||||
|
]:
|
||||||
|
expanded = ""
|
||||||
|
for ch in mapping:
|
||||||
|
if ch not in lvl:
|
||||||
|
expanded += ch
|
||||||
|
lvl = lvl.replace(alias, expanded + alias)
|
||||||
|
|
||||||
for ch, al in [
|
for ch, al in [
|
||||||
("r", axs.uread),
|
("r", axs.uread),
|
||||||
("w", axs.uwrite),
|
("w", axs.uwrite),
|
||||||
("m", axs.umove),
|
("m", axs.umove),
|
||||||
("d", axs.udel),
|
("d", axs.udel),
|
||||||
|
(".", axs.udot),
|
||||||
("a", axs.uadmin),
|
("a", axs.uadmin),
|
||||||
("h", axs.uhtml),
|
("A", junkset),
|
||||||
("h", axs.uget),
|
|
||||||
("g", axs.uget),
|
("g", axs.uget),
|
||||||
("G", axs.uget),
|
|
||||||
("G", axs.upget),
|
("G", axs.upget),
|
||||||
]: # b bb bbb
|
("h", axs.uhtml),
|
||||||
|
]:
|
||||||
if ch in lvl:
|
if ch in lvl:
|
||||||
if un == "*":
|
if un == "*":
|
||||||
t = "└─add permission [{0}] for [everyone] -- {2}"
|
t = "└─add permission [{0}] for [everyone] -- {2}"
|
||||||
@@ -1111,7 +1124,7 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
if self.args.v:
|
if self.args.v:
|
||||||
# list of src:dst:permset:permset:...
|
# list of src:dst:permset:permset:...
|
||||||
# permset is <rwmdgGha>[,username][,username] or <c>,<flag>[=args]
|
# permset is <rwmdgGhaA.>[,username][,username] or <c>,<flag>[=args]
|
||||||
for v_str in self.args.v:
|
for v_str in self.args.v:
|
||||||
m = re_vol.match(v_str)
|
m = re_vol.match(v_str)
|
||||||
if not m:
|
if not m:
|
||||||
@@ -1185,12 +1198,13 @@ class AuthSrv(object):
|
|||||||
vfs = VFS(self.log_func, mount[dst], dst, daxs[dst], mflags[dst])
|
vfs = VFS(self.log_func, mount[dst], dst, daxs[dst], mflags[dst])
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
assert vfs # type: ignore
|
||||||
zv = vfs.add(mount[dst], dst)
|
zv = vfs.add(mount[dst], dst)
|
||||||
zv.axs = daxs[dst]
|
zv.axs = daxs[dst]
|
||||||
zv.flags = mflags[dst]
|
zv.flags = mflags[dst]
|
||||||
zv.dbv = None
|
zv.dbv = None
|
||||||
|
|
||||||
assert vfs
|
assert vfs # type: ignore
|
||||||
vfs.all_vols = {}
|
vfs.all_vols = {}
|
||||||
vfs.all_aps = []
|
vfs.all_aps = []
|
||||||
vfs.all_vps = []
|
vfs.all_vps = []
|
||||||
@@ -1200,14 +1214,21 @@ class AuthSrv(object):
|
|||||||
vol.all_vps.sort(key=lambda x: len(x[0]), reverse=True)
|
vol.all_vps.sort(key=lambda x: len(x[0]), reverse=True)
|
||||||
vol.root = vfs
|
vol.root = vfs
|
||||||
|
|
||||||
for perm in "read write move del get pget html admin".split():
|
for perm in "read write move del get pget html admin dot".split():
|
||||||
axs_key = "u" + perm
|
axs_key = "u" + perm
|
||||||
unames = ["*"] + list(acct.keys())
|
unames = ["*"] + list(acct.keys())
|
||||||
|
for vp, vol in vfs.all_vols.items():
|
||||||
|
zx = getattr(vol.axs, axs_key)
|
||||||
|
if "*" in zx:
|
||||||
|
for usr in unames:
|
||||||
|
zx.add(usr)
|
||||||
|
|
||||||
|
# aread,... = dict[uname, list[volnames] or []]
|
||||||
umap: dict[str, list[str]] = {x: [] for x in unames}
|
umap: dict[str, list[str]] = {x: [] for x in unames}
|
||||||
for usr in unames:
|
for usr in unames:
|
||||||
for vp, vol in vfs.all_vols.items():
|
for vp, vol in vfs.all_vols.items():
|
||||||
zx = getattr(vol.axs, axs_key)
|
zx = getattr(vol.axs, axs_key)
|
||||||
if usr in zx or "*" in zx:
|
if usr in zx:
|
||||||
umap[usr].append(vp)
|
umap[usr].append(vp)
|
||||||
umap[usr].sort()
|
umap[usr].sort()
|
||||||
setattr(vfs, "a" + perm, umap)
|
setattr(vfs, "a" + perm, umap)
|
||||||
@@ -1224,6 +1245,7 @@ class AuthSrv(object):
|
|||||||
axs.upget,
|
axs.upget,
|
||||||
axs.uhtml,
|
axs.uhtml,
|
||||||
axs.uadmin,
|
axs.uadmin,
|
||||||
|
axs.udot,
|
||||||
]:
|
]:
|
||||||
for usr in d:
|
for usr in d:
|
||||||
all_users[usr] = 1
|
all_users[usr] = 1
|
||||||
@@ -1258,9 +1280,7 @@ class AuthSrv(object):
|
|||||||
if vflag == "-":
|
if vflag == "-":
|
||||||
pass
|
pass
|
||||||
elif vflag:
|
elif vflag:
|
||||||
if vflag.startswith("~"):
|
vflag = os.path.expandvars(os.path.expanduser(vflag))
|
||||||
vflag = os.path.expanduser(vflag)
|
|
||||||
|
|
||||||
vol.histpath = uncyg(vflag) if WINDOWS else vflag
|
vol.histpath = uncyg(vflag) if WINDOWS else vflag
|
||||||
elif self.args.hist:
|
elif self.args.hist:
|
||||||
for nch in range(len(hid)):
|
for nch in range(len(hid)):
|
||||||
@@ -1426,12 +1446,12 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
for ga, vf in [["no_hash", "nohash"], ["no_idx", "noidx"]]:
|
for ga, vf in [["no_hash", "nohash"], ["no_idx", "noidx"]]:
|
||||||
if vf in vol.flags:
|
if vf in vol.flags:
|
||||||
ptn = vol.flags.pop(vf)
|
ptn = re.compile(vol.flags.pop(vf))
|
||||||
else:
|
else:
|
||||||
ptn = getattr(self.args, ga)
|
ptn = getattr(self.args, ga)
|
||||||
|
|
||||||
if ptn:
|
if ptn:
|
||||||
vol.flags[vf] = re.compile(ptn)
|
vol.flags[vf] = ptn
|
||||||
|
|
||||||
for ga, vf in vf_bmap().items():
|
for ga, vf in vf_bmap().items():
|
||||||
if getattr(self.args, ga):
|
if getattr(self.args, ga):
|
||||||
@@ -1476,14 +1496,11 @@ class AuthSrv(object):
|
|||||||
raise Exception(t.format(dbd, dbds))
|
raise Exception(t.format(dbd, dbds))
|
||||||
|
|
||||||
# default tag cfgs if unset
|
# default tag cfgs if unset
|
||||||
if "mte" not in vol.flags:
|
for k in ("mte", "mth", "exp_md", "exp_lg"):
|
||||||
vol.flags["mte"] = self.args.mte
|
if k not in vol.flags:
|
||||||
elif vol.flags["mte"].startswith("+"):
|
vol.flags[k] = getattr(self.args, k).copy()
|
||||||
vol.flags["mte"] = ",".join(
|
else:
|
||||||
x for x in [self.args.mte, vol.flags["mte"][1:]] if x
|
vol.flags[k] = odfusion(getattr(self.args, k), vol.flags[k])
|
||||||
)
|
|
||||||
if "mth" not in vol.flags:
|
|
||||||
vol.flags["mth"] = self.args.mth
|
|
||||||
|
|
||||||
# append additive args from argv to volflags
|
# append additive args from argv to volflags
|
||||||
hooks = "xbu xau xiu xbr xar xbd xad xm xban".split()
|
hooks = "xbu xau xiu xbr xar xbd xad xm xban".split()
|
||||||
@@ -1537,7 +1554,11 @@ class AuthSrv(object):
|
|||||||
if vol.flags.get(grp, False):
|
if vol.flags.get(grp, False):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
|
vol.flags = {
|
||||||
|
k: v
|
||||||
|
for k, v in vol.flags.items()
|
||||||
|
if not k.startswith(rm) or k == "mte"
|
||||||
|
}
|
||||||
|
|
||||||
for grp, rm in [["d2v", "e2v"]]:
|
for grp, rm in [["d2v", "e2v"]]:
|
||||||
if not vol.flags.get(grp, False):
|
if not vol.flags.get(grp, False):
|
||||||
@@ -1584,12 +1605,12 @@ class AuthSrv(object):
|
|||||||
if local:
|
if local:
|
||||||
local_only_mtp[a] = True
|
local_only_mtp[a] = True
|
||||||
|
|
||||||
local_mte = {}
|
local_mte = ODict()
|
||||||
for a in vol.flags.get("mte", "").split(","):
|
for a in vol.flags.get("mte", {}).keys():
|
||||||
local = True
|
local = True
|
||||||
all_mte[a] = True
|
all_mte[a] = True
|
||||||
local_mte[a] = True
|
local_mte[a] = True
|
||||||
for b in self.args.mte.split(","):
|
for b in self.args.mte.keys():
|
||||||
if not a or not b:
|
if not a or not b:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -1633,6 +1654,11 @@ class AuthSrv(object):
|
|||||||
vol.flags.pop(k[1:], None)
|
vol.flags.pop(k[1:], None)
|
||||||
vol.flags.pop(k)
|
vol.flags.pop(k)
|
||||||
|
|
||||||
|
for vol in vfs.all_vols.values():
|
||||||
|
if vol.flags.get("dots"):
|
||||||
|
for name in vol.axs.uread:
|
||||||
|
vol.axs.udot.add(name)
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@@ -1651,12 +1677,14 @@ class AuthSrv(object):
|
|||||||
[" write", "uwrite"],
|
[" write", "uwrite"],
|
||||||
[" move", "umove"],
|
[" move", "umove"],
|
||||||
["delete", "udel"],
|
["delete", "udel"],
|
||||||
|
[" dots", "udot"],
|
||||||
[" get", "uget"],
|
[" get", "uget"],
|
||||||
[" upget", "upget"],
|
[" upGet", "upget"],
|
||||||
[" html", "uhtml"],
|
[" html", "uhtml"],
|
||||||
["uadmin", "uadmin"],
|
["uadmin", "uadmin"],
|
||||||
]:
|
]:
|
||||||
u = list(sorted(getattr(zv.axs, attr)))
|
u = list(sorted(getattr(zv.axs, attr)))
|
||||||
|
u = ["*"] if "*" in u else u
|
||||||
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
|
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
|
||||||
u = u if u else "\033[36m--none--\033[0m"
|
u = u if u else "\033[36m--none--\033[0m"
|
||||||
t += "\n| {}: {}".format(txt, u)
|
t += "\n| {}: {}".format(txt, u)
|
||||||
@@ -1719,6 +1747,9 @@ class AuthSrv(object):
|
|||||||
def setup_pwhash(self, acct: dict[str, str]) -> None:
|
def setup_pwhash(self, acct: dict[str, str]) -> None:
|
||||||
self.ah = PWHash(self.args)
|
self.ah = PWHash(self.args)
|
||||||
if not self.ah.on:
|
if not self.ah.on:
|
||||||
|
if self.args.ah_cli or self.args.ah_gen:
|
||||||
|
t = "\n BAD CONFIG:\n cannot --ah-cli or --ah-gen without --ah-alg"
|
||||||
|
raise Exception(t)
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.args.ah_cli:
|
if self.args.ah_cli:
|
||||||
@@ -1810,7 +1841,7 @@ class AuthSrv(object):
|
|||||||
raise Exception("volume not found: " + zs)
|
raise Exception("volume not found: " + zs)
|
||||||
|
|
||||||
self.log(str({"users": users, "vols": vols, "flags": flags}))
|
self.log(str({"users": users, "vols": vols, "flags": flags}))
|
||||||
t = "/{}: read({}) write({}) move({}) del({}) get({}) upget({}) uadmin({})"
|
t = "/{}: read({}) write({}) move({}) del({}) dots({}) get({}) upGet({}) uadmin({})"
|
||||||
for k, zv in self.vfs.all_vols.items():
|
for k, zv in self.vfs.all_vols.items():
|
||||||
vc = zv.axs
|
vc = zv.axs
|
||||||
vs = [
|
vs = [
|
||||||
@@ -1819,6 +1850,7 @@ class AuthSrv(object):
|
|||||||
vc.uwrite,
|
vc.uwrite,
|
||||||
vc.umove,
|
vc.umove,
|
||||||
vc.udel,
|
vc.udel,
|
||||||
|
vc.udot,
|
||||||
vc.uget,
|
vc.uget,
|
||||||
vc.upget,
|
vc.upget,
|
||||||
vc.uhtml,
|
vc.uhtml,
|
||||||
@@ -1961,6 +1993,7 @@ class AuthSrv(object):
|
|||||||
"w": "uwrite",
|
"w": "uwrite",
|
||||||
"m": "umove",
|
"m": "umove",
|
||||||
"d": "udel",
|
"d": "udel",
|
||||||
|
".": "udot",
|
||||||
"g": "uget",
|
"g": "uget",
|
||||||
"G": "upget",
|
"G": "upget",
|
||||||
"h": "uhtml",
|
"h": "uhtml",
|
||||||
@@ -2167,7 +2200,7 @@ def upgrade_cfg_fmt(
|
|||||||
else:
|
else:
|
||||||
sn = sn.replace(",", ", ")
|
sn = sn.replace(",", ", ")
|
||||||
ret.append(" " + sn)
|
ret.append(" " + sn)
|
||||||
elif sn[:1] in "rwmdgGha":
|
elif sn[:1] in "rwmdgGhaA.":
|
||||||
if cat != catx:
|
if cat != catx:
|
||||||
cat = catx
|
cat = catx
|
||||||
ret.append(cat)
|
ret.append(cat)
|
||||||
|
|||||||
@@ -43,6 +43,10 @@ def open(p: str, *a, **ka) -> int:
|
|||||||
return os.open(fsenc(p), *a, **ka)
|
return os.open(fsenc(p), *a, **ka)
|
||||||
|
|
||||||
|
|
||||||
|
def readlink(p: str) -> str:
|
||||||
|
return fsdec(os.readlink(fsenc(p)))
|
||||||
|
|
||||||
|
|
||||||
def rename(src: str, dst: str) -> None:
|
def rename(src: str, dst: str) -> None:
|
||||||
return os.rename(fsenc(src), fsenc(dst))
|
return os.rename(fsenc(src), fsenc(dst))
|
||||||
|
|
||||||
|
|||||||
@@ -46,8 +46,8 @@ class BrokerMp(object):
|
|||||||
self.num_workers = self.args.j or CORES
|
self.num_workers = self.args.j or CORES
|
||||||
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
||||||
for n in range(1, self.num_workers + 1):
|
for n in range(1, self.num_workers + 1):
|
||||||
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1)
|
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1) # type: ignore
|
||||||
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64)
|
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64) # type: ignore
|
||||||
|
|
||||||
proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n))
|
proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n))
|
||||||
Daemon(self.collector, "mp-sink-{}".format(n), (proc,))
|
Daemon(self.collector, "mp-sink-{}".format(n), (proc,))
|
||||||
|
|||||||
@@ -132,7 +132,10 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
expiry, inf = _read_crt(args, "srv.pem")
|
expiry, inf = _read_crt(args, "srv.pem")
|
||||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry
|
if "sans" not in inf:
|
||||||
|
raise Exception("no useable cert found")
|
||||||
|
|
||||||
|
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
|
||||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
||||||
for n in names:
|
for n in names:
|
||||||
if n not in inf["sans"]:
|
if n not in inf["sans"]:
|
||||||
|
|||||||
@@ -9,22 +9,31 @@ onedash = set(zs.split())
|
|||||||
def vf_bmap() -> dict[str, str]:
|
def vf_bmap() -> dict[str, str]:
|
||||||
"""argv-to-volflag: simple bools"""
|
"""argv-to-volflag: simple bools"""
|
||||||
ret = {
|
ret = {
|
||||||
|
"dav_auth": "davauth",
|
||||||
|
"dav_rt": "davrt",
|
||||||
|
"ed": "dots",
|
||||||
"never_symlink": "neversymlink",
|
"never_symlink": "neversymlink",
|
||||||
"no_dedup": "copydupes",
|
"no_dedup": "copydupes",
|
||||||
"no_dupe": "nodupe",
|
"no_dupe": "nodupe",
|
||||||
"no_forget": "noforget",
|
"no_forget": "noforget",
|
||||||
|
"no_robots": "norobots",
|
||||||
|
"no_thumb": "dthumb",
|
||||||
|
"no_vthumb": "dvthumb",
|
||||||
|
"no_athumb": "dathumb",
|
||||||
"th_no_crop": "nocrop",
|
"th_no_crop": "nocrop",
|
||||||
"dav_auth": "davauth",
|
|
||||||
"dav_rt": "davrt",
|
|
||||||
}
|
}
|
||||||
for k in (
|
for k in (
|
||||||
"dotsrch",
|
"dotsrch",
|
||||||
|
"e2d",
|
||||||
|
"e2ds",
|
||||||
|
"e2dsa",
|
||||||
"e2t",
|
"e2t",
|
||||||
"e2ts",
|
"e2ts",
|
||||||
"e2tsr",
|
"e2tsr",
|
||||||
"e2v",
|
"e2v",
|
||||||
"e2vu",
|
"e2vu",
|
||||||
"e2vp",
|
"e2vp",
|
||||||
|
"exp",
|
||||||
"grid",
|
"grid",
|
||||||
"hardlink",
|
"hardlink",
|
||||||
"magic",
|
"magic",
|
||||||
@@ -41,8 +50,22 @@ def vf_bmap() -> dict[str, str]:
|
|||||||
|
|
||||||
def vf_vmap() -> dict[str, str]:
|
def vf_vmap() -> dict[str, str]:
|
||||||
"""argv-to-volflag: simple values"""
|
"""argv-to-volflag: simple values"""
|
||||||
ret = {"th_convt": "convt", "th_size": "thsize"}
|
ret = {
|
||||||
for k in ("dbd", "lg_sbf", "md_sbf", "nrand", "unlist"):
|
"no_hash": "nohash",
|
||||||
|
"no_idx": "noidx",
|
||||||
|
"re_maxage": "scan",
|
||||||
|
"th_convt": "convt",
|
||||||
|
"th_size": "thsize",
|
||||||
|
}
|
||||||
|
for k in (
|
||||||
|
"dbd",
|
||||||
|
"lg_sbf",
|
||||||
|
"md_sbf",
|
||||||
|
"nrand",
|
||||||
|
"sort",
|
||||||
|
"unlist",
|
||||||
|
"u2ts",
|
||||||
|
):
|
||||||
ret[k] = k
|
ret[k] = k
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -50,7 +73,23 @@ def vf_vmap() -> dict[str, str]:
|
|||||||
def vf_cmap() -> dict[str, str]:
|
def vf_cmap() -> dict[str, str]:
|
||||||
"""argv-to-volflag: complex/lists"""
|
"""argv-to-volflag: complex/lists"""
|
||||||
ret = {}
|
ret = {}
|
||||||
for k in ("html_head", "mte", "mth"):
|
for k in (
|
||||||
|
"exp_lg",
|
||||||
|
"exp_md",
|
||||||
|
"html_head",
|
||||||
|
"mte",
|
||||||
|
"mth",
|
||||||
|
"mtp",
|
||||||
|
"xad",
|
||||||
|
"xar",
|
||||||
|
"xau",
|
||||||
|
"xban",
|
||||||
|
"xbd",
|
||||||
|
"xbr",
|
||||||
|
"xbu",
|
||||||
|
"xiu",
|
||||||
|
"xm",
|
||||||
|
):
|
||||||
ret[k] = k
|
ret[k] = k
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -60,10 +99,12 @@ permdescs = {
|
|||||||
"w": 'write; upload files; need "r" to see the uploads',
|
"w": 'write; upload files; need "r" to see the uploads',
|
||||||
"m": 'move; move files and folders; need "w" at destination',
|
"m": 'move; move files and folders; need "w" at destination',
|
||||||
"d": "delete; permanently delete files and folders",
|
"d": "delete; permanently delete files and folders",
|
||||||
|
".": "dots; user can ask to show dotfiles in listings",
|
||||||
"g": "get; download files, but cannot see folder contents",
|
"g": "get; download files, but cannot see folder contents",
|
||||||
"G": 'upget; same as "g" but can see filekeys of their own uploads',
|
"G": 'upget; same as "g" but can see filekeys of their own uploads',
|
||||||
"h": 'html; same as "g" but folders return their index.html',
|
"h": 'html; same as "g" but folders return their index.html',
|
||||||
"a": "admin; can see uploader IPs, config-reload",
|
"a": "admin; can see uploader IPs, config-reload",
|
||||||
|
"A": "all; same as 'rwmda.' (read/write/move/delete/dotfiles)",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -86,6 +127,7 @@ flagcats = {
|
|||||||
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
||||||
"rand": "force randomized filenames, 9 chars long by default",
|
"rand": "force randomized filenames, 9 chars long by default",
|
||||||
"nrand=N": "randomized filenames are N chars long",
|
"nrand=N": "randomized filenames are N chars long",
|
||||||
|
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
|
||||||
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
||||||
"df=1g": "ensure 1 GiB free disk space",
|
"df=1g": "ensure 1 GiB free disk space",
|
||||||
},
|
},
|
||||||
@@ -129,7 +171,7 @@ flagcats = {
|
|||||||
"dathumb": "disables audio thumbnails (spectrograms)",
|
"dathumb": "disables audio thumbnails (spectrograms)",
|
||||||
"dithumb": "disables image thumbnails",
|
"dithumb": "disables image thumbnails",
|
||||||
"thsize": "thumbnail res; WxH",
|
"thsize": "thumbnail res; WxH",
|
||||||
"nocrop": "disable center-cropping",
|
"nocrop": "disable center-cropping by default",
|
||||||
"convt": "conversion timeout in seconds",
|
"convt": "conversion timeout in seconds",
|
||||||
},
|
},
|
||||||
"handlers\n(better explained in --help-handlers)": {
|
"handlers\n(better explained in --help-handlers)": {
|
||||||
@@ -149,6 +191,7 @@ flagcats = {
|
|||||||
},
|
},
|
||||||
"client and ux": {
|
"client and ux": {
|
||||||
"grid": "show grid/thumbnails by default",
|
"grid": "show grid/thumbnails by default",
|
||||||
|
"sort": "default sort order",
|
||||||
"unlist": "dont list files matching REGEX",
|
"unlist": "dont list files matching REGEX",
|
||||||
"html_head=TXT": "includes TXT in the <head>",
|
"html_head=TXT": "includes TXT in the <head>",
|
||||||
"robots": "allows indexing by search engines (default)",
|
"robots": "allows indexing by search engines (default)",
|
||||||
@@ -162,6 +205,7 @@ flagcats = {
|
|||||||
"nohtml": "return html and markdown as text/html",
|
"nohtml": "return html and markdown as text/html",
|
||||||
},
|
},
|
||||||
"others": {
|
"others": {
|
||||||
|
"dots": "allow all users with read-access to\nenable the option to show dotfiles in listings",
|
||||||
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
||||||
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
||||||
"davauth": "ask webdav clients to login for all folders",
|
"davauth": "ask webdav clients to login for all folders",
|
||||||
|
|||||||
@@ -9,19 +9,13 @@ import stat
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
|
|
||||||
|
|
||||||
try:
|
|
||||||
import asynchat
|
|
||||||
except:
|
|
||||||
sys.path.append(os.path.join(E.mod, "vend"))
|
|
||||||
|
|
||||||
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
|
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
|
||||||
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
||||||
from pyftpdlib.handlers import FTPHandler
|
from pyftpdlib.handlers import FTPHandler
|
||||||
from pyftpdlib.ioloop import IOLoop
|
from pyftpdlib.ioloop import IOLoop
|
||||||
from pyftpdlib.servers import FTPServer
|
from pyftpdlib.servers import FTPServer
|
||||||
|
|
||||||
|
from .__init__ import PY2, TYPE_CHECKING
|
||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import (
|
from .util import (
|
||||||
@@ -79,6 +73,7 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
asrv = self.hub.asrv
|
asrv = self.hub.asrv
|
||||||
uname = "*"
|
uname = "*"
|
||||||
if username != "anonymous":
|
if username != "anonymous":
|
||||||
|
uname = ""
|
||||||
for zs in (password, username):
|
for zs in (password, username):
|
||||||
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
|
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
|
||||||
if zs:
|
if zs:
|
||||||
@@ -92,6 +87,12 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
if bonk:
|
if bonk:
|
||||||
logging.warning("client banned: invalid passwords")
|
logging.warning("client banned: invalid passwords")
|
||||||
bans[ip] = bonk
|
bans[ip] = bonk
|
||||||
|
try:
|
||||||
|
# only possible if multiprocessing disabled
|
||||||
|
self.hub.broker.httpsrv.bans[ip] = bonk # type: ignore
|
||||||
|
self.hub.broker.httpsrv.nban += 1 # type: ignore
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
raise AuthenticationFailed("Authentication failed.")
|
raise AuthenticationFailed("Authentication failed.")
|
||||||
|
|
||||||
@@ -132,7 +133,7 @@ class FtpFs(AbstractedFS):
|
|||||||
|
|
||||||
self.can_read = self.can_write = self.can_move = False
|
self.can_read = self.can_write = self.can_move = False
|
||||||
self.can_delete = self.can_get = self.can_upget = False
|
self.can_delete = self.can_get = self.can_upget = False
|
||||||
self.can_admin = False
|
self.can_admin = self.can_dot = False
|
||||||
|
|
||||||
self.listdirinfo = self.listdir
|
self.listdirinfo = self.listdir
|
||||||
self.chdir(".")
|
self.chdir(".")
|
||||||
@@ -148,7 +149,7 @@ class FtpFs(AbstractedFS):
|
|||||||
try:
|
try:
|
||||||
vpath = vpath.replace("\\", "/").strip("/")
|
vpath = vpath.replace("\\", "/").strip("/")
|
||||||
rd, fn = os.path.split(vpath)
|
rd, fn = os.path.split(vpath)
|
||||||
if ANYWIN and relchk(rd):
|
if relchk(rd):
|
||||||
logging.warning("malicious vpath: %s", vpath)
|
logging.warning("malicious vpath: %s", vpath)
|
||||||
t = "Unsupported characters in [{}]"
|
t = "Unsupported characters in [{}]"
|
||||||
raise FSE(t.format(vpath), 1)
|
raise FSE(t.format(vpath), 1)
|
||||||
@@ -167,7 +168,7 @@ class FtpFs(AbstractedFS):
|
|||||||
if not avfs:
|
if not avfs:
|
||||||
raise FSE(t.format(vpath), 1)
|
raise FSE(t.format(vpath), 1)
|
||||||
|
|
||||||
cr, cw, cm, cd, _, _, _ = avfs.can_access("", self.h.uname)
|
cr, cw, cm, cd, _, _, _, _ = avfs.can_access("", self.h.uname)
|
||||||
if r and not cr or w and not cw or m and not cm or d and not cd:
|
if r and not cr or w and not cw or m and not cm or d and not cd:
|
||||||
raise FSE(t.format(vpath), 1)
|
raise FSE(t.format(vpath), 1)
|
||||||
|
|
||||||
@@ -243,6 +244,7 @@ class FtpFs(AbstractedFS):
|
|||||||
self.can_get,
|
self.can_get,
|
||||||
self.can_upget,
|
self.can_upget,
|
||||||
self.can_admin,
|
self.can_admin,
|
||||||
|
self.can_dot,
|
||||||
) = avfs.can_access("", self.h.uname)
|
) = avfs.can_access("", self.h.uname)
|
||||||
|
|
||||||
def mkdir(self, path: str) -> None:
|
def mkdir(self, path: str) -> None:
|
||||||
@@ -265,7 +267,7 @@ class FtpFs(AbstractedFS):
|
|||||||
vfs_ls = [x[0] for x in vfs_ls1]
|
vfs_ls = [x[0] for x in vfs_ls1]
|
||||||
vfs_ls.extend(vfs_virt.keys())
|
vfs_ls.extend(vfs_virt.keys())
|
||||||
|
|
||||||
if not self.args.ed:
|
if not self.can_dot:
|
||||||
vfs_ls = exclude_dotfiles(vfs_ls)
|
vfs_ls = exclude_dotfiles(vfs_ls)
|
||||||
|
|
||||||
vfs_ls.sort()
|
vfs_ls.sort()
|
||||||
@@ -404,7 +406,16 @@ class FtpHandler(FTPHandler):
|
|||||||
super(FtpHandler, self).__init__(conn, server, ioloop)
|
super(FtpHandler, self).__init__(conn, server, ioloop)
|
||||||
|
|
||||||
cip = self.remote_ip
|
cip = self.remote_ip
|
||||||
self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip
|
if cip.startswith("::ffff:"):
|
||||||
|
cip = cip[7:]
|
||||||
|
|
||||||
|
if self.args.ftp_ipa_re and not self.args.ftp_ipa_re.match(cip):
|
||||||
|
logging.warning("client rejected (--ftp-ipa): %s", cip)
|
||||||
|
self.connected = False
|
||||||
|
conn.close()
|
||||||
|
return
|
||||||
|
|
||||||
|
self.cli_ip = cip
|
||||||
|
|
||||||
# abspath->vpath mapping to resolve log_transfer paths
|
# abspath->vpath mapping to resolve log_transfer paths
|
||||||
self.vfs_map: dict[str, str] = {}
|
self.vfs_map: dict[str, str] = {}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import calendar
|
|||||||
import copy
|
import copy
|
||||||
import errno
|
import errno
|
||||||
import gzip
|
import gzip
|
||||||
|
import hashlib
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
@@ -38,7 +39,10 @@ from .szip import StreamZip
|
|||||||
from .util import (
|
from .util import (
|
||||||
HTTPCODE,
|
HTTPCODE,
|
||||||
META_NOBOTS,
|
META_NOBOTS,
|
||||||
|
UTC,
|
||||||
|
Garda,
|
||||||
MultipartParser,
|
MultipartParser,
|
||||||
|
ODict,
|
||||||
Pebkac,
|
Pebkac,
|
||||||
UnrecvEOF,
|
UnrecvEOF,
|
||||||
absreal,
|
absreal,
|
||||||
@@ -72,11 +76,12 @@ from .util import (
|
|||||||
runhook,
|
runhook,
|
||||||
s3enc,
|
s3enc,
|
||||||
sanitize_fn,
|
sanitize_fn,
|
||||||
|
sanitize_vpath,
|
||||||
sendfile_kern,
|
sendfile_kern,
|
||||||
sendfile_py,
|
sendfile_py,
|
||||||
undot,
|
undot,
|
||||||
unescape_cookie,
|
unescape_cookie,
|
||||||
unquote,
|
unquote, # type: ignore
|
||||||
unquotep,
|
unquotep,
|
||||||
vjoin,
|
vjoin,
|
||||||
vol_san,
|
vol_san,
|
||||||
@@ -130,7 +135,8 @@ class HttpCli(object):
|
|||||||
self.headers: dict[str, str] = {}
|
self.headers: dict[str, str] = {}
|
||||||
self.mode = " "
|
self.mode = " "
|
||||||
self.req = " "
|
self.req = " "
|
||||||
self.http_ver = " "
|
self.http_ver = ""
|
||||||
|
self.hint = ""
|
||||||
self.host = " "
|
self.host = " "
|
||||||
self.ua = " "
|
self.ua = " "
|
||||||
self.is_rclone = False
|
self.is_rclone = False
|
||||||
@@ -142,14 +148,12 @@ class HttpCli(object):
|
|||||||
self.rem = " "
|
self.rem = " "
|
||||||
self.vpath = " "
|
self.vpath = " "
|
||||||
self.vpaths = " "
|
self.vpaths = " "
|
||||||
|
self.gctx = " " # additional context for garda
|
||||||
|
self.trailing_slash = True
|
||||||
self.uname = " "
|
self.uname = " "
|
||||||
self.pw = " "
|
self.pw = " "
|
||||||
self.rvol = [" "]
|
self.rvol = [" "]
|
||||||
self.wvol = [" "]
|
self.wvol = [" "]
|
||||||
self.mvol = [" "]
|
|
||||||
self.dvol = [" "]
|
|
||||||
self.gvol = [" "]
|
|
||||||
self.upvol = [" "]
|
|
||||||
self.avol = [" "]
|
self.avol = [" "]
|
||||||
self.do_log = True
|
self.do_log = True
|
||||||
self.can_read = False
|
self.can_read = False
|
||||||
@@ -159,22 +163,18 @@ class HttpCli(object):
|
|||||||
self.can_get = False
|
self.can_get = False
|
||||||
self.can_upget = False
|
self.can_upget = False
|
||||||
self.can_admin = False
|
self.can_admin = False
|
||||||
|
self.can_dot = False
|
||||||
|
self.out_headerlist: list[tuple[str, str]] = []
|
||||||
|
self.out_headers: dict[str, str] = {}
|
||||||
|
self.html_head = " "
|
||||||
# post
|
# post
|
||||||
self.parser: Optional[MultipartParser] = None
|
self.parser: Optional[MultipartParser] = None
|
||||||
# end placeholders
|
# end placeholders
|
||||||
|
|
||||||
self.bufsz = 1024 * 32
|
self.bufsz = 1024 * 32
|
||||||
self.hint = ""
|
|
||||||
self.trailing_slash = True
|
|
||||||
self.out_headerlist: list[tuple[str, str]] = []
|
|
||||||
self.out_headers = {
|
|
||||||
"Vary": "Origin, PW, Cookie",
|
|
||||||
"Cache-Control": "no-store, max-age=0",
|
|
||||||
}
|
|
||||||
h = self.args.html_head
|
h = self.args.html_head
|
||||||
if self.args.no_robots:
|
if self.args.no_robots:
|
||||||
h = META_NOBOTS + (("\n" + h) if h else "")
|
h = META_NOBOTS + (("\n" + h) if h else "")
|
||||||
self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
|
|
||||||
self.html_head = h
|
self.html_head = h
|
||||||
|
|
||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
@@ -223,14 +223,21 @@ class HttpCli(object):
|
|||||||
|
|
||||||
def run(self) -> bool:
|
def run(self) -> bool:
|
||||||
"""returns true if connection can be reused"""
|
"""returns true if connection can be reused"""
|
||||||
self.keepalive = False
|
self.out_headers = {
|
||||||
self.is_https = False
|
"Vary": "Origin, PW, Cookie",
|
||||||
self.headers = {}
|
"Cache-Control": "no-store, max-age=0",
|
||||||
self.hint = ""
|
}
|
||||||
|
if self.args.no_robots:
|
||||||
|
self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
|
||||||
|
|
||||||
if self.is_banned():
|
if self.is_banned():
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
if self.args.ipa_re and not self.args.ipa_re.match(self.conn.addr[0]):
|
||||||
|
self.log("client rejected (--ipa)", 3)
|
||||||
|
self.terse_reply(b"", 500)
|
||||||
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.s.settimeout(2)
|
self.s.settimeout(2)
|
||||||
headerlines = read_header(self.sr, self.args.s_thead, self.args.s_thead)
|
headerlines = read_header(self.sr, self.args.s_thead, self.args.s_thead)
|
||||||
@@ -252,8 +259,8 @@ class HttpCli(object):
|
|||||||
k, zs = header_line.split(":", 1)
|
k, zs = header_line.split(":", 1)
|
||||||
self.headers[k.lower()] = zs.strip()
|
self.headers[k.lower()] = zs.strip()
|
||||||
except:
|
except:
|
||||||
msg = " ]\n#[ ".join(headerlines)
|
msg = "#[ " + " ]\n#[ ".join(headerlines) + " ]"
|
||||||
raise Pebkac(400, "bad headers:\n#[ " + msg + " ]")
|
raise Pebkac(400, "bad headers", log=msg)
|
||||||
|
|
||||||
except Pebkac as ex:
|
except Pebkac as ex:
|
||||||
self.mode = "GET"
|
self.mode = "GET"
|
||||||
@@ -264,9 +271,15 @@ class HttpCli(object):
|
|||||||
h = {"WWW-Authenticate": 'Basic realm="a"'} if ex.code == 401 else {}
|
h = {"WWW-Authenticate": 'Basic realm="a"'} if ex.code == 401 else {}
|
||||||
try:
|
try:
|
||||||
self.loud_reply(unicode(ex), status=ex.code, headers=h, volsan=True)
|
self.loud_reply(unicode(ex), status=ex.code, headers=h, volsan=True)
|
||||||
return self.keepalive
|
|
||||||
except:
|
except:
|
||||||
return False
|
pass
|
||||||
|
|
||||||
|
if ex.log:
|
||||||
|
self.log("additional error context:\n" + ex.log, 6)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.conn.hsrv.nreq += 1
|
||||||
|
|
||||||
self.ua = self.headers.get("user-agent", "")
|
self.ua = self.headers.get("user-agent", "")
|
||||||
self.is_rclone = self.ua.startswith("rclone/")
|
self.is_rclone = self.ua.startswith("rclone/")
|
||||||
@@ -357,22 +370,33 @@ class HttpCli(object):
|
|||||||
self.trailing_slash = vpath.endswith("/")
|
self.trailing_slash = vpath.endswith("/")
|
||||||
vpath = undot(vpath)
|
vpath = undot(vpath)
|
||||||
|
|
||||||
zs = unquotep(arglist)
|
ptn = self.conn.hsrv.ptn_cc
|
||||||
m = self.conn.hsrv.ptn_cc.search(zs)
|
|
||||||
if m:
|
|
||||||
hit = zs[m.span()[0] :]
|
|
||||||
t = "malicious user; Cc in query [{}] => [{!r}]"
|
|
||||||
self.log(t.format(self.req, hit), 1)
|
|
||||||
return False
|
|
||||||
|
|
||||||
for k in arglist.split("&"):
|
for k in arglist.split("&"):
|
||||||
if "=" in k:
|
if "=" in k:
|
||||||
k, zs = k.split("=", 1)
|
k, zs = k.split("=", 1)
|
||||||
# x-www-form-urlencoded (url query part) uses
|
# x-www-form-urlencoded (url query part) uses
|
||||||
# either + or %20 for 0x20 so handle both
|
# either + or %20 for 0x20 so handle both
|
||||||
uparam[k.lower()] = unquotep(zs.strip().replace("+", " "))
|
sv = unquotep(zs.strip().replace("+", " "))
|
||||||
else:
|
else:
|
||||||
uparam[k.lower()] = ""
|
sv = ""
|
||||||
|
|
||||||
|
k = k.lower()
|
||||||
|
uparam[k] = sv
|
||||||
|
|
||||||
|
if k in ("doc", "move", "tree"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
zs = "%s=%s" % (k, sv)
|
||||||
|
m = ptn.search(zs)
|
||||||
|
if not m:
|
||||||
|
continue
|
||||||
|
|
||||||
|
hit = zs[m.span()[0] :]
|
||||||
|
t = "malicious user; Cc in query [{}] => [{!r}]"
|
||||||
|
self.log(t.format(self.req, hit), 1)
|
||||||
|
self.cbonk(self.conn.hsrv.gmal, self.req, "cc_q", "Cc in query")
|
||||||
|
self.terse_reply(b"", 500)
|
||||||
|
return False
|
||||||
|
|
||||||
if self.is_vproxied:
|
if self.is_vproxied:
|
||||||
if vpath.startswith(self.args.R):
|
if vpath.startswith(self.args.R):
|
||||||
@@ -409,12 +433,9 @@ class HttpCli(object):
|
|||||||
self.vpath + "/" if self.trailing_slash and self.vpath else self.vpath
|
self.vpath + "/" if self.trailing_slash and self.vpath else self.vpath
|
||||||
)
|
)
|
||||||
|
|
||||||
ok = "\x00" not in self.vpath
|
if relchk(self.vpath) and (self.vpath != "*" or self.mode != "OPTIONS"):
|
||||||
if ANYWIN:
|
|
||||||
ok = ok and not relchk(self.vpath)
|
|
||||||
|
|
||||||
if not ok and (self.vpath != "*" or self.mode != "OPTIONS"):
|
|
||||||
self.log("invalid relpath [{}]".format(self.vpath))
|
self.log("invalid relpath [{}]".format(self.vpath))
|
||||||
|
self.cbonk(self.conn.hsrv.gmal, self.req, "bad_vp", "invalid relpaths")
|
||||||
return self.tx_404() and self.keepalive
|
return self.tx_404() and self.keepalive
|
||||||
|
|
||||||
zso = self.headers.get("authorization")
|
zso = self.headers.get("authorization")
|
||||||
@@ -431,14 +452,18 @@ class HttpCli(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.pw = uparam.get("pw") or self.headers.get("pw") or bauth or cookie_pw
|
if self.args.idp_h_usr:
|
||||||
self.uname = self.asrv.iacct.get(self.asrv.ah.hash(self.pw)) or "*"
|
self.pw = ""
|
||||||
|
self.uname = self.headers.get(self.args.idp_h_usr) or "*"
|
||||||
|
if self.uname not in self.asrv.vfs.aread:
|
||||||
|
self.log("unknown username: [%s]" % (self.uname), 1)
|
||||||
|
self.uname = "*"
|
||||||
|
else:
|
||||||
|
self.pw = uparam.get("pw") or self.headers.get("pw") or bauth or cookie_pw
|
||||||
|
self.uname = self.asrv.iacct.get(self.asrv.ah.hash(self.pw)) or "*"
|
||||||
|
|
||||||
self.rvol = self.asrv.vfs.aread[self.uname]
|
self.rvol = self.asrv.vfs.aread[self.uname]
|
||||||
self.wvol = self.asrv.vfs.awrite[self.uname]
|
self.wvol = self.asrv.vfs.awrite[self.uname]
|
||||||
self.mvol = self.asrv.vfs.amove[self.uname]
|
|
||||||
self.dvol = self.asrv.vfs.adel[self.uname]
|
|
||||||
self.gvol = self.asrv.vfs.aget[self.uname]
|
|
||||||
self.upvol = self.asrv.vfs.apget[self.uname]
|
|
||||||
self.avol = self.asrv.vfs.aadmin[self.uname]
|
self.avol = self.asrv.vfs.aadmin[self.uname]
|
||||||
|
|
||||||
if self.pw and (
|
if self.pw and (
|
||||||
@@ -471,8 +496,9 @@ class HttpCli(object):
|
|||||||
self.can_get,
|
self.can_get,
|
||||||
self.can_upget,
|
self.can_upget,
|
||||||
self.can_admin,
|
self.can_admin,
|
||||||
|
self.can_dot,
|
||||||
) = (
|
) = (
|
||||||
avn.can_access("", self.uname) if avn else [False] * 7
|
avn.can_access("", self.uname) if avn else [False] * 8
|
||||||
)
|
)
|
||||||
self.avn = avn
|
self.avn = avn
|
||||||
self.vn = vn
|
self.vn = vn
|
||||||
@@ -522,6 +548,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
if pex.code == 999:
|
if pex.code == 999:
|
||||||
|
self.terse_reply(b"", 500)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
post = self.mode in ["POST", "PUT"] or "content-length" in self.headers
|
post = self.mode in ["POST", "PUT"] or "content-length" in self.headers
|
||||||
@@ -547,6 +574,9 @@ class HttpCli(object):
|
|||||||
zb = b"<pre>" + html_escape(msg).encode("utf-8", "replace")
|
zb = b"<pre>" + html_escape(msg).encode("utf-8", "replace")
|
||||||
h = {"WWW-Authenticate": 'Basic realm="a"'} if pex.code == 401 else {}
|
h = {"WWW-Authenticate": 'Basic realm="a"'} if pex.code == 401 else {}
|
||||||
self.reply(zb, status=pex.code, headers=h, volsan=True)
|
self.reply(zb, status=pex.code, headers=h, volsan=True)
|
||||||
|
if pex.log:
|
||||||
|
self.log("additional error context:\n" + pex.log, 6)
|
||||||
|
|
||||||
return self.keepalive
|
return self.keepalive
|
||||||
except Pebkac:
|
except Pebkac:
|
||||||
return False
|
return False
|
||||||
@@ -557,6 +587,36 @@ class HttpCli(object):
|
|||||||
else:
|
else:
|
||||||
return self.conn.iphash.s(self.ip)
|
return self.conn.iphash.s(self.ip)
|
||||||
|
|
||||||
|
def cbonk(self, g: Garda, v: str, reason: str, descr: str) -> bool:
|
||||||
|
self.conn.hsrv.nsus += 1
|
||||||
|
if not g.lim:
|
||||||
|
return False
|
||||||
|
|
||||||
|
bonk, ip = g.bonk(self.ip, v + self.gctx)
|
||||||
|
if not bonk:
|
||||||
|
return False
|
||||||
|
|
||||||
|
xban = self.vn.flags.get("xban")
|
||||||
|
if not xban or not runhook(
|
||||||
|
self.log,
|
||||||
|
xban,
|
||||||
|
self.vn.canonical(self.rem),
|
||||||
|
self.vpath,
|
||||||
|
self.host,
|
||||||
|
self.uname,
|
||||||
|
time.time(),
|
||||||
|
0,
|
||||||
|
self.ip,
|
||||||
|
time.time(),
|
||||||
|
reason,
|
||||||
|
):
|
||||||
|
self.log("client banned: %s" % (descr,), 1)
|
||||||
|
self.conn.hsrv.bans[ip] = bonk
|
||||||
|
self.conn.hsrv.nban += 1
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
def is_banned(self) -> bool:
|
def is_banned(self) -> bool:
|
||||||
if not self.conn.bans:
|
if not self.conn.bans:
|
||||||
return False
|
return False
|
||||||
@@ -573,8 +633,7 @@ class HttpCli(object):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
self.log("banned for {:.0f} sec".format(rt), 6)
|
self.log("banned for {:.0f} sec".format(rt), 6)
|
||||||
zb = b"HTTP/1.0 403 Forbidden\r\n\r\nthank you for playing"
|
self.terse_reply(b"thank you for playing", 403)
|
||||||
self.s.sendall(zb)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def permit_caching(self) -> None:
|
def permit_caching(self) -> None:
|
||||||
@@ -628,6 +687,7 @@ class HttpCli(object):
|
|||||||
hit = zs[m.span()[0] :]
|
hit = zs[m.span()[0] :]
|
||||||
t = "malicious user; Cc in out-hdr {!r} => [{!r}]"
|
t = "malicious user; Cc in out-hdr {!r} => [{!r}]"
|
||||||
self.log(t.format(zs, hit), 1)
|
self.log(t.format(zs, hit), 1)
|
||||||
|
self.cbonk(self.conn.hsrv.gmal, zs, "cc_hdr", "Cc in out-hdr")
|
||||||
raise Pebkac(999)
|
raise Pebkac(999)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -654,6 +714,7 @@ class HttpCli(object):
|
|||||||
and not body.startswith(b"<pre>source file busy")
|
and not body.startswith(b"<pre>source file busy")
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
and (status != 404 or (self.can_get and not self.can_read))
|
||||||
):
|
):
|
||||||
if status == 404:
|
if status == 404:
|
||||||
g = self.conn.hsrv.g404
|
g = self.conn.hsrv.g404
|
||||||
@@ -675,24 +736,7 @@ class HttpCli(object):
|
|||||||
or not self.args.nonsus_urls
|
or not self.args.nonsus_urls
|
||||||
or not self.args.nonsus_urls.search(self.vpath)
|
or not self.args.nonsus_urls.search(self.vpath)
|
||||||
):
|
):
|
||||||
bonk, ip = g.bonk(self.ip, self.vpath)
|
self.cbonk(g, self.vpath, str(status), "%ss" % (status,))
|
||||||
if bonk:
|
|
||||||
xban = self.vn.flags.get("xban")
|
|
||||||
if not xban or not runhook(
|
|
||||||
self.log,
|
|
||||||
xban,
|
|
||||||
self.vn.canonical(self.rem),
|
|
||||||
self.vpath,
|
|
||||||
self.host,
|
|
||||||
self.uname,
|
|
||||||
time.time(),
|
|
||||||
0,
|
|
||||||
self.ip,
|
|
||||||
time.time(),
|
|
||||||
str(status),
|
|
||||||
):
|
|
||||||
self.log("client banned: %ss" % (status,), 1)
|
|
||||||
self.conn.hsrv.bans[ip] = bonk
|
|
||||||
|
|
||||||
if volsan:
|
if volsan:
|
||||||
vols = list(self.asrv.vfs.all_vols.values())
|
vols = list(self.asrv.vfs.all_vols.values())
|
||||||
@@ -720,6 +764,19 @@ class HttpCli(object):
|
|||||||
self.log(body.rstrip())
|
self.log(body.rstrip())
|
||||||
self.reply(body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
|
self.reply(body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
|
||||||
|
|
||||||
|
def terse_reply(self, body: bytes, status: int = 200) -> None:
|
||||||
|
self.keepalive = False
|
||||||
|
|
||||||
|
lines = [
|
||||||
|
"%s %s %s" % (self.http_ver or "HTTP/1.1", status, HTTPCODE[status]),
|
||||||
|
"Connection: Close",
|
||||||
|
]
|
||||||
|
|
||||||
|
if body:
|
||||||
|
lines.append("Content-Length: " + unicode(len(body)))
|
||||||
|
|
||||||
|
self.s.sendall("\r\n".join(lines).encode("utf-8") + b"\r\n\r\n" + body)
|
||||||
|
|
||||||
def urlq(self, add: dict[str, str], rm: list[str]) -> str:
|
def urlq(self, add: dict[str, str], rm: list[str]) -> str:
|
||||||
"""
|
"""
|
||||||
generates url query based on uparam (b, pw, all others)
|
generates url query based on uparam (b, pw, all others)
|
||||||
@@ -804,16 +861,16 @@ class HttpCli(object):
|
|||||||
self.host.lower().split(":")[0],
|
self.host.lower().split(":")[0],
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
if re.sub(r"(:[0-9]{1,5})?/?$", "", origin) in good_origins:
|
if "pw" in ih or re.sub(r"(:[0-9]{1,5})?/?$", "", origin) in good_origins:
|
||||||
good_origin = True
|
good_origin = True
|
||||||
bad_hdrs = ("",)
|
bad_hdrs = ("",)
|
||||||
else:
|
else:
|
||||||
good_origin = False
|
good_origin = False
|
||||||
bad_hdrs = ("", "pw")
|
bad_hdrs = ("", "pw")
|
||||||
|
|
||||||
# '*' blocks all credentials (cookies, http-auth);
|
# '*' blocks auth through cookies / WWW-Authenticate;
|
||||||
# exact-match for Origin is necessary to unlock those,
|
# exact-match for Origin is necessary to unlock those,
|
||||||
# however yolo-requests (?pw=) are always allowed
|
# but the ?pw= param and PW: header are always allowed
|
||||||
acah = ih.get("access-control-request-headers", "")
|
acah = ih.get("access-control-request-headers", "")
|
||||||
acao = (origin if good_origin else None) or (
|
acao = (origin if good_origin else None) or (
|
||||||
"*" if "*" in good_origins else None
|
"*" if "*" in good_origins else None
|
||||||
@@ -863,7 +920,11 @@ class HttpCli(object):
|
|||||||
return self.tx_ico(self.vpath.split("/")[-1], exact=True)
|
return self.tx_ico(self.vpath.split("/")[-1], exact=True)
|
||||||
|
|
||||||
if self.vpath.startswith(".cpr/ssdp"):
|
if self.vpath.startswith(".cpr/ssdp"):
|
||||||
return self.conn.hsrv.ssdp.reply(self)
|
if self.conn.hsrv.ssdp:
|
||||||
|
return self.conn.hsrv.ssdp.reply(self)
|
||||||
|
else:
|
||||||
|
self.reply(b"ssdp is disabled in server config", 404)
|
||||||
|
return False
|
||||||
|
|
||||||
if self.vpath.startswith(".cpr/dd/") and self.args.mpmc:
|
if self.vpath.startswith(".cpr/dd/") and self.args.mpmc:
|
||||||
if self.args.mpmc == ".":
|
if self.args.mpmc == ".":
|
||||||
@@ -885,6 +946,7 @@ class HttpCli(object):
|
|||||||
if not static_path.startswith(path_base):
|
if not static_path.startswith(path_base):
|
||||||
t = "malicious user; attempted path traversal [{}] => [{}]"
|
t = "malicious user; attempted path traversal [{}] => [{}]"
|
||||||
self.log(t.format(self.vpath, static_path), 1)
|
self.log(t.format(self.vpath, static_path), 1)
|
||||||
|
self.cbonk(self.conn.hsrv.gmal, self.req, "trav", "path traversal")
|
||||||
|
|
||||||
self.tx_404()
|
self.tx_404()
|
||||||
return False
|
return False
|
||||||
@@ -1063,7 +1125,6 @@ class HttpCli(object):
|
|||||||
rem,
|
rem,
|
||||||
set(),
|
set(),
|
||||||
self.uname,
|
self.uname,
|
||||||
self.args.ed,
|
|
||||||
True,
|
True,
|
||||||
not self.args.no_scandir,
|
not self.args.no_scandir,
|
||||||
wrap=False,
|
wrap=False,
|
||||||
@@ -1077,7 +1138,7 @@ class HttpCli(object):
|
|||||||
[[True, False]],
|
[[True, False]],
|
||||||
lstat="davrt" not in vn.flags,
|
lstat="davrt" not in vn.flags,
|
||||||
)
|
)
|
||||||
if not self.args.ed:
|
if not self.can_dot:
|
||||||
names = set(exclude_dotfiles([x[0] for x in vfs_ls]))
|
names = set(exclude_dotfiles([x[0] for x in vfs_ls]))
|
||||||
vfs_ls = [x for x in vfs_ls if x[0] in names]
|
vfs_ls = [x for x in vfs_ls if x[0] in names]
|
||||||
|
|
||||||
@@ -1842,7 +1903,7 @@ class HttpCli(object):
|
|||||||
items = [unquotep(x) for x in items if items]
|
items = [unquotep(x) for x in items if items]
|
||||||
|
|
||||||
self.parser.drop()
|
self.parser.drop()
|
||||||
return self.tx_zip(k, v, "", vn, rem, items, self.args.ed)
|
return self.tx_zip(k, v, "", vn, rem, items)
|
||||||
|
|
||||||
def handle_post_json(self) -> bool:
|
def handle_post_json(self) -> bool:
|
||||||
try:
|
try:
|
||||||
@@ -1928,10 +1989,10 @@ class HttpCli(object):
|
|||||||
def handle_search(self, body: dict[str, Any]) -> bool:
|
def handle_search(self, body: dict[str, Any]) -> bool:
|
||||||
idx = self.conn.get_u2idx()
|
idx = self.conn.get_u2idx()
|
||||||
if not idx or not hasattr(idx, "p_end"):
|
if not idx or not hasattr(idx, "p_end"):
|
||||||
raise Pebkac(500, "sqlite3 is not available on the server; cannot search")
|
raise Pebkac(500, "server busy, or sqlite3 not available; cannot search")
|
||||||
|
|
||||||
vols = []
|
vols: list[VFS] = []
|
||||||
seen = {}
|
seen: dict[VFS, bool] = {}
|
||||||
for vtop in self.rvol:
|
for vtop in self.rvol:
|
||||||
vfs, _ = self.asrv.vfs.get(vtop, self.uname, True, False)
|
vfs, _ = self.asrv.vfs.get(vtop, self.uname, True, False)
|
||||||
vfs = vfs.dbv or vfs
|
vfs = vfs.dbv or vfs
|
||||||
@@ -1939,7 +2000,7 @@ class HttpCli(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
seen[vfs] = True
|
seen[vfs] = True
|
||||||
vols.append((vfs.vpath, vfs.realpath, vfs.flags))
|
vols.append(vfs)
|
||||||
|
|
||||||
t0 = time.time()
|
t0 = time.time()
|
||||||
if idx.p_end:
|
if idx.p_end:
|
||||||
@@ -1954,7 +2015,7 @@ class HttpCli(object):
|
|||||||
vbody = copy.deepcopy(body)
|
vbody = copy.deepcopy(body)
|
||||||
vbody["hash"] = len(vbody["hash"])
|
vbody["hash"] = len(vbody["hash"])
|
||||||
self.log("qj: " + repr(vbody))
|
self.log("qj: " + repr(vbody))
|
||||||
hits = idx.fsearch(vols, body)
|
hits = idx.fsearch(self.uname, vols, body)
|
||||||
msg: Any = repr(hits)
|
msg: Any = repr(hits)
|
||||||
taglist: list[str] = []
|
taglist: list[str] = []
|
||||||
trunc = False
|
trunc = False
|
||||||
@@ -1963,7 +2024,7 @@ class HttpCli(object):
|
|||||||
q = body["q"]
|
q = body["q"]
|
||||||
n = body.get("n", self.args.srch_hits)
|
n = body.get("n", self.args.srch_hits)
|
||||||
self.log("qj: {} |{}|".format(q, n))
|
self.log("qj: {} |{}|".format(q, n))
|
||||||
hits, taglist, trunc = idx.search(vols, q, n)
|
hits, taglist, trunc = idx.search(self.uname, vols, q, n)
|
||||||
msg = len(hits)
|
msg = len(hits)
|
||||||
|
|
||||||
idx.p_end = time.time()
|
idx.p_end = time.time()
|
||||||
@@ -1971,8 +2032,7 @@ class HttpCli(object):
|
|||||||
self.log("q#: {} ({:.2f}s)".format(msg, idx.p_dur))
|
self.log("q#: {} ({:.2f}s)".format(msg, idx.p_dur))
|
||||||
|
|
||||||
order = []
|
order = []
|
||||||
cfg = self.args.mte.split(",")
|
for t in self.args.mte:
|
||||||
for t in cfg:
|
|
||||||
if t in taglist:
|
if t in taglist:
|
||||||
order.append(t)
|
order.append(t)
|
||||||
for t in taglist:
|
for t in taglist:
|
||||||
@@ -2119,45 +2179,35 @@ class HttpCli(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def get_pwd_cookie(self, pwd: str) -> str:
|
def get_pwd_cookie(self, pwd: str) -> str:
|
||||||
if self.asrv.ah.hash(pwd) in self.asrv.iacct:
|
hpwd = self.asrv.ah.hash(pwd)
|
||||||
msg = "login ok"
|
uname = self.asrv.iacct.get(hpwd)
|
||||||
|
if uname:
|
||||||
|
msg = "hi " + uname
|
||||||
dur = int(60 * 60 * self.args.logout)
|
dur = int(60 * 60 * self.args.logout)
|
||||||
else:
|
else:
|
||||||
self.log("invalid password: {}".format(pwd), 3)
|
logpwd = pwd
|
||||||
g = self.conn.hsrv.gpwd
|
if self.args.log_badpwd == 0:
|
||||||
if g.lim:
|
logpwd = ""
|
||||||
bonk, ip = g.bonk(self.ip, pwd)
|
elif self.args.log_badpwd == 2:
|
||||||
if bonk:
|
zb = hashlib.sha512(pwd.encode("utf-8", "replace")).digest()
|
||||||
xban = self.vn.flags.get("xban")
|
logpwd = "%" + base64.b64encode(zb[:12]).decode("utf-8")
|
||||||
if not xban or not runhook(
|
|
||||||
self.log,
|
self.log("invalid password: {}".format(logpwd), 3)
|
||||||
xban,
|
self.cbonk(self.conn.hsrv.gpwd, pwd, "pw", "invalid passwords")
|
||||||
self.vn.canonical(self.rem),
|
|
||||||
self.vpath,
|
|
||||||
self.host,
|
|
||||||
self.uname,
|
|
||||||
time.time(),
|
|
||||||
0,
|
|
||||||
self.ip,
|
|
||||||
time.time(),
|
|
||||||
"pw",
|
|
||||||
):
|
|
||||||
self.log("client banned: invalid passwords", 1)
|
|
||||||
self.conn.hsrv.bans[ip] = bonk
|
|
||||||
|
|
||||||
msg = "naw dude"
|
msg = "naw dude"
|
||||||
pwd = "x" # nosec
|
pwd = "x" # nosec
|
||||||
dur = None
|
dur = 0
|
||||||
|
|
||||||
if pwd == "x":
|
if pwd == "x":
|
||||||
# reset both plaintext and tls
|
# reset both plaintext and tls
|
||||||
# (only affects active tls cookies when tls)
|
# (only affects active tls cookies when tls)
|
||||||
for k in ("cppwd", "cppws") if self.is_https else ("cppwd",):
|
for k in ("cppwd", "cppws") if self.is_https else ("cppwd",):
|
||||||
ck = gencookie(k, pwd, self.args.R, False, dur)
|
ck = gencookie(k, pwd, self.args.R, False)
|
||||||
self.out_headerlist.append(("Set-Cookie", ck))
|
self.out_headerlist.append(("Set-Cookie", ck))
|
||||||
else:
|
else:
|
||||||
k = "cppws" if self.is_https else "cppwd"
|
k = "cppws" if self.is_https else "cppwd"
|
||||||
ck = gencookie(k, pwd, self.args.R, self.is_https, dur)
|
ck = gencookie(k, pwd, self.args.R, self.is_https, dur, "; HttpOnly")
|
||||||
self.out_headerlist.append(("Set-Cookie", ck))
|
self.out_headerlist.append(("Set-Cookie", ck))
|
||||||
|
|
||||||
return msg
|
return msg
|
||||||
@@ -2167,26 +2217,30 @@ class HttpCli(object):
|
|||||||
new_dir = self.parser.require("name", 512)
|
new_dir = self.parser.require("name", 512)
|
||||||
self.parser.drop()
|
self.parser.drop()
|
||||||
|
|
||||||
sanitized = sanitize_fn(new_dir, "", [])
|
return self._mkdir(vjoin(self.vpath, new_dir))
|
||||||
return self._mkdir(vjoin(self.vpath, sanitized))
|
|
||||||
|
|
||||||
def _mkdir(self, vpath: str, dav: bool = False) -> bool:
|
def _mkdir(self, vpath: str, dav: bool = False) -> bool:
|
||||||
nullwrite = self.args.nw
|
nullwrite = self.args.nw
|
||||||
|
self.gctx = vpath
|
||||||
|
vpath = undot(vpath)
|
||||||
vfs, rem = self.asrv.vfs.get(vpath, self.uname, False, True)
|
vfs, rem = self.asrv.vfs.get(vpath, self.uname, False, True)
|
||||||
self._assert_safe_rem(rem)
|
rem = sanitize_vpath(rem, "/", [])
|
||||||
fn = vfs.canonical(rem)
|
fn = vfs.canonical(rem)
|
||||||
|
if not fn.startswith(vfs.realpath):
|
||||||
|
self.log("invalid mkdir [%s] [%s]" % (self.gctx, vpath), 1)
|
||||||
|
raise Pebkac(422)
|
||||||
|
|
||||||
if not nullwrite:
|
if not nullwrite:
|
||||||
fdir = os.path.dirname(fn)
|
fdir = os.path.dirname(fn)
|
||||||
|
|
||||||
if not bos.path.isdir(fdir):
|
if dav and not bos.path.isdir(fdir):
|
||||||
raise Pebkac(409, "parent folder does not exist")
|
raise Pebkac(409, "parent folder does not exist")
|
||||||
|
|
||||||
if bos.path.isdir(fn):
|
if bos.path.isdir(fn):
|
||||||
raise Pebkac(405, "that folder exists already")
|
raise Pebkac(405, 'folder "/%s" already exists' % (vpath,))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
bos.mkdir(fn)
|
bos.makedirs(fn)
|
||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
if ex.errno == errno.EACCES:
|
if ex.errno == errno.EACCES:
|
||||||
raise Pebkac(500, "the server OS denied write-access")
|
raise Pebkac(500, "the server OS denied write-access")
|
||||||
@@ -2195,7 +2249,7 @@ class HttpCli(object):
|
|||||||
except:
|
except:
|
||||||
raise Pebkac(500, min_ex())
|
raise Pebkac(500, min_ex())
|
||||||
|
|
||||||
self.out_headers["X-New-Dir"] = quotep(vpath.split("/")[-1])
|
self.out_headers["X-New-Dir"] = quotep(vpath)
|
||||||
|
|
||||||
if dav:
|
if dav:
|
||||||
self.reply(b"", 201)
|
self.reply(b"", 201)
|
||||||
@@ -2716,6 +2770,29 @@ class HttpCli(object):
|
|||||||
|
|
||||||
return file_lastmod, True
|
return file_lastmod, True
|
||||||
|
|
||||||
|
def _expand(self, txt: str, phs: list[str]) -> str:
|
||||||
|
for ph in phs:
|
||||||
|
if ph.startswith("hdr."):
|
||||||
|
sv = str(self.headers.get(ph[4:], ""))
|
||||||
|
elif ph.startswith("self."):
|
||||||
|
sv = str(getattr(self, ph[5:], ""))
|
||||||
|
elif ph.startswith("cfg."):
|
||||||
|
sv = str(getattr(self.args, ph[4:], ""))
|
||||||
|
elif ph.startswith("vf."):
|
||||||
|
sv = str(self.vn.flags.get(ph[3:]) or "")
|
||||||
|
elif ph == "srv.itime":
|
||||||
|
sv = str(int(time.time()))
|
||||||
|
elif ph == "srv.htime":
|
||||||
|
sv = datetime.now(UTC).strftime("%Y-%m-%d, %H:%M:%S")
|
||||||
|
else:
|
||||||
|
self.log("unknown placeholder in server config: [%s]" % (ph), 3)
|
||||||
|
continue
|
||||||
|
|
||||||
|
sv = self.conn.hsrv.ptn_hsafe.sub("_", sv)
|
||||||
|
txt = txt.replace("{{%s}}" % (ph,), sv)
|
||||||
|
|
||||||
|
return txt
|
||||||
|
|
||||||
def tx_file(self, req_path: str) -> bool:
|
def tx_file(self, req_path: str) -> bool:
|
||||||
status = 200
|
status = 200
|
||||||
logmsg = "{:4} {} ".format("", self.req)
|
logmsg = "{:4} {} ".format("", self.req)
|
||||||
@@ -2883,18 +2960,19 @@ class HttpCli(object):
|
|||||||
mime = "text/plain; charset=utf-8"
|
mime = "text/plain; charset=utf-8"
|
||||||
|
|
||||||
self.out_headers["Accept-Ranges"] = "bytes"
|
self.out_headers["Accept-Ranges"] = "bytes"
|
||||||
self.send_headers(length=upper - lower, status=status, mime=mime)
|
|
||||||
|
|
||||||
logmsg += unicode(status) + logtail
|
logmsg += unicode(status) + logtail
|
||||||
|
|
||||||
if self.mode == "HEAD" or not do_send:
|
if self.mode == "HEAD" or not do_send:
|
||||||
if self.do_log:
|
if self.do_log:
|
||||||
self.log(logmsg)
|
self.log(logmsg)
|
||||||
|
|
||||||
|
self.send_headers(length=upper - lower, status=status, mime=mime)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
ret = True
|
ret = True
|
||||||
with open_func(*open_args) as f:
|
with open_func(*open_args) as f:
|
||||||
|
self.send_headers(length=upper - lower, status=status, mime=mime)
|
||||||
|
|
||||||
sendfun = sendfile_kern if use_sendfile else sendfile_py
|
sendfun = sendfile_kern if use_sendfile else sendfile_py
|
||||||
remains = sendfun(
|
remains = sendfun(
|
||||||
self.log, lower, upper, f, self.s, self.args.s_wr_sz, self.args.s_wr_slp
|
self.log, lower, upper, f, self.s, self.args.s_wr_sz, self.args.s_wr_slp
|
||||||
@@ -2902,7 +2980,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
if remains > 0:
|
if remains > 0:
|
||||||
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
||||||
self.keepalive = False
|
ret = False
|
||||||
|
|
||||||
spd = self._spd((upper - lower) - remains)
|
spd = self._spd((upper - lower) - remains)
|
||||||
if self.do_log:
|
if self.do_log:
|
||||||
@@ -2918,7 +2996,6 @@ class HttpCli(object):
|
|||||||
vn: VFS,
|
vn: VFS,
|
||||||
rem: str,
|
rem: str,
|
||||||
items: list[str],
|
items: list[str],
|
||||||
dots: bool,
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
if self.args.no_zip:
|
if self.args.no_zip:
|
||||||
raise Pebkac(400, "not enabled")
|
raise Pebkac(400, "not enabled")
|
||||||
@@ -2975,7 +3052,7 @@ class HttpCli(object):
|
|||||||
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
|
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
|
||||||
|
|
||||||
fgen = vn.zipgen(
|
fgen = vn.zipgen(
|
||||||
vpath, rem, set(items), self.uname, dots, False, not self.args.no_scandir
|
vpath, rem, set(items), self.uname, False, not self.args.no_scandir
|
||||||
)
|
)
|
||||||
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
|
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
|
||||||
cfmt = ""
|
cfmt = ""
|
||||||
@@ -3042,7 +3119,7 @@ class HttpCli(object):
|
|||||||
self.reply(ico, mime=mime, headers={"Last-Modified": lm})
|
self.reply(ico, mime=mime, headers={"Last-Modified": lm})
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def tx_md(self, fs_path: str) -> bool:
|
def tx_md(self, vn: VFS, fs_path: str) -> bool:
|
||||||
logmsg = " %s @%s " % (self.req, self.uname)
|
logmsg = " %s @%s " % (self.req, self.uname)
|
||||||
|
|
||||||
if not self.can_write:
|
if not self.can_write:
|
||||||
@@ -3059,9 +3136,16 @@ class HttpCli(object):
|
|||||||
st = bos.stat(html_path)
|
st = bos.stat(html_path)
|
||||||
ts_html = st.st_mtime
|
ts_html = st.st_mtime
|
||||||
|
|
||||||
|
max_sz = 1024 * self.args.txt_max
|
||||||
sz_md = 0
|
sz_md = 0
|
||||||
lead = b""
|
lead = b""
|
||||||
|
fullfile = b""
|
||||||
for buf in yieldfile(fs_path):
|
for buf in yieldfile(fs_path):
|
||||||
|
if sz_md < max_sz:
|
||||||
|
fullfile += buf
|
||||||
|
else:
|
||||||
|
fullfile = b""
|
||||||
|
|
||||||
if not sz_md and b"\n" in buf[:2]:
|
if not sz_md and b"\n" in buf[:2]:
|
||||||
lead = buf[: buf.find(b"\n") + 1]
|
lead = buf[: buf.find(b"\n") + 1]
|
||||||
sz_md += len(lead)
|
sz_md += len(lead)
|
||||||
@@ -3070,6 +3154,21 @@ class HttpCli(object):
|
|||||||
for c, v in [(b"&", 4), (b"<", 3), (b">", 3)]:
|
for c, v in [(b"&", 4), (b"<", 3), (b">", 3)]:
|
||||||
sz_md += (len(buf) - len(buf.replace(c, b""))) * v
|
sz_md += (len(buf) - len(buf.replace(c, b""))) * v
|
||||||
|
|
||||||
|
if (
|
||||||
|
fullfile
|
||||||
|
and "exp" in vn.flags
|
||||||
|
and "edit" not in self.uparam
|
||||||
|
and "edit2" not in self.uparam
|
||||||
|
and vn.flags.get("exp_md")
|
||||||
|
):
|
||||||
|
fulltxt = fullfile.decode("utf-8", "replace")
|
||||||
|
fulltxt = self._expand(fulltxt, vn.flags.get("exp_md") or [])
|
||||||
|
fullfile = fulltxt.encode("utf-8", "replace")
|
||||||
|
|
||||||
|
if fullfile:
|
||||||
|
fullfile = html_bescape(fullfile)
|
||||||
|
sz_md = len(lead) + len(fullfile)
|
||||||
|
|
||||||
file_ts = int(max(ts_md, ts_html, self.E.t0))
|
file_ts = int(max(ts_md, ts_html, self.E.t0))
|
||||||
file_lastmod, do_send = self._chk_lastmod(file_ts)
|
file_lastmod, do_send = self._chk_lastmod(file_ts)
|
||||||
self.out_headers["Last-Modified"] = file_lastmod
|
self.out_headers["Last-Modified"] = file_lastmod
|
||||||
@@ -3111,8 +3210,11 @@ class HttpCli(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
self.s.sendall(html[0] + lead)
|
self.s.sendall(html[0] + lead)
|
||||||
for buf in yieldfile(fs_path):
|
if fullfile:
|
||||||
self.s.sendall(html_bescape(buf))
|
self.s.sendall(fullfile)
|
||||||
|
else:
|
||||||
|
for buf in yieldfile(fs_path):
|
||||||
|
self.s.sendall(html_bescape(buf))
|
||||||
|
|
||||||
self.s.sendall(html[1])
|
self.s.sendall(html[1])
|
||||||
|
|
||||||
@@ -3233,7 +3335,7 @@ class HttpCli(object):
|
|||||||
if v == "y":
|
if v == "y":
|
||||||
dur = 86400 * 299
|
dur = 86400 * 299
|
||||||
else:
|
else:
|
||||||
dur = None
|
dur = 0
|
||||||
v = "x"
|
v = "x"
|
||||||
|
|
||||||
ck = gencookie("k304", v, self.args.R, False, dur)
|
ck = gencookie("k304", v, self.args.R, False, dur)
|
||||||
@@ -3243,7 +3345,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
def setck(self) -> bool:
|
def setck(self) -> bool:
|
||||||
k, v = self.uparam["setck"].split("=", 1)
|
k, v = self.uparam["setck"].split("=", 1)
|
||||||
t = None if v == "" else 86400 * 299
|
t = 0 if v == "" else 86400 * 299
|
||||||
ck = gencookie(k, v, self.args.R, False, t)
|
ck = gencookie(k, v, self.args.R, False, t)
|
||||||
self.out_headerlist.append(("Set-Cookie", ck))
|
self.out_headerlist.append(("Set-Cookie", ck))
|
||||||
self.reply(b"o7\n")
|
self.reply(b"o7\n")
|
||||||
@@ -3251,7 +3353,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
def set_cfg_reset(self) -> bool:
|
def set_cfg_reset(self) -> bool:
|
||||||
for k in ("k304", "js", "idxh", "cppwd", "cppws"):
|
for k in ("k304", "js", "idxh", "cppwd", "cppws"):
|
||||||
cookie = gencookie(k, "x", self.args.R, False, None)
|
cookie = gencookie(k, "x", self.args.R, False)
|
||||||
self.out_headerlist.append(("Set-Cookie", cookie))
|
self.out_headerlist.append(("Set-Cookie", cookie))
|
||||||
|
|
||||||
self.redirect("", "?h#cc")
|
self.redirect("", "?h#cc")
|
||||||
@@ -3261,11 +3363,19 @@ class HttpCli(object):
|
|||||||
rc = 404
|
rc = 404
|
||||||
if self.args.vague_403:
|
if self.args.vague_403:
|
||||||
t = '<h1 id="n">404 not found ┐( ´ -`)┌</h1><p id="o">or maybe you don\'t have access -- try logging in or <a href="{}/?h">go home</a></p>'
|
t = '<h1 id="n">404 not found ┐( ´ -`)┌</h1><p id="o">or maybe you don\'t have access -- try logging in or <a href="{}/?h">go home</a></p>'
|
||||||
|
pt = "404 not found ┐( ´ -`)┌ (or maybe you don't have access -- try logging in)"
|
||||||
elif is_403:
|
elif is_403:
|
||||||
t = '<h1 id="p">403 forbiddena ~┻━┻</h1><p id="q">you\'ll have to log in or <a href="{}/?h">go home</a></p>'
|
t = '<h1 id="p">403 forbiddena ~┻━┻</h1><p id="q">you\'ll have to log in or <a href="{}/?h">go home</a></p>'
|
||||||
|
pt = "403 forbiddena ~┻━┻ (you'll have to log in)"
|
||||||
rc = 403
|
rc = 403
|
||||||
else:
|
else:
|
||||||
t = '<h1 id="n">404 not found ┐( ´ -`)┌</h1><p><a id="r" href="{}/?h">go home</a></p>'
|
t = '<h1 id="n">404 not found ┐( ´ -`)┌</h1><p><a id="r" href="{}/?h">go home</a></p>'
|
||||||
|
pt = "404 not found ┐( ´ -`)┌"
|
||||||
|
|
||||||
|
if self.ua.startswith("curl/") or self.ua.startswith("fetch"):
|
||||||
|
pt = "# acct: %s\n%s" % (self.uname, pt)
|
||||||
|
self.reply(pt.encode("utf-8"), status=rc)
|
||||||
|
return True
|
||||||
|
|
||||||
t = t.format(self.args.SR)
|
t = t.format(self.args.SR)
|
||||||
qv = quotep(self.vpaths) + self.ourlq()
|
qv = quotep(self.vpaths) + self.ourlq()
|
||||||
@@ -3364,6 +3474,7 @@ class HttpCli(object):
|
|||||||
ret["k" + quotep(excl)] = sub
|
ret["k" + quotep(excl)] = sub
|
||||||
|
|
||||||
vfs = self.asrv.vfs
|
vfs = self.asrv.vfs
|
||||||
|
dots = False
|
||||||
try:
|
try:
|
||||||
vn, rem = vfs.get(top, self.uname, True, False)
|
vn, rem = vfs.get(top, self.uname, True, False)
|
||||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||||
@@ -3372,6 +3483,7 @@ class HttpCli(object):
|
|||||||
not self.args.no_scandir,
|
not self.args.no_scandir,
|
||||||
[[True, False], [False, True]],
|
[[True, False], [False, True]],
|
||||||
)
|
)
|
||||||
|
dots = self.uname in vn.axs.udot
|
||||||
except:
|
except:
|
||||||
vfs_ls = []
|
vfs_ls = []
|
||||||
vfs_virt = {}
|
vfs_virt = {}
|
||||||
@@ -3380,15 +3492,12 @@ class HttpCli(object):
|
|||||||
if d1 == top:
|
if d1 == top:
|
||||||
vfs_virt[d2] = vfs # typechk, value never read
|
vfs_virt[d2] = vfs # typechk, value never read
|
||||||
|
|
||||||
dirs = []
|
dirs = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||||
|
|
||||||
dirnames = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
if not dots or "dots" not in self.uparam:
|
||||||
|
dirs = exclude_dotfiles(dirs)
|
||||||
|
|
||||||
if not self.args.ed or "dots" not in self.uparam:
|
dirs = [quotep(x) for x in dirs if x != excl]
|
||||||
dirnames = exclude_dotfiles(dirnames)
|
|
||||||
|
|
||||||
for fn in [x for x in dirnames if x != excl]:
|
|
||||||
dirs.append(quotep(fn))
|
|
||||||
|
|
||||||
for x in vfs_virt:
|
for x in vfs_virt:
|
||||||
if x != excl:
|
if x != excl:
|
||||||
@@ -3420,7 +3529,8 @@ class HttpCli(object):
|
|||||||
fk_vols = {
|
fk_vols = {
|
||||||
vol: (vol.flags["fk"], 2 if "fka" in vol.flags else 1)
|
vol: (vol.flags["fk"], 2 if "fka" in vol.flags else 1)
|
||||||
for vp, vol in self.asrv.vfs.all_vols.items()
|
for vp, vol in self.asrv.vfs.all_vols.items()
|
||||||
if "fk" in vol.flags and (vp in self.rvol or vp in self.upvol)
|
if "fk" in vol.flags
|
||||||
|
and (self.uname in vol.axs.uread or self.uname in vol.axs.upget)
|
||||||
}
|
}
|
||||||
for vol in self.asrv.vfs.all_vols.values():
|
for vol in self.asrv.vfs.all_vols.values():
|
||||||
cur = idx.get_cur(vol.realpath)
|
cur = idx.get_cur(vol.realpath)
|
||||||
@@ -3691,7 +3801,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
elif self.can_get and self.avn:
|
elif self.can_get and self.avn:
|
||||||
axs = self.avn.axs
|
axs = self.avn.axs
|
||||||
if self.uname not in axs.uhtml and "*" not in axs.uhtml:
|
if self.uname not in axs.uhtml:
|
||||||
pass
|
pass
|
||||||
elif is_dir:
|
elif is_dir:
|
||||||
for fn in ("index.htm", "index.html"):
|
for fn in ("index.htm", "index.html"):
|
||||||
@@ -3731,7 +3841,8 @@ class HttpCli(object):
|
|||||||
)[: vn.flags["fk"]]
|
)[: vn.flags["fk"]]
|
||||||
got = self.uparam.get("k")
|
got = self.uparam.get("k")
|
||||||
if got != correct:
|
if got != correct:
|
||||||
self.log("wrong filekey, want {}, got {}".format(correct, got))
|
t = "wrong filekey, want %s, got %s\n vp: %s\n ap: %s"
|
||||||
|
self.log(t % (correct, got, self.req, abspath), 6)
|
||||||
return self.tx_404()
|
return self.tx_404()
|
||||||
|
|
||||||
if (
|
if (
|
||||||
@@ -3743,7 +3854,7 @@ class HttpCli(object):
|
|||||||
or "edit2" in self.uparam
|
or "edit2" in self.uparam
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
return self.tx_md(abspath)
|
return self.tx_md(vn, abspath)
|
||||||
|
|
||||||
return self.tx_file(abspath)
|
return self.tx_file(abspath)
|
||||||
|
|
||||||
@@ -3805,6 +3916,10 @@ class HttpCli(object):
|
|||||||
if bos.path.exists(fn):
|
if bos.path.exists(fn):
|
||||||
with open(fsenc(fn), "rb") as f:
|
with open(fsenc(fn), "rb") as f:
|
||||||
logues[n] = f.read().decode("utf-8")
|
logues[n] = f.read().decode("utf-8")
|
||||||
|
if "exp" in vn.flags:
|
||||||
|
logues[n] = self._expand(
|
||||||
|
logues[n], vn.flags.get("exp_lg") or []
|
||||||
|
)
|
||||||
|
|
||||||
readme = ""
|
readme = ""
|
||||||
if not self.args.no_readme and not logues[1]:
|
if not self.args.no_readme and not logues[1]:
|
||||||
@@ -3814,6 +3929,8 @@ class HttpCli(object):
|
|||||||
with open(fsenc(fn), "rb") as f:
|
with open(fsenc(fn), "rb") as f:
|
||||||
readme = f.read().decode("utf-8")
|
readme = f.read().decode("utf-8")
|
||||||
break
|
break
|
||||||
|
if readme and "exp" in vn.flags:
|
||||||
|
readme = self._expand(readme, vn.flags.get("exp_md") or [])
|
||||||
|
|
||||||
vf = vn.flags
|
vf = vn.flags
|
||||||
unlist = vf.get("unlist", "")
|
unlist = vf.get("unlist", "")
|
||||||
@@ -3825,6 +3942,9 @@ class HttpCli(object):
|
|||||||
"acct": self.uname,
|
"acct": self.uname,
|
||||||
"idx": e2d,
|
"idx": e2d,
|
||||||
"itag": e2t,
|
"itag": e2t,
|
||||||
|
"dsort": vf["sort"],
|
||||||
|
"dfull": "nocrop" in vf,
|
||||||
|
"u2ts": vf["u2ts"],
|
||||||
"lifetime": vn.flags.get("lifetime") or 0,
|
"lifetime": vn.flags.get("lifetime") or 0,
|
||||||
"frand": bool(vn.flags.get("rand")),
|
"frand": bool(vn.flags.get("rand")),
|
||||||
"unlist": unlist,
|
"unlist": unlist,
|
||||||
@@ -3832,40 +3952,46 @@ class HttpCli(object):
|
|||||||
"logues": logues,
|
"logues": logues,
|
||||||
"readme": readme,
|
"readme": readme,
|
||||||
}
|
}
|
||||||
j2a = {
|
cgv = {
|
||||||
"vdir": quotep(self.vpath),
|
|
||||||
"vpnodes": vpnodes,
|
|
||||||
"files": [],
|
|
||||||
"ls0": None,
|
"ls0": None,
|
||||||
"acct": self.uname,
|
"acct": self.uname,
|
||||||
"perms": json.dumps(perms),
|
"perms": perms,
|
||||||
|
"u2ts": vf["u2ts"],
|
||||||
"lifetime": ls_ret["lifetime"],
|
"lifetime": ls_ret["lifetime"],
|
||||||
"frand": bool(vn.flags.get("rand")),
|
"frand": bool(vn.flags.get("rand")),
|
||||||
"taglist": [],
|
|
||||||
"def_hcols": [],
|
"def_hcols": [],
|
||||||
"have_emp": self.args.emp,
|
"have_emp": self.args.emp,
|
||||||
"have_up2k_idx": e2d,
|
"have_up2k_idx": e2d,
|
||||||
"have_tags_idx": e2t,
|
|
||||||
"have_acode": (not self.args.no_acode),
|
"have_acode": (not self.args.no_acode),
|
||||||
"have_mv": (not self.args.no_mv),
|
"have_mv": (not self.args.no_mv),
|
||||||
"have_del": (not self.args.no_del),
|
"have_del": (not self.args.no_del),
|
||||||
"have_zip": (not self.args.no_zip),
|
"have_zip": (not self.args.no_zip),
|
||||||
"have_unpost": int(self.args.unpost),
|
"have_unpost": int(self.args.unpost),
|
||||||
"have_b_u": (self.can_write and self.uparam.get("b") == "u"),
|
|
||||||
"sb_md": "" if "no_sb_md" in vf else (vf.get("md_sbf") or "y"),
|
"sb_md": "" if "no_sb_md" in vf else (vf.get("md_sbf") or "y"),
|
||||||
|
"readme": readme,
|
||||||
|
"dgrid": "grid" in vf,
|
||||||
|
"dfull": "nocrop" in vf,
|
||||||
|
"dsort": vf["sort"],
|
||||||
|
"themes": self.args.themes,
|
||||||
|
"turbolvl": self.args.turbo,
|
||||||
|
"u2j": self.args.u2j,
|
||||||
|
"idxh": int(self.args.ih),
|
||||||
|
"u2sort": self.args.u2sort,
|
||||||
|
}
|
||||||
|
j2a = {
|
||||||
|
"cgv": cgv,
|
||||||
|
"vpnodes": vpnodes,
|
||||||
|
"files": [],
|
||||||
|
"ls0": None,
|
||||||
|
"taglist": [],
|
||||||
|
"have_tags_idx": e2t,
|
||||||
|
"have_b_u": (self.can_write and self.uparam.get("b") == "u"),
|
||||||
"sb_lg": "" if "no_sb_lg" in vf else (vf.get("lg_sbf") or "y"),
|
"sb_lg": "" if "no_sb_lg" in vf else (vf.get("lg_sbf") or "y"),
|
||||||
"url_suf": url_suf,
|
"url_suf": url_suf,
|
||||||
"logues": logues,
|
"logues": logues,
|
||||||
"readme": readme,
|
|
||||||
"title": html_escape("%s %s" % (self.args.bname, self.vpath), crlf=True),
|
"title": html_escape("%s %s" % (self.args.bname, self.vpath), crlf=True),
|
||||||
"srv_info": srv_infot,
|
"srv_info": srv_infot,
|
||||||
"dgrid": "grid" in vf,
|
|
||||||
"unlist": unlist,
|
|
||||||
"dtheme": self.args.theme,
|
"dtheme": self.args.theme,
|
||||||
"themes": self.args.themes,
|
|
||||||
"turbolvl": self.args.turbo,
|
|
||||||
"idxh": int(self.args.ih),
|
|
||||||
"u2sort": self.args.u2sort,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.args.js_browser:
|
if self.args.js_browser:
|
||||||
@@ -3898,7 +4024,7 @@ class HttpCli(object):
|
|||||||
for k in ["zip", "tar"]:
|
for k in ["zip", "tar"]:
|
||||||
v = self.uparam.get(k)
|
v = self.uparam.get(k)
|
||||||
if v is not None:
|
if v is not None:
|
||||||
return self.tx_zip(k, v, self.vpath, vn, rem, [], self.args.ed)
|
return self.tx_zip(k, v, self.vpath, vn, rem, [])
|
||||||
|
|
||||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||||
rem,
|
rem,
|
||||||
@@ -3929,13 +4055,13 @@ class HttpCli(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
# show dotfiles if permitted and requested
|
# show dotfiles if permitted and requested
|
||||||
if not self.args.ed or (
|
if not self.can_dot or (
|
||||||
"dots" not in self.uparam and (is_ls or "dots" not in self.cookies)
|
"dots" not in self.uparam and (is_ls or "dots" not in self.cookies)
|
||||||
):
|
):
|
||||||
ls_names = exclude_dotfiles(ls_names)
|
ls_names = exclude_dotfiles(ls_names)
|
||||||
|
|
||||||
add_fk = vn.flags.get("fk")
|
add_fk = vf.get("fk")
|
||||||
fk_alg = 2 if "fka" in vn.flags else 1
|
fk_alg = 2 if "fka" in vf else 1
|
||||||
|
|
||||||
dirs = []
|
dirs = []
|
||||||
files = []
|
files = []
|
||||||
@@ -3975,7 +4101,7 @@ class HttpCli(object):
|
|||||||
margin = "-"
|
margin = "-"
|
||||||
|
|
||||||
sz = inf.st_size
|
sz = inf.st_size
|
||||||
zd = datetime.utcfromtimestamp(linf.st_mtime)
|
zd = datetime.fromtimestamp(linf.st_mtime, UTC)
|
||||||
dt = "%04d-%02d-%02d %02d:%02d:%02d" % (
|
dt = "%04d-%02d-%02d %02d:%02d:%02d" % (
|
||||||
zd.year,
|
zd.year,
|
||||||
zd.month,
|
zd.month,
|
||||||
@@ -4035,6 +4161,9 @@ class HttpCli(object):
|
|||||||
ap = vn.canonical(rem)
|
ap = vn.canonical(rem)
|
||||||
return self.tx_file(ap) # is no-cache
|
return self.tx_file(ap) # is no-cache
|
||||||
|
|
||||||
|
mte = vn.flags.get("mte", {})
|
||||||
|
add_up_at = ".up_at" in mte
|
||||||
|
is_admin = self.can_admin
|
||||||
tagset: set[str] = set()
|
tagset: set[str] = set()
|
||||||
for fe in files:
|
for fe in files:
|
||||||
fn = fe["name"]
|
fn = fe["name"]
|
||||||
@@ -4062,24 +4191,38 @@ class HttpCli(object):
|
|||||||
self.log(t.format(rd, fn, min_ex()))
|
self.log(t.format(rd, fn, min_ex()))
|
||||||
break
|
break
|
||||||
|
|
||||||
fe["tags"] = {k: v for k, v in r}
|
tags = {k: v for k, v in r}
|
||||||
|
|
||||||
if self.can_admin:
|
if is_admin:
|
||||||
q = "select ip, at from up where rd=? and fn=?"
|
q = "select ip, at from up where rd=? and fn=?"
|
||||||
try:
|
try:
|
||||||
zs1, zs2 = icur.execute(q, erd_efn).fetchone()
|
zs1, zs2 = icur.execute(q, erd_efn).fetchone()
|
||||||
fe["tags"]["up_ip"] = zs1
|
if zs1:
|
||||||
fe["tags"][".up_at"] = zs2
|
tags["up_ip"] = zs1
|
||||||
|
if zs2:
|
||||||
|
tags[".up_at"] = zs2
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
elif add_up_at:
|
||||||
|
q = "select at from up where rd=? and fn=?"
|
||||||
|
try:
|
||||||
|
(zs1,) = icur.execute(q, erd_efn).fetchone()
|
||||||
|
if zs1:
|
||||||
|
tags[".up_at"] = zs1
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
_ = [tagset.add(k) for k in fe["tags"]]
|
_ = [tagset.add(k) for k in tags]
|
||||||
|
fe["tags"] = tags
|
||||||
|
|
||||||
if icur:
|
if icur:
|
||||||
mte = vn.flags.get("mte") or "up_ip,.up_at"
|
lmte = list(mte)
|
||||||
taglist = [k for k in mte.split(",") if k in tagset]
|
if self.can_admin:
|
||||||
|
lmte += ["up_ip", ".up_at"]
|
||||||
|
|
||||||
|
taglist = [k for k in lmte if k in tagset]
|
||||||
for fe in dirs:
|
for fe in dirs:
|
||||||
fe["tags"] = {}
|
fe["tags"] = ODict()
|
||||||
else:
|
else:
|
||||||
taglist = list(tagset)
|
taglist = list(tagset)
|
||||||
|
|
||||||
@@ -4099,6 +4242,12 @@ class HttpCli(object):
|
|||||||
if sz < 1024 * self.args.txt_max:
|
if sz < 1024 * self.args.txt_max:
|
||||||
with open(fsenc(docpath), "rb") as f:
|
with open(fsenc(docpath), "rb") as f:
|
||||||
doctxt = f.read().decode("utf-8", "replace")
|
doctxt = f.read().decode("utf-8", "replace")
|
||||||
|
|
||||||
|
if doc.lower().endswith(".md") and "exp" in vn.flags:
|
||||||
|
doctxt = self._expand(doctxt, vn.flags.get("exp_md") or [])
|
||||||
|
else:
|
||||||
|
self.log("doc 2big: [{}]".format(doc), c=6)
|
||||||
|
doctxt = "( size of textfile exceeds serverside limit )"
|
||||||
else:
|
else:
|
||||||
self.log("doc 404: [{}]".format(doc), c=6)
|
self.log("doc 404: [{}]".format(doc), c=6)
|
||||||
doctxt = "( textfile not found )"
|
doctxt = "( textfile not found )"
|
||||||
@@ -4112,7 +4261,7 @@ class HttpCli(object):
|
|||||||
dirs.sort(key=itemgetter("name"))
|
dirs.sort(key=itemgetter("name"))
|
||||||
|
|
||||||
if is_js:
|
if is_js:
|
||||||
j2a["ls0"] = {
|
j2a["ls0"] = cgv["ls0"] = {
|
||||||
"dirs": dirs,
|
"dirs": dirs,
|
||||||
"files": files,
|
"files": files,
|
||||||
"taglist": taglist,
|
"taglist": taglist,
|
||||||
@@ -4126,7 +4275,7 @@ class HttpCli(object):
|
|||||||
j2a["txt_ext"] = self.args.textfiles.replace(",", " ")
|
j2a["txt_ext"] = self.args.textfiles.replace(",", " ")
|
||||||
|
|
||||||
if "mth" in vn.flags:
|
if "mth" in vn.flags:
|
||||||
j2a["def_hcols"] = vn.flags["mth"].split(",")
|
j2a["def_hcols"] = list(vn.flags["mth"])
|
||||||
|
|
||||||
html = self.j2s(tpl, **j2a)
|
html = self.j2s(tpl, **j2a)
|
||||||
self.reply(html.encode("utf-8", "replace"))
|
self.reply(html.encode("utf-8", "replace"))
|
||||||
|
|||||||
@@ -112,32 +112,30 @@ class HttpConn(object):
|
|||||||
return self.u2idx
|
return self.u2idx
|
||||||
|
|
||||||
def _detect_https(self) -> bool:
|
def _detect_https(self) -> bool:
|
||||||
method = None
|
try:
|
||||||
if True:
|
method = self.s.recv(4, socket.MSG_PEEK)
|
||||||
try:
|
except socket.timeout:
|
||||||
method = self.s.recv(4, socket.MSG_PEEK)
|
return False
|
||||||
except socket.timeout:
|
except AttributeError:
|
||||||
return False
|
# jython does not support msg_peek; forget about https
|
||||||
except AttributeError:
|
method = self.s.recv(4)
|
||||||
# jython does not support msg_peek; forget about https
|
self.sr = Util.Unrecv(self.s, self.log)
|
||||||
method = self.s.recv(4)
|
self.sr.buf = method
|
||||||
self.sr = Util.Unrecv(self.s, self.log)
|
|
||||||
self.sr.buf = method
|
|
||||||
|
|
||||||
# jython used to do this, they stopped since it's broken
|
# jython used to do this, they stopped since it's broken
|
||||||
# but reimplementing sendall is out of scope for now
|
# but reimplementing sendall is out of scope for now
|
||||||
if not getattr(self.s, "sendall", None):
|
if not getattr(self.s, "sendall", None):
|
||||||
self.s.sendall = self.s.send # type: ignore
|
self.s.sendall = self.s.send # type: ignore
|
||||||
|
|
||||||
if len(method) != 4:
|
if len(method) != 4:
|
||||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||||
len(method)
|
len(method)
|
||||||
)
|
)
|
||||||
if method:
|
if method:
|
||||||
self.log(err)
|
self.log(err)
|
||||||
|
|
||||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return not method or not bool(PTN_HTTP.match(method))
|
return not method or not bool(PTN_HTTP.match(method))
|
||||||
|
|
||||||
|
|||||||
@@ -109,6 +109,7 @@ class HttpSrv(object):
|
|||||||
self.g404 = Garda(self.args.ban_404)
|
self.g404 = Garda(self.args.ban_404)
|
||||||
self.g403 = Garda(self.args.ban_403)
|
self.g403 = Garda(self.args.ban_403)
|
||||||
self.g422 = Garda(self.args.ban_422, False)
|
self.g422 = Garda(self.args.ban_422, False)
|
||||||
|
self.gmal = Garda(self.args.ban_422)
|
||||||
self.gurl = Garda(self.args.ban_url)
|
self.gurl = Garda(self.args.ban_url)
|
||||||
self.bans: dict[str, int] = {}
|
self.bans: dict[str, int] = {}
|
||||||
self.aclose: dict[str, int] = {}
|
self.aclose: dict[str, int] = {}
|
||||||
@@ -128,6 +129,9 @@ class HttpSrv(object):
|
|||||||
|
|
||||||
self.u2fh = FHC()
|
self.u2fh = FHC()
|
||||||
self.metrics = Metrics(self)
|
self.metrics = Metrics(self)
|
||||||
|
self.nreq = 0
|
||||||
|
self.nsus = 0
|
||||||
|
self.nban = 0
|
||||||
self.srvs: list[socket.socket] = []
|
self.srvs: list[socket.socket] = []
|
||||||
self.ncli = 0 # exact
|
self.ncli = 0 # exact
|
||||||
self.clients: set[HttpConn] = set() # laggy
|
self.clients: set[HttpConn] = set() # laggy
|
||||||
@@ -149,6 +153,7 @@ class HttpSrv(object):
|
|||||||
self._build_statics()
|
self._build_statics()
|
||||||
|
|
||||||
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
||||||
|
self.ptn_hsafe = re.compile(r"[\x00-\x1f<>\"'&]")
|
||||||
|
|
||||||
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
||||||
if not self.args.no_dav:
|
if not self.args.no_dav:
|
||||||
@@ -170,7 +175,7 @@ class HttpSrv(object):
|
|||||||
if self.args.log_thrs:
|
if self.args.log_thrs:
|
||||||
start_log_thrs(self.log, self.args.log_thrs, nid)
|
start_log_thrs(self.log, self.args.log_thrs, nid)
|
||||||
|
|
||||||
self.th_cfg: dict[str, Any] = {}
|
self.th_cfg: dict[str, set[str]] = {}
|
||||||
Daemon(self.post_init, "hsrv-init2")
|
Daemon(self.post_init, "hsrv-init2")
|
||||||
|
|
||||||
def post_init(self) -> None:
|
def post_init(self) -> None:
|
||||||
|
|||||||
@@ -4,10 +4,11 @@ from __future__ import print_function, unicode_literals
|
|||||||
import argparse # typechk
|
import argparse # typechk
|
||||||
import colorsys
|
import colorsys
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import re
|
||||||
|
|
||||||
from .__init__ import PY2
|
from .__init__ import PY2
|
||||||
from .th_srv import HAVE_PIL
|
from .th_srv import HAVE_PIL, HAVE_PILF
|
||||||
from .util import BytesIO
|
from .util import BytesIO # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class Ico(object):
|
class Ico(object):
|
||||||
@@ -21,10 +22,10 @@ class Ico(object):
|
|||||||
ext = bext.decode("utf-8")
|
ext = bext.decode("utf-8")
|
||||||
zb = hashlib.sha1(bext).digest()[2:4]
|
zb = hashlib.sha1(bext).digest()[2:4]
|
||||||
if PY2:
|
if PY2:
|
||||||
zb = [ord(x) for x in zb]
|
zb = [ord(x) for x in zb] # type: ignore
|
||||||
|
|
||||||
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
|
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
|
||||||
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 1)
|
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 0.8 if HAVE_PILF else 1, 1)
|
||||||
ci = [int(x * 255) for x in list(c1) + list(c2)]
|
ci = [int(x * 255) for x in list(c1) + list(c2)]
|
||||||
c = "".join(["{:02x}".format(x) for x in ci])
|
c = "".join(["{:02x}".format(x) for x in ci])
|
||||||
|
|
||||||
@@ -37,6 +38,32 @@ class Ico(object):
|
|||||||
|
|
||||||
if chrome:
|
if chrome:
|
||||||
# cannot handle more than ~2000 unique SVGs
|
# cannot handle more than ~2000 unique SVGs
|
||||||
|
if HAVE_PILF:
|
||||||
|
# pillow 10.1 made this the default font;
|
||||||
|
# svg: 3.7s, this: 36s
|
||||||
|
try:
|
||||||
|
from PIL import Image, ImageDraw
|
||||||
|
|
||||||
|
# [.lt] are hard to see lowercase / unspaced
|
||||||
|
ext2 = re.sub("(.)", "\\1 ", ext).upper()
|
||||||
|
|
||||||
|
h = int(128 * h / w)
|
||||||
|
w = 128
|
||||||
|
img = Image.new("RGB", (w, h), "#" + c[:6])
|
||||||
|
pb = ImageDraw.Draw(img)
|
||||||
|
_, _, tw, th = pb.textbbox((0, 0), ext2, font_size=16)
|
||||||
|
xy = ((w - tw) // 2, (h - th) // 2)
|
||||||
|
pb.text(xy, ext2, fill="#" + c[6:], font_size=16)
|
||||||
|
|
||||||
|
img = img.resize((w * 2, h * 2), Image.NEAREST)
|
||||||
|
|
||||||
|
buf = BytesIO()
|
||||||
|
img.save(buf, format="PNG", compress_level=1)
|
||||||
|
return "image/png", buf.getvalue()
|
||||||
|
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if HAVE_PIL:
|
if HAVE_PIL:
|
||||||
# svg: 3s, cache: 6s, this: 8s
|
# svg: 3s, cache: 6s, this: 8s
|
||||||
from PIL import Image, ImageDraw
|
from PIL import Image, ImageDraw
|
||||||
@@ -64,20 +91,6 @@ class Ico(object):
|
|||||||
img.save(buf, format="PNG", compress_level=1)
|
img.save(buf, format="PNG", compress_level=1)
|
||||||
return "image/png", buf.getvalue()
|
return "image/png", buf.getvalue()
|
||||||
|
|
||||||
elif False:
|
|
||||||
# 48s, too slow
|
|
||||||
import pyvips
|
|
||||||
|
|
||||||
h = int(192 * h / w)
|
|
||||||
w = 192
|
|
||||||
img = pyvips.Image.text(
|
|
||||||
ext, width=w, height=h, dpi=192, align=pyvips.Align.CENTRE
|
|
||||||
)
|
|
||||||
img = img.ifthenelse(ci[3:], ci[:3], blend=True)
|
|
||||||
# i = i.resize(3, kernel=pyvips.Kernel.NEAREST)
|
|
||||||
buf = img.write_to_buffer(".png[compression=1]")
|
|
||||||
return "image/png", buf
|
|
||||||
|
|
||||||
svg = """\
|
svg = """\
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
||||||
|
|||||||
@@ -34,14 +34,23 @@ class Metrics(object):
|
|||||||
|
|
||||||
ret: list[str] = []
|
ret: list[str] = []
|
||||||
|
|
||||||
def addc(k: str, unit: str, v: str, desc: str) -> None:
|
def addc(k: str, v: str, desc: str) -> None:
|
||||||
if unit:
|
zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
||||||
k += "_" + unit
|
ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v))
|
||||||
zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
|
||||||
ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v))
|
def adduc(k: str, unit: str, v: str, desc: str) -> None:
|
||||||
else:
|
k += "_" + unit
|
||||||
zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
|
||||||
ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v))
|
ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v))
|
||||||
|
|
||||||
|
def addg(k: str, v: str, desc: str) -> None:
|
||||||
|
zs = "# TYPE %s gauge\n# HELP %s %s\n%s %s"
|
||||||
|
ret.append(zs % (k, k, desc, k, v))
|
||||||
|
|
||||||
|
def addug(k: str, unit: str, v: str, desc: str) -> None:
|
||||||
|
k += "_" + unit
|
||||||
|
zs = "# TYPE %s gauge\n# UNIT %s %s\n# HELP %s %s\n%s %s"
|
||||||
|
ret.append(zs % (k, k, unit, k, desc, k, v))
|
||||||
|
|
||||||
def addh(k: str, typ: str, desc: str) -> None:
|
def addh(k: str, typ: str, desc: str) -> None:
|
||||||
zs = "# TYPE %s %s\n# HELP %s %s"
|
zs = "# TYPE %s %s\n# HELP %s %s"
|
||||||
@@ -54,17 +63,75 @@ class Metrics(object):
|
|||||||
def addv(k: str, v: str) -> None:
|
def addv(k: str, v: str) -> None:
|
||||||
ret.append("%s %s" % (k, v))
|
ret.append("%s %s" % (k, v))
|
||||||
|
|
||||||
|
t = "time since last copyparty restart"
|
||||||
v = "{:.3f}".format(time.time() - self.hsrv.t0)
|
v = "{:.3f}".format(time.time() - self.hsrv.t0)
|
||||||
addc("cpp_uptime", "seconds", v, "time since last server restart")
|
addug("cpp_uptime", "seconds", v, t)
|
||||||
|
|
||||||
|
# timestamps are gauges because initial value is not zero
|
||||||
|
t = "unixtime of last copyparty restart"
|
||||||
|
v = "{:.3f}".format(self.hsrv.t0)
|
||||||
|
addug("cpp_boot_unixtime", "seconds", v, t)
|
||||||
|
|
||||||
|
t = "number of open http(s) client connections"
|
||||||
|
addg("cpp_http_conns", str(self.hsrv.ncli), t)
|
||||||
|
|
||||||
|
t = "number of http(s) requests since last restart"
|
||||||
|
addc("cpp_http_reqs", str(self.hsrv.nreq), t)
|
||||||
|
|
||||||
|
t = "number of 403/422/malicious reqs since restart"
|
||||||
|
addc("cpp_sus_reqs", str(self.hsrv.nsus), t)
|
||||||
|
|
||||||
v = str(len(conn.bans or []))
|
v = str(len(conn.bans or []))
|
||||||
addc("cpp_bans", "", v, "number of banned IPs")
|
addg("cpp_active_bans", v, "number of currently banned IPs")
|
||||||
|
|
||||||
|
t = "number of IPs banned since last restart"
|
||||||
|
addg("cpp_total_bans", str(self.hsrv.nban), t)
|
||||||
|
|
||||||
|
if not args.nos_vst:
|
||||||
|
x = self.hsrv.broker.ask("up2k.get_state")
|
||||||
|
vs = json.loads(x.get())
|
||||||
|
|
||||||
|
nvidle = 0
|
||||||
|
nvbusy = 0
|
||||||
|
nvoffline = 0
|
||||||
|
for v in vs["volstate"].values():
|
||||||
|
if v == "online, idle":
|
||||||
|
nvidle += 1
|
||||||
|
elif "OFFLINE" in v:
|
||||||
|
nvoffline += 1
|
||||||
|
else:
|
||||||
|
nvbusy += 1
|
||||||
|
|
||||||
|
addg("cpp_idle_vols", str(nvidle), "number of idle/ready volumes")
|
||||||
|
addg("cpp_busy_vols", str(nvbusy), "number of busy/indexing volumes")
|
||||||
|
addg("cpp_offline_vols", str(nvoffline), "number of offline volumes")
|
||||||
|
|
||||||
|
t = "time since last database activity (upload/rename/delete)"
|
||||||
|
addug("cpp_db_idle", "seconds", str(vs["dbwt"]), t)
|
||||||
|
|
||||||
|
t = "unixtime of last database activity (upload/rename/delete)"
|
||||||
|
addug("cpp_db_act", "seconds", str(vs["dbwu"]), t)
|
||||||
|
|
||||||
|
t = "number of files queued for hashing/indexing"
|
||||||
|
addg("cpp_hashing_files", str(vs["hashq"]), t)
|
||||||
|
|
||||||
|
t = "number of files queued for metadata scanning"
|
||||||
|
addg("cpp_tagq_files", str(vs["tagq"]), t)
|
||||||
|
|
||||||
|
try:
|
||||||
|
t = "number of files queued for plugin-based analysis"
|
||||||
|
addg("cpp_mtpq_files", str(int(vs["mtpq"])), t)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if not args.nos_hdd:
|
if not args.nos_hdd:
|
||||||
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
||||||
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
||||||
for vpath, vol in allvols:
|
for vpath, vol in allvols:
|
||||||
free, total = get_df(vol.realpath)
|
free, total = get_df(vol.realpath)
|
||||||
|
if free is None or total is None:
|
||||||
|
continue
|
||||||
|
|
||||||
addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total))
|
addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total))
|
||||||
addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free))
|
addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free))
|
||||||
|
|
||||||
@@ -161,5 +228,6 @@ class Metrics(object):
|
|||||||
ret.append("# EOF")
|
ret.append("# EOF")
|
||||||
|
|
||||||
mime = "application/openmetrics-text; version=1.0.0; charset=utf-8"
|
mime = "application/openmetrics-text; version=1.0.0; charset=utf-8"
|
||||||
|
mime = cli.uparam.get("mime") or mime
|
||||||
cli.reply("\n".join(ret).encode("utf-8"), mime=mime)
|
cli.reply("\n".join(ret).encode("utf-8"), mime=mime)
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -118,7 +118,7 @@ def ffprobe(
|
|||||||
b"--",
|
b"--",
|
||||||
fsenc(abspath),
|
fsenc(abspath),
|
||||||
]
|
]
|
||||||
rc, so, se = runcmd(cmd, timeout=timeout)
|
rc, so, se = runcmd(cmd, timeout=timeout, nice=True)
|
||||||
retchk(rc, cmd, se)
|
retchk(rc, cmd, se)
|
||||||
return parse_ffprobe(so)
|
return parse_ffprobe(so)
|
||||||
|
|
||||||
@@ -261,7 +261,8 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
|
|||||||
if ".resw" in ret and ".resh" in ret:
|
if ".resw" in ret and ".resh" in ret:
|
||||||
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
||||||
|
|
||||||
zd = {k: (0, v) for k, v in ret.items()}
|
zero = int("0")
|
||||||
|
zd = {k: (zero, v) for k, v in ret.items()}
|
||||||
|
|
||||||
return zd, md
|
return zd, md
|
||||||
|
|
||||||
@@ -562,6 +563,7 @@ class MTag(object):
|
|||||||
|
|
||||||
args = {
|
args = {
|
||||||
"env": env,
|
"env": env,
|
||||||
|
"nice": True,
|
||||||
"timeout": parser.timeout,
|
"timeout": parser.timeout,
|
||||||
"kill": parser.kill,
|
"kill": parser.kill,
|
||||||
"capture": parser.capture,
|
"capture": parser.capture,
|
||||||
@@ -572,11 +574,6 @@ class MTag(object):
|
|||||||
zd.update(ret)
|
zd.update(ret)
|
||||||
args["sin"] = json.dumps(zd).encode("utf-8", "replace")
|
args["sin"] = json.dumps(zd).encode("utf-8", "replace")
|
||||||
|
|
||||||
if WINDOWS:
|
|
||||||
args["creationflags"] = 0x4000
|
|
||||||
else:
|
|
||||||
cmd = ["nice"] + cmd
|
|
||||||
|
|
||||||
bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])]
|
bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])]
|
||||||
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
||||||
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
||||||
|
|||||||
@@ -136,8 +136,12 @@ class PWHash(object):
|
|||||||
import getpass
|
import getpass
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
p1 = getpass.getpass("password> ")
|
try:
|
||||||
p2 = getpass.getpass("again or just hit ENTER> ")
|
p1 = getpass.getpass("password> ")
|
||||||
|
p2 = getpass.getpass("again or just hit ENTER> ")
|
||||||
|
except EOFError:
|
||||||
|
return
|
||||||
|
|
||||||
if p2 and p1 != p2:
|
if p2 and p1 != p2:
|
||||||
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
|
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -162,6 +162,7 @@ class SMB(object):
|
|||||||
if "connData" in cl:
|
if "connData" in cl:
|
||||||
return cl["connData"]["partygoer"]
|
return cl["connData"]["partygoer"]
|
||||||
cf = cf.f_back
|
cf = cf.f_back
|
||||||
|
raise Exception()
|
||||||
except:
|
except:
|
||||||
warning(
|
warning(
|
||||||
"nyoron... %s <<-- %s <<-- %s <<-- %s",
|
"nyoron... %s <<-- %s <<-- %s <<-- %s",
|
||||||
@@ -405,6 +406,7 @@ class SMB(object):
|
|||||||
|
|
||||||
smbserver.os.path.abspath = self._hook
|
smbserver.os.path.abspath = self._hook
|
||||||
smbserver.os.path.expanduser = self._hook
|
smbserver.os.path.expanduser = self._hook
|
||||||
|
smbserver.os.path.expandvars = self._hook
|
||||||
smbserver.os.path.getatime = self._hook
|
smbserver.os.path.getatime = self._hook
|
||||||
smbserver.os.path.getctime = self._hook
|
smbserver.os.path.getctime = self._hook
|
||||||
smbserver.os.path.getmtime = self._hook
|
smbserver.os.path.getmtime = self._hook
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ class Adapter(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if True:
|
if True: # pylint: disable=using-constant-test
|
||||||
# Type of an IPv4 address (a string in "xxx.xxx.xxx.xxx" format)
|
# Type of an IPv4 address (a string in "xxx.xxx.xxx.xxx" format)
|
||||||
_IPv4Address = str
|
_IPv4Address = str
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from datetime import datetime
|
|||||||
from .__init__ import CORES
|
from .__init__ import CORES
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .th_cli import ThumbCli
|
from .th_cli import ThumbCli
|
||||||
from .util import vjoin
|
from .util import UTC, vjoin
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Generator, Optional
|
from typing import Any, Generator, Optional
|
||||||
@@ -108,7 +108,7 @@ def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
|
|||||||
tf_path = tf.name
|
tf_path = tf.name
|
||||||
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
||||||
|
|
||||||
dt = datetime.utcnow().strftime("%Y-%m%d-%H%M%S")
|
dt = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S")
|
||||||
|
|
||||||
bos.chmod(tf_path, 0o444)
|
bos.chmod(tf_path, 0o444)
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -36,16 +36,22 @@ from .tcpsrv import TcpSrv
|
|||||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||||
from .up2k import Up2k
|
from .up2k import Up2k
|
||||||
from .util import (
|
from .util import (
|
||||||
|
DEF_EXP,
|
||||||
|
DEF_MTE,
|
||||||
|
DEF_MTH,
|
||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
|
UTC,
|
||||||
VERSIONS,
|
VERSIONS,
|
||||||
Daemon,
|
Daemon,
|
||||||
Garda,
|
Garda,
|
||||||
HLog,
|
HLog,
|
||||||
HMaccas,
|
HMaccas,
|
||||||
|
ODict,
|
||||||
alltrace,
|
alltrace,
|
||||||
ansi_re,
|
ansi_re,
|
||||||
min_ex,
|
min_ex,
|
||||||
mp,
|
mp,
|
||||||
|
odfusion,
|
||||||
pybin,
|
pybin,
|
||||||
start_log_thrs,
|
start_log_thrs,
|
||||||
start_stackmon,
|
start_stackmon,
|
||||||
@@ -115,8 +121,6 @@ class SvcHub(object):
|
|||||||
args.no_mv = True
|
args.no_mv = True
|
||||||
args.hardlink = True
|
args.hardlink = True
|
||||||
args.vague_403 = True
|
args.vague_403 = True
|
||||||
args.ban_404 = "50,60,1440"
|
|
||||||
args.turbo = -1
|
|
||||||
args.nih = True
|
args.nih = True
|
||||||
|
|
||||||
if args.s:
|
if args.s:
|
||||||
@@ -134,7 +138,8 @@ class SvcHub(object):
|
|||||||
self.gpwd = Garda(self.args.ban_pw)
|
self.gpwd = Garda(self.args.ban_pw)
|
||||||
self.g404 = Garda(self.args.ban_404)
|
self.g404 = Garda(self.args.ban_404)
|
||||||
self.g403 = Garda(self.args.ban_403)
|
self.g403 = Garda(self.args.ban_403)
|
||||||
self.g422 = Garda(self.args.ban_422)
|
self.g422 = Garda(self.args.ban_422, False)
|
||||||
|
self.gmal = Garda(self.args.ban_422)
|
||||||
self.gurl = Garda(self.args.ban_url)
|
self.gurl = Garda(self.args.ban_url)
|
||||||
|
|
||||||
self.log_div = 10 ** (6 - args.log_tdec)
|
self.log_div = 10 ** (6 - args.log_tdec)
|
||||||
@@ -400,20 +405,25 @@ class SvcHub(object):
|
|||||||
if al.rsp_jtr:
|
if al.rsp_jtr:
|
||||||
al.rsp_slp = 0.000001
|
al.rsp_slp = 0.000001
|
||||||
|
|
||||||
al.th_covers = set(al.th_covers.split(","))
|
zsl = al.th_covers.split(",")
|
||||||
|
zsl = [x.strip() for x in zsl]
|
||||||
|
zsl = [x for x in zsl if x]
|
||||||
|
al.th_covers = set(zsl)
|
||||||
|
al.th_coversd = set(zsl + ["." + x for x in zsl])
|
||||||
|
|
||||||
for k in "c".split(" "):
|
for k in "c".split(" "):
|
||||||
vl = getattr(al, k)
|
vl = getattr(al, k)
|
||||||
if not vl:
|
if not vl:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
vl = [os.path.expanduser(x) if x.startswith("~") else x for x in vl]
|
vl = [os.path.expandvars(os.path.expanduser(x)) for x in vl]
|
||||||
setattr(al, k, vl)
|
setattr(al, k, vl)
|
||||||
|
|
||||||
for k in "lo hist ssl_log".split(" "):
|
for k in "lo hist ssl_log".split(" "):
|
||||||
vs = getattr(al, k)
|
vs = getattr(al, k)
|
||||||
if vs and vs.startswith("~"):
|
if vs:
|
||||||
setattr(al, k, os.path.expanduser(vs))
|
vs = os.path.expandvars(os.path.expanduser(vs))
|
||||||
|
setattr(al, k, vs)
|
||||||
|
|
||||||
for k in "sus_urls nonsus_urls".split(" "):
|
for k in "sus_urls nonsus_urls".split(" "):
|
||||||
vs = getattr(al, k)
|
vs = getattr(al, k)
|
||||||
@@ -427,14 +437,38 @@ class SvcHub(object):
|
|||||||
elif al.ban_url == "no":
|
elif al.ban_url == "no":
|
||||||
al.sus_urls = None
|
al.sus_urls = None
|
||||||
|
|
||||||
if al.xff_src in ("any", "0", ""):
|
al.xff_hdr = al.xff_hdr.lower()
|
||||||
al.xff_re = None
|
al.idp_h_usr = al.idp_h_usr.lower()
|
||||||
else:
|
# al.idp_h_grp = al.idp_h_grp.lower()
|
||||||
zs = al.xff_src.replace(" ", "").replace(".", "\\.").replace(",", "|")
|
|
||||||
al.xff_re = re.compile("^(?:" + zs + ")")
|
al.xff_re = self._ipa2re(al.xff_src)
|
||||||
|
al.ipa_re = self._ipa2re(al.ipa)
|
||||||
|
al.ftp_ipa_re = self._ipa2re(al.ftp_ipa or al.ipa)
|
||||||
|
|
||||||
|
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
||||||
|
al.mte = odfusion(mte, al.mte)
|
||||||
|
|
||||||
|
mth = ODict.fromkeys(DEF_MTH.split(","), True)
|
||||||
|
al.mth = odfusion(mth, al.mth)
|
||||||
|
|
||||||
|
exp = ODict.fromkeys(DEF_EXP.split(" "), True)
|
||||||
|
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
||||||
|
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
||||||
|
|
||||||
|
for k in ["no_hash", "no_idx"]:
|
||||||
|
ptn = getattr(self.args, k)
|
||||||
|
if ptn:
|
||||||
|
setattr(self.args, k, re.compile(ptn))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
||||||
|
if txt in ("any", "0", ""):
|
||||||
|
return None
|
||||||
|
|
||||||
|
zs = txt.replace(" ", "").replace(".", "\\.").replace(",", "|")
|
||||||
|
return re.compile("^(?:" + zs + ")")
|
||||||
|
|
||||||
def _setlimits(self) -> None:
|
def _setlimits(self) -> None:
|
||||||
try:
|
try:
|
||||||
import resource
|
import resource
|
||||||
@@ -476,7 +510,7 @@ class SvcHub(object):
|
|||||||
self.args.nc = min(self.args.nc, soft // 2)
|
self.args.nc = min(self.args.nc, soft // 2)
|
||||||
|
|
||||||
def _logname(self) -> str:
|
def _logname(self) -> str:
|
||||||
dt = datetime.utcnow()
|
dt = datetime.now(UTC)
|
||||||
fn = str(self.args.lo)
|
fn = str(self.args.lo)
|
||||||
for fs in "YmdHMS":
|
for fs in "YmdHMS":
|
||||||
fs = "%" + fs
|
fs = "%" + fs
|
||||||
@@ -497,12 +531,17 @@ class SvcHub(object):
|
|||||||
sel_fn = "{}.{}".format(fn, ctr)
|
sel_fn = "{}.{}".format(fn, ctr)
|
||||||
|
|
||||||
fn = sel_fn
|
fn = sel_fn
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(fn))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if do_xz:
|
if do_xz:
|
||||||
import lzma
|
import lzma
|
||||||
|
|
||||||
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
||||||
|
self.args.no_logflush = True
|
||||||
else:
|
else:
|
||||||
lh = open(fn, "wt", encoding="utf-8", errors="replace")
|
lh = open(fn, "wt", encoding="utf-8", errors="replace")
|
||||||
except:
|
except:
|
||||||
@@ -725,14 +764,31 @@ class SvcHub(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
with self.log_mutex:
|
with self.log_mutex:
|
||||||
zd = datetime.utcnow()
|
zd = datetime.now(UTC)
|
||||||
ts = self.log_dfmt % (
|
ts = self.log_dfmt % (
|
||||||
zd.year,
|
zd.year,
|
||||||
zd.month * 100 + zd.day,
|
zd.month * 100 + zd.day,
|
||||||
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
||||||
zd.microsecond // self.log_div,
|
zd.microsecond // self.log_div,
|
||||||
)
|
)
|
||||||
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
|
|
||||||
|
if c and not self.args.no_ansi:
|
||||||
|
if isinstance(c, int):
|
||||||
|
msg = "\033[3%sm%s\033[0m" % (c, msg)
|
||||||
|
elif "\033" not in c:
|
||||||
|
msg = "\033[%sm%s\033[0m" % (c, msg)
|
||||||
|
else:
|
||||||
|
msg = "%s%s\033[0m" % (c, msg)
|
||||||
|
|
||||||
|
if "\033" in src:
|
||||||
|
src += "\033[0m"
|
||||||
|
|
||||||
|
if "\033" in msg:
|
||||||
|
msg += "\033[0m"
|
||||||
|
|
||||||
|
self.logf.write("@%s [%-21s] %s\n" % (ts, src, msg))
|
||||||
|
if not self.args.no_logflush:
|
||||||
|
self.logf.flush()
|
||||||
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
if now >= self.next_day:
|
if now >= self.next_day:
|
||||||
@@ -743,7 +799,7 @@ class SvcHub(object):
|
|||||||
self.logf.close()
|
self.logf.close()
|
||||||
self._setup_logfile("")
|
self._setup_logfile("")
|
||||||
|
|
||||||
dt = datetime.utcnow()
|
dt = datetime.now(UTC)
|
||||||
|
|
||||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
||||||
day_now = dt.day
|
day_now = dt.day
|
||||||
@@ -751,14 +807,20 @@ class SvcHub(object):
|
|||||||
dt += timedelta(hours=12)
|
dt += timedelta(hours=12)
|
||||||
|
|
||||||
dt = dt.replace(hour=0, minute=0, second=0)
|
dt = dt.replace(hour=0, minute=0, second=0)
|
||||||
self.next_day = calendar.timegm(dt.utctimetuple())
|
try:
|
||||||
|
tt = dt.utctimetuple()
|
||||||
|
except:
|
||||||
|
# still makes me hella uncomfortable
|
||||||
|
tt = dt.timetuple()
|
||||||
|
|
||||||
|
self.next_day = calendar.timegm(tt)
|
||||||
|
|
||||||
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
"""handles logging from all components"""
|
"""handles logging from all components"""
|
||||||
with self.log_mutex:
|
with self.log_mutex:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
if now >= self.next_day:
|
if now >= self.next_day:
|
||||||
dt = datetime.utcfromtimestamp(now)
|
dt = datetime.fromtimestamp(now, UTC)
|
||||||
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
zs = "{}\n" if self.no_ansi else "\033[36m{}\033[0m\n"
|
||||||
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
zs = zs.format(dt.strftime("%Y-%m-%d"))
|
||||||
print(zs, end="")
|
print(zs, end="")
|
||||||
@@ -781,7 +843,7 @@ class SvcHub(object):
|
|||||||
else:
|
else:
|
||||||
msg = "%s%s\033[0m" % (c, msg)
|
msg = "%s%s\033[0m" % (c, msg)
|
||||||
|
|
||||||
zd = datetime.utcfromtimestamp(now)
|
zd = datetime.fromtimestamp(now, UTC)
|
||||||
ts = self.log_efmt % (
|
ts = self.log_efmt % (
|
||||||
zd.hour,
|
zd.hour,
|
||||||
zd.minute,
|
zd.minute,
|
||||||
@@ -802,6 +864,8 @@ class SvcHub(object):
|
|||||||
|
|
||||||
if self.logf:
|
if self.logf:
|
||||||
self.logf.write(msg)
|
self.logf.write(msg)
|
||||||
|
if not self.args.no_logflush:
|
||||||
|
self.logf.flush()
|
||||||
|
|
||||||
def pr(self, *a: Any, **ka: Any) -> None:
|
def pr(self, *a: Any, **ka: Any) -> None:
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -241,6 +241,11 @@ class TcpSrv(object):
|
|||||||
raise OSError(E_ADDR_IN_USE[0], "")
|
raise OSError(E_ADDR_IN_USE[0], "")
|
||||||
self.srv.append(srv)
|
self.srv.append(srv)
|
||||||
except (OSError, socket.error) as ex:
|
except (OSError, socket.error) as ex:
|
||||||
|
try:
|
||||||
|
srv.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if ex.errno in E_ADDR_IN_USE:
|
if ex.errno in E_ADDR_IN_USE:
|
||||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||||
elif ex.errno in E_ADDR_NOT_AVAIL:
|
elif ex.errno in E_ADDR_NOT_AVAIL:
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ class ThumbCli(object):
|
|||||||
if not c:
|
if not c:
|
||||||
raise Exception()
|
raise Exception()
|
||||||
except:
|
except:
|
||||||
c = {k: {} for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
|
c = {k: set() for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
|
||||||
|
|
||||||
self.thumbable = c["thumbable"]
|
self.thumbable = c["thumbable"]
|
||||||
self.fmt_pil = c["pil"]
|
self.fmt_pil = c["pil"]
|
||||||
@@ -94,7 +94,7 @@ class ThumbCli(object):
|
|||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for [{}]".format(ptop))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||||
tpaths = [tpath]
|
tpaths = [tpath]
|
||||||
if fmt == "w":
|
if fmt == "w":
|
||||||
# also check for jpg (maybe webp is unavailable)
|
# also check for jpg (maybe webp is unavailable)
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ from .bos import bos
|
|||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||||
from .util import (
|
from .util import (
|
||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
BytesIO,
|
BytesIO, # type: ignore
|
||||||
Cooldown,
|
Cooldown,
|
||||||
Daemon,
|
Daemon,
|
||||||
Pebkac,
|
Pebkac,
|
||||||
@@ -37,14 +37,21 @@ if TYPE_CHECKING:
|
|||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
HAVE_PIL = False
|
HAVE_PIL = False
|
||||||
|
HAVE_PILF = False
|
||||||
HAVE_HEIF = False
|
HAVE_HEIF = False
|
||||||
HAVE_AVIF = False
|
HAVE_AVIF = False
|
||||||
HAVE_WEBP = False
|
HAVE_WEBP = False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from PIL import ExifTags, Image, ImageOps
|
from PIL import ExifTags, Image, ImageFont, ImageOps
|
||||||
|
|
||||||
HAVE_PIL = True
|
HAVE_PIL = True
|
||||||
|
try:
|
||||||
|
ImageFont.load_default(size=16)
|
||||||
|
HAVE_PILF = True
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||||
HAVE_WEBP = True
|
HAVE_WEBP = True
|
||||||
@@ -79,17 +86,23 @@ except:
|
|||||||
HAVE_VIPS = False
|
HAVE_VIPS = False
|
||||||
|
|
||||||
|
|
||||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
|
||||||
# base16 = 16 = 256
|
# base16 = 16 = 256
|
||||||
# b64-lc = 38 = 1444
|
# b64-lc = 38 = 1444
|
||||||
# base64 = 64 = 4096
|
# base64 = 64 = 4096
|
||||||
rd, fn = vsplit(rem)
|
rd, fn = vsplit(rem)
|
||||||
if rd:
|
if not rd:
|
||||||
h = hashlib.sha512(afsenc(rd)).digest()
|
rd = "\ntop"
|
||||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
|
||||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
# spectrograms are never cropped; strip fullsize flag
|
||||||
else:
|
ext = rem.split(".")[-1].lower()
|
||||||
rd = "top"
|
if ext in ffa and fmt in ("wf", "jf"):
|
||||||
|
fmt = fmt[:1]
|
||||||
|
|
||||||
|
rd += "\n" + fmt
|
||||||
|
h = hashlib.sha512(afsenc(rd)).digest()
|
||||||
|
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
|
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||||
|
|
||||||
# could keep original filenames but this is safer re pathlen
|
# could keep original filenames but this is safer re pathlen
|
||||||
h = hashlib.sha512(afsenc(fn)).digest()
|
h = hashlib.sha512(afsenc(fn)).digest()
|
||||||
@@ -98,7 +111,8 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
|||||||
if fmt in ("opus", "caf"):
|
if fmt in ("opus", "caf"):
|
||||||
cat = "ac"
|
cat = "ac"
|
||||||
else:
|
else:
|
||||||
fmt = "webp" if fmt == "w" else "png" if fmt == "p" else "jpg"
|
fc = fmt[:1]
|
||||||
|
fmt = "webp" if fc == "w" else "png" if fc == "p" else "jpg"
|
||||||
cat = "th"
|
cat = "th"
|
||||||
|
|
||||||
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
|
return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt)
|
||||||
@@ -118,7 +132,7 @@ class ThumbSrv(object):
|
|||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.nthr = max(1, self.args.th_mt)
|
self.nthr = max(1, self.args.th_mt)
|
||||||
|
|
||||||
self.q: Queue[Optional[tuple[str, str, VFS]]] = Queue(self.nthr * 4)
|
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
||||||
for n in range(self.nthr):
|
for n in range(self.nthr):
|
||||||
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
|
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
|
||||||
|
|
||||||
@@ -193,7 +207,7 @@ class ThumbSrv(object):
|
|||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for [{}]".format(ptop))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt)
|
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||||
abspath = os.path.join(ptop, rem)
|
abspath = os.path.join(ptop, rem)
|
||||||
cond = threading.Condition(self.mutex)
|
cond = threading.Condition(self.mutex)
|
||||||
do_conv = False
|
do_conv = False
|
||||||
@@ -220,8 +234,8 @@ class ThumbSrv(object):
|
|||||||
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
||||||
vn = self.asrv.vfs.all_aps[0][1]
|
vn = self.asrv.vfs.all_aps[0][1]
|
||||||
|
|
||||||
self.q.put((abspath, tpath, vn))
|
self.q.put((abspath, tpath, fmt, vn))
|
||||||
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
self.log("conv {} :{} \033[0m{}".format(tpath, fmt, abspath), c=6)
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
@@ -257,7 +271,7 @@ class ThumbSrv(object):
|
|||||||
if not task:
|
if not task:
|
||||||
break
|
break
|
||||||
|
|
||||||
abspath, tpath, vn = task
|
abspath, tpath, fmt, vn = task
|
||||||
ext = abspath.split(".")[-1].lower()
|
ext = abspath.split(".")[-1].lower()
|
||||||
png_ok = False
|
png_ok = False
|
||||||
funs = []
|
funs = []
|
||||||
@@ -290,7 +304,7 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
for fun in funs:
|
for fun in funs:
|
||||||
try:
|
try:
|
||||||
fun(abspath, ttpath, vn)
|
fun(abspath, ttpath, fmt, vn)
|
||||||
break
|
break
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
msg = "{} could not create thumbnail of {}\n{}"
|
msg = "{} could not create thumbnail of {}\n{}"
|
||||||
@@ -324,7 +338,7 @@ class ThumbSrv(object):
|
|||||||
with self.mutex:
|
with self.mutex:
|
||||||
self.nthr -= 1
|
self.nthr -= 1
|
||||||
|
|
||||||
def fancy_pillow(self, im: "Image.Image", vn: VFS) -> "Image.Image":
|
def fancy_pillow(self, im: "Image.Image", fmt: str, vn: VFS) -> "Image.Image":
|
||||||
# exif_transpose is expensive (loads full image + unconditional copy)
|
# exif_transpose is expensive (loads full image + unconditional copy)
|
||||||
res = self.getres(vn)
|
res = self.getres(vn)
|
||||||
r = max(*res) * 2
|
r = max(*res) * 2
|
||||||
@@ -341,7 +355,7 @@ class ThumbSrv(object):
|
|||||||
if rot in rots:
|
if rot in rots:
|
||||||
im = im.transpose(rots[rot])
|
im = im.transpose(rots[rot])
|
||||||
|
|
||||||
if "nocrop" in vn.flags:
|
if fmt.endswith("f"):
|
||||||
im.thumbnail(res, resample=Image.LANCZOS)
|
im.thumbnail(res, resample=Image.LANCZOS)
|
||||||
else:
|
else:
|
||||||
iw, ih = im.size
|
iw, ih = im.size
|
||||||
@@ -351,10 +365,10 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
return im
|
return im
|
||||||
|
|
||||||
def conv_pil(self, abspath: str, tpath: str, vn: VFS) -> None:
|
def conv_pil(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
with Image.open(fsenc(abspath)) as im:
|
with Image.open(fsenc(abspath)) as im:
|
||||||
try:
|
try:
|
||||||
im = self.fancy_pillow(im, vn)
|
im = self.fancy_pillow(im, fmt, vn)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self.log("fancy_pillow {}".format(ex), "90")
|
self.log("fancy_pillow {}".format(ex), "90")
|
||||||
im.thumbnail(self.getres(vn))
|
im.thumbnail(self.getres(vn))
|
||||||
@@ -380,9 +394,9 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
im.save(tpath, **args)
|
im.save(tpath, **args)
|
||||||
|
|
||||||
def conv_vips(self, abspath: str, tpath: str, vn: VFS) -> None:
|
def conv_vips(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
crops = ["centre", "none"]
|
crops = ["centre", "none"]
|
||||||
if "nocrop" in vn.flags:
|
if fmt.endswith("f"):
|
||||||
crops = ["none"]
|
crops = ["none"]
|
||||||
|
|
||||||
w, h = self.getres(vn)
|
w, h = self.getres(vn)
|
||||||
@@ -397,9 +411,10 @@ class ThumbSrv(object):
|
|||||||
if c == crops[-1]:
|
if c == crops[-1]:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
assert img # type: ignore
|
||||||
img.write_to_file(tpath, Q=40)
|
img.write_to_file(tpath, Q=40)
|
||||||
|
|
||||||
def conv_ffmpeg(self, abspath: str, tpath: str, vn: VFS) -> None:
|
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if not ret:
|
if not ret:
|
||||||
return
|
return
|
||||||
@@ -412,7 +427,7 @@ class ThumbSrv(object):
|
|||||||
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
seek = [b"-ss", "{:.0f}".format(dur / 3).encode("utf-8")]
|
||||||
|
|
||||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||||
if "nocrop" in vn.flags:
|
if fmt.endswith("f"):
|
||||||
scale += "decrease,setsar=1:1"
|
scale += "decrease,setsar=1:1"
|
||||||
else:
|
else:
|
||||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||||
@@ -454,7 +469,7 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
def _run_ff(self, cmd: list[bytes], vn: VFS) -> None:
|
def _run_ff(self, cmd: list[bytes], vn: VFS) -> None:
|
||||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||||
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"])
|
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"], nice=True)
|
||||||
if not ret:
|
if not ret:
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -497,7 +512,7 @@ class ThumbSrv(object):
|
|||||||
self.log(t + txt, c=c)
|
self.log(t + txt, c=c)
|
||||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||||
|
|
||||||
def conv_waves(self, abspath: str, tpath: str, vn: VFS) -> None:
|
def conv_waves(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in ret:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
@@ -525,7 +540,7 @@ class ThumbSrv(object):
|
|||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
self._run_ff(cmd, vn)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
def conv_spec(self, abspath: str, tpath: str, vn: VFS) -> None:
|
def conv_spec(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in ret:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
@@ -568,7 +583,7 @@ class ThumbSrv(object):
|
|||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
self._run_ff(cmd, vn)
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
def conv_opus(self, abspath: str, tpath: str, vn: VFS) -> None:
|
def conv_opus(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
if self.args.no_acode:
|
if self.args.no_acode:
|
||||||
raise Exception("disabled in server config")
|
raise Exception("disabled in server config")
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import time
|
|||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
from .__init__ import ANYWIN, TYPE_CHECKING, unicode
|
||||||
|
from .authsrv import LEELOO_DALLAS, VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .up2k import up2k_wark_from_hashlist
|
from .up2k import up2k_wark_from_hashlist
|
||||||
from .util import (
|
from .util import (
|
||||||
@@ -20,6 +21,7 @@ from .util import (
|
|||||||
min_ex,
|
min_ex,
|
||||||
quotep,
|
quotep,
|
||||||
s3dec,
|
s3dec,
|
||||||
|
vjoin,
|
||||||
)
|
)
|
||||||
|
|
||||||
if HAVE_SQLITE3:
|
if HAVE_SQLITE3:
|
||||||
@@ -61,7 +63,7 @@ class U2idx(object):
|
|||||||
self.log_func("u2idx", msg, c)
|
self.log_func("u2idx", msg, c)
|
||||||
|
|
||||||
def fsearch(
|
def fsearch(
|
||||||
self, vols: list[tuple[str, str, dict[str, Any]]], body: dict[str, Any]
|
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
||||||
) -> list[dict[str, Any]]:
|
) -> list[dict[str, Any]]:
|
||||||
"""search by up2k hashlist"""
|
"""search by up2k hashlist"""
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
@@ -75,7 +77,7 @@ class U2idx(object):
|
|||||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(vols, uq, uv, True, False, 99999)[0]
|
return self.run_query(uname, vols, uq, uv, False, 99999)[0]
|
||||||
except:
|
except:
|
||||||
raise Pebkac(500, min_ex())
|
raise Pebkac(500, min_ex())
|
||||||
|
|
||||||
@@ -101,7 +103,7 @@ class U2idx(object):
|
|||||||
uri = ""
|
uri = ""
|
||||||
try:
|
try:
|
||||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||||
db = sqlite3.connect(uri, 2, uri=True, check_same_thread=False)
|
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
||||||
cur = db.cursor()
|
cur = db.cursor()
|
||||||
cur.execute('pragma table_info("up")').fetchone()
|
cur.execute('pragma table_info("up")').fetchone()
|
||||||
self.log("ro: {}".format(db_path))
|
self.log("ro: {}".format(db_path))
|
||||||
@@ -113,14 +115,14 @@ class U2idx(object):
|
|||||||
if not cur:
|
if not cur:
|
||||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||||
cur = sqlite3.connect(db_path, 2, check_same_thread=False).cursor()
|
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
||||||
self.log("opened {}".format(db_path))
|
self.log("opened {}".format(db_path))
|
||||||
|
|
||||||
self.cur[ptop] = cur
|
self.cur[ptop] = cur
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
def search(
|
def search(
|
||||||
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
|
self, uname: str, vols: list[VFS], uq: str, lim: int
|
||||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||||
"""search by query params"""
|
"""search by query params"""
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
@@ -129,7 +131,6 @@ class U2idx(object):
|
|||||||
q = ""
|
q = ""
|
||||||
v: Union[str, int] = ""
|
v: Union[str, int] = ""
|
||||||
va: list[Union[str, int]] = []
|
va: list[Union[str, int]] = []
|
||||||
have_up = False # query has up.* operands
|
|
||||||
have_mt = False
|
have_mt = False
|
||||||
is_key = True
|
is_key = True
|
||||||
is_size = False
|
is_size = False
|
||||||
@@ -174,21 +175,21 @@ class U2idx(object):
|
|||||||
if v == "size":
|
if v == "size":
|
||||||
v = "up.sz"
|
v = "up.sz"
|
||||||
is_size = True
|
is_size = True
|
||||||
have_up = True
|
|
||||||
|
|
||||||
elif v == "date":
|
elif v == "date":
|
||||||
v = "up.mt"
|
v = "up.mt"
|
||||||
is_date = True
|
is_date = True
|
||||||
have_up = True
|
|
||||||
|
elif v == "up_at":
|
||||||
|
v = "up.at"
|
||||||
|
is_date = True
|
||||||
|
|
||||||
elif v == "path":
|
elif v == "path":
|
||||||
v = "trim(?||up.rd,'/')"
|
v = "trim(?||up.rd,'/')"
|
||||||
va.append("\nrd")
|
va.append("\nrd")
|
||||||
have_up = True
|
|
||||||
|
|
||||||
elif v == "name":
|
elif v == "name":
|
||||||
v = "up.fn"
|
v = "up.fn"
|
||||||
have_up = True
|
|
||||||
|
|
||||||
elif v == "tags" or ptn_mt.match(v):
|
elif v == "tags" or ptn_mt.match(v):
|
||||||
have_mt = True
|
have_mt = True
|
||||||
@@ -264,19 +265,24 @@ class U2idx(object):
|
|||||||
q += " lower({}) {} ? ) ".format(field, oper)
|
q += " lower({}) {} ? ) ".format(field, oper)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(vols, q, va, have_up, have_mt, lim)
|
return self.run_query(uname, vols, q, va, have_mt, lim)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise Pebkac(500, repr(ex))
|
raise Pebkac(500, repr(ex))
|
||||||
|
|
||||||
def run_query(
|
def run_query(
|
||||||
self,
|
self,
|
||||||
vols: list[tuple[str, str, dict[str, Any]]],
|
uname: str,
|
||||||
|
vols: list[VFS],
|
||||||
uq: str,
|
uq: str,
|
||||||
uv: list[Union[str, int]],
|
uv: list[Union[str, int]],
|
||||||
have_up: bool,
|
|
||||||
have_mt: bool,
|
have_mt: bool,
|
||||||
lim: int,
|
lim: int,
|
||||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||||
|
if self.args.srch_dbg:
|
||||||
|
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
||||||
|
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
||||||
|
self.log(t % (len(vols), zs), 5)
|
||||||
|
|
||||||
done_flag: list[bool] = []
|
done_flag: list[bool] = []
|
||||||
self.active_id = "{:.6f}_{}".format(
|
self.active_id = "{:.6f}_{}".format(
|
||||||
time.time(), threading.current_thread().ident
|
time.time(), threading.current_thread().ident
|
||||||
@@ -295,13 +301,35 @@ class U2idx(object):
|
|||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
seen_rps: set[str] = set()
|
seen_rps: set[str] = set()
|
||||||
lim = min(lim, int(self.args.srch_hits))
|
clamp = int(self.args.srch_hits)
|
||||||
|
if lim >= clamp:
|
||||||
|
lim = clamp
|
||||||
|
clamped = True
|
||||||
|
else:
|
||||||
|
clamped = False
|
||||||
|
|
||||||
taglist = {}
|
taglist = {}
|
||||||
for (vtop, ptop, flags) in vols:
|
for vol in vols:
|
||||||
|
if lim < 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
vtop = vol.vpath
|
||||||
|
ptop = vol.realpath
|
||||||
|
flags = vol.flags
|
||||||
|
|
||||||
cur = self.get_cur(ptop)
|
cur = self.get_cur(ptop)
|
||||||
if not cur:
|
if not cur:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
excl = []
|
||||||
|
for vp2 in self.asrv.vfs.all_vols.keys():
|
||||||
|
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
|
||||||
|
excl.append(vp2[len(vtop) :].lstrip("/"))
|
||||||
|
|
||||||
|
if self.args.srch_dbg:
|
||||||
|
t = "searching in volume /%s (%s), excludelist %s"
|
||||||
|
self.log(t % (vtop, ptop, excl), 5)
|
||||||
|
|
||||||
self.active_cur = cur
|
self.active_cur = cur
|
||||||
|
|
||||||
vuv = []
|
vuv = []
|
||||||
@@ -313,7 +341,7 @@ class U2idx(object):
|
|||||||
|
|
||||||
sret = []
|
sret = []
|
||||||
fk = flags.get("fk")
|
fk = flags.get("fk")
|
||||||
dots = flags.get("dotsrch")
|
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||||
fk_alg = 2 if "fka" in flags else 1
|
fk_alg = 2 if "fka" in flags else 1
|
||||||
c = cur.execute(uq, tuple(vuv))
|
c = cur.execute(uq, tuple(vuv))
|
||||||
for hit in c:
|
for hit in c:
|
||||||
@@ -322,6 +350,13 @@ class U2idx(object):
|
|||||||
if rd.startswith("//") or fn.startswith("//"):
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
|
if rd in excl or any([x for x in excl if rd.startswith(x + "/")]):
|
||||||
|
if self.args.srch_dbg:
|
||||||
|
zs = vjoin(vjoin(vtop, rd), fn)
|
||||||
|
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
||||||
|
self.log(t % (vtop, zs), 1)
|
||||||
|
continue
|
||||||
|
|
||||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
||||||
if not dots and "/." in ("/" + rp):
|
if not dots and "/." in ("/" + rp):
|
||||||
continue
|
continue
|
||||||
@@ -350,6 +385,19 @@ class U2idx(object):
|
|||||||
if lim < 0:
|
if lim < 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if self.args.srch_dbg:
|
||||||
|
t = "in volume '/%s': hit: %s"
|
||||||
|
self.log(t % (vtop, rp), 5)
|
||||||
|
|
||||||
|
zs = vjoin(vtop, rp)
|
||||||
|
chk_vn, _ = self.asrv.vfs.get(zs, LEELOO_DALLAS, True, False)
|
||||||
|
chk_vn = chk_vn.dbv or chk_vn
|
||||||
|
if chk_vn.vpath != vtop:
|
||||||
|
raise Exception(
|
||||||
|
"database inconsistency! in volume '/%s' (%s), found file [%s] which belongs to volume '/%s' (%s)"
|
||||||
|
% (vtop, ptop, zs, chk_vn.vpath, chk_vn.realpath)
|
||||||
|
)
|
||||||
|
|
||||||
seen_rps.add(rp)
|
seen_rps.add(rp)
|
||||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
|
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
|
||||||
|
|
||||||
@@ -367,12 +415,16 @@ class U2idx(object):
|
|||||||
ret.extend(sret)
|
ret.extend(sret)
|
||||||
# print("[{}] {}".format(ptop, sret))
|
# print("[{}] {}".format(ptop, sret))
|
||||||
|
|
||||||
|
if self.args.srch_dbg:
|
||||||
|
t = "in volume '/%s': got %d hits, %d total so far"
|
||||||
|
self.log(t % (vtop, len(sret), len(ret)), 5)
|
||||||
|
|
||||||
done_flag.append(True)
|
done_flag.append(True)
|
||||||
self.active_id = ""
|
self.active_id = ""
|
||||||
|
|
||||||
ret.sort(key=itemgetter("rp"))
|
ret.sort(key=itemgetter("rp"))
|
||||||
|
|
||||||
return ret, list(taglist.keys()), lim < 0
|
return ret, list(taglist.keys()), lim < 0 and not clamped
|
||||||
|
|
||||||
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
|
def terminator(self, identifier: str, done_flag: list[bool]) -> None:
|
||||||
for _ in range(self.timeout):
|
for _ in range(self.timeout):
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ from queue import Queue
|
|||||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, WINDOWS
|
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, WINDOWS
|
||||||
from .authsrv import LEELOO_DALLAS, SSEELOG, VFS, AuthSrv
|
from .authsrv import LEELOO_DALLAS, SSEELOG, VFS, AuthSrv
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .cfg import vf_bmap, vf_vmap
|
from .cfg import vf_bmap, vf_cmap, vf_vmap
|
||||||
from .fsutil import Fstab
|
from .fsutil import Fstab
|
||||||
from .mtag import MParser, MTag
|
from .mtag import MParser, MTag
|
||||||
from .util import (
|
from .util import (
|
||||||
@@ -65,6 +65,11 @@ from .util import (
|
|||||||
w8b64enc,
|
w8b64enc,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pathlib import Path
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if HAVE_SQLITE3:
|
if HAVE_SQLITE3:
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
@@ -134,8 +139,6 @@ class Up2k(object):
|
|||||||
self.vol_act: dict[str, float] = {}
|
self.vol_act: dict[str, float] = {}
|
||||||
self.busy_aps: set[str] = set()
|
self.busy_aps: set[str] = set()
|
||||||
self.dupesched: dict[str, list[tuple[str, str, float]]] = {}
|
self.dupesched: dict[str, list[tuple[str, str, float]]] = {}
|
||||||
self.snap_persist_interval = 300 # persist unfinished index every 5 min
|
|
||||||
self.snap_discard_interval = 21600 # drop unfinished after 6 hours inactivity
|
|
||||||
self.snap_prev: dict[str, Optional[tuple[int, float]]] = {}
|
self.snap_prev: dict[str, Optional[tuple[int, float]]] = {}
|
||||||
|
|
||||||
self.mtag: Optional[MTag] = None
|
self.mtag: Optional[MTag] = None
|
||||||
@@ -263,6 +266,7 @@ class Up2k(object):
|
|||||||
"hashq": self.n_hashq,
|
"hashq": self.n_hashq,
|
||||||
"tagq": self.n_tagq,
|
"tagq": self.n_tagq,
|
||||||
"mtpq": mtpq,
|
"mtpq": mtpq,
|
||||||
|
"dbwu": "{:.2f}".format(self.db_act),
|
||||||
"dbwt": "{:.2f}".format(
|
"dbwt": "{:.2f}".format(
|
||||||
min(1000 * 24 * 60 * 60 - 1, time.time() - self.db_act)
|
min(1000 * 24 * 60 * 60 - 1, time.time() - self.db_act)
|
||||||
),
|
),
|
||||||
@@ -415,50 +419,49 @@ class Up2k(object):
|
|||||||
def _check_lifetimes(self) -> float:
|
def _check_lifetimes(self) -> float:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
timeout = now + 9001
|
timeout = now + 9001
|
||||||
if now: # diff-golf
|
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
|
||||||
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
|
lifetime = vol.flags.get("lifetime")
|
||||||
lifetime = vol.flags.get("lifetime")
|
if not lifetime:
|
||||||
if not lifetime:
|
continue
|
||||||
continue
|
|
||||||
|
|
||||||
cur = self.cur.get(vol.realpath)
|
cur = self.cur.get(vol.realpath)
|
||||||
if not cur:
|
if not cur:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
nrm = 0
|
nrm = 0
|
||||||
deadline = time.time() - lifetime
|
deadline = time.time() - lifetime
|
||||||
timeout = min(timeout, now + lifetime)
|
timeout = min(timeout, now + lifetime)
|
||||||
q = "select rd, fn from up where at > 0 and at < ? limit 100"
|
q = "select rd, fn from up where at > 0 and at < ? limit 100"
|
||||||
while True:
|
while True:
|
||||||
with self.mutex:
|
|
||||||
hits = cur.execute(q, (deadline,)).fetchall()
|
|
||||||
|
|
||||||
if not hits:
|
|
||||||
break
|
|
||||||
|
|
||||||
for rd, fn in hits:
|
|
||||||
if rd.startswith("//") or fn.startswith("//"):
|
|
||||||
rd, fn = s3dec(rd, fn)
|
|
||||||
|
|
||||||
fvp = ("%s/%s" % (rd, fn)).strip("/")
|
|
||||||
if vp:
|
|
||||||
fvp = "%s/%s" % (vp, fvp)
|
|
||||||
|
|
||||||
self._handle_rm(LEELOO_DALLAS, "", fvp, [], True)
|
|
||||||
nrm += 1
|
|
||||||
|
|
||||||
if nrm:
|
|
||||||
self.log("{} files graduated in {}".format(nrm, vp))
|
|
||||||
|
|
||||||
if timeout < 10:
|
|
||||||
continue
|
|
||||||
|
|
||||||
q = "select at from up where at > 0 order by at limit 1"
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
hits = cur.execute(q).fetchone()
|
hits = cur.execute(q, (deadline,)).fetchall()
|
||||||
|
|
||||||
if hits:
|
if not hits:
|
||||||
timeout = min(timeout, now + lifetime - (now - hits[0]))
|
break
|
||||||
|
|
||||||
|
for rd, fn in hits:
|
||||||
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
|
fvp = ("%s/%s" % (rd, fn)).strip("/")
|
||||||
|
if vp:
|
||||||
|
fvp = "%s/%s" % (vp, fvp)
|
||||||
|
|
||||||
|
self._handle_rm(LEELOO_DALLAS, "", fvp, [], True)
|
||||||
|
nrm += 1
|
||||||
|
|
||||||
|
if nrm:
|
||||||
|
self.log("{} files graduated in {}".format(nrm, vp))
|
||||||
|
|
||||||
|
if timeout < 10:
|
||||||
|
continue
|
||||||
|
|
||||||
|
q = "select at from up where at > 0 order by at limit 1"
|
||||||
|
with self.mutex:
|
||||||
|
hits = cur.execute(q).fetchone()
|
||||||
|
|
||||||
|
if hits:
|
||||||
|
timeout = min(timeout, now + lifetime - (now - hits[0]))
|
||||||
|
|
||||||
return timeout
|
return timeout
|
||||||
|
|
||||||
@@ -642,10 +645,7 @@ class Up2k(object):
|
|||||||
if self.stop:
|
if self.stop:
|
||||||
break
|
break
|
||||||
|
|
||||||
en: set[str] = set()
|
en = set(vol.flags.get("mte", {}))
|
||||||
if "mte" in vol.flags:
|
|
||||||
en = set(vol.flags["mte"].split(","))
|
|
||||||
|
|
||||||
self.entags[vol.realpath] = en
|
self.entags[vol.realpath] = en
|
||||||
|
|
||||||
if "e2d" in vol.flags:
|
if "e2d" in vol.flags:
|
||||||
@@ -794,6 +794,11 @@ class Up2k(object):
|
|||||||
except:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
vpath = "?"
|
||||||
|
for k, v in self.asrv.vfs.all_vols.items():
|
||||||
|
if v.realpath == ptop:
|
||||||
|
vpath = k
|
||||||
|
|
||||||
_, flags = self._expr_idx_filter(flags)
|
_, flags = self._expr_idx_filter(flags)
|
||||||
|
|
||||||
ft = "\033[0;32m{}{:.0}"
|
ft = "\033[0;32m{}{:.0}"
|
||||||
@@ -801,13 +806,25 @@ class Up2k(object):
|
|||||||
fv = "\033[0;36m{}:\033[90m{}"
|
fv = "\033[0;36m{}:\033[90m{}"
|
||||||
fx = set(("html_head",))
|
fx = set(("html_head",))
|
||||||
fd = vf_bmap()
|
fd = vf_bmap()
|
||||||
|
fd.update(vf_cmap())
|
||||||
fd.update(vf_vmap())
|
fd.update(vf_vmap())
|
||||||
fd = {v: k for k, v in fd.items()}
|
fd = {v: k for k, v in fd.items()}
|
||||||
fl = {
|
fl = {
|
||||||
k: v
|
k: v
|
||||||
for k, v in flags.items()
|
for k, v in flags.items()
|
||||||
if k not in fd or v != getattr(self.args, fd[k])
|
if k not in fd
|
||||||
|
or (
|
||||||
|
v != getattr(self.args, fd[k])
|
||||||
|
and str(v) != str(getattr(self.args, fd[k]))
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
for k1, k2 in vf_cmap().items():
|
||||||
|
if k1 not in fl or k1 in fx:
|
||||||
|
continue
|
||||||
|
if str(fl[k1]) == str(getattr(self.args, k2)):
|
||||||
|
del fl[k1]
|
||||||
|
else:
|
||||||
|
fl[k1] = ",".join(x for x in fl[k1])
|
||||||
a = [
|
a = [
|
||||||
(ft if v is True else ff if v is False else fv).format(k, str(v))
|
(ft if v is True else ff if v is False else fv).format(k, str(v))
|
||||||
for k, v in fl.items()
|
for k, v in fl.items()
|
||||||
@@ -817,17 +834,9 @@ class Up2k(object):
|
|||||||
a = ["\033[90mall-default"]
|
a = ["\033[90mall-default"]
|
||||||
|
|
||||||
if a:
|
if a:
|
||||||
vpath = "?"
|
|
||||||
for k, v in self.asrv.vfs.all_vols.items():
|
|
||||||
if v.realpath == ptop:
|
|
||||||
vpath = k
|
|
||||||
|
|
||||||
if vpath:
|
|
||||||
vpath += "/"
|
|
||||||
|
|
||||||
zs = " ".join(sorted(a))
|
zs = " ".join(sorted(a))
|
||||||
zs = zs.replace("30mre.compile(", "30m(") # nohash
|
zs = zs.replace("90mre.compile(", "90m(") # nohash
|
||||||
self.log("/{} {}".format(vpath, zs), "35")
|
self.log("/{} {}".format(vpath + ("/" if vpath else ""), zs), "35")
|
||||||
|
|
||||||
reg = {}
|
reg = {}
|
||||||
drp = None
|
drp = None
|
||||||
@@ -872,14 +881,13 @@ class Up2k(object):
|
|||||||
try:
|
try:
|
||||||
if bos.makedirs(histpath):
|
if bos.makedirs(histpath):
|
||||||
hidedir(histpath)
|
hidedir(histpath)
|
||||||
except:
|
except Exception as ex:
|
||||||
|
t = "failed to initialize volume '/%s': %s"
|
||||||
|
self.log(t % (vpath, ex), 1)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cur = self._open_db(db_path)
|
cur = self._open_db(db_path)
|
||||||
self.cur[ptop] = cur
|
|
||||||
self.volsize[cur] = 0
|
|
||||||
self.volnfiles[cur] = 0
|
|
||||||
|
|
||||||
# speeds measured uploading 520 small files on a WD20SPZX (SMR 2.5" 5400rpm 4kb)
|
# speeds measured uploading 520 small files on a WD20SPZX (SMR 2.5" 5400rpm 4kb)
|
||||||
dbd = flags["dbd"]
|
dbd = flags["dbd"]
|
||||||
@@ -913,6 +921,13 @@ class Up2k(object):
|
|||||||
|
|
||||||
cur.execute("pragma synchronous=" + sync)
|
cur.execute("pragma synchronous=" + sync)
|
||||||
cur.connection.commit()
|
cur.connection.commit()
|
||||||
|
|
||||||
|
self._verify_db_cache(cur, vpath)
|
||||||
|
|
||||||
|
self.cur[ptop] = cur
|
||||||
|
self.volsize[cur] = 0
|
||||||
|
self.volnfiles[cur] = 0
|
||||||
|
|
||||||
return cur, db_path
|
return cur, db_path
|
||||||
except:
|
except:
|
||||||
msg = "cannot use database at [{}]:\n{}"
|
msg = "cannot use database at [{}]:\n{}"
|
||||||
@@ -920,6 +935,25 @@ class Up2k(object):
|
|||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def _verify_db_cache(self, cur: "sqlite3.Cursor", vpath: str) -> None:
|
||||||
|
# check if volume config changed since last use; drop caches if so
|
||||||
|
zsl = [vpath] + list(sorted(self.asrv.vfs.all_vols.keys()))
|
||||||
|
zb = hashlib.sha1("\n".join(zsl).encode("utf-8", "replace")).digest()
|
||||||
|
vcfg = base64.urlsafe_b64encode(zb[:18]).decode("ascii")
|
||||||
|
|
||||||
|
c = cur.execute("select v from kv where k = 'volcfg'")
|
||||||
|
try:
|
||||||
|
(oldcfg,) = c.fetchone()
|
||||||
|
except:
|
||||||
|
oldcfg = ""
|
||||||
|
|
||||||
|
if oldcfg != vcfg:
|
||||||
|
cur.execute("delete from kv where k = 'volcfg'")
|
||||||
|
cur.execute("delete from dh")
|
||||||
|
cur.execute("delete from cv")
|
||||||
|
cur.execute("insert into kv values ('volcfg',?)", (vcfg,))
|
||||||
|
cur.connection.commit()
|
||||||
|
|
||||||
def _build_file_index(self, vol: VFS, all_vols: list[VFS]) -> tuple[bool, bool]:
|
def _build_file_index(self, vol: VFS, all_vols: list[VFS]) -> tuple[bool, bool]:
|
||||||
do_vac = False
|
do_vac = False
|
||||||
top = vol.realpath
|
top = vol.realpath
|
||||||
@@ -1084,7 +1118,11 @@ class Up2k(object):
|
|||||||
|
|
||||||
if stat.S_ISDIR(inf.st_mode):
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
rap = absreal(abspath)
|
rap = absreal(abspath)
|
||||||
if dev and inf.st_dev != dev:
|
if (
|
||||||
|
dev
|
||||||
|
and inf.st_dev != dev
|
||||||
|
and not (ANYWIN and bos.stat(rap).st_dev == dev)
|
||||||
|
):
|
||||||
self.log("skip xdev {}->{}: {}".format(dev, inf.st_dev, abspath), 6)
|
self.log("skip xdev {}->{}: {}".format(dev, inf.st_dev, abspath), 6)
|
||||||
continue
|
continue
|
||||||
if abspath in excl or rap in excl:
|
if abspath in excl or rap in excl:
|
||||||
@@ -1131,8 +1169,12 @@ class Up2k(object):
|
|||||||
files.append((sz, lmod, iname))
|
files.append((sz, lmod, iname))
|
||||||
liname = iname.lower()
|
liname = iname.lower()
|
||||||
if sz and (
|
if sz and (
|
||||||
iname in self.args.th_covers
|
iname in self.args.th_coversd
|
||||||
or (not cv and liname.rsplit(".", 1)[-1] in CV_EXTS)
|
or (
|
||||||
|
not cv
|
||||||
|
and liname.rsplit(".", 1)[-1] in CV_EXTS
|
||||||
|
and not iname.startswith(".")
|
||||||
|
)
|
||||||
):
|
):
|
||||||
cv = iname
|
cv = iname
|
||||||
|
|
||||||
@@ -1182,76 +1224,74 @@ class Up2k(object):
|
|||||||
abspath = os.path.join(cdir, fn)
|
abspath = os.path.join(cdir, fn)
|
||||||
nohash = reh.search(abspath) if reh else False
|
nohash = reh.search(abspath) if reh else False
|
||||||
|
|
||||||
if fn: # diff-golf
|
sql = "select w, mt, sz, at from up where rd = ? and fn = ?"
|
||||||
|
try:
|
||||||
|
c = db.c.execute(sql, (rd, fn))
|
||||||
|
except:
|
||||||
|
c = db.c.execute(sql, s3enc(self.mem_cur, rd, fn))
|
||||||
|
|
||||||
sql = "select w, mt, sz, at from up where rd = ? and fn = ?"
|
in_db = list(c.fetchall())
|
||||||
try:
|
if in_db:
|
||||||
c = db.c.execute(sql, (rd, fn))
|
self.pp.n -= 1
|
||||||
except:
|
dw, dts, dsz, at = in_db[0]
|
||||||
c = db.c.execute(sql, s3enc(self.mem_cur, rd, fn))
|
if len(in_db) > 1:
|
||||||
|
t = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
|
||||||
|
rep_db = "\n".join([repr(x) for x in in_db])
|
||||||
|
self.log(t.format(top, rp, len(in_db), rep_db))
|
||||||
|
dts = -1
|
||||||
|
|
||||||
in_db = list(c.fetchall())
|
if fat32 and abs(dts - lmod) == 1:
|
||||||
if in_db:
|
dts = lmod
|
||||||
self.pp.n -= 1
|
|
||||||
dw, dts, dsz, at = in_db[0]
|
|
||||||
if len(in_db) > 1:
|
|
||||||
t = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
|
|
||||||
rep_db = "\n".join([repr(x) for x in in_db])
|
|
||||||
self.log(t.format(top, rp, len(in_db), rep_db))
|
|
||||||
dts = -1
|
|
||||||
|
|
||||||
if fat32 and abs(dts - lmod) == 1:
|
if dts == lmod and dsz == sz and (nohash or dw[0] != "#" or not sz):
|
||||||
dts = lmod
|
continue
|
||||||
|
|
||||||
if dts == lmod and dsz == sz and (nohash or dw[0] != "#" or not sz):
|
t = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
||||||
continue
|
top, rp, dts, lmod, dsz, sz
|
||||||
|
)
|
||||||
t = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
self.log(t)
|
||||||
top, rp, dts, lmod, dsz, sz
|
self.db_rm(db.c, rd, fn, 0)
|
||||||
)
|
|
||||||
self.log(t)
|
|
||||||
self.db_rm(db.c, rd, fn, 0)
|
|
||||||
ret += 1
|
|
||||||
db.n += 1
|
|
||||||
in_db = []
|
|
||||||
else:
|
|
||||||
at = 0
|
|
||||||
|
|
||||||
self.pp.msg = "a{} {}".format(self.pp.n, abspath)
|
|
||||||
|
|
||||||
if nohash or not sz:
|
|
||||||
wark = up2k_wark_from_metadata(self.salt, sz, lmod, rd, fn)
|
|
||||||
else:
|
|
||||||
if sz > 1024 * 1024:
|
|
||||||
self.log("file: {}".format(abspath))
|
|
||||||
|
|
||||||
try:
|
|
||||||
hashes = self._hashlist_from_file(
|
|
||||||
abspath, "a{}, ".format(self.pp.n)
|
|
||||||
)
|
|
||||||
except Exception as ex:
|
|
||||||
self.log("hash: {} @ [{}]".format(repr(ex), abspath))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not hashes:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
wark = up2k_wark_from_hashlist(self.salt, sz, hashes)
|
|
||||||
|
|
||||||
# skip upload hooks by not providing vflags
|
|
||||||
self.db_add(db.c, {}, rd, fn, lmod, sz, "", "", wark, "", "", "", at)
|
|
||||||
db.n += 1
|
|
||||||
ret += 1
|
ret += 1
|
||||||
td = time.time() - db.t
|
db.n += 1
|
||||||
if db.n >= 4096 or td >= 60:
|
in_db = []
|
||||||
self.log("commit {} new files".format(db.n))
|
else:
|
||||||
db.c.connection.commit()
|
at = 0
|
||||||
db.n = 0
|
|
||||||
db.t = time.time()
|
self.pp.msg = "a{} {}".format(self.pp.n, abspath)
|
||||||
|
|
||||||
|
if nohash or not sz:
|
||||||
|
wark = up2k_wark_from_metadata(self.salt, sz, lmod, rd, fn)
|
||||||
|
else:
|
||||||
|
if sz > 1024 * 1024:
|
||||||
|
self.log("file: {}".format(abspath))
|
||||||
|
|
||||||
|
try:
|
||||||
|
hashes = self._hashlist_from_file(
|
||||||
|
abspath, "a{}, ".format(self.pp.n)
|
||||||
|
)
|
||||||
|
except Exception as ex:
|
||||||
|
self.log("hash: {} @ [{}]".format(repr(ex), abspath))
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not hashes:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
wark = up2k_wark_from_hashlist(self.salt, sz, hashes)
|
||||||
|
|
||||||
|
# skip upload hooks by not providing vflags
|
||||||
|
self.db_add(db.c, {}, rd, fn, lmod, sz, "", "", wark, "", "", "", at)
|
||||||
|
db.n += 1
|
||||||
|
ret += 1
|
||||||
|
td = time.time() - db.t
|
||||||
|
if db.n >= 4096 or td >= 60:
|
||||||
|
self.log("commit {} new files".format(db.n))
|
||||||
|
db.c.connection.commit()
|
||||||
|
db.n = 0
|
||||||
|
db.t = time.time()
|
||||||
|
|
||||||
if not self.args.no_dhash:
|
if not self.args.no_dhash:
|
||||||
db.c.execute("delete from dh where d = ?", (drd,))
|
db.c.execute("delete from dh where d = ?", (drd,)) # type: ignore
|
||||||
db.c.execute("insert into dh values (?,?)", (drd, dhash))
|
db.c.execute("insert into dh values (?,?)", (drd, dhash)) # type: ignore
|
||||||
|
|
||||||
if self.stop:
|
if self.stop:
|
||||||
return -1
|
return -1
|
||||||
@@ -1270,7 +1310,7 @@ class Up2k(object):
|
|||||||
if n:
|
if n:
|
||||||
t = "forgetting {} shadowed autoindexed files in [{}] > [{}]"
|
t = "forgetting {} shadowed autoindexed files in [{}] > [{}]"
|
||||||
self.log(t.format(n, top, sh_rd))
|
self.log(t.format(n, top, sh_rd))
|
||||||
assert sh_erd
|
assert sh_erd # type: ignore
|
||||||
|
|
||||||
q = "delete from dh where (d = ? or d like ?||'%')"
|
q = "delete from dh where (d = ? or d like ?||'%')"
|
||||||
db.c.execute(q, (sh_erd, sh_erd + "/"))
|
db.c.execute(q, (sh_erd, sh_erd + "/"))
|
||||||
@@ -2105,7 +2145,9 @@ class Up2k(object):
|
|||||||
self.log("ST: {}".format(msg))
|
self.log("ST: {}".format(msg))
|
||||||
|
|
||||||
def _orz(self, db_path: str) -> "sqlite3.Cursor":
|
def _orz(self, db_path: str) -> "sqlite3.Cursor":
|
||||||
c = sqlite3.connect(db_path, self.timeout, check_same_thread=False).cursor()
|
c = sqlite3.connect(
|
||||||
|
db_path, timeout=self.timeout, check_same_thread=False
|
||||||
|
).cursor()
|
||||||
# c.connection.set_trace_callback(self._trace)
|
# c.connection.set_trace_callback(self._trace)
|
||||||
return c
|
return c
|
||||||
|
|
||||||
@@ -2135,7 +2177,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
nfiles = next(cur.execute("select count(w) from up"))[0]
|
nfiles = next(cur.execute("select count(w) from up"))[0]
|
||||||
self.log("OK: {} |{}|".format(db_path, nfiles))
|
self.log(" {} |{}|".format(db_path, nfiles), "90")
|
||||||
return cur
|
return cur
|
||||||
except:
|
except:
|
||||||
self.log("WARN: could not list files; DB corrupt?\n" + min_ex())
|
self.log("WARN: could not list files; DB corrupt?\n" + min_ex())
|
||||||
@@ -2169,7 +2211,7 @@ class Up2k(object):
|
|||||||
t = "native sqlite3 backup failed; using fallback method:\n"
|
t = "native sqlite3 backup failed; using fallback method:\n"
|
||||||
self.log(t + min_ex())
|
self.log(t + min_ex())
|
||||||
finally:
|
finally:
|
||||||
c2.close()
|
c2.close() # type: ignore
|
||||||
|
|
||||||
db = cur.connection
|
db = cur.connection
|
||||||
cur.close()
|
cur.close()
|
||||||
@@ -2339,6 +2381,9 @@ class Up2k(object):
|
|||||||
vols = [(ptop, jcur)] if jcur else []
|
vols = [(ptop, jcur)] if jcur else []
|
||||||
if vfs.flags.get("xlink"):
|
if vfs.flags.get("xlink"):
|
||||||
vols += [(k, v) for k, v in self.cur.items() if k != ptop]
|
vols += [(k, v) for k, v in self.cur.items() if k != ptop]
|
||||||
|
if vfs.flags.get("up_ts", "") == "fu" or not cj["lmod"]:
|
||||||
|
# force upload time rather than last-modified
|
||||||
|
cj["lmod"] = int(time.time())
|
||||||
|
|
||||||
alts: list[tuple[int, int, dict[str, Any]]] = []
|
alts: list[tuple[int, int, dict[str, Any]]] = []
|
||||||
for ptop, cur in vols:
|
for ptop, cur in vols:
|
||||||
@@ -2713,7 +2758,18 @@ class Up2k(object):
|
|||||||
raise Exception("symlink-fallback disabled in cfg")
|
raise Exception("symlink-fallback disabled in cfg")
|
||||||
|
|
||||||
if not linked:
|
if not linked:
|
||||||
os.symlink(fsenc(lsrc), fsenc(ldst))
|
if ANYWIN:
|
||||||
|
Path(ldst).symlink_to(lsrc)
|
||||||
|
if not bos.path.exists(dst):
|
||||||
|
try:
|
||||||
|
bos.unlink(dst)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
t = "the created symlink [%s] did not resolve to [%s]"
|
||||||
|
raise Exception(t % (ldst, lsrc))
|
||||||
|
else:
|
||||||
|
os.symlink(fsenc(lsrc), fsenc(ldst))
|
||||||
|
|
||||||
linked = True
|
linked = True
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self.log("cannot link; creating copy: " + repr(ex))
|
self.log("cannot link; creating copy: " + repr(ex))
|
||||||
@@ -2877,7 +2933,6 @@ class Up2k(object):
|
|||||||
|
|
||||||
self._symlink(dst, d2, self.flags[ptop], lmod=lmod)
|
self._symlink(dst, d2, self.flags[ptop], lmod=lmod)
|
||||||
if cur:
|
if cur:
|
||||||
self.db_rm(cur, rd, fn, job["size"])
|
|
||||||
self.db_add(cur, vflags, rd, fn, lmod, *z2[3:])
|
self.db_add(cur, vflags, rd, fn, lmod, *z2[3:])
|
||||||
|
|
||||||
if cur:
|
if cur:
|
||||||
@@ -2920,7 +2975,6 @@ class Up2k(object):
|
|||||||
|
|
||||||
self.db_act = self.vol_act[ptop] = time.time()
|
self.db_act = self.vol_act[ptop] = time.time()
|
||||||
try:
|
try:
|
||||||
self.db_rm(cur, rd, fn, sz)
|
|
||||||
self.db_add(
|
self.db_add(
|
||||||
cur,
|
cur,
|
||||||
vflags,
|
vflags,
|
||||||
@@ -2979,6 +3033,8 @@ class Up2k(object):
|
|||||||
at: float,
|
at: float,
|
||||||
skip_xau: bool = False,
|
skip_xau: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
self.db_rm(db, rd, fn, sz)
|
||||||
|
|
||||||
sql = "insert into up values (?,?,?,?,?,?,?)"
|
sql = "insert into up values (?,?,?,?,?,?,?)"
|
||||||
v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0))
|
v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0))
|
||||||
try:
|
try:
|
||||||
@@ -3035,7 +3091,7 @@ class Up2k(object):
|
|||||||
with self.rescan_cond:
|
with self.rescan_cond:
|
||||||
self.rescan_cond.notify_all()
|
self.rescan_cond.notify_all()
|
||||||
|
|
||||||
if rd and sz and fn.lower() in self.args.th_covers:
|
if rd and sz and fn.lower() in self.args.th_coversd:
|
||||||
# wasteful; db_add will re-index actual covers
|
# wasteful; db_add will re-index actual covers
|
||||||
# but that won't catch existing files
|
# but that won't catch existing files
|
||||||
crd, cdn = rd.rsplit("/", 1) if "/" in rd else ("", rd)
|
crd, cdn = rd.rsplit("/", 1) if "/" in rd else ("", rd)
|
||||||
@@ -3140,7 +3196,13 @@ class Up2k(object):
|
|||||||
break
|
break
|
||||||
|
|
||||||
abspath = djoin(adir, fn)
|
abspath = djoin(adir, fn)
|
||||||
st = bos.stat(abspath)
|
st = stl = bos.lstat(abspath)
|
||||||
|
if stat.S_ISLNK(st.st_mode):
|
||||||
|
try:
|
||||||
|
st = bos.stat(abspath)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
volpath = "{}/{}".format(vrem, fn).strip("/")
|
volpath = "{}/{}".format(vrem, fn).strip("/")
|
||||||
vpath = "{}/{}".format(dbv.vpath, volpath).strip("/")
|
vpath = "{}/{}".format(dbv.vpath, volpath).strip("/")
|
||||||
self.log("rm {}\n {}".format(vpath, abspath))
|
self.log("rm {}\n {}".format(vpath, abspath))
|
||||||
@@ -3153,7 +3215,7 @@ class Up2k(object):
|
|||||||
vpath,
|
vpath,
|
||||||
"",
|
"",
|
||||||
uname,
|
uname,
|
||||||
st.st_mtime,
|
stl.st_mtime,
|
||||||
st.st_size,
|
st.st_size,
|
||||||
ip,
|
ip,
|
||||||
0,
|
0,
|
||||||
@@ -3183,7 +3245,7 @@ class Up2k(object):
|
|||||||
vpath,
|
vpath,
|
||||||
"",
|
"",
|
||||||
uname,
|
uname,
|
||||||
st.st_mtime,
|
stl.st_mtime,
|
||||||
st.st_size,
|
st.st_size,
|
||||||
ip,
|
ip,
|
||||||
0,
|
0,
|
||||||
@@ -3300,28 +3362,27 @@ class Up2k(object):
|
|||||||
if bos.path.exists(dabs):
|
if bos.path.exists(dabs):
|
||||||
raise Pebkac(400, "mv2: target file exists")
|
raise Pebkac(400, "mv2: target file exists")
|
||||||
|
|
||||||
stl = bos.lstat(sabs)
|
is_link = is_dirlink = False
|
||||||
try:
|
st = stl = bos.lstat(sabs)
|
||||||
st = bos.stat(sabs)
|
if stat.S_ISLNK(stl.st_mode):
|
||||||
except:
|
is_link = True
|
||||||
st = stl
|
try:
|
||||||
|
st = bos.stat(sabs)
|
||||||
|
is_dirlink = stat.S_ISDIR(st.st_mode)
|
||||||
|
except:
|
||||||
|
pass # broken symlink; keep as-is
|
||||||
|
|
||||||
xbr = svn.flags.get("xbr")
|
xbr = svn.flags.get("xbr")
|
||||||
xar = dvn.flags.get("xar")
|
xar = dvn.flags.get("xar")
|
||||||
if xbr:
|
if xbr:
|
||||||
if not runhook(
|
if not runhook(
|
||||||
self.log, xbr, sabs, svp, "", uname, st.st_mtime, st.st_size, "", 0, ""
|
self.log, xbr, sabs, svp, "", uname, stl.st_mtime, st.st_size, "", 0, ""
|
||||||
):
|
):
|
||||||
t = "move blocked by xbr server config: {}".format(svp)
|
t = "move blocked by xbr server config: {}".format(svp)
|
||||||
self.log(t, 1)
|
self.log(t, 1)
|
||||||
raise Pebkac(405, t)
|
raise Pebkac(405, t)
|
||||||
|
|
||||||
is_xvol = svn.realpath != dvn.realpath
|
is_xvol = svn.realpath != dvn.realpath
|
||||||
if stat.S_ISLNK(stl.st_mode):
|
|
||||||
is_dirlink = stat.S_ISDIR(st.st_mode)
|
|
||||||
is_link = True
|
|
||||||
else:
|
|
||||||
is_link = is_dirlink = False
|
|
||||||
|
|
||||||
bos.makedirs(os.path.dirname(dabs))
|
bos.makedirs(os.path.dirname(dabs))
|
||||||
|
|
||||||
@@ -3348,7 +3409,7 @@ class Up2k(object):
|
|||||||
c2 = self.cur.get(dvn.realpath)
|
c2 = self.cur.get(dvn.realpath)
|
||||||
|
|
||||||
if ftime_ is None:
|
if ftime_ is None:
|
||||||
ftime = st.st_mtime
|
ftime = stl.st_mtime
|
||||||
fsize = st.st_size
|
fsize = st.st_size
|
||||||
else:
|
else:
|
||||||
ftime = ftime_
|
ftime = ftime_
|
||||||
@@ -3390,7 +3451,16 @@ class Up2k(object):
|
|||||||
if is_xvol and has_dupes:
|
if is_xvol and has_dupes:
|
||||||
raise OSError(errno.EXDEV, "src is symlink")
|
raise OSError(errno.EXDEV, "src is symlink")
|
||||||
|
|
||||||
atomic_move(sabs, dabs)
|
if is_link and st != stl:
|
||||||
|
# relink non-broken symlinks to still work after the move,
|
||||||
|
# but only resolve 1st level to maintain relativity
|
||||||
|
dlink = bos.readlink(sabs)
|
||||||
|
dlink = os.path.join(os.path.dirname(sabs), dlink)
|
||||||
|
dlink = bos.path.abspath(dlink)
|
||||||
|
self._symlink(dlink, dabs, dvn.flags, lmod=ftime)
|
||||||
|
bos.unlink(sabs)
|
||||||
|
else:
|
||||||
|
atomic_move(sabs, dabs)
|
||||||
|
|
||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
if ex.errno != errno.EXDEV:
|
if ex.errno != errno.EXDEV:
|
||||||
@@ -3557,6 +3627,8 @@ class Up2k(object):
|
|||||||
except:
|
except:
|
||||||
self.log("relink: not found: [{}]".format(ap))
|
self.log("relink: not found: [{}]".format(ap))
|
||||||
|
|
||||||
|
# self.log("full:\n" + "\n".join(" {:90}: {}".format(*x) for x in full.items()))
|
||||||
|
# self.log("links:\n" + "\n".join(" {:90}: {}".format(*x) for x in links.items()))
|
||||||
if not dabs and not full and links:
|
if not dabs and not full and links:
|
||||||
# deleting final remaining full copy; swap it with a symlink
|
# deleting final remaining full copy; swap it with a symlink
|
||||||
slabs = list(sorted(links.keys()))[0]
|
slabs = list(sorted(links.keys()))[0]
|
||||||
@@ -3574,12 +3646,45 @@ class Up2k(object):
|
|||||||
dabs = list(sorted(full.keys()))[0]
|
dabs = list(sorted(full.keys()))[0]
|
||||||
|
|
||||||
for alink, parts in links.items():
|
for alink, parts in links.items():
|
||||||
lmod = None
|
lmod = 0.0
|
||||||
try:
|
try:
|
||||||
if alink != sabs and absreal(alink) != sabs:
|
faulty = False
|
||||||
continue
|
ldst = alink
|
||||||
|
try:
|
||||||
|
for n in range(40): # MAXSYMLINKS
|
||||||
|
zs = bos.readlink(ldst)
|
||||||
|
ldst = os.path.join(os.path.dirname(ldst), zs)
|
||||||
|
ldst = bos.path.abspath(ldst)
|
||||||
|
if not bos.path.islink(ldst):
|
||||||
|
break
|
||||||
|
|
||||||
self.log("relinking [{}] to [{}]".format(alink, dabs))
|
if ldst == sabs:
|
||||||
|
t = "relink because level %d would break:"
|
||||||
|
self.log(t % (n,), 6)
|
||||||
|
faulty = True
|
||||||
|
except Exception as ex:
|
||||||
|
self.log("relink because walk failed: %s; %r" % (ex, ex), 3)
|
||||||
|
faulty = True
|
||||||
|
|
||||||
|
zs = absreal(alink)
|
||||||
|
if ldst != zs:
|
||||||
|
t = "relink because computed != actual destination:\n %s\n %s"
|
||||||
|
self.log(t % (ldst, zs), 3)
|
||||||
|
ldst = zs
|
||||||
|
faulty = True
|
||||||
|
|
||||||
|
if bos.path.islink(ldst):
|
||||||
|
raise Exception("broken symlink: %s" % (alink,))
|
||||||
|
|
||||||
|
if alink != sabs and ldst != sabs and not faulty:
|
||||||
|
continue # original symlink OK; leave it be
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
t = "relink because symlink verification failed: %s; %r"
|
||||||
|
self.log(t % (ex, ex), 3)
|
||||||
|
|
||||||
|
self.log("relinking [%s] to [%s]" % (alink, dabs))
|
||||||
|
try:
|
||||||
lmod = bos.path.getmtime(alink, False)
|
lmod = bos.path.getmtime(alink, False)
|
||||||
bos.unlink(alink)
|
bos.unlink(alink)
|
||||||
except:
|
except:
|
||||||
@@ -3745,13 +3850,16 @@ class Up2k(object):
|
|||||||
self._finish_upload(job["ptop"], job["wark"])
|
self._finish_upload(job["ptop"], job["wark"])
|
||||||
|
|
||||||
def _snapshot(self) -> None:
|
def _snapshot(self) -> None:
|
||||||
slp = self.snap_persist_interval
|
slp = self.args.snap_wri
|
||||||
|
if not slp or self.args.no_snap:
|
||||||
|
return
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
time.sleep(slp)
|
time.sleep(slp)
|
||||||
if self.pp:
|
if self.pp:
|
||||||
slp = 5
|
slp = 5
|
||||||
else:
|
else:
|
||||||
slp = self.snap_persist_interval
|
slp = self.args.snap_wri
|
||||||
self.do_snapshot()
|
self.do_snapshot()
|
||||||
|
|
||||||
def do_snapshot(self) -> None:
|
def do_snapshot(self) -> None:
|
||||||
@@ -3765,11 +3873,8 @@ class Up2k(object):
|
|||||||
if not histpath:
|
if not histpath:
|
||||||
return
|
return
|
||||||
|
|
||||||
rm = [
|
idrop = self.args.snap_drop * 60
|
||||||
x
|
rm = [x for x in reg.values() if x["need"] and now - x["poke"] >= idrop]
|
||||||
for x in reg.values()
|
|
||||||
if x["need"] and now - x["poke"] > self.snap_discard_interval
|
|
||||||
]
|
|
||||||
|
|
||||||
if self.args.nw:
|
if self.args.nw:
|
||||||
lost = []
|
lost = []
|
||||||
@@ -3894,45 +3999,58 @@ class Up2k(object):
|
|||||||
self.n_hashq -= 1
|
self.n_hashq -= 1
|
||||||
# self.log("hashq {}".format(self.n_hashq))
|
# self.log("hashq {}".format(self.n_hashq))
|
||||||
|
|
||||||
ptop, vtop, rd, fn, ip, at, usr, skip_xau = self.hashq.get()
|
task = self.hashq.get()
|
||||||
# self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
|
if len(task) != 8:
|
||||||
if "e2d" not in self.flags[ptop]:
|
raise Exception("invalid hash task")
|
||||||
continue
|
|
||||||
|
|
||||||
abspath = djoin(ptop, rd, fn)
|
try:
|
||||||
self.log("hashing " + abspath)
|
if not self._hash_t(task):
|
||||||
inf = bos.stat(abspath)
|
|
||||||
if not inf.st_size:
|
|
||||||
wark = up2k_wark_from_metadata(
|
|
||||||
self.salt, inf.st_size, int(inf.st_mtime), rd, fn
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
hashes = self._hashlist_from_file(abspath)
|
|
||||||
if not hashes:
|
|
||||||
return
|
return
|
||||||
|
except Exception as ex:
|
||||||
|
self.log("failed to hash %s: %s" % (task, ex), 1)
|
||||||
|
|
||||||
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
|
def _hash_t(self, task: tuple[str, str, str, str, str, float, str, bool]) -> bool:
|
||||||
|
ptop, vtop, rd, fn, ip, at, usr, skip_xau = task
|
||||||
|
# self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
|
||||||
|
if "e2d" not in self.flags[ptop]:
|
||||||
|
return True
|
||||||
|
|
||||||
with self.mutex:
|
abspath = djoin(ptop, rd, fn)
|
||||||
self.idx_wark(
|
self.log("hashing " + abspath)
|
||||||
self.flags[ptop],
|
inf = bos.stat(abspath)
|
||||||
rd,
|
if not inf.st_size:
|
||||||
fn,
|
wark = up2k_wark_from_metadata(
|
||||||
inf.st_mtime,
|
self.salt, inf.st_size, int(inf.st_mtime), rd, fn
|
||||||
inf.st_size,
|
)
|
||||||
ptop,
|
else:
|
||||||
vtop,
|
hashes = self._hashlist_from_file(abspath)
|
||||||
wark,
|
if not hashes:
|
||||||
"",
|
return False
|
||||||
usr,
|
|
||||||
ip,
|
|
||||||
at,
|
|
||||||
skip_xau,
|
|
||||||
)
|
|
||||||
|
|
||||||
if at and time.time() - at > 30:
|
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
|
||||||
with self.rescan_cond:
|
|
||||||
self.rescan_cond.notify_all()
|
with self.mutex:
|
||||||
|
self.idx_wark(
|
||||||
|
self.flags[ptop],
|
||||||
|
rd,
|
||||||
|
fn,
|
||||||
|
inf.st_mtime,
|
||||||
|
inf.st_size,
|
||||||
|
ptop,
|
||||||
|
vtop,
|
||||||
|
wark,
|
||||||
|
"",
|
||||||
|
usr,
|
||||||
|
ip,
|
||||||
|
at,
|
||||||
|
skip_xau,
|
||||||
|
)
|
||||||
|
|
||||||
|
if at and time.time() - at > 30:
|
||||||
|
with self.rescan_cond:
|
||||||
|
self.rescan_cond.notify_all()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
def hash_file(
|
def hash_file(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ import threading
|
|||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from datetime import datetime
|
|
||||||
from email.utils import formatdate
|
from email.utils import formatdate
|
||||||
|
|
||||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
||||||
@@ -35,6 +34,35 @@ from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
|
|||||||
from .__version__ import S_BUILD_DT, S_VERSION
|
from .__version__ import S_BUILD_DT, S_VERSION
|
||||||
from .stolen import surrogateescape
|
from .stolen import surrogateescape
|
||||||
|
|
||||||
|
try:
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
except:
|
||||||
|
from datetime import datetime, timedelta, tzinfo
|
||||||
|
|
||||||
|
TD_ZERO = timedelta(0)
|
||||||
|
|
||||||
|
class _UTC(tzinfo):
|
||||||
|
def utcoffset(self, dt):
|
||||||
|
return TD_ZERO
|
||||||
|
|
||||||
|
def tzname(self, dt):
|
||||||
|
return "UTC"
|
||||||
|
|
||||||
|
def dst(self, dt):
|
||||||
|
return TD_ZERO
|
||||||
|
|
||||||
|
UTC = _UTC()
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 7) or (
|
||||||
|
sys.version_info >= (3, 6) and platform.python_implementation() == "CPython"
|
||||||
|
):
|
||||||
|
ODict = dict
|
||||||
|
else:
|
||||||
|
from collections import OrderedDict as ODict
|
||||||
|
|
||||||
|
|
||||||
def _ens(want: str) -> tuple[int, ...]:
|
def _ens(want: str) -> tuple[int, ...]:
|
||||||
ret: list[int] = []
|
ret: list[int] = []
|
||||||
@@ -87,6 +115,11 @@ if True: # pylint: disable=using-constant-test
|
|||||||
import typing
|
import typing
|
||||||
from typing import Any, Generator, Optional, Pattern, Protocol, Union
|
from typing import Any, Generator, Optional, Pattern, Protocol, Union
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import LiteralString
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
class RootLogger(Protocol):
|
class RootLogger(Protocol):
|
||||||
def __call__(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
def __call__(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
return None
|
return None
|
||||||
@@ -116,15 +149,15 @@ if not PY2:
|
|||||||
from urllib.parse import quote_from_bytes as quote
|
from urllib.parse import quote_from_bytes as quote
|
||||||
from urllib.parse import unquote_to_bytes as unquote
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
else:
|
else:
|
||||||
from StringIO import StringIO as BytesIO
|
from StringIO import StringIO as BytesIO # type: ignore
|
||||||
from urllib import quote # pylint: disable=no-name-in-module
|
from urllib import quote # type: ignore # pylint: disable=no-name-in-module
|
||||||
from urllib import unquote # pylint: disable=no-name-in-module
|
from urllib import unquote # type: ignore # pylint: disable=no-name-in-module
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
struct.unpack(b">i", b"idgi")
|
struct.unpack(b">i", b"idgi")
|
||||||
spack = struct.pack
|
spack = struct.pack # type: ignore
|
||||||
sunpack = struct.unpack
|
sunpack = struct.unpack # type: ignore
|
||||||
except:
|
except:
|
||||||
|
|
||||||
def spack(fmt: bytes, *a: Any) -> bytes:
|
def spack(fmt: bytes, *a: Any) -> bytes:
|
||||||
@@ -261,6 +294,13 @@ EXTS["vnd.mozilla.apng"] = "png"
|
|||||||
MAGIC_MAP = {"jpeg": "jpg"}
|
MAGIC_MAP = {"jpeg": "jpg"}
|
||||||
|
|
||||||
|
|
||||||
|
DEF_EXP = "self.ip self.ua self.uname self.host cfg.name cfg.logout vf.scan vf.thsize hdr.cf_ipcountry srv.itime srv.htime"
|
||||||
|
|
||||||
|
DEF_MTE = "circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash"
|
||||||
|
|
||||||
|
DEF_MTH = ".vq,.aq,vc,ac,fmt,res,.fps"
|
||||||
|
|
||||||
|
|
||||||
REKOBO_KEY = {
|
REKOBO_KEY = {
|
||||||
v: ln.split(" ", 1)[0]
|
v: ln.split(" ", 1)[0]
|
||||||
for ln in """
|
for ln in """
|
||||||
@@ -343,6 +383,7 @@ def py_desc() -> str:
|
|||||||
|
|
||||||
|
|
||||||
def _sqlite_ver() -> str:
|
def _sqlite_ver() -> str:
|
||||||
|
assert sqlite3 # type: ignore
|
||||||
try:
|
try:
|
||||||
co = sqlite3.connect(":memory:")
|
co = sqlite3.connect(":memory:")
|
||||||
cur = co.cursor()
|
cur = co.cursor()
|
||||||
@@ -1033,6 +1074,17 @@ def nuprint(msg: str) -> None:
|
|||||||
uprint("{}\n".format(msg))
|
uprint("{}\n".format(msg))
|
||||||
|
|
||||||
|
|
||||||
|
def dedent(txt: str) -> str:
|
||||||
|
pad = 64
|
||||||
|
lns = txt.replace("\r", "").split("\n")
|
||||||
|
for ln in lns:
|
||||||
|
zs = ln.lstrip()
|
||||||
|
pad2 = len(ln) - len(zs)
|
||||||
|
if zs and pad > pad2:
|
||||||
|
pad = pad2
|
||||||
|
return "\n".join([ln[pad:] for ln in lns])
|
||||||
|
|
||||||
|
|
||||||
def rice_tid() -> str:
|
def rice_tid() -> str:
|
||||||
tid = threading.current_thread().ident
|
tid = threading.current_thread().ident
|
||||||
c = sunpack(b"B" * 5, spack(b">Q", tid)[-5:])
|
c = sunpack(b"B" * 5, spack(b">Q", tid)[-5:])
|
||||||
@@ -1118,7 +1170,7 @@ def stackmon(fp: str, ival: float, suffix: str) -> None:
|
|||||||
buf = lzma.compress(buf, preset=0)
|
buf = lzma.compress(buf, preset=0)
|
||||||
|
|
||||||
if "%" in fp:
|
if "%" in fp:
|
||||||
dt = datetime.utcnow()
|
dt = datetime.now(UTC)
|
||||||
for fs in "YmdHMS":
|
for fs in "YmdHMS":
|
||||||
fs = "%" + fs
|
fs = "%" + fs
|
||||||
if fs in fp:
|
if fs in fp:
|
||||||
@@ -1528,8 +1580,8 @@ def read_header(sr: Unrecv, t_idle: int, t_tot: int) -> list[str]:
|
|||||||
|
|
||||||
raise Pebkac(
|
raise Pebkac(
|
||||||
400,
|
400,
|
||||||
"protocol error while reading headers:\n"
|
"protocol error while reading headers",
|
||||||
+ ret.decode("utf-8", "replace"),
|
log=ret.decode("utf-8", "replace"),
|
||||||
)
|
)
|
||||||
|
|
||||||
ofs = ret.find(b"\r\n\r\n")
|
ofs = ret.find(b"\r\n\r\n")
|
||||||
@@ -1612,16 +1664,15 @@ def gen_filekey_dbg(
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str:
|
def gencookie(k: str, v: str, r: str, tls: bool, dur: int = 0, txt: str = "") -> str:
|
||||||
v = v.replace("%", "%25").replace(";", "%3B")
|
v = v.replace("%", "%25").replace(";", "%3B")
|
||||||
if dur:
|
if dur:
|
||||||
exp = formatdate(time.time() + dur, usegmt=True)
|
exp = formatdate(time.time() + dur, usegmt=True)
|
||||||
else:
|
else:
|
||||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||||
|
|
||||||
return "{}={}; Path=/{}; Expires={}{}; SameSite=Lax".format(
|
t = "%s=%s; Path=/%s; Expires=%s%s%s; SameSite=Lax"
|
||||||
k, v, r, exp, "; Secure" if tls else ""
|
return t % (k, v, r, exp, "; Secure" if tls else "", txt)
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def humansize(sz: float, terse: bool = False) -> str:
|
def humansize(sz: float, terse: bool = False) -> str:
|
||||||
@@ -1738,7 +1789,16 @@ def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
|
|||||||
return fn.strip()
|
return fn.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_vpath(vp: str, ok: str, bad: list[str]) -> str:
|
||||||
|
parts = vp.replace(os.sep, "/").split("/")
|
||||||
|
ret = [sanitize_fn(x, ok, bad) for x in parts]
|
||||||
|
return "/".join(ret)
|
||||||
|
|
||||||
|
|
||||||
def relchk(rp: str) -> str:
|
def relchk(rp: str) -> str:
|
||||||
|
if "\x00" in rp:
|
||||||
|
return "[nul]"
|
||||||
|
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
if "\n" in rp or "\r" in rp:
|
if "\n" in rp or "\r" in rp:
|
||||||
return "x\nx"
|
return "x\nx"
|
||||||
@@ -1774,6 +1834,26 @@ def exclude_dotfiles(filepaths: list[str]) -> list[str]:
|
|||||||
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
||||||
|
|
||||||
|
|
||||||
|
def odfusion(
|
||||||
|
base: Union[ODict[str, bool], ODict["LiteralString", bool]], oth: str
|
||||||
|
) -> ODict[str, bool]:
|
||||||
|
# merge an "ordered set" (just a dict really) with another list of keys
|
||||||
|
words0 = [x for x in oth.split(",") if x]
|
||||||
|
words1 = [x for x in oth[1:].split(",") if x]
|
||||||
|
|
||||||
|
ret = base.copy()
|
||||||
|
if oth.startswith("+"):
|
||||||
|
for k in words1:
|
||||||
|
ret[k] = True
|
||||||
|
elif oth[:1] in ("-", "/"):
|
||||||
|
for k in words1:
|
||||||
|
ret.pop(k, None)
|
||||||
|
else:
|
||||||
|
ret = ODict.fromkeys(words0, True)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def ipnorm(ip: str) -> str:
|
def ipnorm(ip: str) -> str:
|
||||||
if ":" in ip:
|
if ":" in ip:
|
||||||
# assume /64 clients; drop 4 groups
|
# assume /64 clients; drop 4 groups
|
||||||
@@ -1926,10 +2006,10 @@ else:
|
|||||||
# moonrunes become \x3f with bytestrings,
|
# moonrunes become \x3f with bytestrings,
|
||||||
# losing mojibake support is worth
|
# losing mojibake support is worth
|
||||||
def _not_actually_mbcs_enc(txt: str) -> bytes:
|
def _not_actually_mbcs_enc(txt: str) -> bytes:
|
||||||
return txt
|
return txt # type: ignore
|
||||||
|
|
||||||
def _not_actually_mbcs_dec(txt: bytes) -> str:
|
def _not_actually_mbcs_dec(txt: bytes) -> str:
|
||||||
return txt
|
return txt # type: ignore
|
||||||
|
|
||||||
fsenc = afsenc = sfsenc = _not_actually_mbcs_enc
|
fsenc = afsenc = sfsenc = _not_actually_mbcs_enc
|
||||||
fsdec = _not_actually_mbcs_dec
|
fsdec = _not_actually_mbcs_dec
|
||||||
@@ -1988,6 +2068,7 @@ def atomic_move(usrc: str, udst: str) -> None:
|
|||||||
def get_df(abspath: str) -> tuple[Optional[int], Optional[int]]:
|
def get_df(abspath: str) -> tuple[Optional[int], Optional[int]]:
|
||||||
try:
|
try:
|
||||||
# some fuses misbehave
|
# some fuses misbehave
|
||||||
|
assert ctypes
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
bfree = ctypes.c_ulonglong(0)
|
bfree = ctypes.c_ulonglong(0)
|
||||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW( # type: ignore
|
ctypes.windll.kernel32.GetDiskFreeSpaceExW( # type: ignore
|
||||||
@@ -2390,6 +2471,7 @@ def getalive(pids: list[int], pgid: int) -> list[int]:
|
|||||||
alive.append(pid)
|
alive.append(pid)
|
||||||
else:
|
else:
|
||||||
# windows doesn't have pgroups; assume
|
# windows doesn't have pgroups; assume
|
||||||
|
assert psutil
|
||||||
psutil.Process(pid)
|
psutil.Process(pid)
|
||||||
alive.append(pid)
|
alive.append(pid)
|
||||||
except:
|
except:
|
||||||
@@ -2407,6 +2489,7 @@ def killtree(root: int) -> None:
|
|||||||
pgid = 0
|
pgid = 0
|
||||||
|
|
||||||
if HAVE_PSUTIL:
|
if HAVE_PSUTIL:
|
||||||
|
assert psutil
|
||||||
pids = [root]
|
pids = [root]
|
||||||
parent = psutil.Process(root)
|
parent = psutil.Process(root)
|
||||||
for child in parent.children(recursive=True):
|
for child in parent.children(recursive=True):
|
||||||
@@ -2446,9 +2529,34 @@ def killtree(root: int) -> None:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _find_nice() -> str:
|
||||||
|
if WINDOWS:
|
||||||
|
return "" # use creationflags
|
||||||
|
|
||||||
|
try:
|
||||||
|
zs = shutil.which("nice")
|
||||||
|
if zs:
|
||||||
|
return zs
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# busted PATHs and/or py2
|
||||||
|
for zs in ("/bin", "/sbin", "/usr/bin", "/usr/sbin"):
|
||||||
|
zs += "/nice"
|
||||||
|
if os.path.exists(zs):
|
||||||
|
return zs
|
||||||
|
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
NICES = _find_nice()
|
||||||
|
NICEB = NICES.encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
def runcmd(
|
def runcmd(
|
||||||
argv: Union[list[bytes], list[str]], timeout: Optional[float] = None, **ka: Any
|
argv: Union[list[bytes], list[str]], timeout: Optional[float] = None, **ka: Any
|
||||||
) -> tuple[int, str, str]:
|
) -> tuple[int, str, str]:
|
||||||
|
isbytes = isinstance(argv[0], (bytes, bytearray))
|
||||||
kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one
|
kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one
|
||||||
capture = ka.pop("capture", 3) # 0=none 1=stdout 2=stderr 3=both
|
capture = ka.pop("capture", 3) # 0=none 1=stdout 2=stderr 3=both
|
||||||
|
|
||||||
@@ -2462,13 +2570,22 @@ def runcmd(
|
|||||||
berr: bytes
|
berr: bytes
|
||||||
|
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
if isinstance(argv[0], (bytes, bytearray)):
|
if isbytes:
|
||||||
if argv[0] in CMD_EXEB:
|
if argv[0] in CMD_EXEB:
|
||||||
argv[0] += b".exe"
|
argv[0] += b".exe"
|
||||||
else:
|
else:
|
||||||
if argv[0] in CMD_EXES:
|
if argv[0] in CMD_EXES:
|
||||||
argv[0] += ".exe"
|
argv[0] += ".exe"
|
||||||
|
|
||||||
|
if ka.pop("nice", None):
|
||||||
|
if WINDOWS:
|
||||||
|
ka["creationflags"] = 0x4000
|
||||||
|
elif NICEB:
|
||||||
|
if isbytes:
|
||||||
|
argv = [NICEB] + argv
|
||||||
|
else:
|
||||||
|
argv = [NICES] + argv
|
||||||
|
|
||||||
p = sp.Popen(argv, stdout=cout, stderr=cerr, **ka)
|
p = sp.Popen(argv, stdout=cout, stderr=cerr, **ka)
|
||||||
if not timeout or PY2:
|
if not timeout or PY2:
|
||||||
bout, berr = p.communicate(sin)
|
bout, berr = p.communicate(sin)
|
||||||
@@ -2616,13 +2733,13 @@ def _parsehook(
|
|||||||
|
|
||||||
sp_ka = {
|
sp_ka = {
|
||||||
"env": env,
|
"env": env,
|
||||||
|
"nice": True,
|
||||||
"timeout": tout,
|
"timeout": tout,
|
||||||
"kill": kill,
|
"kill": kill,
|
||||||
"capture": cap,
|
"capture": cap,
|
||||||
}
|
}
|
||||||
|
|
||||||
if cmd.startswith("~"):
|
cmd = os.path.expandvars(os.path.expanduser(cmd))
|
||||||
cmd = os.path.expanduser(cmd)
|
|
||||||
|
|
||||||
return chk, fork, jtxt, wait, sp_ka, cmd
|
return chk, fork, jtxt, wait, sp_ka, cmd
|
||||||
|
|
||||||
@@ -2761,9 +2878,7 @@ def loadpy(ap: str, hot: bool) -> Any:
|
|||||||
depending on what other inconveniently named files happen
|
depending on what other inconveniently named files happen
|
||||||
to be in the same folder
|
to be in the same folder
|
||||||
"""
|
"""
|
||||||
if ap.startswith("~"):
|
ap = os.path.expandvars(os.path.expanduser(ap))
|
||||||
ap = os.path.expanduser(ap)
|
|
||||||
|
|
||||||
mdir, mfile = os.path.split(absreal(ap))
|
mdir, mfile = os.path.split(absreal(ap))
|
||||||
mname = mfile.rsplit(".", 1)[0]
|
mname = mfile.rsplit(".", 1)[0]
|
||||||
sys.path.insert(0, mdir)
|
sys.path.insert(0, mdir)
|
||||||
@@ -2771,7 +2886,7 @@ def loadpy(ap: str, hot: bool) -> Any:
|
|||||||
if PY2:
|
if PY2:
|
||||||
mod = __import__(mname)
|
mod = __import__(mname)
|
||||||
if hot:
|
if hot:
|
||||||
reload(mod)
|
reload(mod) # type: ignore
|
||||||
else:
|
else:
|
||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
@@ -2914,6 +3029,7 @@ def termsize() -> tuple[int, int]:
|
|||||||
def hidedir(dp) -> None:
|
def hidedir(dp) -> None:
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
try:
|
try:
|
||||||
|
assert ctypes
|
||||||
k32 = ctypes.WinDLL("kernel32")
|
k32 = ctypes.WinDLL("kernel32")
|
||||||
attrs = k32.GetFileAttributesW(dp)
|
attrs = k32.GetFileAttributesW(dp)
|
||||||
if attrs >= 0:
|
if attrs >= 0:
|
||||||
@@ -2923,9 +3039,12 @@ def hidedir(dp) -> None:
|
|||||||
|
|
||||||
|
|
||||||
class Pebkac(Exception):
|
class Pebkac(Exception):
|
||||||
def __init__(self, code: int, msg: Optional[str] = None) -> None:
|
def __init__(
|
||||||
|
self, code: int, msg: Optional[str] = None, log: Optional[str] = None
|
||||||
|
) -> None:
|
||||||
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
||||||
self.code = code
|
self.code = code
|
||||||
|
self.log = log
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return "Pebkac({}, {})".format(self.code, repr(self.args))
|
return "Pebkac({}, {})".format(self.code, repr(self.args))
|
||||||
|
|||||||
@@ -1414,14 +1414,17 @@ html.dz input {
|
|||||||
.opview input.i {
|
.opview input.i {
|
||||||
width: calc(100% - 16.2em);
|
width: calc(100% - 16.2em);
|
||||||
}
|
}
|
||||||
|
input.drc_v,
|
||||||
input.eq_gain {
|
input.eq_gain {
|
||||||
width: 3em;
|
width: 3em;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
margin: 0 .6em;
|
margin: 0 .6em;
|
||||||
}
|
}
|
||||||
|
#audio_drc table,
|
||||||
#audio_eq table {
|
#audio_eq table {
|
||||||
border-collapse: collapse;
|
border-collapse: collapse;
|
||||||
}
|
}
|
||||||
|
#audio_drc td,
|
||||||
#audio_eq td {
|
#audio_eq td {
|
||||||
text-align: center;
|
text-align: center;
|
||||||
}
|
}
|
||||||
@@ -1430,11 +1433,15 @@ input.eq_gain {
|
|||||||
display: block;
|
display: block;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
}
|
}
|
||||||
|
#au_drc,
|
||||||
#au_eq {
|
#au_eq {
|
||||||
display: block;
|
display: block;
|
||||||
margin-top: .5em;
|
margin-top: .5em;
|
||||||
padding: 1.3em .3em;
|
padding: 1.3em .3em;
|
||||||
}
|
}
|
||||||
|
#au_drc {
|
||||||
|
padding: .4em .3em;
|
||||||
|
}
|
||||||
#ico1 {
|
#ico1 {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
@@ -1475,6 +1482,8 @@ input.eq_gain {
|
|||||||
width: calc(100% - 2em);
|
width: calc(100% - 2em);
|
||||||
margin: .3em 0 0 1.4em;
|
margin: .3em 0 0 1.4em;
|
||||||
}
|
}
|
||||||
|
@media (max-width: 130em) { #srch_form.tags #tq_raw { width: calc(100% - 34em) } }
|
||||||
|
@media (max-width: 95em) { #srch_form.tags #tq_raw { width: calc(100% - 2em) } }
|
||||||
#tq_raw td+td {
|
#tq_raw td+td {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
@@ -1777,6 +1786,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
.ghead {
|
.ghead {
|
||||||
|
background: #fff;
|
||||||
background: var(--bg-u2);
|
background: var(--bg-u2);
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
padding: .2em .5em;
|
padding: .2em .5em;
|
||||||
@@ -1807,6 +1817,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
padding: 0;
|
padding: 0;
|
||||||
}
|
}
|
||||||
#rui {
|
#rui {
|
||||||
|
background: #fff;
|
||||||
background: var(--bg);
|
background: var(--bg);
|
||||||
position: fixed;
|
position: fixed;
|
||||||
top: 0;
|
top: 0;
|
||||||
@@ -1863,6 +1874,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
}
|
}
|
||||||
#doc {
|
#doc {
|
||||||
overflow: visible;
|
overflow: visible;
|
||||||
|
background: #fff;
|
||||||
background: var(--bg);
|
background: var(--bg);
|
||||||
margin: -1em 0 .5em 0;
|
margin: -1em 0 .5em 0;
|
||||||
padding: 1em 0 1em 0;
|
padding: 1em 0 1em 0;
|
||||||
@@ -1879,6 +1891,10 @@ html.y #doc {
|
|||||||
text-align: center;
|
text-align: center;
|
||||||
padding: .5em;
|
padding: .5em;
|
||||||
}
|
}
|
||||||
|
#docul li.bn span {
|
||||||
|
font-weight: bold;
|
||||||
|
color: var(--fg-max);
|
||||||
|
}
|
||||||
#doc.prism {
|
#doc.prism {
|
||||||
padding-left: 3em;
|
padding-left: 3em;
|
||||||
}
|
}
|
||||||
@@ -2499,14 +2515,14 @@ html.y #bbox-overlay figcaption a {
|
|||||||
min-width: 24em;
|
min-width: 24em;
|
||||||
}
|
}
|
||||||
#u2cards.w {
|
#u2cards.w {
|
||||||
width: 44em;
|
width: 48em;
|
||||||
text-align: left;
|
text-align: left;
|
||||||
}
|
}
|
||||||
#u2cards.ww {
|
#u2cards.ww {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
#u2etaw.w {
|
#u2etaw.w {
|
||||||
width: 52em;
|
width: 55em;
|
||||||
text-align: right;
|
text-align: right;
|
||||||
margin: 2em auto -2.7em auto;
|
margin: 2em auto -2.7em auto;
|
||||||
}
|
}
|
||||||
@@ -2551,10 +2567,10 @@ html.y #bbox-overlay figcaption a {
|
|||||||
width: 30em;
|
width: 30em;
|
||||||
}
|
}
|
||||||
#u2conf.w {
|
#u2conf.w {
|
||||||
width: 48em;
|
width: 51em;
|
||||||
}
|
}
|
||||||
#u2conf.ww {
|
#u2conf.ww {
|
||||||
width: 78em;
|
width: 82em;
|
||||||
}
|
}
|
||||||
#u2conf.ww #u2c3w {
|
#u2conf.ww #u2c3w {
|
||||||
width: 29em;
|
width: 29em;
|
||||||
@@ -3051,6 +3067,16 @@ html.d #treepar {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@media (max-width: 32em) {
|
||||||
|
#u2conf {
|
||||||
|
font-size: .9em;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@media (max-width: 28em) {
|
||||||
|
#u2conf {
|
||||||
|
font-size: .8em;
|
||||||
|
}
|
||||||
|
}
|
||||||
@media (min-width: 70em) {
|
@media (min-width: 70em) {
|
||||||
#barpos,
|
#barpos,
|
||||||
#barbuf {
|
#barbuf {
|
||||||
|
|||||||
@@ -135,35 +135,17 @@
|
|||||||
|
|
||||||
<script>
|
<script>
|
||||||
var SR = {{ r|tojson }},
|
var SR = {{ r|tojson }},
|
||||||
|
CGV = {{ cgv|tojson }},
|
||||||
TS = "{{ ts }}",
|
TS = "{{ ts }}",
|
||||||
acct = "{{ acct }}",
|
|
||||||
perms = {{ perms }},
|
|
||||||
dgrid = {{ dgrid|tojson }},
|
|
||||||
themes = {{ themes }},
|
|
||||||
dtheme = "{{ dtheme }}",
|
dtheme = "{{ dtheme }}",
|
||||||
srvinf = "{{ srv_info }}",
|
srvinf = "{{ srv_info }}",
|
||||||
s_name = "{{ s_name }}",
|
s_name = "{{ s_name }}",
|
||||||
lang = "{{ lang }}",
|
lang = "{{ lang }}",
|
||||||
dfavico = "{{ favico }}",
|
dfavico = "{{ favico }}",
|
||||||
def_hcols = {{ def_hcols|tojson }},
|
|
||||||
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
|
||||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||||
have_acode = {{ have_acode|tojson }},
|
|
||||||
have_mv = {{ have_mv|tojson }},
|
|
||||||
have_del = {{ have_del|tojson }},
|
|
||||||
have_unpost = {{ have_unpost }},
|
|
||||||
have_zip = {{ have_zip|tojson }},
|
|
||||||
sb_md = "{{ sb_md }}",
|
|
||||||
sb_lg = "{{ sb_lg }}",
|
sb_lg = "{{ sb_lg }}",
|
||||||
lifetime = {{ lifetime }},
|
|
||||||
turbolvl = {{ turbolvl }},
|
|
||||||
idxh = {{ idxh }},
|
|
||||||
frand = {{ frand|tojson }},
|
|
||||||
u2sort = "{{ u2sort }}",
|
|
||||||
have_emp = {{ have_emp|tojson }},
|
|
||||||
txt_ext = "{{ txt_ext }}",
|
txt_ext = "{{ txt_ext }}",
|
||||||
logues = {{ logues|tojson if sb_lg else "[]" }},
|
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||||
readme = {{ readme|tojson }},
|
|
||||||
ls0 = {{ ls0|tojson }};
|
ls0 = {{ ls0|tojson }};
|
||||||
|
|
||||||
document.documentElement.className = localStorage.cpp_thm || dtheme;
|
document.documentElement.className = localStorage.cpp_thm || dtheme;
|
||||||
|
|||||||
@@ -137,6 +137,7 @@ var Ls = {
|
|||||||
|
|
||||||
"ul_par": "parallel uploads:",
|
"ul_par": "parallel uploads:",
|
||||||
"ut_rand": "randomize filenames",
|
"ut_rand": "randomize filenames",
|
||||||
|
"ut_u2ts": "copy the last-modified timestamp$Nfrom your filesystem to the server",
|
||||||
"ut_mt": "continue hashing other files while uploading$N$Nmaybe disable if your CPU or HDD is a bottleneck",
|
"ut_mt": "continue hashing other files while uploading$N$Nmaybe disable if your CPU or HDD is a bottleneck",
|
||||||
"ut_ask": "ask for confirmation before upload starts",
|
"ut_ask": "ask for confirmation before upload starts",
|
||||||
"ut_pot": "improve upload speed on slow devices$Nby making the UI less complex",
|
"ut_pot": "improve upload speed on slow devices$Nby making the UI less complex",
|
||||||
@@ -168,6 +169,7 @@ var Ls = {
|
|||||||
"utl_prog": "progress",
|
"utl_prog": "progress",
|
||||||
|
|
||||||
"ul_flagblk": "the files were added to the queue</b><br>however there is a busy up2k in another browser tab,<br>so waiting for that to finish first",
|
"ul_flagblk": "the files were added to the queue</b><br>however there is a busy up2k in another browser tab,<br>so waiting for that to finish first",
|
||||||
|
"ul_btnlk": "the server configuration has locked this switch into this state",
|
||||||
|
|
||||||
"udt_up": "Upload",
|
"udt_up": "Upload",
|
||||||
"udt_srch": "Search",
|
"udt_srch": "Search",
|
||||||
@@ -231,6 +233,7 @@ var Ls = {
|
|||||||
"ml_tcode": "transcode",
|
"ml_tcode": "transcode",
|
||||||
"ml_tint": "tint",
|
"ml_tint": "tint",
|
||||||
"ml_eq": "audio equalizer",
|
"ml_eq": "audio equalizer",
|
||||||
|
"ml_drc": "dynamic range compressor",
|
||||||
|
|
||||||
"mt_preload": "start loading the next song near the end for gapless playback\">preload",
|
"mt_preload": "start loading the next song near the end for gapless playback\">preload",
|
||||||
"mt_fullpre": "try to preload the entire song;$N✅ enable on <b>unreliable</b> connections,$N❌ <b>disable</b> on slow connections probably\">full",
|
"mt_fullpre": "try to preload the entire song;$N✅ enable on <b>unreliable</b> connections,$N❌ <b>disable</b> on slow connections probably\">full",
|
||||||
@@ -248,6 +251,7 @@ var Ls = {
|
|||||||
"mt_coth": "convert all others (not mp3) to opus\">oth",
|
"mt_coth": "convert all others (not mp3) to opus\">oth",
|
||||||
"mt_tint": "background level (0-100) on the seekbar$Nto make buffering less distracting",
|
"mt_tint": "background level (0-100) on the seekbar$Nto make buffering less distracting",
|
||||||
"mt_eq": "enables the equalizer and gain control;$N$Nboost <code>0</code> = standard 100% volume (unmodified)$N$Nwidth <code>1 </code> = standard stereo (unmodified)$Nwidth <code>0.5</code> = 50% left-right crossfeed$Nwidth <code>0 </code> = mono$N$Nboost <code>-0.8</code> & width <code>10</code> = vocal removal :^)$N$Nenabling the equalizer makes gapless albums fully gapless, so leave it on with all the values at zero (except width = 1) if you care about that",
|
"mt_eq": "enables the equalizer and gain control;$N$Nboost <code>0</code> = standard 100% volume (unmodified)$N$Nwidth <code>1 </code> = standard stereo (unmodified)$Nwidth <code>0.5</code> = 50% left-right crossfeed$Nwidth <code>0 </code> = mono$N$Nboost <code>-0.8</code> & width <code>10</code> = vocal removal :^)$N$Nenabling the equalizer makes gapless albums fully gapless, so leave it on with all the values at zero (except width = 1) if you care about that",
|
||||||
|
"mt_drc": "enables the dynamic range compressor (volume flattener / brickwaller); will also enable EQ to balance the spaghetti, so set all EQ fields except for 'width' to 0 if you don't want it$N$Nlowers the volume of audio above THRESHOLD dB; for every RATIO dB past THRESHOLD there is 1 dB of output, so default values of tresh -24 and ratio 12 means it should never get louder than -22 dB and it is safe to increase the equalizer boost to 0.8, or even 1.8 with ATK 0 and a huge RLS like 90 (only works in firefox; RLS is max 1 in other browsers)$N$N(see wikipedia, they explain it much better)",
|
||||||
|
|
||||||
"mb_play": "play",
|
"mb_play": "play",
|
||||||
"mm_hashplay": "play this audio file?",
|
"mm_hashplay": "play this audio file?",
|
||||||
@@ -323,14 +327,15 @@ var Ls = {
|
|||||||
"tv_xe1": "could not load textfile:\n\nerror ",
|
"tv_xe1": "could not load textfile:\n\nerror ",
|
||||||
"tv_xe2": "404, file not found",
|
"tv_xe2": "404, file not found",
|
||||||
"tv_lst": "list of textfiles in",
|
"tv_lst": "list of textfiles in",
|
||||||
"tvt_close": "return to folder view$NHotkey: M\">❌ close",
|
"tvt_close": "return to folder view$NHotkey: M (or Esc)\">❌ close",
|
||||||
"tvt_dl": "download this file\">💾 download",
|
"tvt_dl": "download this file$NHotkey: Y\">💾 download",
|
||||||
"tvt_prev": "show previous document$NHotkey: i\">⬆ prev",
|
"tvt_prev": "show previous document$NHotkey: i\">⬆ prev",
|
||||||
"tvt_next": "show next document$NHotkey: K\">⬇ next",
|
"tvt_next": "show next document$NHotkey: K\">⬇ next",
|
||||||
"tvt_sel": "select file ( for cut / delete / ... )$NHotkey: S\">sel",
|
"tvt_sel": "select file ( for cut / delete / ... )$NHotkey: S\">sel",
|
||||||
"tvt_edit": "open file in text editor$NHotkey: E\">✏️ edit",
|
"tvt_edit": "open file in text editor$NHotkey: E\">✏️ edit",
|
||||||
|
|
||||||
"gt_msel": "enable file selection; ctrl-click a file to override$N$N<em>when active: doubleclick a file / folder to open it</em>$N$NHotkey: S\">multiselect",
|
"gt_msel": "enable file selection; ctrl-click a file to override$N$N<em>when active: doubleclick a file / folder to open it</em>$N$NHotkey: S\">multiselect",
|
||||||
|
"gt_full": "show uncropped thumbnails\">full",
|
||||||
"gt_zoom": "zoom",
|
"gt_zoom": "zoom",
|
||||||
"gt_chop": "chop",
|
"gt_chop": "chop",
|
||||||
"gt_sort": "sort by",
|
"gt_sort": "sort by",
|
||||||
@@ -351,11 +356,14 @@ var Ls = {
|
|||||||
"s_rd": "path",
|
"s_rd": "path",
|
||||||
"s_fn": "name",
|
"s_fn": "name",
|
||||||
"s_ta": "tags",
|
"s_ta": "tags",
|
||||||
|
"s_ua": "up@",
|
||||||
"s_ad": "adv.",
|
"s_ad": "adv.",
|
||||||
"s_s1": "minimum MiB",
|
"s_s1": "minimum MiB",
|
||||||
"s_s2": "maximum MiB",
|
"s_s2": "maximum MiB",
|
||||||
"s_d1": "min. iso8601",
|
"s_d1": "min. iso8601",
|
||||||
"s_d2": "max. iso8601",
|
"s_d2": "max. iso8601",
|
||||||
|
"s_u1": "uploaded after",
|
||||||
|
"s_u2": "and/or before",
|
||||||
"s_r1": "path contains (space-separated)",
|
"s_r1": "path contains (space-separated)",
|
||||||
"s_f1": "name contains (negate with -nope)",
|
"s_f1": "name contains (negate with -nope)",
|
||||||
"s_t1": "tags contains (^=start, end=$)",
|
"s_t1": "tags contains (^=start, end=$)",
|
||||||
@@ -448,6 +456,8 @@ var Ls = {
|
|||||||
"u_expl": "explain",
|
"u_expl": "explain",
|
||||||
"u_tu": '<p class="warn">WARNING: turbo enabled, <span> client may not detect and resume incomplete uploads; see turbo-button tooltip</span></p>',
|
"u_tu": '<p class="warn">WARNING: turbo enabled, <span> client may not detect and resume incomplete uploads; see turbo-button tooltip</span></p>',
|
||||||
"u_ts": '<p class="warn">WARNING: turbo enabled, <span> search results can be incorrect; see turbo-button tooltip</span></p>',
|
"u_ts": '<p class="warn">WARNING: turbo enabled, <span> search results can be incorrect; see turbo-button tooltip</span></p>',
|
||||||
|
"u_turbo_c": "turbo is disabled in server config",
|
||||||
|
"u_turbo_g": "disabling turbo because you don't have\ndirectory listing privileges within this volume",
|
||||||
"u_life_cfg": 'autodelete after <input id="lifem" p="60" /> min (or <input id="lifeh" p="3600" /> hours)',
|
"u_life_cfg": 'autodelete after <input id="lifem" p="60" /> min (or <input id="lifeh" p="3600" /> hours)',
|
||||||
"u_life_est": 'upload will be deleted <span id="lifew" tt="local time">---</span>',
|
"u_life_est": 'upload will be deleted <span id="lifew" tt="local time">---</span>',
|
||||||
"u_life_max": 'this folder enforces a\nmax lifetime of {0}',
|
"u_life_max": 'this folder enforces a\nmax lifetime of {0}',
|
||||||
@@ -549,8 +559,9 @@ var Ls = {
|
|||||||
"dokumentviser",
|
"dokumentviser",
|
||||||
["I/K", "forr./neste fil"],
|
["I/K", "forr./neste fil"],
|
||||||
["M", "lukk tekstdokument"],
|
["M", "lukk tekstdokument"],
|
||||||
["E", "rediger tekstdokument"]
|
["E", "rediger tekstdokument"],
|
||||||
["S", "velg fil (for F2/ctrl-x/...)"]
|
["S", "velg fil (for F2/ctrl-x/...)"],
|
||||||
|
["Y", "last ned tekstfil"],
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
|
|
||||||
@@ -606,6 +617,7 @@ var Ls = {
|
|||||||
|
|
||||||
"ul_par": "samtidige handl.:",
|
"ul_par": "samtidige handl.:",
|
||||||
"ut_rand": "finn opp nye tilfeldige filnavn",
|
"ut_rand": "finn opp nye tilfeldige filnavn",
|
||||||
|
"ut_u2ts": "gi filen på serveren samme$Ntidsstempel som lokalt hos deg",
|
||||||
"ut_mt": "fortsett å befare køen mens opplastning foregår$N$Nskru denne av dersom du har en$Ntreg prosessor eller harddisk",
|
"ut_mt": "fortsett å befare køen mens opplastning foregår$N$Nskru denne av dersom du har en$Ntreg prosessor eller harddisk",
|
||||||
"ut_ask": "bekreft filutvalg før opplastning starter",
|
"ut_ask": "bekreft filutvalg før opplastning starter",
|
||||||
"ut_pot": "forbedre ytelsen på trege enheter ved å$Nforenkle brukergrensesnittet",
|
"ut_pot": "forbedre ytelsen på trege enheter ved å$Nforenkle brukergrensesnittet",
|
||||||
@@ -637,6 +649,7 @@ var Ls = {
|
|||||||
"utl_prog": "fremdrift",
|
"utl_prog": "fremdrift",
|
||||||
|
|
||||||
"ul_flagblk": "filene har blitt lagt i køen</b><br>men det er en annen nettleserfane som holder på med befaring eller opplastning akkurat nå,<br>så venter til den er ferdig først",
|
"ul_flagblk": "filene har blitt lagt i køen</b><br>men det er en annen nettleserfane som holder på med befaring eller opplastning akkurat nå,<br>så venter til den er ferdig først",
|
||||||
|
"ul_btnlk": "bryteren har blitt låst til denne tilstanden i serverens konfigurasjon",
|
||||||
|
|
||||||
"udt_up": "Last opp",
|
"udt_up": "Last opp",
|
||||||
"udt_srch": "Søk",
|
"udt_srch": "Søk",
|
||||||
@@ -700,6 +713,7 @@ var Ls = {
|
|||||||
"ml_tcode": "konvertering",
|
"ml_tcode": "konvertering",
|
||||||
"ml_tint": "tint",
|
"ml_tint": "tint",
|
||||||
"ml_eq": "audio equalizer (tonejustering)",
|
"ml_eq": "audio equalizer (tonejustering)",
|
||||||
|
"ml_drc": "compressor (volum-utjevning)",
|
||||||
|
|
||||||
"mt_preload": "hent ned litt av neste sang i forkant,$Nslik at pausen i overgangen blir mindre\">forles",
|
"mt_preload": "hent ned litt av neste sang i forkant,$Nslik at pausen i overgangen blir mindre\">forles",
|
||||||
"mt_fullpre": "hent ned hele neste sang, ikke bare litt:$N✅ skru på hvis nettet ditt er <b>ustabilt</b>,$N❌ skru av hvis nettet ditt er <b>tregt</b>\">full",
|
"mt_fullpre": "hent ned hele neste sang, ikke bare litt:$N✅ skru på hvis nettet ditt er <b>ustabilt</b>,$N❌ skru av hvis nettet ditt er <b>tregt</b>\">full",
|
||||||
@@ -717,6 +731,7 @@ var Ls = {
|
|||||||
"mt_coth": "konverter alt annet (men ikke mp3) til opus\">andre",
|
"mt_coth": "konverter alt annet (men ikke mp3) til opus\">andre",
|
||||||
"mt_tint": "nivå av bakgrunnsfarge på søkestripa (0-100),$Ngjør oppdateringer mindre distraherende",
|
"mt_tint": "nivå av bakgrunnsfarge på søkestripa (0-100),$Ngjør oppdateringer mindre distraherende",
|
||||||
"mt_eq": "aktiver tonekontroll og forsterker;$N$Nboost <code>0</code> = normal volumskala$N$Nwidth <code>1 </code> = normal stereo$Nwidth <code>0.5</code> = 50% blanding venstre-høyre$Nwidth <code>0 </code> = mono$N$Nboost <code>-0.8</code> & width <code>10</code> = instrumental :^)$N$Nreduserer også dødtid imellom sangfiler",
|
"mt_eq": "aktiver tonekontroll og forsterker;$N$Nboost <code>0</code> = normal volumskala$N$Nwidth <code>1 </code> = normal stereo$Nwidth <code>0.5</code> = 50% blanding venstre-høyre$Nwidth <code>0 </code> = mono$N$Nboost <code>-0.8</code> & width <code>10</code> = instrumental :^)$N$Nreduserer også dødtid imellom sangfiler",
|
||||||
|
"mt_drc": "aktiver volum-utjevning (dynamic range compressor); vil også aktivere tonejustering, så sett alle EQ-feltene bortsett fra 'width' til 0 hvis du ikke vil ha noe EQ$N$Nfilteret vil dempe volumet på alt som er høyere enn TRESH dB; for hver RATIO dB over grensen er det 1dB som treffer høyttalerne, så standardverdiene tresh -24 og ratio 12 skal bety at volumet ikke går høyere enn -22 dB, slik at man trygt kan øke boost-verdien i equalizer'n til rundt 0.8, eller 1.8 kombinert med ATK 0 og RLS 90 (bare mulig i firefox; andre nettlesere tar ikke høyere RLS enn 1)$N$Nwikipedia forklarer dette mye bedre forresten",
|
||||||
|
|
||||||
"mb_play": "lytt",
|
"mb_play": "lytt",
|
||||||
"mm_hashplay": "spill denne sangen?",
|
"mm_hashplay": "spill denne sangen?",
|
||||||
@@ -792,14 +807,15 @@ var Ls = {
|
|||||||
"tv_xe1": "kunne ikke laste tekstfil:\n\nfeil ",
|
"tv_xe1": "kunne ikke laste tekstfil:\n\nfeil ",
|
||||||
"tv_xe2": "404, Fil ikke funnet",
|
"tv_xe2": "404, Fil ikke funnet",
|
||||||
"tv_lst": "tekstfiler i mappen",
|
"tv_lst": "tekstfiler i mappen",
|
||||||
"tvt_close": "gå tilbake til mappen$NSnarvei: M\">❌ lukk",
|
"tvt_close": "gå tilbake til mappen$NSnarvei: M (eller Esc)\">❌ lukk",
|
||||||
"tvt_dl": "last ned denne filen\">💾 last ned",
|
"tvt_dl": "last ned denne filen$NSnarvei: Y\">💾 last ned",
|
||||||
"tvt_prev": "vis forrige dokument$NSnarvei: i\">⬆ forr.",
|
"tvt_prev": "vis forrige dokument$NSnarvei: i\">⬆ forr.",
|
||||||
"tvt_next": "vis neste dokument$NSnarvei: K\">⬇ neste",
|
"tvt_next": "vis neste dokument$NSnarvei: K\">⬇ neste",
|
||||||
"tvt_sel": "markér filen ( for utklipp / sletting / ... )$NSnarvei: S\">merk",
|
"tvt_sel": "markér filen ( for utklipp / sletting / ... )$NSnarvei: S\">merk",
|
||||||
"tvt_edit": "redigér filen$NSnarvei: E\">✏️ endre",
|
"tvt_edit": "redigér filen$NSnarvei: E\">✏️ endre",
|
||||||
|
|
||||||
"gt_msel": "markér filer istedenfor å åpne dem; ctrl-klikk filer for å overstyre$N$N<em>når aktiv: dobbelklikk en fil / mappe for å åpne</em>$N$NSnarvei: S\">markering",
|
"gt_msel": "markér filer istedenfor å åpne dem; ctrl-klikk filer for å overstyre$N$N<em>når aktiv: dobbelklikk en fil / mappe for å åpne</em>$N$NSnarvei: S\">markering",
|
||||||
|
"gt_full": "ikke beskjær bildene\">full",
|
||||||
"gt_zoom": "zoom",
|
"gt_zoom": "zoom",
|
||||||
"gt_chop": "trim",
|
"gt_chop": "trim",
|
||||||
"gt_sort": "sorter",
|
"gt_sort": "sorter",
|
||||||
@@ -820,11 +836,14 @@ var Ls = {
|
|||||||
"s_rd": "sti",
|
"s_rd": "sti",
|
||||||
"s_fn": "navn",
|
"s_fn": "navn",
|
||||||
"s_ta": "meta",
|
"s_ta": "meta",
|
||||||
|
"s_ua": "up@",
|
||||||
"s_ad": "avns.",
|
"s_ad": "avns.",
|
||||||
"s_s1": "større enn ↓ MiB",
|
"s_s1": "større enn ↓ MiB",
|
||||||
"s_s2": "mindre enn ↓ MiB",
|
"s_s2": "mindre enn ↓ MiB",
|
||||||
"s_d1": "nyere enn <dato>",
|
"s_d1": "nyere enn <dato>",
|
||||||
"s_d2": "eldre enn",
|
"s_d2": "eldre enn",
|
||||||
|
"s_u1": "lastet opp etter",
|
||||||
|
"s_u2": "og/eller før",
|
||||||
"s_r1": "mappenavn inneholder",
|
"s_r1": "mappenavn inneholder",
|
||||||
"s_f1": "filnavn inneholder",
|
"s_f1": "filnavn inneholder",
|
||||||
"s_t1": "sang-info inneholder",
|
"s_t1": "sang-info inneholder",
|
||||||
@@ -917,6 +936,8 @@ var Ls = {
|
|||||||
"u_expl": "forklar",
|
"u_expl": "forklar",
|
||||||
"u_tu": '<p class="warn">ADVARSEL: turbo er på, <span> avbrutte opplastninger vil muligens ikke oppdages og gjenopptas; hold musepekeren over turbo-knappen for mer info</span></p>',
|
"u_tu": '<p class="warn">ADVARSEL: turbo er på, <span> avbrutte opplastninger vil muligens ikke oppdages og gjenopptas; hold musepekeren over turbo-knappen for mer info</span></p>',
|
||||||
"u_ts": '<p class="warn">ADVARSEL: turbo er på, <span> søkeresultater kan være feil; hold musepekeren over turbo-knappen for mer info</span></p>',
|
"u_ts": '<p class="warn">ADVARSEL: turbo er på, <span> søkeresultater kan være feil; hold musepekeren over turbo-knappen for mer info</span></p>',
|
||||||
|
"u_turbo_c": "turbo er deaktivert i serverkonfigurasjonen",
|
||||||
|
"u_turbo_g": 'turbo ble deaktivert fordi du ikke har\ntilgang til å se mappeinnhold i dette volumet',
|
||||||
"u_life_cfg": 'slett opplastning etter <input id="lifem" p="60" /> min (eller <input id="lifeh" p="3600" /> timer)',
|
"u_life_cfg": 'slett opplastning etter <input id="lifem" p="60" /> min (eller <input id="lifeh" p="3600" /> timer)',
|
||||||
"u_life_est": 'opplastningen slettes <span id="lifew" tt="lokal tid">---</span>',
|
"u_life_est": 'opplastningen slettes <span id="lifew" tt="lokal tid">---</span>',
|
||||||
"u_life_max": 'denne mappen tillater ikke å \noppbevare filer i mer enn {0}',
|
"u_life_max": 'denne mappen tillater ikke å \noppbevare filer i mer enn {0}',
|
||||||
@@ -1042,6 +1063,10 @@ ebi('op_up2k').innerHTML = (
|
|||||||
' <input type="checkbox" id="u2rand" />\n' +
|
' <input type="checkbox" id="u2rand" />\n' +
|
||||||
' <label for="u2rand" tt="' + L.ut_rand + '">🎲</label>\n' +
|
' <label for="u2rand" tt="' + L.ut_rand + '">🎲</label>\n' +
|
||||||
' </td>\n' +
|
' </td>\n' +
|
||||||
|
' <td class="c" rowspan="2">\n' +
|
||||||
|
' <input type="checkbox" id="u2ts" />\n' +
|
||||||
|
' <label for="u2ts" tt="' + L.ut_u2ts + '">📅</a>\n' +
|
||||||
|
' </td>\n' +
|
||||||
' <td class="c" data-perm="read" data-dep="idx" rowspan="2">\n' +
|
' <td class="c" data-perm="read" data-dep="idx" rowspan="2">\n' +
|
||||||
' <input type="checkbox" id="fsearch" />\n' +
|
' <input type="checkbox" id="fsearch" />\n' +
|
||||||
' <label for="fsearch" tt="' + L.ut_srch + '">🔎</label>\n' +
|
' <label for="fsearch" tt="' + L.ut_srch + '">🔎</label>\n' +
|
||||||
@@ -1052,7 +1077,7 @@ ebi('op_up2k').innerHTML = (
|
|||||||
' <tr>\n' +
|
' <tr>\n' +
|
||||||
' <td class="c" data-perm="read">\n' +
|
' <td class="c" data-perm="read">\n' +
|
||||||
' <a href="#" class="b" id="nthread_sub">–</a><input\n' +
|
' <a href="#" class="b" id="nthread_sub">–</a><input\n' +
|
||||||
' class="txtbox" id="nthread" value="2" tt="' + L.ut_par + '"/><a\n' +
|
' class="txtbox" id="nthread" value="" tt="' + L.ut_par + '"/><a\n' +
|
||||||
' href="#" class="b" id="nthread_add">+</a><br /> \n' +
|
' href="#" class="b" id="nthread_add">+</a><br /> \n' +
|
||||||
' </td>\n' +
|
' </td>\n' +
|
||||||
' </tr>\n' +
|
' </tr>\n' +
|
||||||
@@ -1366,7 +1391,9 @@ var mpl = (function () {
|
|||||||
'<input type="text" id="pb_tint" value="0" ' + NOAC + ' style="width:2.4em" tt="' + L.mt_tint + '" />' +
|
'<input type="text" id="pb_tint" value="0" ' + NOAC + ' style="width:2.4em" tt="' + L.mt_tint + '" />' +
|
||||||
'</div></div>' +
|
'</div></div>' +
|
||||||
|
|
||||||
'<div><h3>' + L.ml_eq + '</h3><div id="audio_eq"></div></div>');
|
'<div><h3 id="h_drc">' + L.ml_drc + '</h3><div id="audio_drc"></div></div>' +
|
||||||
|
'<div><h3>' + L.ml_eq + '</h3><div id="audio_eq"></div></div>' +
|
||||||
|
'');
|
||||||
|
|
||||||
var r = {
|
var r = {
|
||||||
"pb_mode": (sread('pb_mode', ['loop', 'next']) || 'next').split('-')[0],
|
"pb_mode": (sread('pb_mode', ['loop', 'next']) || 'next').split('-')[0],
|
||||||
@@ -1479,7 +1506,6 @@ var mpl = (function () {
|
|||||||
artist = (np.circle && np.circle != np.artist ? np.circle + ' // ' : '') + (np.artist || (fns.length > 1 ? fns[0] : '')),
|
artist = (np.circle && np.circle != np.artist ? np.circle + ' // ' : '') + (np.artist || (fns.length > 1 ? fns[0] : '')),
|
||||||
title = np.title || fns.pop(),
|
title = np.title || fns.pop(),
|
||||||
cover = '',
|
cover = '',
|
||||||
pcover = '',
|
|
||||||
tags = { title: title };
|
tags = { title: title };
|
||||||
|
|
||||||
if (artist)
|
if (artist)
|
||||||
@@ -1494,20 +1520,14 @@ var mpl = (function () {
|
|||||||
|
|
||||||
for (var a = 0, aa = files.length; a < aa; a++) {
|
for (var a = 0, aa = files.length; a < aa; a++) {
|
||||||
if (/^(cover|folder)\.(jpe?g|png|gif)$/i.test(files[a].textContent)) {
|
if (/^(cover|folder)\.(jpe?g|png|gif)$/i.test(files[a].textContent)) {
|
||||||
cover = noq_href(files[a]);
|
cover = files[a].getAttribute('href');
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (cover) {
|
if (cover) {
|
||||||
cover += (cover.indexOf('?') === -1 ? '?' : '&') + 'th=j';
|
cover += (cover.indexOf('?') === -1 ? '?' : '&') + 'th=j';
|
||||||
pcover = cover;
|
tags.artwork = [{ "src": cover, type: "image/jpeg" }];
|
||||||
|
|
||||||
var pwd = get_pwd();
|
|
||||||
if (pwd)
|
|
||||||
pcover += '&pw=' + uricom_enc(pwd);
|
|
||||||
|
|
||||||
tags.artwork = [{ "src": pcover, type: "image/jpeg" }];
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1519,7 +1539,7 @@ var mpl = (function () {
|
|||||||
ebi('np_dur').textContent = np['.dur'] || '';
|
ebi('np_dur').textContent = np['.dur'] || '';
|
||||||
ebi('np_url').textContent = get_vpath() + np.file.split('?')[0];
|
ebi('np_url').textContent = get_vpath() + np.file.split('?')[0];
|
||||||
if (!MOBILE)
|
if (!MOBILE)
|
||||||
ebi('np_img').setAttribute('src', cover || ''); // dont give last.fm the pwd
|
ebi('np_img').setAttribute('src', cover || '');
|
||||||
|
|
||||||
navigator.mediaSession.metadata = new MediaMetadata(tags);
|
navigator.mediaSession.metadata = new MediaMetadata(tags);
|
||||||
navigator.mediaSession.setActionHandler('play', mplay);
|
navigator.mediaSession.setActionHandler('play', mplay);
|
||||||
@@ -1697,7 +1717,7 @@ function MPlayer() {
|
|||||||
var t0 = Date.now();
|
var t0 = Date.now();
|
||||||
|
|
||||||
if (mpl.waves)
|
if (mpl.waves)
|
||||||
fetch(url + '&th=p').then(function (x) {
|
fetch(url.replace(/\bth=opus&/, '') + '&th=p').then(function (x) {
|
||||||
x.body.getReader().read();
|
x.body.getReader().read();
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -2217,7 +2237,7 @@ function song_skip(n, dirskip) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tid)
|
if (tid && !dirskip)
|
||||||
play(ofs + n);
|
play(ofs + n);
|
||||||
else
|
else
|
||||||
play(mp.order[n == -1 ? mp.order.length - 1 : 0]);
|
play(mp.order[n == -1 ? mp.order.length - 1 : 0]);
|
||||||
@@ -2250,6 +2270,21 @@ function next_song_cmn(e) {
|
|||||||
mpl.traversals = 0;
|
mpl.traversals = 0;
|
||||||
t_fchg = 0;
|
t_fchg = 0;
|
||||||
}
|
}
|
||||||
|
function last_song(e) {
|
||||||
|
ev(e);
|
||||||
|
if (mp.order.length) {
|
||||||
|
mpl.traversals = 0;
|
||||||
|
return song_skip(-1, true);
|
||||||
|
}
|
||||||
|
if (mpl.traversals++ < 5) {
|
||||||
|
treectl.ls_cb = last_song;
|
||||||
|
return tree_neigh(-1);
|
||||||
|
}
|
||||||
|
toast.inf(10, L.mm_nof);
|
||||||
|
console.log("mm_nof2");
|
||||||
|
mpl.traversals = 0;
|
||||||
|
t_fchg = 0;
|
||||||
|
}
|
||||||
function prev_song(e) {
|
function prev_song(e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
|
|
||||||
@@ -2472,8 +2507,13 @@ function start_actx() {
|
|||||||
var afilt = (function () {
|
var afilt = (function () {
|
||||||
var r = {
|
var r = {
|
||||||
"eqen": false,
|
"eqen": false,
|
||||||
|
"drcen": false,
|
||||||
"bands": [31.25, 62.5, 125, 250, 500, 1000, 2000, 4000, 8000, 16000],
|
"bands": [31.25, 62.5, 125, 250, 500, 1000, 2000, 4000, 8000, 16000],
|
||||||
"gains": [4, 3, 2, 1, 0, 0, 1, 2, 3, 4],
|
"gains": [4, 3, 2, 1, 0, 0, 1, 2, 3, 4],
|
||||||
|
"drcv": [-24, 30, 12, 0.01, 0.25],
|
||||||
|
"drch": ['tresh', 'knee', 'ratio', 'atk', 'rls'],
|
||||||
|
"drck": ['threshold', 'knee', 'ratio', 'attack', 'release'],
|
||||||
|
"drcn": null,
|
||||||
"filters": [],
|
"filters": [],
|
||||||
"filterskip": [],
|
"filterskip": [],
|
||||||
"plugs": [],
|
"plugs": [],
|
||||||
@@ -2483,16 +2523,18 @@ var afilt = (function () {
|
|||||||
"acst": {}
|
"acst": {}
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!ACtx)
|
function setvis(vis) {
|
||||||
ebi('audio_eq').parentNode.style.display = 'none';
|
ebi('audio_eq').parentNode.style.display = ebi('audio_drc').parentNode.style.display = (vis ? '' : 'none');
|
||||||
|
}
|
||||||
|
|
||||||
|
setvis(ACtx);
|
||||||
|
|
||||||
r.init = function () {
|
r.init = function () {
|
||||||
start_actx();
|
start_actx();
|
||||||
if (r.cfg)
|
if (r.cfg)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (!actx)
|
setvis(actx);
|
||||||
ebi('audio_eq').parentNode.style.display = 'none';
|
|
||||||
|
|
||||||
// some browsers have insane high-frequency boost
|
// some browsers have insane high-frequency boost
|
||||||
// (or rather the actual problem is Q but close enough)
|
// (or rather the actual problem is Q but close enough)
|
||||||
@@ -2543,6 +2585,8 @@ var afilt = (function () {
|
|||||||
var gains = jread('au_eq_gain', r.gains);
|
var gains = jread('au_eq_gain', r.gains);
|
||||||
if (r.gains.length == gains.length)
|
if (r.gains.length == gains.length)
|
||||||
r.gains = gains;
|
r.gains = gains;
|
||||||
|
|
||||||
|
r.drcv = jread('au_drcv', r.drcv);
|
||||||
}
|
}
|
||||||
catch (ex) { }
|
catch (ex) { }
|
||||||
|
|
||||||
@@ -2579,12 +2623,20 @@ var afilt = (function () {
|
|||||||
mp.acs = mpo.acs = null;
|
mp.acs = mpo.acs = null;
|
||||||
};
|
};
|
||||||
|
|
||||||
r.apply = function () {
|
r.apply = function (v) {
|
||||||
r.init();
|
r.init();
|
||||||
r.draw();
|
r.draw();
|
||||||
|
|
||||||
if (!actx)
|
if (!actx) {
|
||||||
bcfg_set('au_eq', false);
|
bcfg_set('au_eq', r.eqen = false);
|
||||||
|
bcfg_set('au_drc', r.drcen = false);
|
||||||
|
}
|
||||||
|
else if (v === true && r.drcen && !r.eqen)
|
||||||
|
bcfg_set('au_eq', r.eqen = true);
|
||||||
|
else if (v === false && !r.eqen)
|
||||||
|
bcfg_set('au_drc', r.drcen = false);
|
||||||
|
|
||||||
|
r.drcn = null;
|
||||||
|
|
||||||
var plug = false;
|
var plug = false;
|
||||||
for (var a = 0; a < r.plugs.length; a++)
|
for (var a = 0; a < r.plugs.length; a++)
|
||||||
@@ -2644,6 +2696,28 @@ var afilt = (function () {
|
|||||||
fi.gain.value = r.amp + 0.94; // +.137 dB measured; now -.25 dB and almost bitperfect
|
fi.gain.value = r.amp + 0.94; // +.137 dB measured; now -.25 dB and almost bitperfect
|
||||||
r.filters.push(fi);
|
r.filters.push(fi);
|
||||||
|
|
||||||
|
// wait nevermind, drc goes first
|
||||||
|
timer.rm(showdrc);
|
||||||
|
if (r.drcen) {
|
||||||
|
fi = r.drcn = actx.createDynamicsCompressor();
|
||||||
|
for (var a = 0; a < r.drcv.length; a++)
|
||||||
|
fi[r.drck[a]].value = r.drcv[a];
|
||||||
|
|
||||||
|
if (r.drcv[3] < 0.02) {
|
||||||
|
// avoid static at decode start
|
||||||
|
fi.attack.value = 0.02;
|
||||||
|
setTimeout(function () {
|
||||||
|
try {
|
||||||
|
fi.attack.value = r.drcv[3];
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
}, 200);
|
||||||
|
}
|
||||||
|
|
||||||
|
r.filters.push(fi);
|
||||||
|
timer.add(showdrc);
|
||||||
|
}
|
||||||
|
|
||||||
if (Math.round(r.chw * 25) != 25) {
|
if (Math.round(r.chw * 25) != 25) {
|
||||||
var split = actx.createChannelSplitter(2),
|
var split = actx.createChannelSplitter(2),
|
||||||
merge = actx.createChannelMerger(2),
|
merge = actx.createChannelMerger(2),
|
||||||
@@ -2716,6 +2790,31 @@ var afilt = (function () {
|
|||||||
clmod(that, 'err', err);
|
clmod(that, 'err', err);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function adj_drc() {
|
||||||
|
var err = false;
|
||||||
|
try {
|
||||||
|
var n = this.getAttribute('k'),
|
||||||
|
ov = r.drcv[n],
|
||||||
|
vs = this.value,
|
||||||
|
v = parseFloat(vs);
|
||||||
|
|
||||||
|
if (!isNum(v) || v + '' != vs)
|
||||||
|
throw new Error('inval v');
|
||||||
|
|
||||||
|
if (v == ov)
|
||||||
|
return;
|
||||||
|
|
||||||
|
r.drcv[n] = v;
|
||||||
|
jwrite('au_drcv', r.drcv);
|
||||||
|
if (r.drcn)
|
||||||
|
r.drcn[r.drck[n]].value = v;
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
err = true;
|
||||||
|
}
|
||||||
|
clmod(this, 'err', err);
|
||||||
|
}
|
||||||
|
|
||||||
function eq_mod(e) {
|
function eq_mod(e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
adj_band(this, 0);
|
adj_band(this, 0);
|
||||||
@@ -2727,6 +2826,13 @@ var afilt = (function () {
|
|||||||
adj_band(this, step);
|
adj_band(this, step);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function showdrc() {
|
||||||
|
if (!r.drcn)
|
||||||
|
return timer.rm(showdrc);
|
||||||
|
|
||||||
|
ebi('h_drc').textContent = f2f(r.drcn.reduction, 1);
|
||||||
|
}
|
||||||
|
|
||||||
var html = ['<table><tr><td rowspan="4">',
|
var html = ['<table><tr><td rowspan="4">',
|
||||||
'<a id="au_eq" class="tgl btn" href="#" tt="' + L.mt_eq + '">enable</a></td>'],
|
'<a id="au_eq" class="tgl btn" href="#" tt="' + L.mt_eq + '">enable</a></td>'],
|
||||||
h2 = [], h3 = [], h4 = [];
|
h2 = [], h3 = [], h4 = [];
|
||||||
@@ -2756,6 +2862,18 @@ var afilt = (function () {
|
|||||||
html += h4.join('\n') + '</tr><table>';
|
html += h4.join('\n') + '</tr><table>';
|
||||||
ebi('audio_eq').innerHTML = html;
|
ebi('audio_eq').innerHTML = html;
|
||||||
|
|
||||||
|
h2 = [];
|
||||||
|
html = ['<table><tr><td rowspan="2">',
|
||||||
|
'<a id="au_drc" class="tgl btn" href="#" tt="' + L.mt_drc + '">enable</a></td>'];
|
||||||
|
|
||||||
|
for (var a = 0; a < r.drch.length; a++) {
|
||||||
|
html.push('<td>' + r.drch[a] + '</td>');
|
||||||
|
h2.push('<td><input type="text" class="drc_v" ' + NOAC + ' k="' + a + '" value="' + r.drcv[a] + '" /></td>');
|
||||||
|
}
|
||||||
|
html = html.join('\n') + '</tr><tr>';
|
||||||
|
html += h2.join('\n') + '</tr><table>';
|
||||||
|
ebi('audio_drc').innerHTML = html;
|
||||||
|
|
||||||
var stp = QSA('a.eq_step');
|
var stp = QSA('a.eq_step');
|
||||||
for (var a = 0, aa = stp.length; a < aa; a++)
|
for (var a = 0, aa = stp.length; a < aa; a++)
|
||||||
stp[a].onclick = eq_step;
|
stp[a].onclick = eq_step;
|
||||||
@@ -2765,8 +2883,12 @@ var afilt = (function () {
|
|||||||
txt[a].oninput = eq_mod;
|
txt[a].oninput = eq_mod;
|
||||||
txt[a].onkeydown = eq_keydown;
|
txt[a].onkeydown = eq_keydown;
|
||||||
}
|
}
|
||||||
|
txt = QSA('input.drc_v');
|
||||||
|
for (var a = 0; a < txt.length; a++)
|
||||||
|
txt[a].oninput = txt[a].onkeydown = adj_drc;
|
||||||
|
|
||||||
bcfg_bind(r, 'eqen', 'au_eq', false, r.apply);
|
bcfg_bind(r, 'eqen', 'au_eq', false, r.apply);
|
||||||
|
bcfg_bind(r, 'drcen', 'au_drc', false, r.apply);
|
||||||
|
|
||||||
r.draw();
|
r.draw();
|
||||||
return r;
|
return r;
|
||||||
@@ -2807,7 +2929,7 @@ function play(tid, is_ev, seek) {
|
|||||||
tn = mp.order.length - 1;
|
tn = mp.order.length - 1;
|
||||||
}
|
}
|
||||||
else if (mpl.pb_mode == 'next') {
|
else if (mpl.pb_mode == 'next') {
|
||||||
treectl.ls_cb = prev_song;
|
treectl.ls_cb = last_song;
|
||||||
return tree_neigh(-1);
|
return tree_neigh(-1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2891,7 +3013,7 @@ function play(tid, is_ev, seek) {
|
|||||||
|
|
||||||
pbar.unwave();
|
pbar.unwave();
|
||||||
if (mpl.waves)
|
if (mpl.waves)
|
||||||
pbar.loadwaves(url + '&th=p');
|
pbar.loadwaves(url.replace(/\bth=opus&/, '') + '&th=p');
|
||||||
|
|
||||||
mpui.progress_updater();
|
mpui.progress_updater();
|
||||||
pbar.onresize();
|
pbar.onresize();
|
||||||
@@ -3032,6 +3154,8 @@ function scan_hash(v) {
|
|||||||
|
|
||||||
|
|
||||||
function eval_hash() {
|
function eval_hash() {
|
||||||
|
window.onpopstate = treectl.onpopfun;
|
||||||
|
|
||||||
var v = hash0;
|
var v = hash0;
|
||||||
hash0 = null;
|
hash0 = null;
|
||||||
if (!v)
|
if (!v)
|
||||||
@@ -3133,11 +3257,35 @@ function eval_hash() {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
|
|
||||||
|
function read_dsort(txt) {
|
||||||
|
try {
|
||||||
|
var zt = (('' + txt).trim() || 'href').split(/,+/g);
|
||||||
|
dsort = [];
|
||||||
|
for (var a = 0; a < zt.length; a++) {
|
||||||
|
var z = zt[a].trim(), n = 1, t = "";
|
||||||
|
if (z.startsWith("-")) {
|
||||||
|
z = z.slice(1);
|
||||||
|
n = -1;
|
||||||
|
}
|
||||||
|
if (z == "sz" || z.indexOf('/.') + 1)
|
||||||
|
t = "int";
|
||||||
|
|
||||||
|
dsort.push([z, n, t]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
toast.warn(10, 'failed to apply default sort order [' + txt + ']:\n' + ex);
|
||||||
|
dsort = [['href', 1, '']];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
read_dsort(dsort);
|
||||||
|
|
||||||
|
|
||||||
function sortfiles(nodes) {
|
function sortfiles(nodes) {
|
||||||
if (!nodes.length)
|
if (!nodes.length)
|
||||||
return nodes;
|
return nodes;
|
||||||
|
|
||||||
var sopts = jread('fsort', [["href", 1, ""]]),
|
var sopts = jread('fsort', jcp(dsort)),
|
||||||
dir1st = sread('dir1st') !== '0';
|
dir1st = sread('dir1st') !== '0';
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -3508,7 +3656,7 @@ var fileman = (function () {
|
|||||||
(function (a) {
|
(function (a) {
|
||||||
f[a].inew.onkeydown = function (e) {
|
f[a].inew.onkeydown = function (e) {
|
||||||
rn_ok(a, true);
|
rn_ok(a, true);
|
||||||
if (e.key == 'Enter')
|
if (e.key.endsWith('Enter'))
|
||||||
return rn_apply();
|
return rn_apply();
|
||||||
};
|
};
|
||||||
QS('.rn_dec' + k).onclick = function (e) {
|
QS('.rn_dec' + k).onclick = function (e) {
|
||||||
@@ -3603,7 +3751,7 @@ var fileman = (function () {
|
|||||||
if (e.key == 'Escape')
|
if (e.key == 'Escape')
|
||||||
return rn_cancel();
|
return rn_cancel();
|
||||||
|
|
||||||
if (e.key == 'Enter')
|
if (e.key.endsWith('Enter'))
|
||||||
return rn_apply();
|
return rn_apply();
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -3658,7 +3806,7 @@ var fileman = (function () {
|
|||||||
|
|
||||||
function rename_cb() {
|
function rename_cb() {
|
||||||
if (this.status !== 201) {
|
if (this.status !== 201) {
|
||||||
var msg = this.responseText;
|
var msg = unpre(this.responseText);
|
||||||
toast.err(9, L.fr_efail + msg);
|
toast.err(9, L.fr_efail + msg);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -3707,7 +3855,7 @@ var fileman = (function () {
|
|||||||
}
|
}
|
||||||
function delete_cb() {
|
function delete_cb() {
|
||||||
if (this.status !== 200) {
|
if (this.status !== 200) {
|
||||||
var msg = this.responseText;
|
var msg = unpre(this.responseText);
|
||||||
toast.err(9, L.fd_err + msg);
|
toast.err(9, L.fd_err + msg);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -3828,7 +3976,7 @@ var fileman = (function () {
|
|||||||
}
|
}
|
||||||
function paste_cb() {
|
function paste_cb() {
|
||||||
if (this.status !== 201) {
|
if (this.status !== 201) {
|
||||||
var msg = this.responseText;
|
var msg = unpre(this.responseText);
|
||||||
toast.err(9, L.fp_err + msg);
|
toast.err(9, L.fp_err + msg);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -3979,7 +4127,10 @@ var showfile = (function () {
|
|||||||
if (lang == 'md' && td.textContent != '-')
|
if (lang == 'md' && td.textContent != '-')
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
td.innerHTML = '<a href="#" class="doc bri" hl="' + link.id + '">-txt-</a>';
|
td.innerHTML = '<a href="#" id="t' +
|
||||||
|
link.id + '" class="doc bri" hl="' +
|
||||||
|
link.id + '">-txt-</a>';
|
||||||
|
|
||||||
td.getElementsByTagName('a')[0].setAttribute('href', '?doc=' + fn);
|
td.getElementsByTagName('a')[0].setAttribute('href', '?doc=' + fn);
|
||||||
}
|
}
|
||||||
r.mktree();
|
r.mktree();
|
||||||
@@ -4073,6 +4224,9 @@ var showfile = (function () {
|
|||||||
el.textContent = txt;
|
el.textContent = txt;
|
||||||
el.innerHTML = '<code>' + el.innerHTML + '</code>';
|
el.innerHTML = '<code>' + el.innerHTML + '</code>';
|
||||||
if (!window.no_prism) {
|
if (!window.no_prism) {
|
||||||
|
if ((lang == 'conf' || lang == 'cfg') && ('\n' + txt).indexOf('\n# -*- mode: yaml -*-') + 1)
|
||||||
|
lang = 'yaml';
|
||||||
|
|
||||||
el.className = 'prism linkable-line-numbers line-numbers language-' + lang;
|
el.className = 'prism linkable-line-numbers line-numbers language-' + lang;
|
||||||
if (!defer)
|
if (!defer)
|
||||||
fun(el.firstChild);
|
fun(el.firstChild);
|
||||||
@@ -4161,7 +4315,7 @@ var showfile = (function () {
|
|||||||
};
|
};
|
||||||
|
|
||||||
r.mktree = function () {
|
r.mktree = function () {
|
||||||
var html = ['<li class="bn">' + L.tv_lst + '<br />' + linksplit(get_vpath()).join('') + '</li>'];
|
var html = ['<li class="bn">' + L.tv_lst + '<br />' + linksplit(get_vpath()).join('<span>/</span>') + '</li>'];
|
||||||
for (var a = 0; a < r.files.length; a++) {
|
for (var a = 0; a < r.files.length; a++) {
|
||||||
var file = r.files[a];
|
var file = r.files[a];
|
||||||
html.push('<li><a href="?doc=' +
|
html.push('<li><a href="?doc=' +
|
||||||
@@ -4234,7 +4388,8 @@ var thegrid = (function () {
|
|||||||
gfiles.style.display = 'none';
|
gfiles.style.display = 'none';
|
||||||
gfiles.innerHTML = (
|
gfiles.innerHTML = (
|
||||||
'<div id="ghead" class="ghead">' +
|
'<div id="ghead" class="ghead">' +
|
||||||
'<a href="#" class="tgl btn" id="gridsel" tt="' + L.gt_msel + '</a> <span>' + L.gt_zoom + ': ' +
|
'<a href="#" class="tgl btn" id="gridsel" tt="' + L.gt_msel + '</a> ' +
|
||||||
|
'<a href="#" class="tgl btn" id="gridfull" tt="' + L.gt_full + '</a> <span>' + L.gt_zoom + ': ' +
|
||||||
'<a href="#" class="btn" z="-1.2" tt="Hotkey: shift-A">–</a> ' +
|
'<a href="#" class="btn" z="-1.2" tt="Hotkey: shift-A">–</a> ' +
|
||||||
'<a href="#" class="btn" z="1.2" tt="Hotkey: shift-D">+</a></span> <span>' + L.gt_chop + ': ' +
|
'<a href="#" class="btn" z="1.2" tt="Hotkey: shift-D">+</a></span> <span>' + L.gt_chop + ': ' +
|
||||||
'<a href="#" class="btn" l="-1" tt="' + L.gt_c1 + '">–</a> ' +
|
'<a href="#" class="btn" l="-1" tt="' + L.gt_c1 + '">–</a> ' +
|
||||||
@@ -4307,6 +4462,7 @@ var thegrid = (function () {
|
|||||||
filecols.uivis();
|
filecols.uivis();
|
||||||
|
|
||||||
aligngriditems();
|
aligngriditems();
|
||||||
|
restore_scroll();
|
||||||
};
|
};
|
||||||
|
|
||||||
r.setdirty = function () {
|
r.setdirty = function () {
|
||||||
@@ -4362,29 +4518,39 @@ var thegrid = (function () {
|
|||||||
function gclick(e, dbl) {
|
function gclick(e, dbl) {
|
||||||
var oth = ebi(this.getAttribute('ref')),
|
var oth = ebi(this.getAttribute('ref')),
|
||||||
href = noq_href(this),
|
href = noq_href(this),
|
||||||
aplay = ebi('a' + oth.getAttribute('id')),
|
fid = oth.getAttribute('id'),
|
||||||
|
aplay = ebi('a' + fid),
|
||||||
|
atext = ebi('t' + fid),
|
||||||
|
is_txt = atext && showfile.getlang(href),
|
||||||
is_img = /\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp|webm|mkv|mp4)(\?|$)/i.test(href),
|
is_img = /\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp|webm|mkv|mp4)(\?|$)/i.test(href),
|
||||||
is_dir = href.endsWith('/'),
|
is_dir = href.endsWith('/'),
|
||||||
|
is_srch = !!ebi('unsearch'),
|
||||||
in_tree = is_dir && treectl.find(oth.textContent.slice(0, -1)),
|
in_tree = is_dir && treectl.find(oth.textContent.slice(0, -1)),
|
||||||
have_sel = QS('#files tr.sel'),
|
have_sel = QS('#files tr.sel'),
|
||||||
td = oth.closest('td').nextSibling,
|
td = oth.closest('td').nextSibling,
|
||||||
tr = td.parentNode;
|
tr = td.parentNode;
|
||||||
|
|
||||||
if ((r.sel && !dbl && !ctrl(e)) || (treectl.csel && (e.shiftKey || ctrl(e)))) {
|
if (!is_srch && ((r.sel && !dbl && !ctrl(e)) || (treectl.csel && (e.shiftKey || ctrl(e))))) {
|
||||||
td.onclick.call(td, e);
|
td.onclick.call(td, e);
|
||||||
if (e.shiftKey)
|
if (e.shiftKey)
|
||||||
return r.loadsel();
|
return r.loadsel();
|
||||||
clmod(this, 'sel', clgot(tr, 'sel'));
|
clmod(this, 'sel', clgot(tr, 'sel'));
|
||||||
}
|
}
|
||||||
else if (widget.is_open && aplay)
|
|
||||||
aplay.click();
|
|
||||||
|
|
||||||
else if (in_tree && !have_sel)
|
else if (in_tree && !have_sel)
|
||||||
in_tree.click();
|
in_tree.click();
|
||||||
|
|
||||||
|
else if (oth.hasAttribute('download'))
|
||||||
|
oth.click();
|
||||||
|
|
||||||
|
else if (widget.is_open && aplay)
|
||||||
|
aplay.click();
|
||||||
|
|
||||||
else if (is_dir && !have_sel)
|
else if (is_dir && !have_sel)
|
||||||
treectl.reqls(href, true);
|
treectl.reqls(href, true);
|
||||||
|
|
||||||
|
else if (is_txt && !has(['md', 'htm', 'html'], is_txt))
|
||||||
|
atext.click();
|
||||||
|
|
||||||
else if (!is_img && have_sel)
|
else if (!is_img && have_sel)
|
||||||
window.open(href, '_blank');
|
window.open(href, '_blank');
|
||||||
|
|
||||||
@@ -4484,6 +4650,9 @@ var thegrid = (function () {
|
|||||||
if (!r.dirty)
|
if (!r.dirty)
|
||||||
return r.loadsel();
|
return r.loadsel();
|
||||||
|
|
||||||
|
if (dfull != r.full && !sread('gridfull'))
|
||||||
|
bcfg_upd_ui('gridfull', r.full = dfull);
|
||||||
|
|
||||||
var html = [],
|
var html = [],
|
||||||
svgs = new Set(),
|
svgs = new Set(),
|
||||||
max_svgs = CHROME ? 500 : 5000,
|
max_svgs = CHROME ? 500 : 5000,
|
||||||
@@ -4501,8 +4670,10 @@ var thegrid = (function () {
|
|||||||
|
|
||||||
if (r.thumbs) {
|
if (r.thumbs) {
|
||||||
ihref += '?th=' + (have_webp ? 'w' : 'j');
|
ihref += '?th=' + (have_webp ? 'w' : 'j');
|
||||||
|
if (r.full)
|
||||||
|
ihref += 'f'
|
||||||
if (href == "#")
|
if (href == "#")
|
||||||
ihref = SR + '/.cpr/ico/⏏️';
|
ihref = SR + '/.cpr/ico/' + (ref == 'moar' ? '++' : 'exit');
|
||||||
}
|
}
|
||||||
else if (isdir) {
|
else if (isdir) {
|
||||||
ihref = SR + '/.cpr/ico/folder';
|
ihref = SR + '/.cpr/ico/folder';
|
||||||
@@ -4587,6 +4758,7 @@ var thegrid = (function () {
|
|||||||
};
|
};
|
||||||
|
|
||||||
bcfg_bind(r, 'thumbs', 'thumbs', true, r.setdirty);
|
bcfg_bind(r, 'thumbs', 'thumbs', true, r.setdirty);
|
||||||
|
bcfg_bind(r, 'full', 'gridfull', false, r.setdirty);
|
||||||
bcfg_bind(r, 'sel', 'gridsel', false, r.loadsel);
|
bcfg_bind(r, 'sel', 'gridsel', false, r.loadsel);
|
||||||
bcfg_bind(r, 'en', 'griden', dgrid, function (v) {
|
bcfg_bind(r, 'en', 'griden', dgrid, function (v) {
|
||||||
v ? loadgrid() : r.setvis(true);
|
v ? loadgrid() : r.setvis(true);
|
||||||
@@ -4828,7 +5000,7 @@ document.onkeydown = function (e) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (k == 'Enter' && ae && (ae.onclick || ae.hasAttribute('tabIndex')))
|
if (k.endsWith('Enter') && ae && (ae.onclick || ae.hasAttribute('tabIndex')))
|
||||||
return ev(e) && ae.click() || true;
|
return ev(e) && ae.click() || true;
|
||||||
|
|
||||||
if (aet && aet != 'a' && aet != 'tr' && aet != 'pre')
|
if (aet && aet != 'a' && aet != 'tr' && aet != 'pre')
|
||||||
@@ -4904,6 +5076,13 @@ document.onkeydown = function (e) {
|
|||||||
return QS('#twobytwo').click();
|
return QS('#twobytwo').click();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (showfile.active()) {
|
||||||
|
if (k == 'KeyS')
|
||||||
|
showfile.tglsel();
|
||||||
|
if (k == 'KeyE' && ebi('editdoc').style.display != 'none')
|
||||||
|
ebi('editdoc').click();
|
||||||
|
}
|
||||||
|
|
||||||
if (thegrid.en) {
|
if (thegrid.en) {
|
||||||
if (k == 'KeyS')
|
if (k == 'KeyS')
|
||||||
return ebi('gridsel').click();
|
return ebi('gridsel').click();
|
||||||
@@ -4914,13 +5093,6 @@ document.onkeydown = function (e) {
|
|||||||
if (k == 'KeyD')
|
if (k == 'KeyD')
|
||||||
return QSA('#ghead a[z]')[1].click();
|
return QSA('#ghead a[z]')[1].click();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (showfile.active()) {
|
|
||||||
if (k == 'KeyS')
|
|
||||||
showfile.tglsel();
|
|
||||||
if (k == 'KeyE' && ebi('editdoc').style.display != 'none')
|
|
||||||
ebi('editdoc').click();
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@@ -4952,6 +5124,11 @@ document.onkeydown = function (e) {
|
|||||||
[
|
[
|
||||||
L.s_ad,
|
L.s_ad,
|
||||||
["adv", "adv", L.s_a1, "30", "key>=1A key<=2B .bpm>165"]
|
["adv", "adv", L.s_a1, "30", "key>=1A key<=2B .bpm>165"]
|
||||||
|
],
|
||||||
|
[
|
||||||
|
L.s_ua,
|
||||||
|
["utl", "ut_min", L.s_u1, "14", "2007-04-08"],
|
||||||
|
["utu", "ut_max", L.s_u2, "14", "2038-01-19"]
|
||||||
]
|
]
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -5021,7 +5198,7 @@ document.onkeydown = function (e) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function ev_search_keydown(e) {
|
function ev_search_keydown(e) {
|
||||||
if (e.key == 'Enter')
|
if (e.key.endsWith('Enter'))
|
||||||
do_search();
|
do_search();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -5096,7 +5273,7 @@ document.onkeydown = function (e) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (k.length == 3) {
|
if (k.length == 3) {
|
||||||
q += k.replace(/sz/, 'size').replace(/dt/, 'date').replace(/l$/, ' >= ').replace(/u$/, ' <= ') + tv;
|
q += k.replace(/l$/, ' >= ').replace(/u$/, ' <= ').replace(/^sz/, 'size').replace(/^dt/, 'date').replace(/^ut/, 'up_at') + tv;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -5149,10 +5326,7 @@ document.onkeydown = function (e) {
|
|||||||
|
|
||||||
function xhr_search_results() {
|
function xhr_search_results() {
|
||||||
if (this.status !== 200) {
|
if (this.status !== 200) {
|
||||||
var msg = this.responseText;
|
var msg = unpre(this.responseText);
|
||||||
if (msg.indexOf('<pre>') === 0)
|
|
||||||
msg = msg.slice(5);
|
|
||||||
|
|
||||||
srch_msg(true, "http " + this.status + ": " + msg);
|
srch_msg(true, "http " + this.status + ": " + msg);
|
||||||
search_in_progress = 0;
|
search_in_progress = 0;
|
||||||
return;
|
return;
|
||||||
@@ -5191,7 +5365,7 @@ document.onkeydown = function (e) {
|
|||||||
if (ext.length > 8)
|
if (ext.length > 8)
|
||||||
ext = '%';
|
ext = '%';
|
||||||
|
|
||||||
var links = linksplit(r.rp + '', id).join(''),
|
var links = linksplit(r.rp + '', id).join('<span>/</span>'),
|
||||||
nodes = ['<tr><td>-</td><td><div>' + links + '</div>', sz];
|
nodes = ['<tr><td>-</td><td><div>' + links + '</div>', sz];
|
||||||
|
|
||||||
for (var b = 0; b < tagord.length; b++) {
|
for (var b = 0; b < tagord.length; b++) {
|
||||||
@@ -5791,6 +5965,9 @@ var treectl = (function () {
|
|||||||
if (res.files[a].tags === undefined)
|
if (res.files[a].tags === undefined)
|
||||||
res.files[a].tags = {};
|
res.files[a].tags = {};
|
||||||
|
|
||||||
|
read_dsort(res.dsort);
|
||||||
|
dfull = res.dfull;
|
||||||
|
|
||||||
srvinf = res.srvinf;
|
srvinf = res.srvinf;
|
||||||
try {
|
try {
|
||||||
ebi('srv_info').innerHTML = ebi('srv_info2').innerHTML = '<span>' + res.srvinf + '</span>';
|
ebi('srv_info').innerHTML = ebi('srv_info2').innerHTML = '<span>' + res.srvinf + '</span>';
|
||||||
@@ -5910,7 +6087,8 @@ var treectl = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (tn.lead == '-')
|
if (tn.lead == '-')
|
||||||
tn.lead = '<a href="?doc=' + tn.href + '" class="doc' + (lang ? ' bri' : '') +
|
tn.lead = '<a href="?doc=' + bhref + '" id="t' + id +
|
||||||
|
'" class="doc' + (lang ? ' bri' : '') +
|
||||||
'" hl="' + id + '" name="' + hname + '">-txt-</a>';
|
'" hl="' + id + '" name="' + hname + '">-txt-</a>';
|
||||||
|
|
||||||
var ln = ['<tr><td>' + tn.lead + '</td><td><a href="' +
|
var ln = ['<tr><td>' + tn.lead + '</td><td><a href="' +
|
||||||
@@ -5944,6 +6122,7 @@ var treectl = (function () {
|
|||||||
setTimeout(r.tscroll, 100);
|
setTimeout(r.tscroll, 100);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else ebi('lazy').innerHTML = '';
|
||||||
|
|
||||||
function asdf() {
|
function asdf() {
|
||||||
showfile.mktree();
|
showfile.mktree();
|
||||||
@@ -6127,7 +6306,7 @@ var treectl = (function () {
|
|||||||
if (cs == 'tree' || (cs != 'na' && vw >= 60))
|
if (cs == 'tree' || (cs != 'na' && vw >= 60))
|
||||||
r.entree(null, true);
|
r.entree(null, true);
|
||||||
|
|
||||||
window.onpopstate = function (e) {
|
r.onpopfun = function (e) {
|
||||||
console.log("h-pop " + e.state);
|
console.log("h-pop " + e.state);
|
||||||
if (!e.state)
|
if (!e.state)
|
||||||
return;
|
return;
|
||||||
@@ -6274,6 +6453,7 @@ function apply_perms(res) {
|
|||||||
if (res.frand)
|
if (res.frand)
|
||||||
ebi('u2rand').parentNode.style.display = 'none';
|
ebi('u2rand').parentNode.style.display = 'none';
|
||||||
|
|
||||||
|
u2ts = res.u2ts;
|
||||||
if (up2k)
|
if (up2k)
|
||||||
up2k.set_fsearch();
|
up2k.set_fsearch();
|
||||||
|
|
||||||
@@ -6354,6 +6534,7 @@ var filecols = (function () {
|
|||||||
toh = ths[a].outerHTML, // !ff10
|
toh = ths[a].outerHTML, // !ff10
|
||||||
ttv = L.cols[ths[a].textContent];
|
ttv = L.cols[ths[a].textContent];
|
||||||
|
|
||||||
|
ttv = (ttv ? ttv + '; ' : '') + 'id=<code>' + th.getAttribute('name') + '</code>';
|
||||||
if (!MOBILE && toh) {
|
if (!MOBILE && toh) {
|
||||||
th.innerHTML = '<div class="cfg"><a href="#">-</a></div>' + toh;
|
th.innerHTML = '<div class="cfg"><a href="#">-</a></div>' + toh;
|
||||||
th.getElementsByTagName('a')[0].onclick = ev_row_tgl;
|
th.getElementsByTagName('a')[0].onclick = ev_row_tgl;
|
||||||
@@ -7011,16 +7192,17 @@ var msel = (function () {
|
|||||||
form.onsubmit = function (e) {
|
form.onsubmit = function (e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
clmod(sf, 'vis', 1);
|
clmod(sf, 'vis', 1);
|
||||||
sf.textContent = 'creating "' + tb.value + '"...';
|
var dn = tb.value;
|
||||||
|
sf.textContent = 'creating "' + dn + '"...';
|
||||||
|
|
||||||
var fd = new FormData();
|
var fd = new FormData();
|
||||||
fd.append("act", "mkdir");
|
fd.append("act", "mkdir");
|
||||||
fd.append("name", tb.value);
|
fd.append("name", dn);
|
||||||
|
|
||||||
var xhr = new XHR();
|
var xhr = new XHR();
|
||||||
xhr.vp = get_evpath();
|
xhr.vp = get_evpath();
|
||||||
xhr.dn = tb.value;
|
xhr.dn = dn;
|
||||||
xhr.open('POST', xhr.vp, true);
|
xhr.open('POST', dn.startsWith('/') ? (SR || '/') : xhr.vp, true);
|
||||||
xhr.onload = xhr.onerror = cb;
|
xhr.onload = xhr.onerror = cb;
|
||||||
xhr.responseType = 'text';
|
xhr.responseType = 'text';
|
||||||
xhr.send(fd);
|
xhr.send(fd);
|
||||||
@@ -7037,7 +7219,7 @@ var msel = (function () {
|
|||||||
xhrchk(this, L.fd_xe1, L.fd_xe2);
|
xhrchk(this, L.fd_xe1, L.fd_xe2);
|
||||||
|
|
||||||
if (this.status !== 201) {
|
if (this.status !== 201) {
|
||||||
sf.textContent = 'error: ' + this.responseText;
|
sf.textContent = 'error: ' + unpre(this.responseText);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -7046,8 +7228,9 @@ var msel = (function () {
|
|||||||
sf.textContent = '';
|
sf.textContent = '';
|
||||||
|
|
||||||
var dn = this.getResponseHeader('X-New-Dir');
|
var dn = this.getResponseHeader('X-New-Dir');
|
||||||
dn = dn || uricom_enc(this.dn);
|
dn = dn ? '/' + dn + '/' : uricom_enc(this.dn);
|
||||||
treectl.goto(this.vp + dn + '/', true);
|
treectl.goto(dn, true);
|
||||||
|
tree_scrollto();
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
|
|
||||||
@@ -7084,7 +7267,7 @@ var msel = (function () {
|
|||||||
xhrchk(this, L.fsm_xe1, L.fsm_xe2);
|
xhrchk(this, L.fsm_xe1, L.fsm_xe2);
|
||||||
|
|
||||||
if (this.status < 200 || this.status > 201) {
|
if (this.status < 200 || this.status > 201) {
|
||||||
sf.textContent = 'error: ' + this.responseText;
|
sf.textContent = 'error: ' + unpre(this.responseText);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -7284,7 +7467,7 @@ function sandbox(tgt, rules, cls, html) {
|
|||||||
html = '<html class="iframe ' + document.documentElement.className + '"><head><style>' + globalcss() +
|
html = '<html class="iframe ' + document.documentElement.className + '"><head><style>' + globalcss() +
|
||||||
'</style><base target="_parent"></head><body id="b" class="logue ' + cls + '">' + html +
|
'</style><base target="_parent"></head><body id="b" class="logue ' + cls + '">' + html +
|
||||||
'<script>' + env + '</script>' + sandboxjs() +
|
'<script>' + env + '</script>' + sandboxjs() +
|
||||||
'<script>var d=document.documentElement,' +
|
'<script>var d=document.documentElement,TS="' + TS + '",' +
|
||||||
'loc=new URL("' + location.href.split('?')[0] + '");' +
|
'loc=new URL("' + location.href.split('?')[0] + '");' +
|
||||||
'function say(m){window.parent.postMessage(m,"*")};' +
|
'function say(m){window.parent.postMessage(m,"*")};' +
|
||||||
'setTimeout(function(){var its=0,pih=-1,f=function(){' +
|
'setTimeout(function(){var its=0,pih=-1,f=function(){' +
|
||||||
@@ -7429,7 +7612,7 @@ var unpost = (function () {
|
|||||||
'<tr><td><a me="' + me + '" class="n' + a + '" href="#">' + L.un_del + '</a></td>' +
|
'<tr><td><a me="' + me + '" class="n' + a + '" href="#">' + L.un_del + '</a></td>' +
|
||||||
'<td>' + unix2iso(res[a].at) + '</td>' +
|
'<td>' + unix2iso(res[a].at) + '</td>' +
|
||||||
'<td>' + res[a].sz + '</td>' +
|
'<td>' + res[a].sz + '</td>' +
|
||||||
'<td>' + linksplit(res[a].vp).join(' ') + '</td></tr>');
|
'<td>' + linksplit(res[a].vp).join('<span> / </span>') + '</td></tr>');
|
||||||
}
|
}
|
||||||
|
|
||||||
html.push("</tbody></table>");
|
html.push("</tbody></table>");
|
||||||
@@ -7462,7 +7645,7 @@ var unpost = (function () {
|
|||||||
|
|
||||||
function unpost_delete_cb() {
|
function unpost_delete_cb() {
|
||||||
if (this.status !== 200) {
|
if (this.status !== 200) {
|
||||||
var msg = this.responseText;
|
var msg = unpre(this.responseText);
|
||||||
toast.err(9, L.un_derr + msg);
|
toast.err(9, L.un_derr + msg);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -7585,10 +7768,36 @@ ebi('path').onclick = function (e) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
var scroll_y = -1;
|
||||||
|
var scroll_vp = '\n';
|
||||||
|
var scroll_obj = null;
|
||||||
|
function persist_scroll() {
|
||||||
|
var obj = scroll_obj;
|
||||||
|
if (!obj) {
|
||||||
|
var o1 = document.getElementsByTagName('html')[0];
|
||||||
|
var o2 = document.body;
|
||||||
|
obj = o1.scrollTop > o2.scrollTop ? o1 : o2;
|
||||||
|
}
|
||||||
|
var y = obj.scrollTop;
|
||||||
|
if (y > 0)
|
||||||
|
scroll_obj = obj;
|
||||||
|
|
||||||
|
scroll_y = y;
|
||||||
|
scroll_vp = get_evpath();
|
||||||
|
}
|
||||||
|
function restore_scroll() {
|
||||||
|
if (get_evpath() == scroll_vp && scroll_obj && scroll_obj.scrollTop < 1)
|
||||||
|
scroll_obj.scrollTop = scroll_y;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
ebi('files').onclick = ebi('docul').onclick = function (e) {
|
ebi('files').onclick = ebi('docul').onclick = function (e) {
|
||||||
if (!treectl.csel && e && (ctrl(e) || e.shiftKey))
|
if (!treectl.csel && e && (ctrl(e) || e.shiftKey))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
|
if (!showfile.active())
|
||||||
|
persist_scroll();
|
||||||
|
|
||||||
var tgt = e.target.closest('a[id]');
|
var tgt = e.target.closest('a[id]');
|
||||||
if (tgt && tgt.getAttribute('id').indexOf('f-') === 0 && tgt.textContent.endsWith('/')) {
|
if (tgt && tgt.getAttribute('id').indexOf('f-') === 0 && tgt.textContent.endsWith('/')) {
|
||||||
var el = treectl.find(tgt.textContent.slice(0, -1));
|
var el = treectl.find(tgt.textContent.slice(0, -1));
|
||||||
|
|||||||
@@ -931,7 +931,13 @@ var set_lno = (function () {
|
|||||||
// hotkeys / toolbar
|
// hotkeys / toolbar
|
||||||
(function () {
|
(function () {
|
||||||
var keydown = function (ev) {
|
var keydown = function (ev) {
|
||||||
ev = ev || window.event;
|
if (!ev && window.event) {
|
||||||
|
ev = window.event;
|
||||||
|
if (localStorage.dev_fbw == 1) {
|
||||||
|
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
|
||||||
|
console.error('using window.event');
|
||||||
|
}
|
||||||
|
}
|
||||||
var kc = ev.code || ev.keyCode || ev.which,
|
var kc = ev.code || ev.keyCode || ev.which,
|
||||||
editing = document.activeElement == dom_src;
|
editing = document.activeElement == dom_src;
|
||||||
|
|
||||||
@@ -1003,7 +1009,7 @@ var set_lno = (function () {
|
|||||||
md_home(ev.shiftKey);
|
md_home(ev.shiftKey);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (!ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
|
if (!ev.shiftKey && (ev.code.endsWith("Enter") || kc == 13)) {
|
||||||
return md_newline();
|
return md_newline();
|
||||||
}
|
}
|
||||||
if (!ev.shiftKey && kc == 8) {
|
if (!ev.shiftKey && kc == 8) {
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
|
<style>ul{padding-left:1.3em}li{margin:.4em 0}</style>
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
@@ -48,9 +49,13 @@
|
|||||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
|
||||||
</pre>
|
</pre>
|
||||||
{% if s %}
|
<ul>
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
{% if s %}
|
||||||
{% endif %}
|
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||||
|
{% endif %}
|
||||||
|
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||||
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
||||||
<pre>
|
<pre>
|
||||||
@@ -73,10 +78,13 @@
|
|||||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
||||||
</pre>
|
</pre>
|
||||||
{% if s %}
|
<ul>
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
{% if s %}
|
||||||
{% endif %}
|
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||||
|
{% endif %}
|
||||||
|
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||||
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
|
</ul>
|
||||||
<p>or the emergency alternative (gnome/gui-only):</p>
|
<p>or the emergency alternative (gnome/gui-only):</p>
|
||||||
<!-- gnome-bug: ignores vp -->
|
<!-- gnome-bug: ignores vp -->
|
||||||
<pre>
|
<pre>
|
||||||
@@ -123,8 +131,14 @@
|
|||||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>W:</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>W:</b>
|
||||||
</pre>
|
</pre>
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
<ul>
|
||||||
|
{% if args.ftps %}
|
||||||
|
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||||
|
{% endif %}
|
||||||
|
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||||
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
|
</ul>
|
||||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||||
<pre>
|
<pre>
|
||||||
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||||
@@ -145,8 +159,14 @@
|
|||||||
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>mp</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>mp</b>
|
||||||
</pre>
|
</pre>
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
<ul>
|
||||||
|
{% if args.ftps %}
|
||||||
|
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||||
|
{% endif %}
|
||||||
|
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||||
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
|
</ul>
|
||||||
<p>emergency alternative (gnome/gui-only):</p>
|
<p>emergency alternative (gnome/gui-only):</p>
|
||||||
<!-- gnome-bug: ignores vp -->
|
<!-- gnome-bug: ignores vp -->
|
||||||
<pre>
|
<pre>
|
||||||
@@ -178,7 +198,7 @@
|
|||||||
partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ rvp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b>
|
partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ rvp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b>
|
||||||
</pre>
|
</pre>
|
||||||
{% if s %}
|
{% if s %}
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p>
|
<ul><li>if you are on LAN (or just dont have valid certificates), add <code>-td</code></li></ul>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<p>
|
<p>
|
||||||
you can use <a href="{{ r }}/.cpr/a/u2c.py">u2c.py</a> to upload (sometimes faster than web-browsers)
|
you can use <a href="{{ r }}/.cpr/a/u2c.py">u2c.py</a> to upload (sometimes faster than web-browsers)
|
||||||
|
|||||||
@@ -1,3 +1,18 @@
|
|||||||
|
:root {
|
||||||
|
--fg: #ccc;
|
||||||
|
--fg-max: #fff;
|
||||||
|
--bg-u2: #2b2b2b;
|
||||||
|
--bg-u5: #444;
|
||||||
|
}
|
||||||
|
html.y {
|
||||||
|
--fg: #222;
|
||||||
|
--fg-max: #000;
|
||||||
|
--bg-u2: #f7f7f7;
|
||||||
|
--bg-u5: #ccc;
|
||||||
|
}
|
||||||
|
html.bz {
|
||||||
|
--bg-u2: #202231;
|
||||||
|
}
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: 'scp';
|
font-family: 'scp';
|
||||||
font-display: swap;
|
font-display: swap;
|
||||||
@@ -14,7 +29,9 @@ html {
|
|||||||
max-width: min(34em, 90%);
|
max-width: min(34em, 90%);
|
||||||
max-width: min(34em, calc(100% - 7em));
|
max-width: min(34em, calc(100% - 7em));
|
||||||
color: #ddd;
|
color: #ddd;
|
||||||
|
color: var(--fg);
|
||||||
background: #333;
|
background: #333;
|
||||||
|
background: var(--bg-u2);
|
||||||
border: 0 solid #777;
|
border: 0 solid #777;
|
||||||
box-shadow: 0 .2em .5em #111;
|
box-shadow: 0 .2em .5em #111;
|
||||||
border-radius: .4em;
|
border-radius: .4em;
|
||||||
@@ -159,9 +176,10 @@ html {
|
|||||||
#modalc code,
|
#modalc code,
|
||||||
#tt code {
|
#tt code {
|
||||||
color: #eee;
|
color: #eee;
|
||||||
|
color: var(--fg-max);
|
||||||
background: #444;
|
background: #444;
|
||||||
|
background: var(--bg-u5);
|
||||||
padding: .1em .3em;
|
padding: .1em .3em;
|
||||||
border-top: 1px solid #777;
|
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
line-height: 1.7em;
|
line-height: 1.7em;
|
||||||
}
|
}
|
||||||
@@ -169,22 +187,15 @@ html {
|
|||||||
color: #f6a;
|
color: #f6a;
|
||||||
}
|
}
|
||||||
html.y #tt {
|
html.y #tt {
|
||||||
color: #333;
|
|
||||||
background: #fff;
|
|
||||||
border-color: #888 #000 #777 #000;
|
border-color: #888 #000 #777 #000;
|
||||||
}
|
}
|
||||||
html.bz #tt {
|
html.bz #tt {
|
||||||
background: #202231;
|
|
||||||
border-color: #3b3f58;
|
border-color: #3b3f58;
|
||||||
}
|
}
|
||||||
html.y #tt,
|
html.y #tt,
|
||||||
html.y #toast {
|
html.y #toast {
|
||||||
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
||||||
}
|
}
|
||||||
html.y #tt code {
|
|
||||||
background: #060;
|
|
||||||
color: #fff;
|
|
||||||
}
|
|
||||||
#modalc code {
|
#modalc code {
|
||||||
color: #060;
|
color: #060;
|
||||||
background: transparent;
|
background: transparent;
|
||||||
@@ -322,6 +333,9 @@ html.y .btn:focus {
|
|||||||
box-shadow: 0 .1em .2em #037 inset;
|
box-shadow: 0 .1em .2em #037 inset;
|
||||||
outline: #037 solid .1em;
|
outline: #037 solid .1em;
|
||||||
}
|
}
|
||||||
|
input[type="submit"] {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
input[type="text"]:focus,
|
input[type="text"]:focus,
|
||||||
input:not([type]):focus,
|
input:not([type]):focus,
|
||||||
textarea:focus {
|
textarea:focus {
|
||||||
|
|||||||
@@ -852,7 +852,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
setmsg(suggest_up2k, 'msg');
|
setmsg(suggest_up2k, 'msg');
|
||||||
|
|
||||||
var parallel_uploads = icfg_get('nthread'),
|
var parallel_uploads = ebi('nthread').value = icfg_get('nthread', u2j),
|
||||||
uc = {},
|
uc = {},
|
||||||
fdom_ctr = 0,
|
fdom_ctr = 0,
|
||||||
biggest_file = 0;
|
biggest_file = 0;
|
||||||
@@ -861,6 +861,7 @@ function up2k_init(subtle) {
|
|||||||
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
||||||
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
|
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
|
||||||
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
||||||
|
bcfg_bind(uc, 'u2ts', 'u2ts', !u2ts.endsWith('u'), set_u2ts, false);
|
||||||
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
|
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
|
||||||
|
|
||||||
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
|
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
|
||||||
@@ -1042,7 +1043,7 @@ function up2k_init(subtle) {
|
|||||||
clmod(ebi(v), 'hl', 1);
|
clmod(ebi(v), 'hl', 1);
|
||||||
}
|
}
|
||||||
function offdrag(e) {
|
function offdrag(e) {
|
||||||
ev(e);
|
noope(e);
|
||||||
|
|
||||||
var v = this.getAttribute('v');
|
var v = this.getAttribute('v');
|
||||||
if (v)
|
if (v)
|
||||||
@@ -1339,6 +1340,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
function up_them(good_files) {
|
function up_them(good_files) {
|
||||||
start_actx();
|
start_actx();
|
||||||
|
draw_turbo();
|
||||||
var evpath = get_evpath(),
|
var evpath = get_evpath(),
|
||||||
draw_each = good_files.length < 50;
|
draw_each = good_files.length < 50;
|
||||||
|
|
||||||
@@ -1361,7 +1363,7 @@ function up2k_init(subtle) {
|
|||||||
name = good_files[a][1],
|
name = good_files[a][1],
|
||||||
fdir = evpath,
|
fdir = evpath,
|
||||||
now = Date.now(),
|
now = Date.now(),
|
||||||
lmod = fobj.lastModified || now,
|
lmod = uc.u2ts ? (fobj.lastModified || now) : 0,
|
||||||
ofs = name.lastIndexOf('/') + 1;
|
ofs = name.lastIndexOf('/') + 1;
|
||||||
|
|
||||||
if (ofs) {
|
if (ofs) {
|
||||||
@@ -1405,7 +1407,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
pvis.addfile([
|
pvis.addfile([
|
||||||
uc.fsearch ? esc(entry.name) : linksplit(
|
uc.fsearch ? esc(entry.name) : linksplit(
|
||||||
entry.purl + uricom_enc(entry.name)).join(' '),
|
entry.purl + uricom_enc(entry.name)).join(' / '),
|
||||||
'📐 ' + L.u_hashing,
|
'📐 ' + L.u_hashing,
|
||||||
''
|
''
|
||||||
], entry.size, draw_each);
|
], entry.size, draw_each);
|
||||||
@@ -2282,7 +2284,7 @@ function up2k_init(subtle) {
|
|||||||
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
||||||
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
||||||
|
|
||||||
msg.push(linksplit(hit.rp).join('') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
|
msg.push(linksplit(hit.rp).join(' / ') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
|
||||||
}
|
}
|
||||||
msg = msg.join('<br />\n');
|
msg = msg.join('<br />\n');
|
||||||
}
|
}
|
||||||
@@ -2316,7 +2318,7 @@ function up2k_init(subtle) {
|
|||||||
url += '?k=' + fk;
|
url += '?k=' + fk;
|
||||||
}
|
}
|
||||||
|
|
||||||
pvis.seth(t.n, 0, linksplit(url).join(' '));
|
pvis.seth(t.n, 0, linksplit(url).join(' / '));
|
||||||
}
|
}
|
||||||
|
|
||||||
var chunksize = get_chunksize(t.size),
|
var chunksize = get_chunksize(t.size),
|
||||||
@@ -2400,15 +2402,12 @@ function up2k_init(subtle) {
|
|||||||
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
||||||
|
|
||||||
var err = "",
|
var err = "",
|
||||||
rsp = (xhr.responseText + ''),
|
rsp = unpre(xhr.responseText),
|
||||||
ofs = rsp.lastIndexOf('\nURL: ');
|
ofs = rsp.lastIndexOf('\nURL: ');
|
||||||
|
|
||||||
if (ofs !== -1)
|
if (ofs !== -1)
|
||||||
rsp = rsp.slice(0, ofs);
|
rsp = rsp.slice(0, ofs);
|
||||||
|
|
||||||
if (rsp.indexOf('<pre>') === 0)
|
|
||||||
rsp = rsp.slice(5);
|
|
||||||
|
|
||||||
if (rsp.indexOf('rate-limit ') !== -1) {
|
if (rsp.indexOf('rate-limit ') !== -1) {
|
||||||
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
||||||
console.log("rate-limit: " + penalty);
|
console.log("rate-limit: " + penalty);
|
||||||
@@ -2427,7 +2426,7 @@ function up2k_init(subtle) {
|
|||||||
err = rsp;
|
err = rsp;
|
||||||
ofs = err.indexOf('\n/');
|
ofs = err.indexOf('\n/');
|
||||||
if (ofs !== -1) {
|
if (ofs !== -1) {
|
||||||
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
|
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' / ');
|
||||||
}
|
}
|
||||||
if (!t.rechecks && (err_pend || err_srcb)) {
|
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||||
t.rechecks = 0;
|
t.rechecks = 0;
|
||||||
@@ -2534,7 +2533,7 @@ function up2k_init(subtle) {
|
|||||||
cdr = t.size;
|
cdr = t.size;
|
||||||
|
|
||||||
var orz = function (xhr) {
|
var orz = function (xhr) {
|
||||||
var txt = ((xhr.response && xhr.response.err) || xhr.responseText) + '';
|
var txt = unpre((xhr.response && xhr.response.err) || xhr.responseText);
|
||||||
if (txt.indexOf('upload blocked by x') + 1) {
|
if (txt.indexOf('upload blocked by x') + 1) {
|
||||||
apop(st.busy.upload, upt);
|
apop(st.busy.upload, upt);
|
||||||
apop(t.postlist, npart);
|
apop(t.postlist, npart);
|
||||||
@@ -2620,7 +2619,7 @@ function up2k_init(subtle) {
|
|||||||
wpx = window.innerWidth,
|
wpx = window.innerWidth,
|
||||||
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
||||||
wem = wpx * 1.0 / fpx,
|
wem = wpx * 1.0 / fpx,
|
||||||
wide = wem > 54 ? 'w' : '',
|
wide = wem > 57 ? 'w' : '',
|
||||||
parent = ebi(wide ? 'u2btn_cw' : 'u2btn_ct'),
|
parent = ebi(wide ? 'u2btn_cw' : 'u2btn_ct'),
|
||||||
btn = ebi('u2btn');
|
btn = ebi('u2btn');
|
||||||
|
|
||||||
@@ -2629,7 +2628,7 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2conf').className = ebi('u2cards').className = ebi('u2etaw').className = wide;
|
ebi('u2conf').className = ebi('u2cards').className = ebi('u2etaw').className = wide;
|
||||||
}
|
}
|
||||||
|
|
||||||
wide = wem > 82 ? 'ww' : wide;
|
wide = wem > 86 ? 'ww' : wide;
|
||||||
parent = ebi(wide == 'ww' ? 'u2c3w' : 'u2c3t');
|
parent = ebi(wide == 'ww' ? 'u2c3w' : 'u2c3t');
|
||||||
var its = [ebi('u2etaw'), ebi('u2cards')];
|
var its = [ebi('u2etaw'), ebi('u2cards')];
|
||||||
if (its[0].parentNode !== parent) {
|
if (its[0].parentNode !== parent) {
|
||||||
@@ -2686,7 +2685,11 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
parallel_uploads = v;
|
parallel_uploads = v;
|
||||||
swrite('nthread', v);
|
if (v == u2j)
|
||||||
|
localStorage.removeItem('nthread');
|
||||||
|
else
|
||||||
|
swrite('nthread', v);
|
||||||
|
|
||||||
clmod(obj, 'err');
|
clmod(obj, 'err');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -2700,7 +2703,7 @@ function up2k_init(subtle) {
|
|||||||
parallel_uploads = 16;
|
parallel_uploads = 16;
|
||||||
|
|
||||||
obj.value = parallel_uploads;
|
obj.value = parallel_uploads;
|
||||||
bumpthread({ "target": 1 })
|
bumpthread({ "target": 1 });
|
||||||
}
|
}
|
||||||
|
|
||||||
function tgl_fsearch() {
|
function tgl_fsearch() {
|
||||||
@@ -2710,7 +2713,12 @@ function up2k_init(subtle) {
|
|||||||
function draw_turbo() {
|
function draw_turbo() {
|
||||||
if (turbolvl < 0 && uc.turbo) {
|
if (turbolvl < 0 && uc.turbo) {
|
||||||
bcfg_set('u2turbo', uc.turbo = false);
|
bcfg_set('u2turbo', uc.turbo = false);
|
||||||
toast.err(10, "turbo is disabled in server config");
|
toast.err(10, L.u_turbo_c);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (uc.turbo && !has(perms, 'read')) {
|
||||||
|
bcfg_set('u2turbo', uc.turbo = false);
|
||||||
|
toast.warn(30, L.u_turbo_g);
|
||||||
}
|
}
|
||||||
|
|
||||||
var msg = (turbolvl || !uc.turbo) ? null : uc.fsearch ? L.u_ts : L.u_tu,
|
var msg = (turbolvl || !uc.turbo) ? null : uc.fsearch ? L.u_ts : L.u_tu,
|
||||||
@@ -2811,6 +2819,8 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
function set_fsearch(new_state) {
|
function set_fsearch(new_state) {
|
||||||
var fixed = false,
|
var fixed = false,
|
||||||
|
persist = new_state !== undefined,
|
||||||
|
preferred = bcfg_get('fsearch', undefined),
|
||||||
can_write = false;
|
can_write = false;
|
||||||
|
|
||||||
if (!ebi('fsearch')) {
|
if (!ebi('fsearch')) {
|
||||||
@@ -2827,8 +2837,14 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (new_state === undefined)
|
||||||
|
new_state = preferred;
|
||||||
|
|
||||||
if (new_state !== undefined)
|
if (new_state !== undefined)
|
||||||
bcfg_set('fsearch', uc.fsearch = new_state);
|
if (persist)
|
||||||
|
bcfg_set('fsearch', uc.fsearch = new_state);
|
||||||
|
else
|
||||||
|
bcfg_upd_ui('fsearch', uc.fsearch = new_state);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
clmod(ebi('u2c3w'), 's', !can_write);
|
clmod(ebi('u2c3w'), 's', !can_write);
|
||||||
@@ -2851,6 +2867,9 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2cards').style.display = ebi('u2tab').style.display = potato ? 'none' : '';
|
ebi('u2cards').style.display = ebi('u2tab').style.display = potato ? 'none' : '';
|
||||||
ebi('u2mu').style.display = potato ? '' : 'none';
|
ebi('u2mu').style.display = potato ? '' : 'none';
|
||||||
|
|
||||||
|
if (u2ts.startsWith('f') || !sread('u2ts'))
|
||||||
|
uc.u2ts = bcfg_upd_ui('u2ts', !u2ts.endsWith('u'));
|
||||||
|
|
||||||
draw_turbo();
|
draw_turbo();
|
||||||
draw_life();
|
draw_life();
|
||||||
onresize();
|
onresize();
|
||||||
@@ -2875,12 +2894,24 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function set_u2sort() {
|
function set_u2sort(en) {
|
||||||
if (u2sort.indexOf('f') < 0)
|
if (u2sort.indexOf('f') < 0)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
bcfg_set('u2sort', uc.az = u2sort.indexOf('n') + 1);
|
var fen = uc.az = u2sort.indexOf('n') + 1;
|
||||||
localStorage.removeItem('u2sort');
|
bcfg_upd_ui('u2sort', fen);
|
||||||
|
if (en != fen)
|
||||||
|
toast.warn(10, L.ul_btnlk);
|
||||||
|
}
|
||||||
|
|
||||||
|
function set_u2ts(en) {
|
||||||
|
if (u2ts.indexOf('f') < 0)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var fen = !u2ts.endsWith('u');
|
||||||
|
bcfg_upd_ui('u2ts', fen);
|
||||||
|
if (en != fen)
|
||||||
|
toast.warn(10, L.ul_btnlk);
|
||||||
}
|
}
|
||||||
|
|
||||||
function set_hashw() {
|
function set_hashw() {
|
||||||
@@ -2978,7 +3009,7 @@ ebi('ico1').onclick = function () {
|
|||||||
if (QS('#op_up2k.act'))
|
if (QS('#op_up2k.act'))
|
||||||
goto_up2k();
|
goto_up2k();
|
||||||
|
|
||||||
apply_perms({ "perms": perms, "frand": frand });
|
apply_perms({ "perms": perms, "frand": frand, "u2ts": u2ts });
|
||||||
|
|
||||||
|
|
||||||
(function () {
|
(function () {
|
||||||
|
|||||||
@@ -6,6 +6,11 @@ if (!window.console || !console.log)
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
if (window.CGV)
|
||||||
|
for (var k in CGV)
|
||||||
|
window[k] = CGV[k];
|
||||||
|
|
||||||
|
|
||||||
var wah = '',
|
var wah = '',
|
||||||
NOAC = 'autocorrect="off" autocapitalize="off"',
|
NOAC = 'autocorrect="off" autocapitalize="off"',
|
||||||
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
||||||
@@ -272,7 +277,13 @@ function anymod(e, shift_ok) {
|
|||||||
|
|
||||||
|
|
||||||
function ev(e) {
|
function ev(e) {
|
||||||
e = e || window.event;
|
if (!e && window.event) {
|
||||||
|
e = window.event;
|
||||||
|
if (localStorage.dev_fbw == 1) {
|
||||||
|
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
|
||||||
|
console.error('using window.event');
|
||||||
|
}
|
||||||
|
}
|
||||||
if (!e)
|
if (!e)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
@@ -291,7 +302,7 @@ function ev(e) {
|
|||||||
|
|
||||||
|
|
||||||
function noope(e) {
|
function noope(e) {
|
||||||
ev(e);
|
try { ev(e); } catch (ex) { }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -364,7 +375,7 @@ function import_js(url, cb, ecb) {
|
|||||||
var head = document.head || document.getElementsByTagName('head')[0];
|
var head = document.head || document.getElementsByTagName('head')[0];
|
||||||
var script = mknod('script');
|
var script = mknod('script');
|
||||||
script.type = 'text/javascript';
|
script.type = 'text/javascript';
|
||||||
script.src = url;
|
script.src = url + '?_=' + (window.TS || 'a');
|
||||||
script.onload = cb;
|
script.onload = cb;
|
||||||
script.onerror = ecb || function () {
|
script.onerror = ecb || function () {
|
||||||
var m = 'Failed to load module:\n' + url;
|
var m = 'Failed to load module:\n' + url;
|
||||||
@@ -478,7 +489,7 @@ function yscroll() {
|
|||||||
|
|
||||||
function showsort(tab) {
|
function showsort(tab) {
|
||||||
var v, vn, v1, v2, th = tab.tHead,
|
var v, vn, v1, v2, th = tab.tHead,
|
||||||
sopts = jread('fsort', [["href", 1, ""]]);
|
sopts = jread('fsort', jcp(dsort));
|
||||||
|
|
||||||
th && (th = th.rows[0]) && (th = th.cells);
|
th && (th = th.rows[0]) && (th = th.cells);
|
||||||
|
|
||||||
@@ -617,9 +628,8 @@ function linksplit(rp, id) {
|
|||||||
}
|
}
|
||||||
var vlink = esc(uricom_dec(link));
|
var vlink = esc(uricom_dec(link));
|
||||||
|
|
||||||
if (link.indexOf('/') !== -1) {
|
if (link.indexOf('/') !== -1)
|
||||||
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
vlink = vlink.slice(0, -1);
|
||||||
}
|
|
||||||
|
|
||||||
if (!rp) {
|
if (!rp) {
|
||||||
if (q)
|
if (q)
|
||||||
@@ -751,17 +761,6 @@ function noq_href(el) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function get_pwd() {
|
|
||||||
var k = HTTPS ? 's=' : 'd=',
|
|
||||||
pwd = ('; ' + document.cookie).split('; cppw' + k);
|
|
||||||
|
|
||||||
if (pwd.length < 2)
|
|
||||||
return null;
|
|
||||||
|
|
||||||
return decodeURIComponent(pwd[1].split(';')[0]);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function unix2iso(ts) {
|
function unix2iso(ts) {
|
||||||
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
||||||
}
|
}
|
||||||
@@ -932,7 +931,7 @@ function fcfg_get(name, defval) {
|
|||||||
val = parseFloat(sread(name));
|
val = parseFloat(sread(name));
|
||||||
|
|
||||||
if (!isNum(val))
|
if (!isNum(val))
|
||||||
return parseFloat(o ? o.value : defval);
|
return parseFloat(o && o.value !== '' ? o.value : defval);
|
||||||
|
|
||||||
if (o)
|
if (o)
|
||||||
o.value = val;
|
o.value = val;
|
||||||
@@ -977,13 +976,14 @@ function bcfg_set(name, val) {
|
|||||||
function bcfg_upd_ui(name, val) {
|
function bcfg_upd_ui(name, val) {
|
||||||
var o = ebi(name);
|
var o = ebi(name);
|
||||||
if (!o)
|
if (!o)
|
||||||
return;
|
return val;
|
||||||
|
|
||||||
if (o.getAttribute('type') == 'checkbox')
|
if (o.getAttribute('type') == 'checkbox')
|
||||||
o.checked = val;
|
o.checked = val;
|
||||||
else if (o) {
|
else if (o) {
|
||||||
clmod(o, 'on', val);
|
clmod(o, 'on', val);
|
||||||
}
|
}
|
||||||
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
|
function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
|
||||||
@@ -1351,6 +1351,11 @@ function lf2br(txt) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function unpre(txt) {
|
||||||
|
return ('' + txt).replace(/^<pre>/, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
var toast = (function () {
|
var toast = (function () {
|
||||||
var r = {},
|
var r = {},
|
||||||
te = null,
|
te = null,
|
||||||
@@ -1463,6 +1468,7 @@ var modal = (function () {
|
|||||||
r.load();
|
r.load();
|
||||||
|
|
||||||
r.busy = false;
|
r.busy = false;
|
||||||
|
r.nofocus = 0;
|
||||||
|
|
||||||
r.show = function (html) {
|
r.show = function (html) {
|
||||||
o = mknod('div', 'modal');
|
o = mknod('div', 'modal');
|
||||||
@@ -1476,6 +1482,7 @@ var modal = (function () {
|
|||||||
a.onclick = ng;
|
a.onclick = ng;
|
||||||
|
|
||||||
a = ebi('modal-ok');
|
a = ebi('modal-ok');
|
||||||
|
a.addEventListener('blur', onblur);
|
||||||
a.onclick = ok;
|
a.onclick = ok;
|
||||||
|
|
||||||
var inp = ebi('modali');
|
var inp = ebi('modali');
|
||||||
@@ -1486,6 +1493,7 @@ var modal = (function () {
|
|||||||
}, 0);
|
}, 0);
|
||||||
|
|
||||||
document.addEventListener('focus', onfocus);
|
document.addEventListener('focus', onfocus);
|
||||||
|
document.addEventListener('selectionchange', onselch);
|
||||||
timer.add(onfocus);
|
timer.add(onfocus);
|
||||||
if (cb_up)
|
if (cb_up)
|
||||||
setTimeout(cb_up, 1);
|
setTimeout(cb_up, 1);
|
||||||
@@ -1493,6 +1501,11 @@ var modal = (function () {
|
|||||||
|
|
||||||
r.hide = function () {
|
r.hide = function () {
|
||||||
timer.rm(onfocus);
|
timer.rm(onfocus);
|
||||||
|
try {
|
||||||
|
ebi('modal-ok').removeEventListener('blur', onblur);
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
document.removeEventListener('selectionchange', onselch);
|
||||||
document.removeEventListener('focus', onfocus);
|
document.removeEventListener('focus', onfocus);
|
||||||
document.removeEventListener('keydown', onkey);
|
document.removeEventListener('keydown', onkey);
|
||||||
o.parentNode.removeChild(o);
|
o.parentNode.removeChild(o);
|
||||||
@@ -1514,17 +1527,35 @@ var modal = (function () {
|
|||||||
cb_ng(null);
|
cb_ng(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var onselch = function () {
|
||||||
|
try {
|
||||||
|
if (window.getSelection() + '')
|
||||||
|
r.nofocus = 15;
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
};
|
||||||
|
|
||||||
|
var onblur = function () {
|
||||||
|
r.nofocus = 3;
|
||||||
|
};
|
||||||
|
|
||||||
var onfocus = function (e) {
|
var onfocus = function (e) {
|
||||||
|
if (MOBILE)
|
||||||
|
return;
|
||||||
|
|
||||||
var ctr = ebi('modalc');
|
var ctr = ebi('modalc');
|
||||||
if (!ctr || !ctr.contains || !document.activeElement || ctr.contains(document.activeElement))
|
if (!ctr || !ctr.contains || !document.activeElement || ctr.contains(document.activeElement))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
|
if (--r.nofocus >= 0)
|
||||||
|
return;
|
||||||
|
|
||||||
if (ctr = ebi('modal-ok'))
|
if (ctr = ebi('modal-ok'))
|
||||||
ctr.focus();
|
ctr.focus();
|
||||||
}, 20);
|
}, 20);
|
||||||
ev(e);
|
ev(e);
|
||||||
}
|
};
|
||||||
|
|
||||||
var onkey = function (e) {
|
var onkey = function (e) {
|
||||||
var k = e.code,
|
var k = e.code,
|
||||||
@@ -1535,18 +1566,18 @@ var modal = (function () {
|
|||||||
if (k == 'Space' && ae && (ae === eok || ae === eng))
|
if (k == 'Space' && ae && (ae === eok || ae === eng))
|
||||||
k = 'Enter';
|
k = 'Enter';
|
||||||
|
|
||||||
if (k == 'Enter') {
|
if (k.endsWith('Enter')) {
|
||||||
if (ae && ae == eng)
|
if (ae && ae == eng)
|
||||||
return ng();
|
return ng(e);
|
||||||
|
|
||||||
return ok();
|
return ok(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((k == 'ArrowLeft' || k == 'ArrowRight') && eng && (ae == eok || ae == eng))
|
if ((k == 'ArrowLeft' || k == 'ArrowRight') && eng && (ae == eok || ae == eng))
|
||||||
return (ae == eok ? eng : eok).focus() || ev(e);
|
return (ae == eok ? eng : eok).focus() || ev(e);
|
||||||
|
|
||||||
if (k == 'Escape')
|
if (k == 'Escape')
|
||||||
return ng();
|
return ng(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
var next = function () {
|
var next = function () {
|
||||||
|
|||||||
@@ -1,3 +1,323 @@
|
|||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1208-2133 `v1.9.27` another dedup bug
|
||||||
|
|
||||||
|
so [v1.9.26](https://github.com/9001/copyparty/releases/tag/v1.9.26) fixed how moving a symlink could break other related symlinks, and then it turns out symlinks themselves could also die when moving them to another location, and somehow nobody encountered any of these until now... surely there are no more deduplication-related issues left at this point, yeah?
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
|
||||||
|
* #65 moving deduplicated copies of files (symlinks) from one location to another could make them disappear (break the symlinks)
|
||||||
|
|
||||||
|
* don't worry, we are **not** talking about data loss! but see the [release notes for v1.9.26](https://github.com/9001/copyparty/releases/tag/v1.9.26) which explain how to deal with this issue (how to find, diagnose, and repair broken symlinks)
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
## regarding fedora packages
|
||||||
|
|
||||||
|
[copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/) (fedora's build service) is not building at the moment; ***if you installed copyparty from copr-pypi,*** you can upgrade to this release by running one of the following:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dnf install https://ocv.me/copyparty/fedora/37/python3-copyparty.fc37.noarch.rpm
|
||||||
|
dnf install https://ocv.me/copyparty/fedora/38/python3-copyparty.fc38.noarch.rpm
|
||||||
|
dnf install https://ocv.me/copyparty/fedora/39/python3-copyparty.fc39.noarch.rpm
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1208-0136 `v1.9.26` dont break symlinks
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* *tumbleweed*
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
|
||||||
|
* deleting files from the server could make some duplicates of that file unavailable (by breaking nested symlinks)
|
||||||
|
|
||||||
|
* don't worry, we are **not** talking about data loss! but such broken links would disappear from the directory listing and would need to be remedied by replacing the broken links manually, either by using a file explorer or commandline
|
||||||
|
|
||||||
|
* **only** affected linux/macos, did **not** affect servers with `--hardlink` or `--never-symlink` or `--no-dedup`, and **mainly** affected servers with lots of duplicate files (with some dupes in the same folder and some elsewhere)
|
||||||
|
|
||||||
|
* if you want to check for such broken symlinks, the following unix command will find all of them: `find -L -type l`
|
||||||
|
|
||||||
|
* to repair a broken link, first remove it and then replace it: `rm thelink.opus; ln -s /mnt/music/realfile.opus thelink.opus`
|
||||||
|
|
||||||
|
* if you are left with a mystery file and want to know where its duplicates are, you can grep for the filename in the logs and you'll find something like the following line, where the `wark` is the file identifier; grep for that to find all the other copies of that file -- `purl` is the folder/URL which that copy of the file was uploaded to:
|
||||||
|
```json
|
||||||
|
{"name": "04. GHOST.opus", "purl": "/mu/vt/suisei/still-still-stellar/", "size": 4520986, "lmod": 1697091772, "sprs": true, "hash": [], "wark": "SJMASMtWOa0UZnc002nn5unO5iCBMa-krt2CDcq8eJe9"}
|
||||||
|
```
|
||||||
|
|
||||||
|
* the server would throw an error if you tried to delete a broken symlink
|
||||||
|
* prevent warnings about duplicate file entries in the database by preventing that from happening in the first place
|
||||||
|
* `u2c.py` (commandline uploader) would fail to delete files from the server if there's more than ~10'000 files to be deleted
|
||||||
|
* and forgot to bump the version number... `1.11 (2nd season)`
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* `--help` was slightly improved
|
||||||
|
* docker images are now based on alpine v3.19
|
||||||
|
* `copyparty.exe` is now based on python v3.11.7
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1201-2326 `v1.9.25` focus
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* remember and restore scroll position when leaving the textfile viewer
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* the request-smuggling detetcor was too strict, blocking access to textfiles with newlines / control-codes in the filename
|
||||||
|
* focus and text selection in messageboxes was still jank, mainly in firefox and especially phones
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* the banhammer now applies on attempts at request-smuggling and path traversals
|
||||||
|
* these were merely detected and rejected before, might as well bonk them
|
||||||
|
* reject bad requests with a terse 500 instead of abruptly disconnecting in some cases
|
||||||
|
* stops firefox from rapidly spamming additional attempts
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1201-0210 `v1.9.24` header auth
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* initial work on #62 (support identity providers, oauth/SSO/...); see [readme](https://github.com/9001/copyparty#identity-providers)
|
||||||
|
* only authentication so far; no authorization yet, and users must exist in the copyparty config with bogus passwords
|
||||||
|
* new option `--ipa` rejects connections from clients outside of a given allowlist of IP prefixes
|
||||||
|
* environment variables can be used almost everywhere that takes a filesystem path; should make it way more comfy to write configs for docker / systemd
|
||||||
|
* #59 added a basic [docker-compose yaml](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/docker/basic-docker-compose) and an example config
|
||||||
|
* probably much room for improvement on everything docker still
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* the nftables-based port-forwarding in the [systemd example](https://github.com/9001/copyparty/tree/hovudstraum/contrib/systemd) was buggy; replaced with CAP_NET_BIND_SERVICE
|
||||||
|
* palemoon-specific js crash if a text selection was dragged
|
||||||
|
* text selection in messageboxes was jank
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* improved [systemd example](https://github.com/9001/copyparty/tree/hovudstraum/contrib/systemd) with hardening and a better example config
|
||||||
|
* logfiles are flushed for every line written; can be disabled with `--no-logflush` for ~3% more performance best-case
|
||||||
|
* iphones probably won't broadcast cover-art to car stereos over bluetooth anymore since the thingamajig in iOS that's in charge of that doesn't have cookie-access, and strapping in the auth is too funky so let's stop doing that b7723ac245b8b3e38d6410891ef1aa92d4772114
|
||||||
|
* can be remedied by enabling filekeys and granting unauthenticated people access that way, but that's too much effort for anyone to bother with I'm sure
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1125-1417 `v1.9.21` in a bind
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* #63 the grid-view will open textfiles in the textfile viewer
|
||||||
|
* [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh) now accepts user/group names (in addition to IDs)
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* the `Y` hotkey (which turns all links into download links) didn't affect the grid-view
|
||||||
|
* on some servers with unusual filesystem layouts (especially ubuntu-zfs), [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh) would make an unholy mess of recursive bind-mounts, quickly running out of inodes and requiring a server reboot
|
||||||
|
* added several safeguards to avoid anything like this in the future
|
||||||
|
* mutex around jail setup/teardown to prevent racing other instances
|
||||||
|
* verify jail status by inspecting /proc/mounts between each folder to bind
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1121-2325 `v1.9.20` nice
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* expensive subprocesses (ffmpeg, parsers, hooks) will run with `nice` to reduce cpu priority
|
||||||
|
* ...so listening to flacs won't grind everything else to a halt
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* the "load more" search results button didn't disappear if you hit the serverside limit
|
||||||
|
* the "show all" button for huge folders didn't disappear when navigating into a smaller folder
|
||||||
|
* trying to play the previous track when you're already playing the first track in a folder would send you on a wild adventure
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1119-1229 `v1.9.19` shadow filter
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* #61 Mk.II: filter search results to also handle this issue in volumes where reindexing is disabled, or (spoiler warning:) a bug in the directory indexer prevents shadowed files from being forgotten
|
||||||
|
* filekeys didn't always get included in the up2k UI for world-readable folders
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1118-2106 `v1.9.18` cache invalidation
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* #61 search results could contain stale records from overlapping volumes:
|
||||||
|
* if volume `/foo` is indexed and then volume `/foo/bar` is later created, any files inside the `bar` subfolder would not become forgotten in `/foo`'s database until something in `/foo` changes, which could be never
|
||||||
|
* as a result, search results could show stale metadata from `/foo`'s database regarding files in `/foo/bar`
|
||||||
|
* fix this by dropping caches and reindexing if copyparty is started with a different list of volumes than last time
|
||||||
|
* #60 client error when ctrl-clicking search results
|
||||||
|
* icons for the close/more buttons in search results are now pillow-10.x compatible
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* `u2c.exe`: upgraded certifi to version `2023.11.17`
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1111-1738 `v1.9.17` 11-11
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* `u2c.py` / `u2c.exe` (the commandline uploader):
|
||||||
|
* `-x` is now case-insensitive
|
||||||
|
* if a file fails to upload after 30 attempts, give up (bitflips)
|
||||||
|
* add 5 sec delay before reattempts (configurable with `--cd`)
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* clients could crash the file indexer by uploading and then instantly deleting files (as some webdav clients tend to do)
|
||||||
|
* and fix some upload errorhandling which broke during a refactoring in v1.9.16
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* upgraded pyftpdlib to v1.5.9
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1104-2158 `v1.9.16` windedup
|
||||||
|
|
||||||
|
## breaking changes
|
||||||
|
* two of the prometheus metrics have changed slightly; see the [breaking changes readme section](https://github.com/9001/copyparty#breaking-changes)
|
||||||
|
* (i'm not familiar with prometheus so i'm not sure if this is a big deal)
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* #58 versioned docker images! no longer just `latest`
|
||||||
|
* browser: the mkdir feature now accepts `foo/bar/qux` and `../foo` and `/bar`
|
||||||
|
* add 14 more prometheus metrics; see [readme](https://github.com/9001/copyparty#prometheus) for details
|
||||||
|
* connections, requests, malicious requests, volume state, file hashing/analyzation queues
|
||||||
|
* catch some more malicious requests in the autoban filters
|
||||||
|
* some malicious requests are now answered with HTTP 422, so that they count against `--ban-422`
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* windows: fix symlink-based upload deduplication
|
||||||
|
* MS decided to make symlinks relative to working-directory rather than destination-path...
|
||||||
|
* `--stats` would produce invalid metrics if a volume was offline
|
||||||
|
* minor improvements to password hashing ux:
|
||||||
|
* properly warn if `--ah-cli` or `--ah-gen` is used without `--ah-alg`
|
||||||
|
* support `^D` during `--ah-cli`
|
||||||
|
* browser-ux / cosmetics:
|
||||||
|
* fix toast/tooltip colors on splashpage
|
||||||
|
* easier to do partial text selection inside links (search results, breadcrumbs, uploads)
|
||||||
|
* more rclone-related hints on the connect-page
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* malformed http headers from clients are no longer included in the client error-message
|
||||||
|
* just in case there are deployments with a reverse-proxy inserting interesting stuff on the way in
|
||||||
|
* the serverlog still contains all the necessary info to debug your own clients
|
||||||
|
* updated [example nginx config](https://github.com/9001/copyparty/blob/hovudstraum/contrib/nginx/copyparty.conf) to recover faster from brief server outages
|
||||||
|
* the default value of `fail_timeout` (10sec) makes nginx cache the outage for longer than necessary
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1024-1643 `v1.9.15` expand placeholder
|
||||||
|
|
||||||
|
[made it just in time!](https://a.ocv.me/pub/g/nerd-stuff/PXL_20231024_170348367.jpg) (EDIT: nevermind, three of the containers didn't finish uploading to ghcr before takeoff ;_; all up now)
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* #56 placeholder variables in markdown documents and prologue/epilogue html files
|
||||||
|
* default-disabled; must be enabled globally with `--exp` or per-volume with volflag `exp`
|
||||||
|
* `{{self.ip}}` becomes the client IP; see [/srv/expand/README.md](https://github.com/9001/copyparty/blob/hovudstraum/srv/expand/README.md) for more examples
|
||||||
|
* dynamic-range-compressor: reduced volume jumps between songs when enabled
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* v1.9.14 broke the `scan` volflag, causing volume rescans to happen every 10sec if enabled
|
||||||
|
* its global counterpart `--re-maxage` was not affected
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1021-1443 `v1.9.14` uptime
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* search for files by upload time
|
||||||
|
* option to display upload time in directory listings
|
||||||
|
* enable globally with `-e2d -mte +.up_at` or per-volume with volflags `e2d,mte=+.up_at`
|
||||||
|
* has a ~17% performance impact on directory listings
|
||||||
|
* [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression) in the audioplayer settings
|
||||||
|
* `--ban-404` is now default-enabled
|
||||||
|
* the turbo-uploader will now un-turbo when necessary to avoid banning itself
|
||||||
|
* this only affects accounts with permissions `g`, `G`, or `h`
|
||||||
|
* accounts with read-access (which are able to see directory listings anyways) and accounts with write-only access are no longer affected by `--ban-404` or `--ban-url`
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* #55 clients could hit the `--url-ban` filter when uploading over webdav
|
||||||
|
* fixed by limiting `--ban-404` and `--ban-url` to accounts with permission `g`, `G`, or `h`
|
||||||
|
* fixed 20% performance drop in python 3.12 due to utcfromtimestamp deprecation
|
||||||
|
* but 3.12.0 is still 5% slower than 3.11.6 for some reason
|
||||||
|
* volume listing on startup would display some redundant info
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* timeout for unfinished uploads increased from 6 to 24 hours
|
||||||
|
* and is now configurable with `--snap-drop`
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1015-2006 `v1.9.12` more buttons
|
||||||
|
|
||||||
|
just adding requested features, nothing important
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* button `📅` in the uploader (default-enabled) sends your local last-modified timestamps to the server
|
||||||
|
* when deselected, the files on the server will have the upload time as their timestamps instead
|
||||||
|
* `--u2ts` specifies the default setting, `c` client-last-modified or `u` upload-time, or `fc` and `fu` to force
|
||||||
|
* button `full` in the gridview decides if thumbnails should be center-cropped or not
|
||||||
|
* `--no-crop` and the `nocrop` volflag now sets the default value of this instead of forcing the setting
|
||||||
|
* thumbnail cleanup is now more granular, cleaning full-jpg separately from cropped-webp for example
|
||||||
|
* set default sort order with `--sort` or volflag `sort`
|
||||||
|
* one or more comma-separated values; `tags/Cirle,tags/.tn,tags/Artist,tags/Title,href`
|
||||||
|
* see the column header tooltips in the browser to know what names (`id`) to use
|
||||||
|
* prefix a column name with `-` for descending sort
|
||||||
|
* specifying a sort order in the client will override all server-defined ones
|
||||||
|
* when visiting a read-only folder, the upload-or-filesearch toggle will remember its previous state and restore it when leaving the folder
|
||||||
|
* much more intuitive, if anything about this UI can be called that...
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* iPhone: rare javascript panic when switching between safari and another app
|
||||||
|
* ie9: file-rename ui was borked
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* copyparty.exe: upgrade to pillow 10.1 (which adds a new font for thumbnails in chrome)
|
||||||
|
* still based on python 3.11.6 because 3.12 is currently slower than 3.11
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1009-0036 `v1.9.11` bustin'
|
||||||
|
|
||||||
|
okay, i swear this is the last version for weeks! probably
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* cachebuster didn't apply to dynamically loaded javascript files
|
||||||
|
* READMEs could fail to render with `ReferenceError: DOMPurify is not defined` after upgrading from a copyparty older than v1.9.2
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1008-2051 `v1.9.10` badpwd
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* argument `--log-badpwd` specifies how to log invalid login attempts;
|
||||||
|
* `0` = just a warning with no further information
|
||||||
|
* `1` = log incorrect password in plaintext (default)
|
||||||
|
* `2` = log sha512 hash of the incorrect password
|
||||||
|
* `1` and `2` are convenient for stuff like setting up autoban triggers for common passwords using fail2ban or similar
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* none!
|
||||||
|
* the formerly mentioned caching-directives bug turned out to be unreachable... oh well, better safe than sorry
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-1007-2229 `v1.9.9` fix cross-volume dedup moves
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* v1.6.2 introduced a bug which, when moving files between volumes, could cause the move operation to abort when it encounters a deduplicated file
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
# 2023-1006-1750 `v1.9.8` static filekeys
|
# 2023-1006-1750 `v1.9.8` static filekeys
|
||||||
|
|
||||||
|
|||||||
33
docs/examples/docker/basic-docker-compose/copyparty.conf
Normal file
33
docs/examples/docker/basic-docker-compose/copyparty.conf
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# not actually YAML but lets pretend:
|
||||||
|
# -*- mode: yaml -*-
|
||||||
|
# vim: ft=yaml:
|
||||||
|
|
||||||
|
|
||||||
|
[global]
|
||||||
|
e2dsa # enable file indexing and filesystem scanning
|
||||||
|
e2ts # enable multimedia indexing
|
||||||
|
ansi # enable colors in log messages
|
||||||
|
|
||||||
|
# q, lo: /cfg/log/%Y-%m%d.log # log to file instead of docker
|
||||||
|
|
||||||
|
# ftp: 3921 # enable ftp server on port 3921
|
||||||
|
# p: 3939 # listen on another port
|
||||||
|
# ipa: 10.89. # only allow connections from 10.89.*
|
||||||
|
# df: 16 # stop accepting uploads if less than 16 GB free disk space
|
||||||
|
# ver # show copyparty version in the controlpanel
|
||||||
|
# grid # show thumbnails/grid-view by default
|
||||||
|
# theme: 2 # monokai
|
||||||
|
# name: datasaver # change the server-name that's displayed in the browser
|
||||||
|
# stats, nos-dup # enable the prometheus endpoint, but disable the dupes counter (too slow)
|
||||||
|
# no-robots, force-js # make it harder for search engines to read your server
|
||||||
|
|
||||||
|
|
||||||
|
[accounts]
|
||||||
|
ed: wark # username: password
|
||||||
|
|
||||||
|
|
||||||
|
[/] # create a volume at "/" (the webroot), which will
|
||||||
|
/w # share /w (the docker data volume)
|
||||||
|
accs:
|
||||||
|
rw: * # everyone gets read-write access, but
|
||||||
|
rwmda: ed # the user "ed" gets read-write-move-delete-admin
|
||||||
20
docs/examples/docker/basic-docker-compose/docker-compose.yml
Normal file
20
docs/examples/docker/basic-docker-compose/docker-compose.yml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
version: '3'
|
||||||
|
services:
|
||||||
|
|
||||||
|
copyparty:
|
||||||
|
image: copyparty/ac:latest
|
||||||
|
container_name: copyparty
|
||||||
|
user: "1000:1000"
|
||||||
|
ports:
|
||||||
|
- 3923:3923
|
||||||
|
volumes:
|
||||||
|
- ./:/cfg:z
|
||||||
|
- /path/to/your/fileshare/top/folder:/w:z
|
||||||
|
|
||||||
|
stop_grace_period: 15s # thumbnailer is allowed to continue finishing up for 10s after the shutdown signal
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "wget --spider -q 127.0.0.1:3923/?reset"]
|
||||||
|
interval: 1m
|
||||||
|
timeout: 2s
|
||||||
|
retries: 5
|
||||||
|
start_period: 15s
|
||||||
72
docs/examples/docker/idp/copyparty.conf
Normal file
72
docs/examples/docker/idp/copyparty.conf
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
# not actually YAML but lets pretend:
|
||||||
|
# -*- mode: yaml -*-
|
||||||
|
# vim: ft=yaml:
|
||||||
|
|
||||||
|
|
||||||
|
# example config for how copyparty can be used with an identity
|
||||||
|
# provider, replacing the built-in authentication/authorization
|
||||||
|
# mechanism, and instead expecting the reverse-proxy to provide
|
||||||
|
# the requester's username (and possibly a group-name, for
|
||||||
|
# optional group-based access control)
|
||||||
|
#
|
||||||
|
# the filesystem-path `/w` is used as the storage location
|
||||||
|
# because that is the data-volume in the docker containers,
|
||||||
|
# because a deployment like this (with an IdP) is more commonly
|
||||||
|
# seen in containerized environments -- but this is not required
|
||||||
|
|
||||||
|
|
||||||
|
[global]
|
||||||
|
e2dsa # enable file indexing and filesystem scanning
|
||||||
|
e2ts # enable multimedia indexing
|
||||||
|
ansi # enable colors in log messages
|
||||||
|
|
||||||
|
# enable IdP support by expecting username/groupname in
|
||||||
|
# http-headers provided by the reverse-proxy; header "X-IdP-User"
|
||||||
|
# will contain the username, "X-IdP-Group" the groupname
|
||||||
|
idp-h-usr: x-idp-user
|
||||||
|
idp-h-grp: x-idp-group
|
||||||
|
|
||||||
|
|
||||||
|
[/] # create a volume at "/" (the webroot), which will
|
||||||
|
/w # share /w (the docker data volume)
|
||||||
|
accs:
|
||||||
|
rw: * # everyone gets read-access, but
|
||||||
|
rwmda: %su # the group "su" gets read-write-move-delete-admin
|
||||||
|
|
||||||
|
|
||||||
|
[/u/${u}] # each user gets their own home-folder at /u/username
|
||||||
|
/w/u/${u} # which will be "u/username" in the docker data volume
|
||||||
|
accs:
|
||||||
|
r: * # read-access for anyone, and
|
||||||
|
rwmda: ${u}, %su # read-write-move-delete-admin for that username + the "su" group
|
||||||
|
|
||||||
|
|
||||||
|
[/u/${u}/priv] # each user also gets a private area at /u/username/priv
|
||||||
|
/w/u/${u}/priv # stored at DATAVOLUME/u/username/priv
|
||||||
|
accs:
|
||||||
|
rwmda: ${u}, %su # read-write-move-delete-admin for that username + the "su" group
|
||||||
|
|
||||||
|
|
||||||
|
[/lounge/${g}] # each group gets their own shared volume
|
||||||
|
/w/lounge/${g} # stored at DATAVOLUME/lounge/groupname
|
||||||
|
accs:
|
||||||
|
r: * # read-access for anyone, and
|
||||||
|
rwmda: %${g}, %su # read-write-move-delete-admin for that group + the "su" group
|
||||||
|
|
||||||
|
|
||||||
|
[/lounge/${g}/priv] # and a private area for each group too
|
||||||
|
/w/lounge/${g}/priv # stored at DATAVOLUME/lounge/groupname/priv
|
||||||
|
accs:
|
||||||
|
rwmda: %${g}, %su # read-write-move-delete-admin for that group + the "su" group
|
||||||
|
|
||||||
|
|
||||||
|
# and create some strategic volumes to prevent anyone from gaining
|
||||||
|
# unintended access to priv folders if the users/groups db is lost
|
||||||
|
[/u]
|
||||||
|
/w/u
|
||||||
|
accs:
|
||||||
|
rwmda: %su
|
||||||
|
[/lounge]
|
||||||
|
/w/lounge
|
||||||
|
accs:
|
||||||
|
rwmda: %su
|
||||||
@@ -28,10 +28,6 @@ https://github.com/nayuki/QR-Code-generator/
|
|||||||
C: Project Nayuki
|
C: Project Nayuki
|
||||||
L: MIT
|
L: MIT
|
||||||
|
|
||||||
https://github.com/python/cpython/blob/3.10/Lib/asyncore.py
|
|
||||||
C: 1996 Sam Rushing
|
|
||||||
L: ISC
|
|
||||||
|
|
||||||
https://github.com/ahupp/python-magic/
|
https://github.com/ahupp/python-magic/
|
||||||
C: 2001-2014 Adam Hupp
|
C: 2001-2014 Adam Hupp
|
||||||
L: MIT
|
L: MIT
|
||||||
|
|||||||
@@ -100,6 +100,10 @@ include_trailing_comma = true
|
|||||||
[tool.bandit]
|
[tool.bandit]
|
||||||
skips = ["B104", "B110", "B112"]
|
skips = ["B104", "B110", "B112"]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
line-length = 120
|
||||||
|
ignore = ["E402", "E722"]
|
||||||
|
|
||||||
# =====================================================================
|
# =====================================================================
|
||||||
|
|
||||||
[tool.pylint.MAIN]
|
[tool.pylint.MAIN]
|
||||||
|
|||||||
@@ -17,11 +17,15 @@ docker run --rm -it -u 1000 -p 3923:3923 -v /mnt/nas:/w -v $PWD/cfgdir:/cfg copy
|
|||||||
* if you are using rootless podman, remove `-u 1000`
|
* if you are using rootless podman, remove `-u 1000`
|
||||||
* if you have selinux, append `:z` to all `-v` args (for example `-v /mnt/nas:/w:z`)
|
* if you have selinux, append `:z` to all `-v` args (for example `-v /mnt/nas:/w:z`)
|
||||||
|
|
||||||
i'm unfamiliar with docker-compose and alternatives so let me know if this section could be better 🙏
|
this example is also available as a podman-compatible [docker-compose yaml](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/docker/basic-docker-compose); example usage: `docker-compose up` (you may need to `systemctl enable --now podman.socket` or similar)
|
||||||
|
|
||||||
|
i'm not very familiar with containers, so let me know if this section could be better 🙏
|
||||||
|
|
||||||
|
|
||||||
## configuration
|
## configuration
|
||||||
|
|
||||||
|
> this section basically explains how the [docker-compose yaml](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/docker/basic-docker-compose) works, so you may look there instead
|
||||||
|
|
||||||
the container has the same default config as the sfx and the pypi module, meaning it will listen on port 3923 and share the "current folder" (`/w` inside the container) as read-write for anyone
|
the container has the same default config as the sfx and the pypi module, meaning it will listen on port 3923 and share the "current folder" (`/w` inside the container) as read-write for anyone
|
||||||
|
|
||||||
the recommended way to configure copyparty inside a container is to mount a folder which has one or more [config files](https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf) inside; `-v /your/config/folder:/cfg`
|
the recommended way to configure copyparty inside a container is to mount a folder which has one or more [config files](https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf) inside; `-v /your/config/folder:/cfg`
|
||||||
|
|||||||
@@ -141,12 +141,25 @@ filt=
|
|||||||
}
|
}
|
||||||
|
|
||||||
[ $push ] && {
|
[ $push ] && {
|
||||||
|
ver=$(
|
||||||
|
python3 ../../dist/copyparty-sfx.py --version 2>/dev/null |
|
||||||
|
awk '/^copyparty v/{sub(/-.*/,"");sub(/v/,"");print$2;exit}'
|
||||||
|
)
|
||||||
|
echo $ver | grep -E '[0-9]\.[0-9]' || {
|
||||||
|
echo no ver
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
for i in $dhub_order; do
|
for i in $dhub_order; do
|
||||||
|
printf '\ndockerhub %s\n' $i
|
||||||
|
podman manifest push --all copyparty-$i copyparty/$i:$ver
|
||||||
podman manifest push --all copyparty-$i copyparty/$i:latest
|
podman manifest push --all copyparty-$i copyparty/$i:latest
|
||||||
done
|
done &
|
||||||
for i in $ghcr_order; do
|
for i in $ghcr_order; do
|
||||||
|
printf '\nghcr %s\n' $i
|
||||||
|
podman manifest push --all copyparty-$i ghcr.io/9001/copyparty-$i:$ver
|
||||||
podman manifest push --all copyparty-$i ghcr.io/9001/copyparty-$i:latest
|
podman manifest push --all copyparty-$i ghcr.io/9001/copyparty-$i:latest
|
||||||
done
|
done &
|
||||||
|
wait
|
||||||
}
|
}
|
||||||
|
|
||||||
echo ok
|
echo ok
|
||||||
|
|||||||
@@ -205,26 +205,22 @@ necho() {
|
|||||||
mv {markupsafe,jinja2} j2/
|
mv {markupsafe,jinja2} j2/
|
||||||
|
|
||||||
necho collecting pyftpdlib
|
necho collecting pyftpdlib
|
||||||
f="../build/pyftpdlib-1.5.8.tar.gz"
|
f="../build/pyftpdlib-1.5.9.tar.gz"
|
||||||
[ -e "$f" ] ||
|
[ -e "$f" ] ||
|
||||||
(url=https://github.com/giampaolo/pyftpdlib/archive/refs/tags/release-1.5.8.tar.gz;
|
(url=https://github.com/giampaolo/pyftpdlib/archive/refs/tags/release-1.5.9.tar.gz;
|
||||||
wget -O$f "$url" || curl -L "$url" >$f)
|
wget -O$f "$url" || curl -L "$url" >$f)
|
||||||
|
|
||||||
tar -zxf $f
|
tar -zxf $f
|
||||||
mv pyftpdlib-release-*/pyftpdlib .
|
mv pyftpdlib-release-*/pyftpdlib .
|
||||||
rm -rf pyftpdlib-release-* pyftpdlib/test
|
rm -rf pyftpdlib-release-* pyftpdlib/test
|
||||||
|
for f in pyftpdlib/_async{hat,ore}.py; do
|
||||||
|
[ -e "$f" ] || continue;
|
||||||
|
iawk 'NR<4||NR>27||!/^#/;NR==4{print"# license: https://opensource.org/licenses/ISC\n"}' $f
|
||||||
|
done
|
||||||
|
|
||||||
mkdir ftp/
|
mkdir ftp/
|
||||||
mv pyftpdlib ftp/
|
mv pyftpdlib ftp/
|
||||||
|
|
||||||
necho collecting asyncore, asynchat
|
|
||||||
for n in asyncore.py asynchat.py; do
|
|
||||||
f=../build/$n
|
|
||||||
[ -e "$f" ] ||
|
|
||||||
(url=https://raw.githubusercontent.com/python/cpython/c4d45ee670c09d4f6da709df072ec80cb7dfad22/Lib/$n;
|
|
||||||
wget -O$f "$url" || curl -L "$url" >$f)
|
|
||||||
done
|
|
||||||
|
|
||||||
necho collecting python-magic
|
necho collecting python-magic
|
||||||
v=0.4.27
|
v=0.4.27
|
||||||
f="../build/python-magic-$v.tar.gz"
|
f="../build/python-magic-$v.tar.gz"
|
||||||
@@ -293,12 +289,6 @@ necho() {
|
|||||||
(cd "${x%/*}"; cp -p "../$(cat "${x##*/}")" ${x##*/})
|
(cd "${x%/*}"; cp -p "../$(cat "${x##*/}")" ${x##*/})
|
||||||
done
|
done
|
||||||
|
|
||||||
# insert asynchat
|
|
||||||
mkdir copyparty/vend
|
|
||||||
for n in asyncore.py asynchat.py; do
|
|
||||||
awk 'NR<4||NR>27;NR==4{print"# license: https://opensource.org/licenses/ISC\n"}' ../build/$n >copyparty/vend/$n
|
|
||||||
done
|
|
||||||
|
|
||||||
rm -f copyparty/stolen/*/README.md
|
rm -f copyparty/stolen/*/README.md
|
||||||
|
|
||||||
# remove type hints before build instead
|
# remove type hints before build instead
|
||||||
@@ -419,17 +409,14 @@ iawk '/^ {0,4}[^ ]/{s=0}/^ {4}def (serve_forever|_loop)/{s=1}!s' ftp/pyftpdlib/s
|
|||||||
rm -f ftp/pyftpdlib/{__main__,prefork}.py
|
rm -f ftp/pyftpdlib/{__main__,prefork}.py
|
||||||
|
|
||||||
[ $no_ftp ] &&
|
[ $no_ftp ] &&
|
||||||
rm -rf copyparty/ftpd.py ftp asyncore.py asynchat.py &&
|
rm -rf copyparty/ftpd.py ftp &&
|
||||||
sed -ri '/add_argument\("--ftp/d' copyparty/__main__.py &&
|
|
||||||
sed -ri '/\.ftp/d' copyparty/svchub.py
|
sed -ri '/\.ftp/d' copyparty/svchub.py
|
||||||
|
|
||||||
[ $no_smb ] &&
|
[ $no_smb ] &&
|
||||||
rm -f copyparty/smbd.py &&
|
rm -f copyparty/smbd.py
|
||||||
sed -ri '/add_argument\("--smb/d' copyparty/__main__.py
|
|
||||||
|
|
||||||
[ $no_zm ] &&
|
[ $no_zm ] &&
|
||||||
rm -rf copyparty/mdns.py copyparty/stolen/dnslib &&
|
rm -rf copyparty/mdns.py copyparty/stolen/dnslib
|
||||||
sed -ri '/add_argument\("--zm/d' copyparty/__main__.py
|
|
||||||
|
|
||||||
[ $no_cm ] && {
|
[ $no_cm ] && {
|
||||||
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
|
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
|
||||||
@@ -579,8 +566,8 @@ nf=$(ls -1 "$zdir"/arc.* 2>/dev/null | wc -l)
|
|||||||
cat ../$bdir/COPYING.txt) >> copyparty/res/COPYING.txt ||
|
cat ../$bdir/COPYING.txt) >> copyparty/res/COPYING.txt ||
|
||||||
echo "copying.txt 404 pls rebuild"
|
echo "copying.txt 404 pls rebuild"
|
||||||
|
|
||||||
mv ftp/* j2/* copyparty/vend/* .
|
mv ftp/* j2/* .
|
||||||
rm -rf ftp j2 py2 py37 copyparty/vend
|
rm -rf ftp j2 py2 py37
|
||||||
(cd copyparty; tar -cvf z.tar $t; rm -rf $t)
|
(cd copyparty; tar -cvf z.tar $t; rm -rf $t)
|
||||||
cd ..
|
cd ..
|
||||||
pyoxidizer build --release --target-triple $tgt
|
pyoxidizer build --release --target-triple $tgt
|
||||||
|
|||||||
@@ -9,10 +9,13 @@ tee build2.sh | cmp build.sh && rm build2.sh || {
|
|||||||
[[ $r =~ [yY] ]] && mv build{2,}.sh && exec ./build.sh
|
[[ $r =~ [yY] ]] && mv build{2,}.sh && exec ./build.sh
|
||||||
}
|
}
|
||||||
|
|
||||||
[ -e up2k.sh ] && [ ! "$1" ] && ./up2k.sh
|
clean=--clean
|
||||||
|
[ "$1" = f ] && clean= && shift
|
||||||
|
|
||||||
uname -s | grep WOW64 && m=64 || m=32
|
uname -s | grep WOW64 && m=64 || m=32
|
||||||
uname -s | grep NT-10 && w10=1 || w7=1
|
uname -s | grep NT-10 && w10=1 || w7=1
|
||||||
|
[ $w7 ] && [ -e up2k.sh ] && [ ! "$1" ] && ./up2k.sh
|
||||||
|
|
||||||
[ $w7 ] && pyv=37 || pyv=311
|
[ $w7 ] && pyv=37 || pyv=311
|
||||||
esuf=
|
esuf=
|
||||||
[ $w7 ] && [ $m = 32 ] && esuf=32
|
[ $w7 ] && [ $m = 32 ] && esuf=32
|
||||||
@@ -65,12 +68,18 @@ sed -r 's/1,2,3,0/'$a,$b,$c,$d'/;s/1\.2\.3/'$a.$b.$c/ <loader.rc >loader.rc2
|
|||||||
sed -ri s/copyparty.exe/copyparty$esuf.exe/ loader.rc2
|
sed -ri s/copyparty.exe/copyparty$esuf.exe/ loader.rc2
|
||||||
|
|
||||||
excl=(
|
excl=(
|
||||||
|
asyncio
|
||||||
copyparty.broker_mp
|
copyparty.broker_mp
|
||||||
copyparty.broker_mpw
|
copyparty.broker_mpw
|
||||||
|
copyparty.smbd
|
||||||
ctypes.macholib
|
ctypes.macholib
|
||||||
curses
|
curses
|
||||||
|
email._header_value_parser
|
||||||
|
email.header
|
||||||
|
email.parser
|
||||||
inspect
|
inspect
|
||||||
multiprocessing
|
multiprocessing
|
||||||
|
packaging
|
||||||
pdb
|
pdb
|
||||||
pickle
|
pickle
|
||||||
PIL.EpsImagePlugin
|
PIL.EpsImagePlugin
|
||||||
@@ -85,6 +94,7 @@ excl=(
|
|||||||
PIL.ImageShow
|
PIL.ImageShow
|
||||||
PIL.ImageTk
|
PIL.ImageTk
|
||||||
PIL.ImageWin
|
PIL.ImageWin
|
||||||
|
PIL.PdfParser
|
||||||
) || excl+=(
|
) || excl+=(
|
||||||
PIL
|
PIL
|
||||||
PIL.ExifTags
|
PIL.ExifTags
|
||||||
@@ -95,7 +105,7 @@ excl=(
|
|||||||
excl=( "${excl[@]/#/--exclude-module }" )
|
excl=( "${excl[@]/#/--exclude-module }" )
|
||||||
|
|
||||||
$APPDATA/python/python$pyv/scripts/pyinstaller \
|
$APPDATA/python/python$pyv/scripts/pyinstaller \
|
||||||
-y --clean -p mods --upx-dir=. \
|
-y $clean -p mods --upx-dir=. \
|
||||||
${excl[*]} \
|
${excl[*]} \
|
||||||
--version-file loader.rc2 -i loader.ico -n copyparty -c -F loader.py \
|
--version-file loader.rc2 -i loader.ico -n copyparty -c -F loader.py \
|
||||||
--add-data 'mods/copyparty/res;copyparty/res' \
|
--add-data 'mods/copyparty/res;copyparty/res' \
|
||||||
|
|||||||
@@ -1,22 +1,23 @@
|
|||||||
d5510a24cb5e15d6d30677335bbc7624c319b371c0513981843dc51d9b3a1e027661096dfcfc540634222bb2634be6db55bf95185b30133cb884f1e47652cf53 altgraph-0.17.3-py2.py3-none-any.whl
|
f117016b1e6a7d7e745db30d3e67f1acf7957c443a0dd301b6c5e10b8368f2aa4db6be9782d2d3f84beadd139bfeef4982e40f21ca5d9065cb794eeb0e473e82 altgraph-0.17.4-py2.py3-none-any.whl
|
||||||
eda6c38fc4d813fee897e969ff9ecc5acc613df755ae63df0392217bbd67408b5c1f6c676f2bf5497b772a3eb4e1a360e1245e1c16ee83f0af555f1ab82c3977 Git-2.39.1-32-bit.exe
|
eda6c38fc4d813fee897e969ff9ecc5acc613df755ae63df0392217bbd67408b5c1f6c676f2bf5497b772a3eb4e1a360e1245e1c16ee83f0af555f1ab82c3977 Git-2.39.1-32-bit.exe
|
||||||
17ce52ba50692a9d964f57a23ac163fb74c77fdeb2ca988a6d439ae1fe91955ff43730c073af97a7b3223093ffea3479a996b9b50ee7fba0869247a56f74baa6 pefile-2023.2.7-py3-none-any.whl
|
17ce52ba50692a9d964f57a23ac163fb74c77fdeb2ca988a6d439ae1fe91955ff43730c073af97a7b3223093ffea3479a996b9b50ee7fba0869247a56f74baa6 pefile-2023.2.7-py3-none-any.whl
|
||||||
f298e34356b5590dde7477d7b3a88ad39c622a2bcf3fcd7c53870ce8384dd510f690af81b8f42e121a22d3968a767d2e07595036b2ed7049c8ef4d112bcf3a61 pyinstaller-5.13.2-py3-none-win32.whl
|
f298e34356b5590dde7477d7b3a88ad39c622a2bcf3fcd7c53870ce8384dd510f690af81b8f42e121a22d3968a767d2e07595036b2ed7049c8ef4d112bcf3a61 pyinstaller-5.13.2-py3-none-win32.whl
|
||||||
ea73aa54cc6d5db20dfb127e54562dabf890e4cd6171a91b10a51af2bcfc76e1d64cbdce4546df2dcfe42b624724c85b1cd05934be2413425b1f880222727b4f pyinstaller-5.13.2-py3-none-win_amd64.whl
|
f23615c522ed58b9a05978ba4c69c06224590f3a6adbd8e89b31838b181a57160739ceff1fc2ba6f4239b8fee46f92ce02910b2debda2710558ed42cff1ce3f1 pyinstaller-6.1.0-py3-none-win_amd64.whl
|
||||||
2f4e3927a38cf7757bc9a1c06370d79209669a285a80f1b09cf9917137825c7022a50a56b351807e6e687e2c3a7bd7b2c5cc6daeb4d90e11920284c1a04a1cc3 pyinstaller_hooks_contrib-2023.8-py2.py3-none-any.whl
|
5747b3b119629c4cf956f0eaa85f29218bb3680d3a4a262fa6e976e56b35067302e153d2c0a001505f2cb642b1f78752567889b3b82e342d6cd29aac8b70e92e pyinstaller_hooks_contrib-2023.10-py2.py3-none-any.whl
|
||||||
749a473646c6d4c7939989649733d4c7699fd1c359c27046bf5bc9c070d1a4b8b986bbc65f60d7da725baf16dbfdd75a4c2f5bb8335f2cb5685073f5fee5c2d1 pywin32_ctypes-0.2.2-py3-none-any.whl
|
749a473646c6d4c7939989649733d4c7699fd1c359c27046bf5bc9c070d1a4b8b986bbc65f60d7da725baf16dbfdd75a4c2f5bb8335f2cb5685073f5fee5c2d1 pywin32_ctypes-0.2.2-py3-none-any.whl
|
||||||
|
6e0d854040baff861e1647d2bece7d090bc793b2bd9819c56105b94090df54881a6a9b43ebd82578cd7c76d47181571b671e60672afd9def389d03c9dae84fcf setuptools-68.2.2-py3-none-any.whl
|
||||||
3c5adf0a36516d284a2ede363051edc1bcc9df925c5a8a9fa2e03cab579dd8d847fdad42f7fd5ba35992e08234c97d2dbfec40a9d12eec61c8dc03758f2bd88e typing_extensions-4.4.0-py3-none-any.whl
|
3c5adf0a36516d284a2ede363051edc1bcc9df925c5a8a9fa2e03cab579dd8d847fdad42f7fd5ba35992e08234c97d2dbfec40a9d12eec61c8dc03758f2bd88e typing_extensions-4.4.0-py3-none-any.whl
|
||||||
8d16a967a0a7872a7575b1005cf66915deacda6ee8611fbb52f42fc3e3beb2f901a5140c942a5d146bd412b92bfa9cbadd82beeba83df6d70930c6dc26608a5b upx-4.1.0-win32.zip
|
8d16a967a0a7872a7575b1005cf66915deacda6ee8611fbb52f42fc3e3beb2f901a5140c942a5d146bd412b92bfa9cbadd82beeba83df6d70930c6dc26608a5b upx-4.1.0-win32.zip
|
||||||
# u2c (win7)
|
# u2c (win7)
|
||||||
a7d259277af4948bf960682bc9fb45a44b9ae9a19763c8a7c313cef4aa9ec2d447d843e4a7c409e9312c8c8f863a24487a8ee4ffa6891e9b1c4e111bb4723861 certifi-2022.12.7-py3-none-any.whl
|
f3390290b896019b2fa169932390e4930d1c03c014e1f6db2405ca2eb1f51f5f5213f725885853805b742997b0edb369787e5c0069d217bc4e8b957f847f58b6 certifi-2023.11.17-py3-none-any.whl
|
||||||
2822c0dae180b1c8cfb7a70c8c00bad62af9afdbb18b656236680def9d3f1fcdcb8ef5eb64fc3b4c934385cd175ad5992a2284bcba78a243130de75b2d1650db charset_normalizer-3.1.0-cp37-cp37m-win32.whl
|
904eb57b13bea80aea861de86987e618665d37fa9ea0856e0125a9ba767a53e5064de0b9c4735435a2ddf4f16f7f7d2c75a682e1de83d9f57922bdca8e29988c charset_normalizer-3.3.0-cp37-cp37m-win32.whl
|
||||||
ffdd45326f4e91c02714f7a944cbcc2fdd09299f709cfa8aec0892053eef0134fb80d9ba3790afd319538a86feb619037cbf533e2f5939cb56b35bb17f56c858 idna-3.4-py3-none-any.whl
|
ffdd45326f4e91c02714f7a944cbcc2fdd09299f709cfa8aec0892053eef0134fb80d9ba3790afd319538a86feb619037cbf533e2f5939cb56b35bb17f56c858 idna-3.4-py3-none-any.whl
|
||||||
220e0e122d5851aaccf633224dd7fbd3ba8c8d2720944d8019d6a276ed818d83e3426fe21807f22d673b5428f19fcf9a6b4e645f69bbecd967c568bb6aeb7c8d requests-2.28.2-py3-none-any.whl
|
b795abb26ba2f04f1afcfb196f21f638014b26c8186f8f488f1c2d91e8e0220962fbd259dbc9c3875222eb47fc95c73fc0606aaa6602b9ebc524809c9ba3501f requests-2.31.0-py3-none-any.whl
|
||||||
8770011f4ad1fe40a3062e6cdf1fda431530c59ee7de3fc5f8c57db54bfdb71c3aa220ca0e0bb1874fc6700e9ebb57defbae54ac84938bc9ad8f074910106681 urllib3-1.26.14-py2.py3-none-any.whl
|
5a25cb9b79bb6107f9055dc3e9f62ebc6d4d9ca2c730d824985c93cd82406b723c200d6300c5064e42ee9fc7a2853d6ec6661394f3ed7bac03750e1f2a6840d1 urllib3-1.26.17-py2.py3-none-any.whl
|
||||||
# win7
|
# win7
|
||||||
91c025f7d94bcdf93df838fab67053165a414fc84e8496f92ecbb910dd55f6b6af5e360bbd051444066880c5a6877e75157bd95e150ead46e5c605930dfc50f2 future-0.18.2.tar.gz
|
91c025f7d94bcdf93df838fab67053165a414fc84e8496f92ecbb910dd55f6b6af5e360bbd051444066880c5a6877e75157bd95e150ead46e5c605930dfc50f2 future-0.18.2.tar.gz
|
||||||
c06b3295d1d0b0f0a6f9a6cd0be861b9b643b4a5ea37857f0bd41c45deaf27bb927b71922dab74e633e43d75d04a9bd0d1c4ad875569740b0f2a98dd2bfa5113 importlib_metadata-5.0.0-py3-none-any.whl
|
c06b3295d1d0b0f0a6f9a6cd0be861b9b643b4a5ea37857f0bd41c45deaf27bb927b71922dab74e633e43d75d04a9bd0d1c4ad875569740b0f2a98dd2bfa5113 importlib_metadata-5.0.0-py3-none-any.whl
|
||||||
4e71295da5d1a26c71a0baa8905fdccb522bb16d56bc964db636de68688c5bf703f3b2880cdeea07138789e0eb4506e06f9ccd0da906c89d2cb6d55ad64659ea pip-22.3-py3-none-any.whl
|
016a8cbd09384f1a9a44cb0e8274df75a8bcb2f3966bb5d708c62145289efaa5db98f75256c97e4f8046735ce2e529fbb076f284a46cdb716e89a75660200ad9 pip-23.2.1-py3-none-any.whl
|
||||||
6bb73cc2db795c59c92f2115727f5c173cacc9465af7710db9ff2f2aec2d73130d0992d0f16dcb3fac222dc15c0916562d0813b2337401022020673a4461df3d python-3.7.9-amd64.exe
|
6bb73cc2db795c59c92f2115727f5c173cacc9465af7710db9ff2f2aec2d73130d0992d0f16dcb3fac222dc15c0916562d0813b2337401022020673a4461df3d python-3.7.9-amd64.exe
|
||||||
500747651c87f59f2436c5ab91207b5b657856e43d10083f3ce27efb196a2580fadd199a4209519b409920c562aaaa7dcbdfb83ed2072a43eaccae6e2d056f31 python-3.7.9.exe
|
500747651c87f59f2436c5ab91207b5b657856e43d10083f3ce27efb196a2580fadd199a4209519b409920c562aaaa7dcbdfb83ed2072a43eaccae6e2d056f31 python-3.7.9.exe
|
||||||
68e1b618d988be56aaae4e2eb92bc0093627a00441c1074ebe680c41aa98a6161e52733ad0c59888c643a33fe56884e4f935178b2557fbbdd105e92e0d993df6 windows6.1-kb2533623-x64.msu
|
68e1b618d988be56aaae4e2eb92bc0093627a00441c1074ebe680c41aa98a6161e52733ad0c59888c643a33fe56884e4f935178b2557fbbdd105e92e0d993df6 windows6.1-kb2533623-x64.msu
|
||||||
@@ -26,5 +27,6 @@ ba91ab0518c61eff13e5612d9e6b532940813f6b56e6ed81ea6c7c4d45acee4d98136a383a250675
|
|||||||
00558cca2e0ac813d404252f6e5aeacb50546822ecb5d0570228b8ddd29d94e059fbeb6b90393dee5abcddaca1370aca784dc9b095cbb74e980b3c024767fb24 Jinja2-3.1.2-py3-none-any.whl
|
00558cca2e0ac813d404252f6e5aeacb50546822ecb5d0570228b8ddd29d94e059fbeb6b90393dee5abcddaca1370aca784dc9b095cbb74e980b3c024767fb24 Jinja2-3.1.2-py3-none-any.whl
|
||||||
7f8f4daa4f4f2dbf24cdd534b2952ee3fba6334eb42b37465ccda3aa1cccc3d6204aa6bfffb8a83bf42ec59c702b5b5247d4c8ee0d4df906334ae53072ef8c4c MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl
|
7f8f4daa4f4f2dbf24cdd534b2952ee3fba6334eb42b37465ccda3aa1cccc3d6204aa6bfffb8a83bf42ec59c702b5b5247d4c8ee0d4df906334ae53072ef8c4c MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl
|
||||||
8a6e2b13a2ec4ef914a5d62aad3db6464d45e525a82e07f6051ed10474eae959069e165dba011aefb8207cdfd55391d73d6f06362c7eb247b08763106709526e mutagen-1.47.0-py3-none-any.whl
|
8a6e2b13a2ec4ef914a5d62aad3db6464d45e525a82e07f6051ed10474eae959069e165dba011aefb8207cdfd55391d73d6f06362c7eb247b08763106709526e mutagen-1.47.0-py3-none-any.whl
|
||||||
08a033202b5c51e50609b2700dd69cbae30edb367f34762fd1633aae08b35949b4f67f12c75f25868a5b62b4956190d0cc8d201b170758d9c04a523bc8442b9b Pillow-10.0.1-cp311-cp311-win_amd64.whl
|
656015f5cc2c04aa0653ee5609c39a7e5f0b6a58c84fe26b20bd070c52d20b4effb810132f7fb771168483e9fd975cc3302837dd7a1a687ee058b0460c857cc4 packaging-23.2-py3-none-any.whl
|
||||||
c86bbeacad3ae3c7bde747f5b4f09c11eced841add14e79ec4a064e5e29ebca35460e543ba735b11bfb882837d5ff4371ce64492d28d096b4686233c9a8cda6d python-3.11.5-amd64.exe
|
6401616fdfdd720d1aaa9a0ed1398d00664b28b6d84517dff8d1f9c416452610c6afa64cfb012a78e61d1cf4f6d0784eca6e7610957859e511f15bc6f3b3bd53 Pillow-10.1.0-cp311-cp311-win_amd64.whl
|
||||||
|
2e6a57bab45b5a825a2073780c73980cbf5aafd99dc3b28660ea3f5f658f04668cd0f01c7de0bb79e362ff4e3b8f01dd4f671d3a2e054d3071baefdcf0b0e4ba python-3.11.7-amd64.exe
|
||||||
|
|||||||
@@ -106,20 +106,19 @@ def meichk():
|
|||||||
if filt not in sys.executable:
|
if filt not in sys.executable:
|
||||||
filt = os.path.basename(sys.executable)
|
filt = os.path.basename(sys.executable)
|
||||||
|
|
||||||
pids = []
|
hits = []
|
||||||
ptn = re.compile(r"^([^\s]+)\s+([0-9]+)")
|
|
||||||
try:
|
try:
|
||||||
procs = sp.check_output("tasklist").decode("utf-8", "replace")
|
cmd = "tasklist /fo csv".split(" ")
|
||||||
|
procs = sp.check_output(cmd).decode("utf-8", "replace")
|
||||||
except:
|
except:
|
||||||
procs = "" # winpe
|
procs = "" # winpe
|
||||||
|
|
||||||
for ln in procs.splitlines():
|
for ln in procs.split("\n"):
|
||||||
m = ptn.match(ln)
|
if filt in ln.split('"')[:2][-1]:
|
||||||
if m and filt in m.group(1).lower():
|
hits.append(ln)
|
||||||
pids.append(int(m.group(2)))
|
|
||||||
|
|
||||||
mod = os.path.dirname(os.path.realpath(__file__))
|
mod = os.path.dirname(os.path.realpath(__file__))
|
||||||
if os.path.basename(mod).startswith("_MEI") and len(pids) == 2:
|
if os.path.basename(mod).startswith("_MEI") and len(hits) == 2:
|
||||||
meicln(mod)
|
meicln(mod)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -15,20 +15,25 @@ uname -s | grep NT-10 && w10=1 || {
|
|||||||
w7=1; uname -s | grep WOW64 && w7x64=1 || w7x32=1
|
w7=1; uname -s | grep WOW64 && w7x64=1 || w7x32=1
|
||||||
}
|
}
|
||||||
fns=(
|
fns=(
|
||||||
altgraph-0.17.3-py2.py3-none-any.whl
|
altgraph-0.17.4-py2.py3-none-any.whl
|
||||||
pefile-2023.2.7-py3-none-any.whl
|
pefile-2023.2.7-py3-none-any.whl
|
||||||
pyinstaller-5.13.2-py3-none-win_amd64.whl
|
pyinstaller_hooks_contrib-2023.10-py2.py3-none-any.whl
|
||||||
pyinstaller_hooks_contrib-2023.7-py2.py3-none-any.whl
|
|
||||||
pywin32_ctypes-0.2.2-py3-none-any.whl
|
pywin32_ctypes-0.2.2-py3-none-any.whl
|
||||||
|
setuptools-68.2.2-py3-none-any.whl
|
||||||
upx-4.1.0-win32.zip
|
upx-4.1.0-win32.zip
|
||||||
)
|
)
|
||||||
[ $w10 ] && fns+=(
|
[ $w10 ] && fns+=(
|
||||||
|
pyinstaller-6.1.0-py3-none-win_amd64.whl
|
||||||
|
Jinja2-3.1.2-py3-none-any.whl
|
||||||
|
MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl
|
||||||
mutagen-1.47.0-py3-none-any.whl
|
mutagen-1.47.0-py3-none-any.whl
|
||||||
Pillow-10.0.1-cp311-cp311-win_amd64.whl
|
packaging-23.2-py3-none-any.whl
|
||||||
python-3.11.3-amd64.exe
|
Pillow-10.1.0-cp311-cp311-win_amd64.whl
|
||||||
}
|
python-3.11.7-amd64.exe
|
||||||
|
)
|
||||||
[ $w7 ] && fns+=(
|
[ $w7 ] && fns+=(
|
||||||
certifi-2022.12.7-py3-none-any.whl
|
pyinstaller-5.13.2-py3-none-win32.whl
|
||||||
|
certifi-2023.11.17-py3-none-any.whl
|
||||||
chardet-5.1.0-py3-none-any.whl
|
chardet-5.1.0-py3-none-any.whl
|
||||||
idna-3.4-py3-none-any.whl
|
idna-3.4-py3-none-any.whl
|
||||||
requests-2.28.2-py3-none-any.whl
|
requests-2.28.2-py3-none-any.whl
|
||||||
@@ -37,7 +42,7 @@ fns=(
|
|||||||
[ $w7 ] && fns+=(
|
[ $w7 ] && fns+=(
|
||||||
future-0.18.2.tar.gz
|
future-0.18.2.tar.gz
|
||||||
importlib_metadata-5.0.0-py3-none-any.whl
|
importlib_metadata-5.0.0-py3-none-any.whl
|
||||||
pip-22.3-py3-none-any.whl
|
pip-23.2.1-py3-none-any.whl
|
||||||
typing_extensions-4.4.0-py3-none-any.whl
|
typing_extensions-4.4.0-py3-none-any.whl
|
||||||
zipp-3.10.0-py3-none-any.whl
|
zipp-3.10.0-py3-none-any.whl
|
||||||
)
|
)
|
||||||
@@ -67,31 +72,26 @@ uname -s | grep NT-10 && w10=1 || w7=1
|
|||||||
[ $w7 ] && pyv=37 || pyv=311
|
[ $w7 ] && pyv=37 || pyv=311
|
||||||
appd=$(cygpath.exe "$APPDATA")
|
appd=$(cygpath.exe "$APPDATA")
|
||||||
cd ~/Downloads &&
|
cd ~/Downloads &&
|
||||||
unzip upx-*-win32.zip &&
|
yes | unzip upx-*-win32.zip &&
|
||||||
mv upx-*/upx.exe . &&
|
mv upx-*/upx.exe . &&
|
||||||
python -m ensurepip &&
|
python -m ensurepip &&
|
||||||
python -m pip install --user -U pip-*.whl &&
|
{ [ $w10 ] || python -m pip install --user -U pip-*.whl; } &&
|
||||||
{ [ $w7 ] || python -m pip install --user -U mutagen-*.whl Pillow-*.whl; } &&
|
{ [ $w7 ] || python -m pip install --user -U {packaging,setuptools,mutagen,Pillow,Jinja2,MarkupSafe}-*.whl; } &&
|
||||||
{ [ $w10 ] || python -m pip install --user -U {requests,urllib3,charset_normalizer,certifi,idna}-*.whl; } &&
|
{ [ $w10 ] || python -m pip install --user -U {requests,urllib3,charset_normalizer,certifi,idna}-*.whl; } &&
|
||||||
{ [ $w10 ] || python -m pip install --user -U future-*.tar.gz importlib_metadata-*.whl typing_extensions-*.whl zipp-*.whl; } &&
|
{ [ $w10 ] || python -m pip install --user -U future-*.tar.gz importlib_metadata-*.whl typing_extensions-*.whl zipp-*.whl; } &&
|
||||||
python -m pip install --user -U pyinstaller-*.whl pefile-*.whl pywin32_ctypes-*.whl pyinstaller_hooks_contrib-*.whl altgraph-*.whl &&
|
python -m pip install --user -U pyinstaller-*.whl pefile-*.whl pywin32_ctypes-*.whl pyinstaller_hooks_contrib-*.whl altgraph-*.whl &&
|
||||||
sed -ri 's/--lzma/--best/' $appd/Python/Python$pyv/site-packages/pyinstaller/building/utils.py &&
|
sed -ri 's/--lzma/--best/' $appd/Python/Python$pyv/site-packages/pyinstaller/building/utils.py &&
|
||||||
curl -fkLO https://192.168.123.1:3923/cpp/scripts/uncomment.py &&
|
curl -fkLO https://192.168.123.1:3923/cpp/scripts/uncomment.py &&
|
||||||
python uncomment.py $(for d in $appd/Python/Python$pyv/site-packages/{requests,urllib3,charset_normalizer,certifi,idna}; do find $d -name \*.py; done) &&
|
python uncomment.py 1 $(for d in $appd/Python/Python$pyv/site-packages/{requests,urllib3,charset_normalizer,certifi,idna,mutagen,PIL,jinja2,markupsafe}; do find $d -name \*.py; done) &&
|
||||||
cd &&
|
cd &&
|
||||||
rm -f build.sh &&
|
rm -f build.sh &&
|
||||||
curl -fkLO https://192.168.123.1:3923/cpp/scripts/pyinstaller/build.sh &&
|
curl -fkLO https://192.168.123.1:3923/cpp/scripts/pyinstaller/build.sh &&
|
||||||
curl -fkLO https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.sh &&
|
{ [ $w10 ] || curl -fkLO https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.sh; } &&
|
||||||
echo ok
|
echo ok
|
||||||
# python -m pip install --user -U Pillow-9.2.0-cp37-cp37m-win32.whl
|
# python -m pip install --user -U Pillow-9.2.0-cp37-cp37m-win32.whl
|
||||||
# sed -ri 's/, bestopt, /]+bestopt+[/' $APPDATA/Python/Python37/site-packages/pyinstaller/building/utils.py
|
# sed -ri 's/, bestopt, /]+bestopt+[/' $APPDATA/Python/Python37/site-packages/pyinstaller/building/utils.py
|
||||||
# sed -ri 's/(^\s+bestopt = ).*/\1["--best","--lzma","--ultra-brute"]/' $APPDATA/Python/Python37/site-packages/pyinstaller/building/utils.py
|
# sed -ri 's/(^\s+bestopt = ).*/\1["--best","--lzma","--ultra-brute"]/' $APPDATA/Python/Python37/site-packages/pyinstaller/building/utils.py
|
||||||
|
|
||||||
===[ win10: copy-paste into git-bash ]=========================
|
|
||||||
#for f in $appd/Python/Python311/site-packages/mutagen/*.py; do awk -i inplace '/^\s*def _?(save|write)/{sub(/d.*/," ");s=$0;ns=length(s)} ns&&/[^ ]/&&substr($0,0,ns)!=s{ns=0} !ns' "$f"; done &&
|
|
||||||
python uncomment.py $appd/Python/Python311/site-packages/{mutagen,PIL,jinja2,markupsafe}/*.py &&
|
|
||||||
echo ok
|
|
||||||
|
|
||||||
|
|
||||||
## ============================================================
|
## ============================================================
|
||||||
## notes
|
## notes
|
||||||
|
|||||||
@@ -1,6 +1,17 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
|
# osx support
|
||||||
|
gtar=$(command -v gtar || command -v gnutar) || true
|
||||||
|
[ ! -z "$gtar" ] && command -v gfind >/dev/null && {
|
||||||
|
tar() { $gtar "$@"; }
|
||||||
|
sed() { gsed "$@"; }
|
||||||
|
find() { gfind "$@"; }
|
||||||
|
sort() { gsort "$@"; }
|
||||||
|
command -v grealpath >/dev/null &&
|
||||||
|
realpath() { grealpath "$@"; }
|
||||||
|
}
|
||||||
|
|
||||||
rm -rf unt
|
rm -rf unt
|
||||||
mkdir -p unt/srv
|
mkdir -p unt/srv
|
||||||
cp -pR copyparty tests unt/
|
cp -pR copyparty tests unt/
|
||||||
@@ -30,9 +41,11 @@ for py in python{2,3}; do
|
|||||||
[ "${1:0:6}" = python ] && [ "$1" != $py ] && continue
|
[ "${1:0:6}" = python ] && [ "$1" != $py ] && continue
|
||||||
|
|
||||||
PYTHONPATH=
|
PYTHONPATH=
|
||||||
[ $py = python2 ] && PYTHONPATH=../scripts/py2:../sfx/py37
|
[ $py = python2 ] && PYTHONPATH=../scripts/py2:../sfx/py37:../sfx/j2
|
||||||
export PYTHONPATH
|
export PYTHONPATH
|
||||||
|
|
||||||
|
[ $py = python2 ] && py=$(command -v python2.7 || echo $py)
|
||||||
|
|
||||||
nice $py -m unittest discover -s tests >/dev/null &
|
nice $py -m unittest discover -s tests >/dev/null &
|
||||||
pids+=($!)
|
pids+=($!)
|
||||||
done
|
done
|
||||||
|
|||||||
@@ -59,9 +59,6 @@ copyparty/th_srv.py,
|
|||||||
copyparty/u2idx.py,
|
copyparty/u2idx.py,
|
||||||
copyparty/up2k.py,
|
copyparty/up2k.py,
|
||||||
copyparty/util.py,
|
copyparty/util.py,
|
||||||
copyparty/vend,
|
|
||||||
copyparty/vend/asynchat.py,
|
|
||||||
copyparty/vend/asyncore.py,
|
|
||||||
copyparty/web,
|
copyparty/web,
|
||||||
copyparty/web/a,
|
copyparty/web/a,
|
||||||
copyparty/web/a/__init__.py,
|
copyparty/web/a/__init__.py,
|
||||||
|
|||||||
@@ -295,7 +295,10 @@ def unpack():
|
|||||||
# the only possible input is a single tar.bz2
|
# the only possible input is a single tar.bz2
|
||||||
# which gets hardcoded into this script at build stage
|
# which gets hardcoded into this script at build stage
|
||||||
# skip 0
|
# skip 0
|
||||||
tf.extractall(mine)
|
try:
|
||||||
|
tf.extractall(mine, filter="tar")
|
||||||
|
except TypeError:
|
||||||
|
tf.extractall(mine)
|
||||||
|
|
||||||
os.remove(tar)
|
os.remove(tar)
|
||||||
|
|
||||||
|
|||||||
@@ -97,6 +97,7 @@ def tc1(vflags):
|
|||||||
ovid = f.read()
|
ovid = f.read()
|
||||||
|
|
||||||
args = [
|
args = [
|
||||||
|
"-q",
|
||||||
"-p4321",
|
"-p4321",
|
||||||
"-e2dsa",
|
"-e2dsa",
|
||||||
"-e2tsr",
|
"-e2tsr",
|
||||||
|
|||||||
@@ -16,16 +16,11 @@ def uncomment(fpath):
|
|||||||
orig = f.read().decode("utf-8")
|
orig = f.read().decode("utf-8")
|
||||||
|
|
||||||
out = ""
|
out = ""
|
||||||
for ln in orig.split("\n"):
|
|
||||||
if not ln.startswith("#"):
|
|
||||||
break
|
|
||||||
|
|
||||||
out += ln + "\n"
|
|
||||||
|
|
||||||
io_obj = io.StringIO(orig)
|
io_obj = io.StringIO(orig)
|
||||||
prev_toktype = tokenize.INDENT
|
prev_toktype = tokenize.INDENT
|
||||||
last_lineno = -1
|
last_lineno = -1
|
||||||
last_col = 0
|
last_col = 0
|
||||||
|
code = False
|
||||||
for tok in tokenize.generate_tokens(io_obj.readline):
|
for tok in tokenize.generate_tokens(io_obj.readline):
|
||||||
# print(repr(tok))
|
# print(repr(tok))
|
||||||
token_type = tok[0]
|
token_type = tok[0]
|
||||||
@@ -53,7 +48,11 @@ def uncomment(fpath):
|
|||||||
out += token_string
|
out += token_string
|
||||||
else:
|
else:
|
||||||
out += '"a"'
|
out += '"a"'
|
||||||
elif token_type != tokenize.COMMENT or is_legalese:
|
elif token_type != tokenize.COMMENT:
|
||||||
|
out += token_string
|
||||||
|
if not code and token_string.strip():
|
||||||
|
code = True
|
||||||
|
elif is_legalese or (not start_col and not code):
|
||||||
out += token_string
|
out += token_string
|
||||||
else:
|
else:
|
||||||
if out.rstrip(" ").endswith("\n"):
|
if out.rstrip(" ").endswith("\n"):
|
||||||
|
|||||||
26
srv/expand/README.md
Normal file
26
srv/expand/README.md
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
## text expansion
|
||||||
|
|
||||||
|
enable expansion of placeholder variables in `README.md` and prologue/epilogue files with `--exp` and customize the list of allowed placeholders to expand using `--exp-md` and `--exp-lg`
|
||||||
|
|
||||||
|
| explanation | placeholder |
|
||||||
|
| -------------------- | -------------------- |
|
||||||
|
| your ip address | {{self.ip}} |
|
||||||
|
| your user-agent | {{self.ua}} |
|
||||||
|
| your username | {{self.uname}} |
|
||||||
|
| the `Host` you see | {{self.host}} |
|
||||||
|
| server unix time | {{srv.itime}} |
|
||||||
|
| server datetime | {{srv.htime}} |
|
||||||
|
| server name | {{cfg.name}} |
|
||||||
|
| logout after | {{cfg.logout}} hours |
|
||||||
|
| vol reindex interval | {{vf.scan}} |
|
||||||
|
| thumbnail size | {{vf.thsize}} |
|
||||||
|
| your country | {{hdr.cf_ipcountry}} |
|
||||||
|
|
||||||
|
placeholders starting with...
|
||||||
|
* `self.` are grabbed from copyparty's internal state; anything in `httpcli.py` is fair game
|
||||||
|
* `cfg.` are the global server settings
|
||||||
|
* `vf.` are the volflags of the current volume
|
||||||
|
* `hdr.` are grabbed from the client headers; any header is supported, just add it (in lowercase) to the allowlist
|
||||||
|
* `srv.` are processed inside the `_expand` function in httpcli
|
||||||
|
|
||||||
|
for example (bad example), `hdr_cf_ipcountry` maps to the header `CF-IPCountry` (which is generated by cloudflare before the request is passed on to your server / copyparty)
|
||||||
@@ -1,16 +1,16 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import itertools
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import itertools
|
|
||||||
|
|
||||||
from . import util as tu
|
|
||||||
from .util import Cfg
|
|
||||||
|
|
||||||
from copyparty.authsrv import AuthSrv
|
from copyparty.authsrv import AuthSrv
|
||||||
from copyparty.httpcli import HttpCli
|
from copyparty.httpcli import HttpCli
|
||||||
|
|
||||||
|
from . import util as tu
|
||||||
|
from .util import Cfg
|
||||||
|
|
||||||
atlas = ["%", "25", "2e", "2f", ".", "/"]
|
atlas = ["%", "25", "2e", "2f", ".", "/"]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import sys
|
|
||||||
import runpy
|
import runpy
|
||||||
|
import sys
|
||||||
|
|
||||||
host = sys.argv[1]
|
host = sys.argv[1]
|
||||||
sys.argv = sys.argv[:1] + sys.argv[2:]
|
sys.argv = sys.argv[:1] + sys.argv[2:]
|
||||||
|
|||||||
111
tests/test_dots.py
Normal file
111
tests/test_dots.py
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tarfile
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from copyparty.authsrv import AuthSrv
|
||||||
|
from copyparty.httpcli import HttpCli
|
||||||
|
from copyparty.up2k import Up2k
|
||||||
|
from copyparty.u2idx import U2idx
|
||||||
|
from tests import util as tu
|
||||||
|
from tests.util import Cfg
|
||||||
|
|
||||||
|
|
||||||
|
def hdr(query, uname):
|
||||||
|
h = "GET /%s HTTP/1.1\r\nPW: %s\r\nConnection: close\r\n\r\n"
|
||||||
|
return (h % (query, uname)).encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
class TestHttpCli(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.td = tu.get_ramdisk()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
os.chdir(tempfile.gettempdir())
|
||||||
|
shutil.rmtree(self.td)
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
td = os.path.join(self.td, "vfs")
|
||||||
|
os.mkdir(td)
|
||||||
|
os.chdir(td)
|
||||||
|
|
||||||
|
# topDir volA volA/*dirA .volB .volB/*dirB
|
||||||
|
spaths = " t .t a a/da a/.da .b .b/db .b/.db"
|
||||||
|
for n, dirpath in enumerate(spaths.split(" ")):
|
||||||
|
if dirpath:
|
||||||
|
os.makedirs(dirpath)
|
||||||
|
|
||||||
|
for pfx in "f", ".f":
|
||||||
|
filepath = pfx + str(n)
|
||||||
|
if dirpath:
|
||||||
|
filepath = os.path.join(dirpath, filepath)
|
||||||
|
|
||||||
|
with open(filepath, "wb") as f:
|
||||||
|
f.write(filepath.encode("utf-8"))
|
||||||
|
|
||||||
|
vcfg = [
|
||||||
|
".::r,u1:r.,u2",
|
||||||
|
"a:a:r,u1:r,u2",
|
||||||
|
".b:.b:r.,u1:r,u2"
|
||||||
|
]
|
||||||
|
self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"], e2dsa=True)
|
||||||
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
|
|
||||||
|
self.assertEqual(self.tardir("", "u1"), "f0 t/f1 a/f3 a/da/f4")
|
||||||
|
self.assertEqual(self.tardir(".t", "u1"), "f2")
|
||||||
|
self.assertEqual(self.tardir(".b", "u1"), ".f6 f6 .db/.f8 .db/f8 db/.f7 db/f7")
|
||||||
|
|
||||||
|
zs = ".f0 f0 .t/.f2 .t/f2 t/.f1 t/f1 .b/f6 .b/db/f7 a/f3 a/da/f4"
|
||||||
|
self.assertEqual(self.tardir("", "u2"), zs)
|
||||||
|
|
||||||
|
self.assertEqual(self.curl("?tar", "x")[1][:17], "\nJ2EOT")
|
||||||
|
|
||||||
|
# search
|
||||||
|
up2k = Up2k(self)
|
||||||
|
u2idx = U2idx(self)
|
||||||
|
allvols = list(self.asrv.vfs.all_vols.values())
|
||||||
|
|
||||||
|
x = u2idx.search("u1", allvols, "", 999)
|
||||||
|
x = " ".join(sorted([x["rp"] for x in x[0]]))
|
||||||
|
# u1 can see dotfiles in volB so they should be included
|
||||||
|
xe = ".b/.db/.f8 .b/.db/f8 .b/.f6 .b/db/.f7 .b/db/f7 .b/f6 a/da/f4 a/f3 f0 t/f1"
|
||||||
|
self.assertEqual(x, xe)
|
||||||
|
|
||||||
|
x = u2idx.search("u2", allvols, "", 999)
|
||||||
|
x = " ".join(sorted([x["rp"] for x in x[0]]))
|
||||||
|
self.assertEqual(x, ".f0 .t/.f2 .t/f2 a/da/f4 a/f3 f0 t/.f1 t/f1")
|
||||||
|
|
||||||
|
self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"], dotsrch=False)
|
||||||
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
|
u2idx = U2idx(self)
|
||||||
|
|
||||||
|
x = u2idx.search("u1", self.asrv.vfs.all_vols.values(), "", 999)
|
||||||
|
x = " ".join(sorted([x["rp"] for x in x[0]]))
|
||||||
|
# u1 can see dotfiles in volB so they should be included
|
||||||
|
xe = "a/da/f4 a/f3 f0 t/f1"
|
||||||
|
self.assertEqual(x, xe)
|
||||||
|
|
||||||
|
def tardir(self, url, uname):
|
||||||
|
h, b = self.curl("/" + url + "?tar", uname, True)
|
||||||
|
tar = tarfile.open(fileobj=io.BytesIO(b), mode="r|").getnames()
|
||||||
|
top = ("top" if not url else url.lstrip(".").split("/")[0]) + "/"
|
||||||
|
assert len(tar) == len([x for x in tar if x.startswith(top)])
|
||||||
|
return " ".join([x[len(top):] for x in tar])
|
||||||
|
|
||||||
|
def curl(self, url, uname, binary=False):
|
||||||
|
conn = tu.VHttpConn(self.args, self.asrv, self.log, hdr(url, uname))
|
||||||
|
HttpCli(conn).run()
|
||||||
|
if binary:
|
||||||
|
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
|
||||||
|
return [h.decode("utf-8"), b]
|
||||||
|
|
||||||
|
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||||
|
|
||||||
|
def log(self, src, msg, c=0):
|
||||||
|
print(msg)
|
||||||
@@ -4,9 +4,9 @@ from __future__ import print_function, unicode_literals
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from xml.etree import ElementTree as ET
|
from xml.etree import ElementTree as ET
|
||||||
from copyparty.dxml import parse_xml, BadXML, mkenod, mktnod
|
|
||||||
|
from copyparty.dxml import BadXML, mkenod, mktnod, parse_xml
|
||||||
|
|
||||||
ET.register_namespace("D", "DAV:")
|
ET.register_namespace("D", "DAV:")
|
||||||
|
|
||||||
|
|||||||
@@ -4,18 +4,17 @@ from __future__ import print_function, unicode_literals
|
|||||||
|
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import time
|
|
||||||
import shutil
|
|
||||||
import pprint
|
import pprint
|
||||||
|
import shutil
|
||||||
import tarfile
|
import tarfile
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from tests import util as tu
|
|
||||||
from tests.util import Cfg, eprint
|
|
||||||
|
|
||||||
from copyparty.authsrv import AuthSrv
|
from copyparty.authsrv import AuthSrv
|
||||||
from copyparty.httpcli import HttpCli
|
from copyparty.httpcli import HttpCli
|
||||||
|
from tests import util as tu
|
||||||
|
from tests.util import Cfg, eprint
|
||||||
|
|
||||||
|
|
||||||
def hdr(query):
|
def hdr(query):
|
||||||
|
|||||||
@@ -2,19 +2,17 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import unittest
|
import unittest
|
||||||
from textwrap import dedent
|
|
||||||
|
|
||||||
|
from copyparty import util
|
||||||
|
from copyparty.authsrv import VFS, AuthSrv
|
||||||
from tests import util as tu
|
from tests import util as tu
|
||||||
from tests.util import Cfg
|
from tests.util import Cfg
|
||||||
|
|
||||||
from copyparty.authsrv import AuthSrv, VFS
|
|
||||||
from copyparty import util
|
|
||||||
|
|
||||||
|
|
||||||
class TestVFS(unittest.TestCase):
|
class TestVFS(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@@ -176,11 +174,11 @@ class TestVFS(unittest.TestCase):
|
|||||||
self.assertEqual(len(vfs.nodes), 1)
|
self.assertEqual(len(vfs.nodes), 1)
|
||||||
self.assertEqual(n.vpath, "a")
|
self.assertEqual(n.vpath, "a")
|
||||||
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
||||||
self.assertAxs(n.axs.uread, ["*"])
|
self.assertAxs(n.axs.uread, ["*", "k"])
|
||||||
self.assertAxs(n.axs.uwrite, [])
|
self.assertAxs(n.axs.uwrite, [])
|
||||||
perm_na = (False, False, False, False, False, False, False)
|
perm_na = (False, False, False, False, False, False, False, False)
|
||||||
perm_rw = (True, True, False, False, False, False, False)
|
perm_rw = (True, True, False, False, False, False, False, False)
|
||||||
perm_ro = (True, False, False, False, False, False, False)
|
perm_ro = (True, False, False, False, False, False, False, False)
|
||||||
self.assertEqual(vfs.can_access("/", "*"), perm_na)
|
self.assertEqual(vfs.can_access("/", "*"), perm_na)
|
||||||
self.assertEqual(vfs.can_access("/", "k"), perm_rw)
|
self.assertEqual(vfs.can_access("/", "k"), perm_rw)
|
||||||
self.assertEqual(vfs.can_access("/a", "*"), perm_ro)
|
self.assertEqual(vfs.can_access("/a", "*"), perm_ro)
|
||||||
@@ -233,7 +231,7 @@ class TestVFS(unittest.TestCase):
|
|||||||
cfg_path = os.path.join(self.td, "test.cfg")
|
cfg_path = os.path.join(self.td, "test.cfg")
|
||||||
with open(cfg_path, "wb") as f:
|
with open(cfg_path, "wb") as f:
|
||||||
f.write(
|
f.write(
|
||||||
dedent(
|
util.dedent(
|
||||||
"""
|
"""
|
||||||
u a:123
|
u a:123
|
||||||
u asd:fgh:jkl
|
u asd:fgh:jkl
|
||||||
|
|||||||
110
tests/util.py
110
tests/util.py
@@ -3,23 +3,23 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import shutil
|
|
||||||
import jinja2
|
|
||||||
import threading
|
|
||||||
import tempfile
|
|
||||||
import platform
|
import platform
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
|
|
||||||
|
import jinja2
|
||||||
|
|
||||||
WINDOWS = platform.system() == "Windows"
|
WINDOWS = platform.system() == "Windows"
|
||||||
ANYWIN = WINDOWS or sys.platform in ["msys"]
|
ANYWIN = WINDOWS or sys.platform in ["msys"]
|
||||||
MACOS = platform.system() == "Darwin"
|
MACOS = platform.system() == "Darwin"
|
||||||
|
|
||||||
J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader)
|
J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader) # type: ignore
|
||||||
J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}\nJ2EOT")
|
J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}\nJ2EOT")
|
||||||
|
|
||||||
|
|
||||||
@@ -43,7 +43,8 @@ if MACOS:
|
|||||||
|
|
||||||
from copyparty.__init__ import E
|
from copyparty.__init__ import E
|
||||||
from copyparty.__main__ import init_E
|
from copyparty.__main__ import init_E
|
||||||
from copyparty.util import Unrecv, FHC, Garda
|
from copyparty.util import FHC, Garda, Unrecv
|
||||||
|
from copyparty.u2idx import U2idx
|
||||||
|
|
||||||
init_E(E)
|
init_E(E)
|
||||||
|
|
||||||
@@ -83,8 +84,8 @@ def get_ramdisk():
|
|||||||
for _ in range(10):
|
for _ in range(10):
|
||||||
try:
|
try:
|
||||||
_, _ = chkcmd(["diskutil", "eraseVolume", "HFS+", "cptd", devname])
|
_, _ = chkcmd(["diskutil", "eraseVolume", "HFS+", "cptd", devname])
|
||||||
with open("/Volumes/cptd/.metadata_never_index", "w") as f:
|
with open("/Volumes/cptd/.metadata_never_index", "wb") as f:
|
||||||
f.write("orz")
|
f.write(b"orz")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
shutil.rmtree("/Volumes/cptd/.fseventsd")
|
shutil.rmtree("/Volumes/cptd/.fseventsd")
|
||||||
@@ -99,61 +100,75 @@ def get_ramdisk():
|
|||||||
raise Exception("ramdisk creation failed")
|
raise Exception("ramdisk creation failed")
|
||||||
|
|
||||||
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
|
ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
|
||||||
try:
|
if not os.path.isdir(ret):
|
||||||
os.mkdir(ret)
|
os.mkdir(ret)
|
||||||
finally:
|
|
||||||
return subdir(ret)
|
return subdir(ret)
|
||||||
|
|
||||||
|
|
||||||
class Cfg(Namespace):
|
class Cfg(Namespace):
|
||||||
def __init__(self, a=None, v=None, c=None):
|
def __init__(self, a=None, v=None, c=None, **ka0):
|
||||||
ka = {}
|
ka = {}
|
||||||
|
|
||||||
ex = "daw dav_auth dav_inf dav_mac dav_rt dotsrch e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js getmod grid hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nw rand smb th_no_crop vague_403 vc ver xdev xlink xvol"
|
ex = "daw dav_auth dav_inf dav_mac dav_rt e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp exp force_js getmod grid hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_lifetime no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nw rand smb srch_dbg stats th_no_crop vague_403 vc ver xdev xlink xvol"
|
||||||
ka.update(**{k: False for k in ex.split()})
|
ka.update(**{k: False for k in ex.split()})
|
||||||
|
|
||||||
ex = "dotpart no_rescan no_sendfile no_voldump plain_ip"
|
ex = "dotpart dotsrch no_dhash no_fastboot no_rescan no_sendfile no_voldump re_dhash plain_ip"
|
||||||
ka.update(**{k: True for k in ex.split()})
|
ka.update(**{k: True for k in ex.split()})
|
||||||
|
|
||||||
ex = "css_browser hist js_browser no_forget no_hash no_idx nonsus_urls"
|
ex = "ah_cli ah_gen css_browser hist ipa_re js_browser no_forget no_hash no_idx nonsus_urls"
|
||||||
ka.update(**{k: None for k in ex.split()})
|
ka.update(**{k: None for k in ex.split()})
|
||||||
|
|
||||||
ex = "s_thead s_tbody th_convt"
|
ex = "hash_mt srch_time u2j"
|
||||||
|
ka.update(**{k: 1 for k in ex.split()})
|
||||||
|
|
||||||
|
ex = "reg_cap s_thead s_tbody th_convt"
|
||||||
ka.update(**{k: 9 for k in ex.split()})
|
ka.update(**{k: 9 for k in ex.split()})
|
||||||
|
|
||||||
ex = "df loris re_maxage rproxy rsp_jtr rsp_slp s_wr_slp theme themes turbo"
|
ex = "db_act df loris re_maxage rproxy rsp_jtr rsp_slp s_wr_slp snap_wri theme themes turbo"
|
||||||
ka.update(**{k: 0 for k in ex.split()})
|
ka.update(**{k: 0 for k in ex.split()})
|
||||||
|
|
||||||
ex = "ah_alg bname doctitle favico html_head lg_sbf log_fk md_sbf mth name textfiles unlist vname R RS SR"
|
ex = "ah_alg bname doctitle exit favico idp_h_usr html_head lg_sbf log_fk md_sbf name textfiles unlist vname R RS SR"
|
||||||
ka.update(**{k: "" for k in ex.split()})
|
ka.update(**{k: "" for k in ex.split()})
|
||||||
|
|
||||||
ex = "on403 on404 xad xar xau xban xbd xbr xbu xiu xm"
|
ex = "on403 on404 xad xar xau xban xbd xbr xbu xiu xm"
|
||||||
ka.update(**{k: [] for k in ex.split()})
|
ka.update(**{k: [] for k in ex.split()})
|
||||||
|
|
||||||
|
ex = "exp_lg exp_md th_coversd"
|
||||||
|
ka.update(**{k: {} for k in ex.split()})
|
||||||
|
|
||||||
|
ka.update(ka0)
|
||||||
|
|
||||||
super(Cfg, self).__init__(
|
super(Cfg, self).__init__(
|
||||||
a=a or [],
|
a=a or [],
|
||||||
v=v or [],
|
v=v or [],
|
||||||
c=c,
|
c=c,
|
||||||
E=E,
|
E=E,
|
||||||
dbd="wal",
|
dbd="wal",
|
||||||
s_wr_sz=512 * 1024,
|
|
||||||
th_size="320x256",
|
|
||||||
fk_salt="a" * 16,
|
fk_salt="a" * 16,
|
||||||
unpost=600,
|
|
||||||
u2sort="s",
|
|
||||||
mtp=[],
|
|
||||||
mte="a",
|
|
||||||
lang="eng",
|
lang="eng",
|
||||||
|
log_badpwd=1,
|
||||||
logout=573,
|
logout=573,
|
||||||
|
mte={"a": True},
|
||||||
|
mth={},
|
||||||
|
mtp=[],
|
||||||
|
s_wr_sz=512 * 1024,
|
||||||
|
sort="href",
|
||||||
|
srch_hits=99999,
|
||||||
|
th_size="320x256",
|
||||||
|
u2sort="s",
|
||||||
|
u2ts="c",
|
||||||
|
unpost=600,
|
||||||
|
warksalt="hunter2",
|
||||||
**ka
|
**ka
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class NullBroker(object):
|
class NullBroker(object):
|
||||||
def say(*args):
|
def say(self, *args):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def ask(*args):
|
def ask(self, *args):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -180,10 +195,16 @@ class VSock(object):
|
|||||||
|
|
||||||
|
|
||||||
class VHttpSrv(object):
|
class VHttpSrv(object):
|
||||||
def __init__(self):
|
def __init__(self, args, asrv, log):
|
||||||
|
self.args = args
|
||||||
|
self.asrv = asrv
|
||||||
|
self.log = log
|
||||||
|
|
||||||
self.broker = NullBroker()
|
self.broker = NullBroker()
|
||||||
self.prism = None
|
self.prism = None
|
||||||
self.bans = {}
|
self.bans = {}
|
||||||
|
self.nreq = 0
|
||||||
|
self.nsus = 0
|
||||||
|
|
||||||
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
|
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
|
||||||
self.j2 = {x: J2_FILES for x in aliases}
|
self.j2 = {x: J2_FILES for x in aliases}
|
||||||
@@ -193,31 +214,38 @@ class VHttpSrv(object):
|
|||||||
self.g403 = Garda("")
|
self.g403 = Garda("")
|
||||||
self.gurl = Garda("")
|
self.gurl = Garda("")
|
||||||
|
|
||||||
|
self.u2idx = None
|
||||||
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
||||||
|
|
||||||
def cachebuster(self):
|
def cachebuster(self):
|
||||||
return "a"
|
return "a"
|
||||||
|
|
||||||
|
def get_u2idx(self):
|
||||||
|
self.u2idx = self.u2idx or U2idx(self)
|
||||||
|
return self.u2idx
|
||||||
|
|
||||||
|
|
||||||
class VHttpConn(object):
|
class VHttpConn(object):
|
||||||
def __init__(self, args, asrv, log, buf):
|
def __init__(self, args, asrv, log, buf):
|
||||||
|
self.t0 = time.time()
|
||||||
self.s = VSock(buf)
|
self.s = VSock(buf)
|
||||||
self.sr = Unrecv(self.s, None)
|
self.sr = Unrecv(self.s, None) # type: ignore
|
||||||
|
self.aclose = {}
|
||||||
self.addr = ("127.0.0.1", "42069")
|
self.addr = ("127.0.0.1", "42069")
|
||||||
self.args = args
|
self.args = args
|
||||||
self.asrv = asrv
|
self.asrv = asrv
|
||||||
self.nid = None
|
self.bans = {}
|
||||||
|
self.freshen_pwd = 0.0
|
||||||
|
self.hsrv = VHttpSrv(args, asrv, log)
|
||||||
|
self.ico = None
|
||||||
|
self.lf_url = None
|
||||||
self.log_func = log
|
self.log_func = log
|
||||||
self.log_src = "a"
|
self.log_src = "a"
|
||||||
self.lf_url = None
|
|
||||||
self.hsrv = VHttpSrv()
|
|
||||||
self.bans = {}
|
|
||||||
self.aclose = {}
|
|
||||||
self.u2fh = FHC()
|
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.nreq = -1
|
|
||||||
self.nbyte = 0
|
self.nbyte = 0
|
||||||
self.ico = None
|
self.nid = None
|
||||||
|
self.nreq = -1
|
||||||
self.thumbcli = None
|
self.thumbcli = None
|
||||||
self.freshen_pwd = 0.0
|
self.u2fh = FHC()
|
||||||
self.t0 = time.time()
|
|
||||||
|
self.get_u2idx = self.hsrv.get_u2idx
|
||||||
Reference in New Issue
Block a user