Compare commits
235 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b0cc396bca | ||
|
|
ae463518f6 | ||
|
|
2be2e9a0d8 | ||
|
|
e405fddf74 | ||
|
|
c269b0dd91 | ||
|
|
8c3211263a | ||
|
|
bf04e7c089 | ||
|
|
c7c6e48b1a | ||
|
|
974ca773be | ||
|
|
9270c2df19 | ||
|
|
b39ff92f34 | ||
|
|
7454167f78 | ||
|
|
5ceb3a962f | ||
|
|
52bd5642da | ||
|
|
c39c93725f | ||
|
|
d00f0b9fa7 | ||
|
|
01cfc70982 | ||
|
|
e6aec189bd | ||
|
|
c98fff1647 | ||
|
|
0009e31bd3 | ||
|
|
db95e880b2 | ||
|
|
e69fea4a59 | ||
|
|
4360800a6e | ||
|
|
b179e2b031 | ||
|
|
ecdec75b4e | ||
|
|
5cb2e33353 | ||
|
|
43ff2e531a | ||
|
|
1c2c9db8f0 | ||
|
|
7ea183baef | ||
|
|
ab87fac6d8 | ||
|
|
1e3b7eee3b | ||
|
|
4de028fc3b | ||
|
|
604e5dfaaf | ||
|
|
05e0c2ec9e | ||
|
|
76bd005bdc | ||
|
|
5effaed352 | ||
|
|
cedaf4809f | ||
|
|
6deaf5c268 | ||
|
|
9dc6a26472 | ||
|
|
14ad5916fc | ||
|
|
1a46738649 | ||
|
|
9e5e3b099a | ||
|
|
292ce75cc2 | ||
|
|
ce7df7afd4 | ||
|
|
e28e793f81 | ||
|
|
3e561976db | ||
|
|
273a4eb7d0 | ||
|
|
6175f85bb6 | ||
|
|
a80579f63a | ||
|
|
96d6bcf26e | ||
|
|
49e8df25ac | ||
|
|
6a05850f21 | ||
|
|
5e7c3defe3 | ||
|
|
6c0987d4d0 | ||
|
|
6eba9feffe | ||
|
|
8adfcf5950 | ||
|
|
36d6fa512a | ||
|
|
79b6e9b393 | ||
|
|
dc2e2cbd4b | ||
|
|
5c12dac30f | ||
|
|
641929191e | ||
|
|
617321631a | ||
|
|
ddc0c899f8 | ||
|
|
cdec42c1ae | ||
|
|
c48f469e39 | ||
|
|
44909cc7b8 | ||
|
|
8f61e1568c | ||
|
|
b7be7a0fd8 | ||
|
|
1526a4e084 | ||
|
|
dbdb9574b1 | ||
|
|
853ae6386c | ||
|
|
a4b56c74c7 | ||
|
|
d7f1951e44 | ||
|
|
7e2ff9825e | ||
|
|
9b423396ec | ||
|
|
781146b2fb | ||
|
|
84937d1ce0 | ||
|
|
98cce66aa4 | ||
|
|
043c2d4858 | ||
|
|
99cc434779 | ||
|
|
5095d17e81 | ||
|
|
87d835ae37 | ||
|
|
6939ca768b | ||
|
|
e3957e8239 | ||
|
|
4ad6e45216 | ||
|
|
76e5eeea3f | ||
|
|
eb17f57761 | ||
|
|
b0db14d8b0 | ||
|
|
2b644fa81b | ||
|
|
190ccee820 | ||
|
|
4e7dd32e78 | ||
|
|
5817fb66ae | ||
|
|
9cb04eef93 | ||
|
|
0019fe7f04 | ||
|
|
852c6f2de1 | ||
|
|
c4191de2e7 | ||
|
|
4de61defc9 | ||
|
|
0aa88590d0 | ||
|
|
405f3ee5fe | ||
|
|
bc339f774a | ||
|
|
e67b695b23 | ||
|
|
4a7633ab99 | ||
|
|
c58f2ef61f | ||
|
|
3866e6a3f2 | ||
|
|
381686fc66 | ||
|
|
a918c285bf | ||
|
|
1e20eafbe0 | ||
|
|
39399934ee | ||
|
|
b47635150a | ||
|
|
78d2f69ed5 | ||
|
|
7a98dc669e | ||
|
|
2f15bb5085 | ||
|
|
712a578e6c | ||
|
|
d8dfc4ccb2 | ||
|
|
e413007eb0 | ||
|
|
6d1d3e48d8 | ||
|
|
04966164ce | ||
|
|
8b62aa7cc7 | ||
|
|
1088e8c6a5 | ||
|
|
8c54c2226f | ||
|
|
f74ac1f18b | ||
|
|
25931e62fd | ||
|
|
707a940399 | ||
|
|
87ef50d384 | ||
|
|
dcadf2b11c | ||
|
|
37a690a4c3 | ||
|
|
87ad23fb93 | ||
|
|
5f54d534e3 | ||
|
|
aecae552a4 | ||
|
|
eaa6b3d0be | ||
|
|
c2ace91e52 | ||
|
|
0bac87c36f | ||
|
|
e650d05939 | ||
|
|
85a96e4446 | ||
|
|
2569005139 | ||
|
|
c50cb66aef | ||
|
|
d4c5fca15b | ||
|
|
75cea4f684 | ||
|
|
68c6794d33 | ||
|
|
82f98dd54d | ||
|
|
741d781c18 | ||
|
|
0be1e43451 | ||
|
|
5366bf22bb | ||
|
|
bcd91b1809 | ||
|
|
9bd5738e6f | ||
|
|
bab4aa4c0a | ||
|
|
e965b9b9e2 | ||
|
|
31101427d3 | ||
|
|
a083dc36ba | ||
|
|
9b7b9262aa | ||
|
|
660011fa6e | ||
|
|
ead31b6823 | ||
|
|
4310580cd4 | ||
|
|
b005acbfda | ||
|
|
460709e6f3 | ||
|
|
a8768d05a9 | ||
|
|
f8e3e87a52 | ||
|
|
70f1642d0d | ||
|
|
3fc7561da4 | ||
|
|
9065226c3d | ||
|
|
b7e321fa47 | ||
|
|
664665b86b | ||
|
|
f4f362b7a4 | ||
|
|
577d23f460 | ||
|
|
504e168486 | ||
|
|
f2f9640371 | ||
|
|
ee46f832b1 | ||
|
|
b0e755d410 | ||
|
|
cfd24604d5 | ||
|
|
264894e595 | ||
|
|
5bb9f56247 | ||
|
|
18942ed066 | ||
|
|
85321a6f31 | ||
|
|
baf641396d | ||
|
|
17c91e7014 | ||
|
|
010770684d | ||
|
|
b4c503657b | ||
|
|
71bd306268 | ||
|
|
dd7fab1352 | ||
|
|
dacca18863 | ||
|
|
53d92cc0a6 | ||
|
|
434823f6f0 | ||
|
|
2cb1f50370 | ||
|
|
03f53f6392 | ||
|
|
a70ecd7af0 | ||
|
|
8b81e58205 | ||
|
|
4500c04edf | ||
|
|
6222ddd720 | ||
|
|
8a7135cf41 | ||
|
|
b4c7282956 | ||
|
|
8491a40a04 | ||
|
|
343d38b693 | ||
|
|
6cf53d7364 | ||
|
|
b070d44de7 | ||
|
|
79aa40fdea | ||
|
|
dcaff2785f | ||
|
|
497f5b4307 | ||
|
|
be32ad0da6 | ||
|
|
8ee2bf810b | ||
|
|
28232656a9 | ||
|
|
fbc2424e8f | ||
|
|
94cd13e8b8 | ||
|
|
447ed5ab37 | ||
|
|
af59808611 | ||
|
|
e3406a9f86 | ||
|
|
7fd1d6a4e8 | ||
|
|
0ab2a665de | ||
|
|
3895575bc2 | ||
|
|
138c2bbcbb | ||
|
|
bc7af1d1c8 | ||
|
|
19cd96e392 | ||
|
|
db194ab519 | ||
|
|
02ad4bfab2 | ||
|
|
56b73dcc8a | ||
|
|
7704b9c8a2 | ||
|
|
999b7ae919 | ||
|
|
252b5a88b1 | ||
|
|
01e2681a07 | ||
|
|
aa32f30202 | ||
|
|
195eb53995 | ||
|
|
06fa78f54a | ||
|
|
7a57c9dbf1 | ||
|
|
bb657bfa85 | ||
|
|
87181726b0 | ||
|
|
f1477a1c14 | ||
|
|
4f94a9e38b | ||
|
|
fbed322d3b | ||
|
|
9b0f519e4e | ||
|
|
6cd6dadd06 | ||
|
|
9a28afcb48 | ||
|
|
45b701801d | ||
|
|
062246fb12 | ||
|
|
416ebfdd68 | ||
|
|
731eb92f33 | ||
|
|
dbe2aec79c |
2
.github/pull_request_template.md
vendored
Normal file
2
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
Please include the following text somewhere in this PR description:
|
||||||
|
This PR complies with the DCO; https://developercertificate.org/
|
||||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -25,7 +25,12 @@ copyparty.egg-info/
|
|||||||
copyparty/res/COPYING.txt
|
copyparty/res/COPYING.txt
|
||||||
copyparty/web/deps/
|
copyparty/web/deps/
|
||||||
srv/
|
srv/
|
||||||
|
scripts/docker/i/
|
||||||
|
contrib/package/arch/pkg/
|
||||||
|
contrib/package/arch/src/
|
||||||
|
|
||||||
# state/logs
|
# state/logs
|
||||||
up.*.txt
|
up.*.txt
|
||||||
.hist/
|
.hist/
|
||||||
|
scripts/docker/*.out
|
||||||
|
scripts/docker/*.err
|
||||||
|
|||||||
1
.vscode/launch.json
vendored
1
.vscode/launch.json
vendored
@@ -8,6 +8,7 @@
|
|||||||
"module": "copyparty",
|
"module": "copyparty",
|
||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
"cwd": "${workspaceFolder}",
|
"cwd": "${workspaceFolder}",
|
||||||
|
"justMyCode": false,
|
||||||
"args": [
|
"args": [
|
||||||
//"-nw",
|
//"-nw",
|
||||||
"-ed",
|
"-ed",
|
||||||
|
|||||||
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -52,9 +52,11 @@
|
|||||||
"--disable=missing-module-docstring",
|
"--disable=missing-module-docstring",
|
||||||
"--disable=missing-class-docstring",
|
"--disable=missing-class-docstring",
|
||||||
"--disable=missing-function-docstring",
|
"--disable=missing-function-docstring",
|
||||||
|
"--disable=import-outside-toplevel",
|
||||||
"--disable=wrong-import-position",
|
"--disable=wrong-import-position",
|
||||||
"--disable=raise-missing-from",
|
"--disable=raise-missing-from",
|
||||||
"--disable=bare-except",
|
"--disable=bare-except",
|
||||||
|
"--disable=broad-except",
|
||||||
"--disable=invalid-name",
|
"--disable=invalid-name",
|
||||||
"--disable=line-too-long",
|
"--disable=line-too-long",
|
||||||
"--disable=consider-using-f-string"
|
"--disable=consider-using-f-string"
|
||||||
@@ -64,6 +66,7 @@
|
|||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
"[html]": {
|
"[html]": {
|
||||||
"editor.formatOnSave": false,
|
"editor.formatOnSave": false,
|
||||||
|
"editor.autoIndent": "keep",
|
||||||
},
|
},
|
||||||
"[css]": {
|
"[css]": {
|
||||||
"editor.formatOnSave": false,
|
"editor.formatOnSave": false,
|
||||||
|
|||||||
209
README.md
209
README.md
@@ -1,6 +1,6 @@
|
|||||||
# ⇆🎉 copyparty
|
# 💾🎉 copyparty
|
||||||
|
|
||||||
* http file sharing hub (py2/py3) [(on PyPI)](https://pypi.org/project/copyparty/)
|
* portable file sharing hub (py2/py3) [(on PyPI)](https://pypi.org/project/copyparty/)
|
||||||
* MIT-Licensed, 2019-05-26, ed @ irc.rizon.net
|
* MIT-Licensed, 2019-05-26, ed @ irc.rizon.net
|
||||||
|
|
||||||
|
|
||||||
@@ -27,9 +27,8 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
|
|||||||
## readme toc
|
## readme toc
|
||||||
|
|
||||||
* top
|
* top
|
||||||
* [quickstart](#quickstart) - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
|
* [quickstart](#quickstart) - just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
|
||||||
* [on servers](#on-servers) - you may also want these, especially on servers
|
* [on servers](#on-servers) - you may also want these, especially on servers
|
||||||
* [on debian](#on-debian) - recommended additional steps on debian
|
|
||||||
* [features](#features)
|
* [features](#features)
|
||||||
* [testimonials](#testimonials) - small collection of user feedback
|
* [testimonials](#testimonials) - small collection of user feedback
|
||||||
* [motivations](#motivations) - project goals / philosophy
|
* [motivations](#motivations) - project goals / philosophy
|
||||||
@@ -57,7 +56,7 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
|
|||||||
* [other tricks](#other-tricks)
|
* [other tricks](#other-tricks)
|
||||||
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
|
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
|
||||||
* [server config](#server-config) - using arguments or config files, or a mix of both
|
* [server config](#server-config) - using arguments or config files, or a mix of both
|
||||||
* [zeroconf](#zeroconf) - announce enabled services on the LAN
|
* [zeroconf](#zeroconf) - announce enabled services on the LAN ([pic](https://user-images.githubusercontent.com/241032/215344737-0eae8d98-9496-4256-9aa8-cd2f6971810d.png))
|
||||||
* [mdns](#mdns) - LAN domain-name and feature announcer
|
* [mdns](#mdns) - LAN domain-name and feature announcer
|
||||||
* [ssdp](#ssdp) - windows-explorer announcer
|
* [ssdp](#ssdp) - windows-explorer announcer
|
||||||
* [qr-code](#qr-code) - print a qr-code [(screenshot)](https://user-images.githubusercontent.com/241032/194728533-6f00849b-c6ac-43c6-9359-83e454d11e00.png) for quick access
|
* [qr-code](#qr-code) - print a qr-code [(screenshot)](https://user-images.githubusercontent.com/241032/194728533-6f00849b-c6ac-43c6-9359-83e454d11e00.png) for quick access
|
||||||
@@ -75,32 +74,29 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
|
|||||||
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
|
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
|
||||||
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
|
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
|
||||||
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
||||||
* [upload events](#upload-events) - trigger a script/program on each upload
|
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
||||||
|
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
||||||
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
||||||
* [themes](#themes)
|
* [themes](#themes)
|
||||||
* [complete examples](#complete-examples)
|
* [complete examples](#complete-examples)
|
||||||
|
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
|
||||||
* [browser support](#browser-support) - TLDR: yes
|
* [browser support](#browser-support) - TLDR: yes
|
||||||
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
||||||
* [mount as drive](#mount-as-drive) - a remote copyparty server as a local filesystem
|
* [mount as drive](#mount-as-drive) - a remote copyparty server as a local filesystem
|
||||||
* [up2k](#up2k) - quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
|
|
||||||
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||||
* [client-side](#client-side) - when uploading files
|
* [client-side](#client-side) - when uploading files
|
||||||
* [security](#security) - some notes on hardening
|
* [security](#security) - some notes on hardening
|
||||||
* [gotchas](#gotchas) - behavior that might be unexpected
|
* [gotchas](#gotchas) - behavior that might be unexpected
|
||||||
|
* [cors](#cors) - cross-site request config
|
||||||
* [recovering from crashes](#recovering-from-crashes)
|
* [recovering from crashes](#recovering-from-crashes)
|
||||||
* [client crashes](#client-crashes)
|
* [client crashes](#client-crashes)
|
||||||
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
|
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
|
||||||
* [HTTP API](#HTTP-API)
|
* [HTTP API](#HTTP-API) - see [devnotes](#./docs/devnotes.md#http-api)
|
||||||
* [read](#read)
|
|
||||||
* [write](#write)
|
|
||||||
* [admin](#admin)
|
|
||||||
* [general](#general)
|
|
||||||
* [dependencies](#dependencies) - mandatory deps
|
* [dependencies](#dependencies) - mandatory deps
|
||||||
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
||||||
* [install recommended deps](#install-recommended-deps)
|
|
||||||
* [optional gpl stuff](#optional-gpl-stuff)
|
* [optional gpl stuff](#optional-gpl-stuff)
|
||||||
* [sfx](#sfx) - the self-contained "binary"
|
* [sfx](#sfx) - the self-contained "binary"
|
||||||
* [copyparty.exe](#copypartyexe) - download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) or [copyparty64.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty64.exe)
|
* [copyparty.exe](#copypartyexe) - download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (win8+) or [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) (win7+)
|
||||||
* [install on android](#install-on-android)
|
* [install on android](#install-on-android)
|
||||||
* [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports
|
* [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports
|
||||||
* [devnotes](#devnotes) - for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
|
* [devnotes](#devnotes) - for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
|
||||||
@@ -108,18 +104,34 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
|
|||||||
|
|
||||||
## quickstart
|
## quickstart
|
||||||
|
|
||||||
download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
|
just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
|
||||||
|
|
||||||
if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
|
* or install through pypi (python3 only): `python3 -m pip install --user -U copyparty`
|
||||||
|
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
|
||||||
|
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
|
||||||
|
* docker has all deps built-in, so skip this step:
|
||||||
|
|
||||||
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes)
|
enable thumbnails (images/audio/video), media indexing, and audio transcoding by installing some recommended deps:
|
||||||
|
|
||||||
|
* **Alpine:** `apk add py3-pillow ffmpeg`
|
||||||
|
* **Debian:** `apt install python3-pil ffmpeg`
|
||||||
|
* **Fedora:** `dnf install python3-pillow ffmpeg`
|
||||||
|
* **FreeBSD:** `pkg install py39-sqlite3 py39-pillow ffmpeg`
|
||||||
|
* **MacOS:** `port install py-Pillow ffmpeg`
|
||||||
|
* **MacOS** (alternative): `brew install pillow ffmpeg`
|
||||||
|
* **Windows:** `python -m pip install --user -U Pillow`
|
||||||
|
* install python and ffmpeg manually; do not use `winget` or `Microsoft Store` (it breaks $PATH)
|
||||||
|
* copyparty.exe comes with `Pillow` and only needs `ffmpeg`
|
||||||
|
* see [optional dependencies](#optional-dependencies) to enable even more features
|
||||||
|
|
||||||
|
running copyparty without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes)
|
||||||
|
|
||||||
some recommended options:
|
some recommended options:
|
||||||
* `-e2dsa` enables general [file indexing](#file-indexing)
|
* `-e2dsa` enables general [file indexing](#file-indexing)
|
||||||
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies) to enable thumbnails and more
|
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen)
|
||||||
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
|
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
|
||||||
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
|
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
|
||||||
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other permissions (`r`ead, `w`rite, `m`ove, `d`elete, `g`et, up`G`et)
|
* see [accounts and volumes](#accounts-and-volumes) (or `--help-accounts`) for the syntax and other permissions
|
||||||
|
|
||||||
|
|
||||||
### on servers
|
### on servers
|
||||||
@@ -128,7 +140,8 @@ you may also want these, especially on servers:
|
|||||||
|
|
||||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||||
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
||||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
|
* [contrib/rc/copyparty](contrib/rc/copyparty) to run copyparty on FreeBSD
|
||||||
|
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
||||||
|
|
||||||
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
|
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
|
||||||
```
|
```
|
||||||
@@ -139,18 +152,6 @@ firewall-cmd --reload
|
|||||||
```
|
```
|
||||||
(1900:ssdp, 3921:ftp, 3923:http/https, 3945:smb, 3990:ftps, 5353:mdns, 12000:passive-ftp)
|
(1900:ssdp, 3921:ftp, 3923:http/https, 3945:smb, 3990:ftps, 5353:mdns, 12000:passive-ftp)
|
||||||
|
|
||||||
### on debian
|
|
||||||
|
|
||||||
recommended additional steps on debian which enable audio metadata and thumbnails (from images and videos):
|
|
||||||
|
|
||||||
* as root, run the following:
|
|
||||||
`apt install python3 python3-pip python3-dev ffmpeg`
|
|
||||||
|
|
||||||
* then, as the user which will be running copyparty (so hopefully not root), run this:
|
|
||||||
`python3 -m pip install --user -U Pillow pillow-avif-plugin`
|
|
||||||
|
|
||||||
(skipped `pyheif-pillow-opener` because apparently debian is too old to build it)
|
|
||||||
|
|
||||||
|
|
||||||
## features
|
## features
|
||||||
|
|
||||||
@@ -167,6 +168,7 @@ recommended additional steps on debian which enable audio metadata and thumbnai
|
|||||||
* upload
|
* upload
|
||||||
* ☑ basic: plain multipart, ie6 support
|
* ☑ basic: plain multipart, ie6 support
|
||||||
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
||||||
|
* unaffected by cloudflare's max-upload-size (100 MiB)
|
||||||
* ☑ stash: simple PUT filedropper
|
* ☑ stash: simple PUT filedropper
|
||||||
* ☑ [unpost](#unpost): undo/delete accidental uploads
|
* ☑ [unpost](#unpost): undo/delete accidental uploads
|
||||||
* ☑ [self-destruct](#self-destruct) (specified server-side or client-side)
|
* ☑ [self-destruct](#self-destruct) (specified server-side or client-side)
|
||||||
@@ -178,7 +180,7 @@ recommended additional steps on debian which enable audio metadata and thumbnai
|
|||||||
* browser
|
* browser
|
||||||
* ☑ [navpane](#navpane) (directory tree sidebar)
|
* ☑ [navpane](#navpane) (directory tree sidebar)
|
||||||
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
||||||
* ☑ audio player (with OS media controls and opus transcoding)
|
* ☑ audio player (with [OS media controls](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) and opus transcoding)
|
||||||
* ☑ image gallery with webm player
|
* ☑ image gallery with webm player
|
||||||
* ☑ textfile browser with syntax hilighting
|
* ☑ textfile browser with syntax hilighting
|
||||||
* ☑ [thumbnails](#thumbnails)
|
* ☑ [thumbnails](#thumbnails)
|
||||||
@@ -200,7 +202,7 @@ recommended additional steps on debian which enable audio metadata and thumbnai
|
|||||||
|
|
||||||
small collection of user feedback
|
small collection of user feedback
|
||||||
|
|
||||||
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`
|
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`, `wow this is better than nextcloud`
|
||||||
|
|
||||||
|
|
||||||
# motivations
|
# motivations
|
||||||
@@ -209,8 +211,7 @@ project goals / philosophy
|
|||||||
|
|
||||||
* inverse linux philosophy -- do all the things, and do an *okay* job
|
* inverse linux philosophy -- do all the things, and do an *okay* job
|
||||||
* quick drop-in service to get a lot of features in a pinch
|
* quick drop-in service to get a lot of features in a pinch
|
||||||
* there are probably [better alternatives](https://github.com/awesome-selfhosted/awesome-selfhosted) if you have specific/long-term needs
|
* some of [the alternatives](./docs/versus.md) might be a better fit for you
|
||||||
* but the resumable multithreaded uploads are p slick ngl
|
|
||||||
* run anywhere, support everything
|
* run anywhere, support everything
|
||||||
* as many web-browsers and python versions as possible
|
* as many web-browsers and python versions as possible
|
||||||
* every browser should at least be able to browse, download, upload files
|
* every browser should at least be able to browse, download, upload files
|
||||||
@@ -234,11 +235,14 @@ browser-specific:
|
|||||||
* Android-Chrome: increase "parallel uploads" for higher speed (android bug)
|
* Android-Chrome: increase "parallel uploads" for higher speed (android bug)
|
||||||
* Android-Firefox: takes a while to select files (their fix for ☝️)
|
* Android-Firefox: takes a while to select files (their fix for ☝️)
|
||||||
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
|
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
|
||||||
* Desktop-Firefox: may stop you from deleting files you've uploaded until you visit `about:memory` and click `Minimize memory usage`
|
* Desktop-Firefox: [may stop you from unplugging USB flashdrives](https://bugzilla.mozilla.org/show_bug.cgi?id=1792598) until you visit `about:memory` and click `Minimize memory usage`
|
||||||
|
|
||||||
server-os-specific:
|
server-os-specific:
|
||||||
* RHEL8 / Rocky8: you can run copyparty using `/usr/libexec/platform-python`
|
* RHEL8 / Rocky8: you can run copyparty using `/usr/libexec/platform-python`
|
||||||
|
|
||||||
|
server notes:
|
||||||
|
* pypy is supported but regular cpython is faster if you enable the database
|
||||||
|
|
||||||
|
|
||||||
# bugs
|
# bugs
|
||||||
|
|
||||||
@@ -252,23 +256,15 @@ server-os-specific:
|
|||||||
* Windows: if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
* Windows: if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
||||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
||||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||||
* [the database can get stuck](https://github.com/9001/copyparty/issues/10)
|
|
||||||
* has only happened once but that is once too many
|
|
||||||
* luckily not dangerous for file integrity and doesn't really stop uploads or anything like that
|
|
||||||
* but would really appreciate some logs if anyone ever runs into it again
|
|
||||||
* probably more, pls let me know
|
* probably more, pls let me know
|
||||||
|
|
||||||
## not my bugs
|
## not my bugs
|
||||||
|
|
||||||
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
|
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
|
||||||
|
|
||||||
* [Chrome issue 1354816](https://bugs.chromium.org/p/chromium/issues/detail?id=1354816) -- chrome may eat all RAM uploading over plaintext http with `mt` enabled
|
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
||||||
|
|
||||||
* more amusingly, [Chrome issue 1354800](https://bugs.chromium.org/p/chromium/issues/detail?id=1354800) -- chrome may eat all RAM uploading in general (altho you probably won't run into this one)
|
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- entire browser can crash after uploading ~4000 small files
|
||||||
|
|
||||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive and likely to run into the above gc bugs)
|
|
||||||
|
|
||||||
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- sometimes forgets to close filedescriptors during upload so the browser can crash after ~4000 files
|
|
||||||
|
|
||||||
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
|
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
|
||||||
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
|
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
|
||||||
@@ -291,6 +287,9 @@ server-os-specific:
|
|||||||
|
|
||||||
upgrade notes
|
upgrade notes
|
||||||
|
|
||||||
|
* `1.6.0` (2023-01-29):
|
||||||
|
* http-api: delete/move is now `POST` instead of `GET`
|
||||||
|
* everything other than `GET` and `HEAD` must pass [cors validation](#cors)
|
||||||
* `1.5.0` (2022-12-03): [new chunksize formula](https://github.com/9001/copyparty/commit/54e1c8d261df) for files larger than 128 GiB
|
* `1.5.0` (2022-12-03): [new chunksize formula](https://github.com/9001/copyparty/commit/54e1c8d261df) for files larger than 128 GiB
|
||||||
* **users:** upgrade to the latest [cli uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) if you use that
|
* **users:** upgrade to the latest [cli uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) if you use that
|
||||||
* **devs:** update third-party up2k clients (if those even exist)
|
* **devs:** update third-party up2k clients (if those even exist)
|
||||||
@@ -305,13 +304,14 @@ upgrade notes
|
|||||||
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
|
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
|
||||||
|
|
||||||
* can I make copyparty download a file to my server if I give it a URL?
|
* can I make copyparty download a file to my server if I give it a URL?
|
||||||
* not really, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
|
* yes, using [hooks](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
||||||
|
|
||||||
|
|
||||||
# accounts and volumes
|
# accounts and volumes
|
||||||
|
|
||||||
per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
|
per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
|
||||||
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
|
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
|
||||||
|
* changes to the `[global]` config section requires a restart to take effect
|
||||||
|
|
||||||
a quick summary can be seen using `--help-accounts`
|
a quick summary can be seen using `--help-accounts`
|
||||||
|
|
||||||
@@ -516,11 +516,14 @@ up2k has several advantages:
|
|||||||
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
|
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
|
||||||
* the last-modified timestamp of the file is preserved
|
* the last-modified timestamp of the file is preserved
|
||||||
|
|
||||||
|
> it is perfectly safe to restart / upgrade copyparty while someone is uploading to it!
|
||||||
|
> all known up2k clients will resume just fine 💪
|
||||||
|
|
||||||
see [up2k](#up2k) for details on how it works, or watch a [demo video](https://a.ocv.me/pub/demo/pics-vids/#gf-0f6f5c0d)
|
see [up2k](#up2k) for details on how it works, or watch a [demo video](https://a.ocv.me/pub/demo/pics-vids/#gf-0f6f5c0d)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
**protip:** you can avoid scaring away users with [contrib/plugins/minimal-up2k.html](contrib/plugins/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
**protip:** you can avoid scaring away users with [contrib/plugins/minimal-up2k.js](contrib/plugins/minimal-up2k.js) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||||
|
|
||||||
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress -- also, the `[🔔]` and/or `[🔊]` switches enable visible and/or audible notifications on upload completion
|
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress -- also, the `[🔔]` and/or `[🔊]` switches enable visible and/or audible notifications on upload completion
|
||||||
|
|
||||||
@@ -696,11 +699,12 @@ using arguments or config files, or a mix of both:
|
|||||||
* config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf) and [./docs/example2.conf](docs/example2.conf)
|
* config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf) and [./docs/example2.conf](docs/example2.conf)
|
||||||
* `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting
|
* `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting
|
||||||
* or click the `[reload cfg]` button in the control-panel when logged in as admin
|
* or click the `[reload cfg]` button in the control-panel when logged in as admin
|
||||||
|
* changes to the `[global]` config section requires a restart to take effect
|
||||||
|
|
||||||
|
|
||||||
## zeroconf
|
## zeroconf
|
||||||
|
|
||||||
announce enabled services on the LAN if you specify the `-z` option, which enables [mdns](#mdns) and [ssdp](#ssdp)
|
announce enabled services on the LAN ([pic](https://user-images.githubusercontent.com/241032/215344737-0eae8d98-9496-4256-9aa8-cd2f6971810d.png)) -- `-z` enables both [mdns](#mdns) and [ssdp](#ssdp)
|
||||||
|
|
||||||
* `--z-on` / `--z-off`' limits the feature to certain networks
|
* `--z-on` / `--z-off`' limits the feature to certain networks
|
||||||
|
|
||||||
@@ -724,6 +728,10 @@ uses [ssdp](https://en.wikipedia.org/wiki/Simple_Service_Discovery_Protocol) to
|
|||||||
|
|
||||||
doubleclicking the icon opens the "connect" page which explains how to mount copyparty as a local filesystem
|
doubleclicking the icon opens the "connect" page which explains how to mount copyparty as a local filesystem
|
||||||
|
|
||||||
|
if copyparty does not appear in windows explorer, use `--zsv` to see why:
|
||||||
|
|
||||||
|
* maybe the discovery multicast was sent from an IP which does not intersect with the server subnets
|
||||||
|
|
||||||
|
|
||||||
## qr-code
|
## qr-code
|
||||||
|
|
||||||
@@ -763,6 +771,8 @@ general usage:
|
|||||||
on macos, connect from finder:
|
on macos, connect from finder:
|
||||||
* [Go] -> [Connect to Server...] -> http://192.168.123.1:3923/
|
* [Go] -> [Connect to Server...] -> http://192.168.123.1:3923/
|
||||||
|
|
||||||
|
in order to grant full write-access to webdav clients, the volflag `daw` must be set and the account must also have delete-access (otherwise the client won't be allowed to replace the contents of existing files, which is how webdav works)
|
||||||
|
|
||||||
|
|
||||||
### connecting to webdav from windows
|
### connecting to webdav from windows
|
||||||
|
|
||||||
@@ -802,7 +812,7 @@ some **BIG WARNINGS** specific to SMB/CIFS, in decreasing importance:
|
|||||||
|
|
||||||
and some minor issues,
|
and some minor issues,
|
||||||
* clients only see the first ~400 files in big folders; [impacket#1433](https://github.com/SecureAuthCorp/impacket/issues/1433)
|
* clients only see the first ~400 files in big folders; [impacket#1433](https://github.com/SecureAuthCorp/impacket/issues/1433)
|
||||||
* hot-reload of server config (`/?reload=cfg`) only works for volumes, not account passwords
|
* hot-reload of server config (`/?reload=cfg`) does not include the `[global]` section (commandline args)
|
||||||
* listens on the first IPv4 `-i` interface only (default = :: = 0.0.0.0 = all)
|
* listens on the first IPv4 `-i` interface only (default = :: = 0.0.0.0 = all)
|
||||||
* login doesn't work on winxp, but anonymous access is ok -- remove all accounts from copyparty config for that to work
|
* login doesn't work on winxp, but anonymous access is ok -- remove all accounts from copyparty config for that to work
|
||||||
* win10 onwards does not allow connecting anonymously / without accounts
|
* win10 onwards does not allow connecting anonymously / without accounts
|
||||||
@@ -932,6 +942,8 @@ some examples,
|
|||||||
## other flags
|
## other flags
|
||||||
|
|
||||||
* `:c,magic` enables filetype detection for nameless uploads, same as `--magic`
|
* `:c,magic` enables filetype detection for nameless uploads, same as `--magic`
|
||||||
|
* needs https://pypi.org/project/python-magic/ `python3 -m pip install --user -U python-magic`
|
||||||
|
* on windows grab this instead `python3 -m pip install --user -U python-magic-bin`
|
||||||
|
|
||||||
|
|
||||||
## database location
|
## database location
|
||||||
@@ -1000,9 +1012,18 @@ copyparty can invoke external programs to collect additional metadata for files
|
|||||||
if something doesn't work, try `--mtag-v` for verbose error messages
|
if something doesn't work, try `--mtag-v` for verbose error messages
|
||||||
|
|
||||||
|
|
||||||
## upload events
|
## event hooks
|
||||||
|
|
||||||
trigger a script/program on each upload like so:
|
trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
||||||
|
|
||||||
|
you can set hooks before and/or after an event happens, and currently you can hook uploads, moves/renames, and deletes
|
||||||
|
|
||||||
|
there's a bunch of flags and stuff, see `--help-hooks`
|
||||||
|
|
||||||
|
|
||||||
|
### upload events
|
||||||
|
|
||||||
|
the older, more powerful approach ([examples](./bin/mtag/)):
|
||||||
|
|
||||||
```
|
```
|
||||||
-v /mnt/inc:inc:w:c,mte=+x1:c,mtp=x1=ad,kn,/usr/bin/notify-send
|
-v /mnt/inc:inc:w:c,mte=+x1:c,mtp=x1=ad,kn,/usr/bin/notify-send
|
||||||
@@ -1012,11 +1033,12 @@ so filesystem location `/mnt/inc` shared at `/inc`, write-only for everyone, app
|
|||||||
|
|
||||||
that'll run the command `notify-send` with the path to the uploaded file as the first and only argument (so on linux it'll show a notification on-screen)
|
that'll run the command `notify-send` with the path to the uploaded file as the first and only argument (so on linux it'll show a notification on-screen)
|
||||||
|
|
||||||
note that it will only trigger on new unique files, not dupes
|
note that this is way more complicated than the new [event hooks](#event-hooks) but this approach has the following advantages:
|
||||||
|
* non-blocking and multithreaded; doesn't hold other uploads back
|
||||||
|
* you get access to tags from FFmpeg and other mtp parsers
|
||||||
|
* only trigger on new unique files, not dupes
|
||||||
|
|
||||||
and it will occupy the parsing threads, so fork anything expensive (or set `kn` to have copyparty fork it for you) -- otoh if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1`
|
note that it will occupy the parsing threads, so fork anything expensive (or set `kn` to have copyparty fork it for you) -- otoh if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1`
|
||||||
|
|
||||||
if this becomes popular maybe there should be a less janky way to do it actually
|
|
||||||
|
|
||||||
|
|
||||||
## hiding from google
|
## hiding from google
|
||||||
@@ -1068,6 +1090,21 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
|
|||||||
`-lo log/cpp-%Y-%m%d-%H%M%S.txt.xz`
|
`-lo log/cpp-%Y-%m%d-%H%M%S.txt.xz`
|
||||||
|
|
||||||
|
|
||||||
|
## reverse-proxy
|
||||||
|
|
||||||
|
running copyparty next to other websites hosted on an existing webserver such as nginx or apache
|
||||||
|
|
||||||
|
you can either:
|
||||||
|
* give copyparty its own domain or subdomain (recommended)
|
||||||
|
* or do location-based proxying, using `--rp-loc=/stuff` to tell copyparty where it is mounted -- has a slight performance cost and higher chance of bugs
|
||||||
|
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
|
||||||
|
|
||||||
|
example webserver configs:
|
||||||
|
|
||||||
|
* [nginx config](contrib/nginx/copyparty.conf) -- entire domain/subdomain
|
||||||
|
* [apache2 config](contrib/apache/copyparty.conf) -- location-based
|
||||||
|
|
||||||
|
|
||||||
# browser support
|
# browser support
|
||||||
|
|
||||||
TLDR: yes
|
TLDR: yes
|
||||||
@@ -1127,11 +1164,11 @@ interact with copyparty using non-browser clients
|
|||||||
* curl/wget: upload some files (post=file, chunk=stdin)
|
* curl/wget: upload some files (post=file, chunk=stdin)
|
||||||
* `post(){ curl -F act=bput -F f=@"$1" http://127.0.0.1:3923/?pw=wark;}`
|
* `post(){ curl -F act=bput -F f=@"$1" http://127.0.0.1:3923/?pw=wark;}`
|
||||||
`post movie.mkv`
|
`post movie.mkv`
|
||||||
* `post(){ curl -b cppwd=wark -H rand:8 -T "$1" http://127.0.0.1:3923/;}`
|
* `post(){ curl -H pw:wark -H rand:8 -T "$1" http://127.0.0.1:3923/;}`
|
||||||
`post movie.mkv`
|
`post movie.mkv`
|
||||||
* `post(){ wget --header='Cookie: cppwd=wark' --post-file="$1" -O- http://127.0.0.1:3923/?raw;}`
|
* `post(){ wget --header='pw: wark' --post-file="$1" -O- http://127.0.0.1:3923/?raw;}`
|
||||||
`post movie.mkv`
|
`post movie.mkv`
|
||||||
* `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`
|
* `chunk(){ curl -H pw:wark -T- http://127.0.0.1:3923/;}`
|
||||||
`chunk <movie.mkv`
|
`chunk <movie.mkv`
|
||||||
|
|
||||||
* bash: when curl and wget is not available or too boring
|
* bash: when curl and wget is not available or too boring
|
||||||
@@ -1139,7 +1176,7 @@ interact with copyparty using non-browser clients
|
|||||||
* `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923`
|
* `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923`
|
||||||
|
|
||||||
* python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
* python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
* file uploads, file-search, folder sync, autoresume of aborted/broken uploads
|
||||||
* can be downloaded from copyparty: controlpanel -> connect -> [up2k.py](http://127.0.0.1:3923/.cpr/a/up2k.py)
|
* can be downloaded from copyparty: controlpanel -> connect -> [up2k.py](http://127.0.0.1:3923/.cpr/a/up2k.py)
|
||||||
* see [./bin/README.md#up2kpy](bin/README.md#up2kpy)
|
* see [./bin/README.md#up2kpy](bin/README.md#up2kpy)
|
||||||
|
|
||||||
@@ -1155,7 +1192,7 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
|
|||||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
|
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
|
||||||
b512 <movie.mkv
|
b512 <movie.mkv
|
||||||
|
|
||||||
you can provide passwords using cookie `cppwd=hunter2`, as a url-param `?pw=hunter2`, or with basic-authentication (either as the username or password)
|
you can provide passwords using header `PW: hunter2`, cookie `cppwd=hunter2`, url-param `?pw=hunter2`, or with basic-authentication (either as the username or password)
|
||||||
|
|
||||||
NOTE: curl will not send the original filename if you use `-T` combined with url-params! Also, make sure to always leave a trailing slash in URLs unless you want to override the filename
|
NOTE: curl will not send the original filename if you use `-T` combined with url-params! Also, make sure to always leave a trailing slash in URLs unless you want to override the filename
|
||||||
|
|
||||||
@@ -1191,7 +1228,7 @@ below are some tweaks roughly ordered by usefulness:
|
|||||||
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
|
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
|
||||||
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
|
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
|
||||||
* huge amount of short-lived connections
|
* huge amount of short-lived connections
|
||||||
* really heavy traffic (downloads/uploads)
|
* simultaneous downloads and uploads saturating a 20gbps connection
|
||||||
|
|
||||||
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
|
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
|
||||||
|
|
||||||
@@ -1216,6 +1253,11 @@ when uploading files,
|
|||||||
|
|
||||||
some notes on hardening
|
some notes on hardening
|
||||||
|
|
||||||
|
* set `--rproxy 0` if your copyparty is directly facing the internet (not through a reverse-proxy)
|
||||||
|
* cors doesn't work right otherwise
|
||||||
|
|
||||||
|
safety profiles:
|
||||||
|
|
||||||
* option `-s` is a shortcut to set the following options:
|
* option `-s` is a shortcut to set the following options:
|
||||||
* `--no-thumb` disables thumbnails and audio transcoding to stop copyparty from running `FFmpeg`/`Pillow`/`VIPS` on uploaded files, which is a [good idea](https://www.cvedetails.com/vulnerability-list.php?vendor_id=3611) if anonymous upload is enabled
|
* `--no-thumb` disables thumbnails and audio transcoding to stop copyparty from running `FFmpeg`/`Pillow`/`VIPS` on uploaded files, which is a [good idea](https://www.cvedetails.com/vulnerability-list.php?vendor_id=3611) if anonymous upload is enabled
|
||||||
* `--no-mtag-ff` uses `mutagen` to grab music tags instead of `FFmpeg`, which is safer and faster but less accurate
|
* `--no-mtag-ff` uses `mutagen` to grab music tags instead of `FFmpeg`, which is safer and faster but less accurate
|
||||||
@@ -1223,7 +1265,6 @@ some notes on hardening
|
|||||||
* `--no-robots` and `--force-js` makes life harder for crawlers, see [hiding from google](#hiding-from-google)
|
* `--no-robots` and `--force-js` makes life harder for crawlers, see [hiding from google](#hiding-from-google)
|
||||||
|
|
||||||
* option `-ss` is a shortcut for the above plus:
|
* option `-ss` is a shortcut for the above plus:
|
||||||
* `--no-logues` and `--no-readme` disables support for readme's and prologues / epilogues in directory listings, which otherwise lets people upload arbitrary `<script>` tags
|
|
||||||
* `--unpost 0`, `--no-del`, `--no-mv` disables all move/delete support
|
* `--unpost 0`, `--no-del`, `--no-mv` disables all move/delete support
|
||||||
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
||||||
* however note if you edit one file it will also affect the other copies
|
* however note if you edit one file it will also affect the other copies
|
||||||
@@ -1234,6 +1275,7 @@ some notes on hardening
|
|||||||
|
|
||||||
* option `-sss` is a shortcut for the above plus:
|
* option `-sss` is a shortcut for the above plus:
|
||||||
* `--no-dav` disables webdav support
|
* `--no-dav` disables webdav support
|
||||||
|
* `--no-logues` and `--no-readme` disables support for readme's and prologues / epilogues in directory listings, which otherwise lets people upload arbitrary (but sandboxed) `<script>` tags
|
||||||
* `-lo cpp-%Y-%m%d-%H%M%S.txt.xz` enables logging to disk
|
* `-lo cpp-%Y-%m%d-%H%M%S.txt.xz` enables logging to disk
|
||||||
* `-ls **,*,ln,p,r` does a scan on startup for any dangerous symlinks
|
* `-ls **,*,ln,p,r` does a scan on startup for any dangerous symlinks
|
||||||
|
|
||||||
@@ -1241,6 +1283,7 @@ other misc notes:
|
|||||||
|
|
||||||
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
|
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
|
||||||
* combine this with volflag `c,fk` to generate filekeys (per-file accesskeys); users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
* combine this with volflag `c,fk` to generate filekeys (per-file accesskeys); users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||||
|
* the default filekey entropy is fairly small so give `--fk-salt` around 30 characters if you want filekeys longer than 16 chars
|
||||||
* permissions `wG` lets users upload files and receive their own filekeys, still without being able to see other uploads
|
* permissions `wG` lets users upload files and receive their own filekeys, still without being able to see other uploads
|
||||||
|
|
||||||
|
|
||||||
@@ -1249,6 +1292,22 @@ other misc notes:
|
|||||||
behavior that might be unexpected
|
behavior that might be unexpected
|
||||||
|
|
||||||
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
||||||
|
* users can submit `<script>`s which autorun for other visitors in a few ways;
|
||||||
|
* uploading a `README.md` -- avoid with `--no-readme`
|
||||||
|
* renaming `some.html` to `.epilogue.html` -- avoid with either `--no-logues` or `--no-dot-ren`
|
||||||
|
* the directory-listing embed is sandboxed (so any malicious scripts can't do any damage) but the markdown editor is not
|
||||||
|
|
||||||
|
|
||||||
|
## cors
|
||||||
|
|
||||||
|
cross-site request config
|
||||||
|
|
||||||
|
by default, except for `GET` and `HEAD` operations, all requests must either:
|
||||||
|
* not contain an `Origin` header at all
|
||||||
|
* or have an `Origin` matching the server domain
|
||||||
|
* or the header `PW` with your password as value
|
||||||
|
|
||||||
|
cors can be configured with `--acao` and `--acam`, or the protections entirely disabled with `--allow-csrf`
|
||||||
|
|
||||||
|
|
||||||
# recovering from crashes
|
# recovering from crashes
|
||||||
@@ -1301,18 +1360,12 @@ enable [thumbnails](#thumbnails) of...
|
|||||||
* **AVIF pictures:** `pyvips` or `ffmpeg` or `pillow-avif-plugin`
|
* **AVIF pictures:** `pyvips` or `ffmpeg` or `pillow-avif-plugin`
|
||||||
* **JPEG XL pictures:** `pyvips` or `ffmpeg`
|
* **JPEG XL pictures:** `pyvips` or `ffmpeg`
|
||||||
|
|
||||||
enable [smb](#smb-server) support:
|
enable [smb](#smb-server) support (**not** recommended):
|
||||||
* `impacket==0.10.0`
|
* `impacket==0.10.0`
|
||||||
|
|
||||||
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
|
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
|
||||||
|
|
||||||
|
|
||||||
## install recommended deps
|
|
||||||
```
|
|
||||||
python -m pip install --user -U jinja2 mutagen Pillow
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## optional gpl stuff
|
## optional gpl stuff
|
||||||
|
|
||||||
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
|
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
|
||||||
@@ -1329,15 +1382,19 @@ you can reduce the sfx size by repacking it; see [./docs/devnotes.md#sfx-repack]
|
|||||||
|
|
||||||
## copyparty.exe
|
## copyparty.exe
|
||||||
|
|
||||||
download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) or [copyparty64.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty64.exe)
|
download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (win8+) or [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) (win7+)
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
can be convenient on old machines where installing python is problematic, however is **not recommended** and should be considered a last resort -- if possible, please use **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** instead
|
can be convenient on machines where installing python is problematic, however is **not recommended** -- if possible, please use **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** instead
|
||||||
|
|
||||||
* [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) is compatible with 32bit windows7, which means it uses an ancient copy of python (3.7.9) which cannot be upgraded and will definitely become a security hazard at some point
|
* [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) runs on win8 or newer, was compiled on win10, does thumbnails + media tags, and is *currently* safe to use, but any future python/expat/pillow CVEs can only be remedied by downloading a newer version of the exe
|
||||||
|
|
||||||
* [copyparty64.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty64.exe) is identical except 64bit so it [works in WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png)
|
* on win8 it needs [vc redist 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48145), on win10 it just works
|
||||||
|
|
||||||
|
* dangerous: [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is compatible with [windows7](https://user-images.githubusercontent.com/241032/221445944-ae85d1f4-d351-4837-b130-82cab57d6cca.png), which means it uses an ancient copy of python (3.7.9) which cannot be upgraded and should never be exposed to the internet (LAN is fine)
|
||||||
|
|
||||||
|
* dangerous and deprecated: [copyparty-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.6.8/copyparty-winpe64.exe) lets you [run copyparty in WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png) and is otherwise completely useless
|
||||||
|
|
||||||
meanwhile [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) instead relies on your system python which gives better performance and will stay safe as long as you keep your python install up-to-date
|
meanwhile [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) instead relies on your system python which gives better performance and will stay safe as long as you keep your python install up-to-date
|
||||||
|
|
||||||
|
|||||||
9
SECURITY.md
Normal file
9
SECURITY.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
if you hit something extra juicy pls let me know on either of the following
|
||||||
|
* email -- `copyparty@ocv.ze` except `ze` should be `me`
|
||||||
|
* [mastodon dm](https://layer8.space/@tripflag) -- `@tripflag@layer8.space`
|
||||||
|
* [github private vulnerability report](https://github.com/9001/copyparty/security/advisories/new), wow that form is complicated
|
||||||
|
* [twitter dm](https://twitter.com/tripflag) (if im somehow not banned yet)
|
||||||
|
|
||||||
|
no bug bounties sorry! all i can offer is greetz in the release notes
|
||||||
@@ -1,7 +1,8 @@
|
|||||||
# [`up2k.py`](up2k.py)
|
# [`up2k.py`](up2k.py)
|
||||||
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||||
* faster than browsers
|
* sync local folder to server
|
||||||
|
* generally faster than browsers
|
||||||
* if something breaks just restart it
|
* if something breaks just restart it
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
28
bin/hooks/README.md
Normal file
28
bin/hooks/README.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
standalone programs which are executed by copyparty when an event happens (upload, file rename, delete, ...)
|
||||||
|
|
||||||
|
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
||||||
|
|
||||||
|
run copyparty with `--help-hooks` for usage details / hook type explanations (xbu/xau/xiu/xbr/xar/xbd/xad)
|
||||||
|
|
||||||
|
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
||||||
|
|
||||||
|
|
||||||
|
# after upload
|
||||||
|
* [notify.py](notify.py) shows a desktop notification ([example](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png))
|
||||||
|
* [notify2.py](notify2.py) uses the json API to show more context
|
||||||
|
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||||
|
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||||
|
|
||||||
|
|
||||||
|
# upload batches
|
||||||
|
these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every single file), `xiu` hooks are given a list of recent uploads on STDIN after the server has gone idle for N seconds, reducing server load + providing more context
|
||||||
|
* [xiu.py](xiu.py) is a "minimal" example showing a list of filenames + total filesize
|
||||||
|
* [xiu-sha.py](xiu-sha.py) produces a sha512 checksum list in the volume root
|
||||||
|
|
||||||
|
|
||||||
|
# before upload
|
||||||
|
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
||||||
|
|
||||||
|
|
||||||
|
# on message
|
||||||
|
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
||||||
61
bin/hooks/discord-announce.py
Executable file
61
bin/hooks/discord-announce.py
Executable file
@@ -0,0 +1,61 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
from copyparty.util import humansize, quotep
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
announces a new upload on discord
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xau f,t5,j,bin/hooks/discord-announce.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
f = fork; don't wait for it to finish
|
||||||
|
t5 = timeout if it's still running after 5 sec
|
||||||
|
j = provide upload information as json; not just the filename
|
||||||
|
|
||||||
|
replace "xau" with "xbu" to announce Before upload starts instead of After completion
|
||||||
|
|
||||||
|
# how to discord:
|
||||||
|
first create the webhook url; https://support.discord.com/hc/en-us/articles/228383668-Intro-to-Webhooks
|
||||||
|
then use this to design your message: https://discohook.org/
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
WEBHOOK = "https://discord.com/api/webhooks/1234/base64"
|
||||||
|
|
||||||
|
# read info from copyparty
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
vpath = inf["vp"]
|
||||||
|
filename = vpath.split("/")[-1]
|
||||||
|
url = f"https://{inf['host']}/{quotep(vpath)}"
|
||||||
|
|
||||||
|
# compose the message to discord
|
||||||
|
j = {
|
||||||
|
"title": filename,
|
||||||
|
"url": url,
|
||||||
|
"description": url.rsplit("/", 1)[0],
|
||||||
|
"color": 0x449900,
|
||||||
|
"fields": [
|
||||||
|
{"name": "Size", "value": humansize(inf["sz"])},
|
||||||
|
{"name": "User", "value": inf["user"]},
|
||||||
|
{"name": "IP", "value": inf["ip"]},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
for v in j["fields"]:
|
||||||
|
v["inline"] = True
|
||||||
|
|
||||||
|
r = requests.post(WEBHOOK, json={"embeds": [j]})
|
||||||
|
print(f"discord: {r}\n", end="")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
62
bin/hooks/notify.py
Executable file
62
bin/hooks/notify.py
Executable file
@@ -0,0 +1,62 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess as sp
|
||||||
|
from plyer import notification
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
show os notification on upload; works on windows, linux, macos, android
|
||||||
|
|
||||||
|
depdencies:
|
||||||
|
windows: python3 -m pip install --user -U plyer
|
||||||
|
linux: python3 -m pip install --user -U plyer
|
||||||
|
macos: python3 -m pip install --user -U plyer pyobjus
|
||||||
|
android: just termux and termux-api
|
||||||
|
|
||||||
|
example usages; either as global config (all volumes) or as volflag:
|
||||||
|
--xau f,bin/hooks/notify.py
|
||||||
|
-v srv/inc:inc:c,xau=f,bin/hooks/notify.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
f = fork so it doesn't block uploads
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from copyparty.util import humansize
|
||||||
|
except:
|
||||||
|
|
||||||
|
def humansize(n):
|
||||||
|
return n
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
fp = sys.argv[1]
|
||||||
|
dp, fn = os.path.split(fp)
|
||||||
|
try:
|
||||||
|
sz = humansize(os.path.getsize(fp))
|
||||||
|
except:
|
||||||
|
sz = "?"
|
||||||
|
|
||||||
|
msg = "{} ({})\n📁 {}".format(fn, sz, dp)
|
||||||
|
title = "File received"
|
||||||
|
|
||||||
|
if "com.termux" in sys.executable:
|
||||||
|
sp.run(["termux-notification", "-t", title, "-c", msg])
|
||||||
|
return
|
||||||
|
|
||||||
|
icon = "emblem-documents-symbolic" if sys.platform == "linux" else ""
|
||||||
|
notification.notify(
|
||||||
|
title=title,
|
||||||
|
message=msg,
|
||||||
|
app_icon=icon,
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
68
bin/hooks/notify2.py
Executable file
68
bin/hooks/notify2.py
Executable file
@@ -0,0 +1,68 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess as sp
|
||||||
|
from datetime import datetime
|
||||||
|
from plyer import notification
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
same as notify.py but with additional info (uploader, ...)
|
||||||
|
and also supports --xm (notify on 📟 message)
|
||||||
|
|
||||||
|
example usages; either as global config (all volumes) or as volflag:
|
||||||
|
--xm f,j,bin/hooks/notify2.py
|
||||||
|
--xau f,j,bin/hooks/notify2.py
|
||||||
|
-v srv/inc:inc:c,xm=f,j,bin/hooks/notify2.py
|
||||||
|
-v srv/inc:inc:c,xau=f,j,bin/hooks/notify2.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
f = fork so it doesn't block uploads
|
||||||
|
j = provide json instead of filepath list
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from copyparty.util import humansize
|
||||||
|
except:
|
||||||
|
|
||||||
|
def humansize(n):
|
||||||
|
return n
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
fp = inf["ap"]
|
||||||
|
sz = humansize(inf["sz"])
|
||||||
|
dp, fn = os.path.split(fp)
|
||||||
|
mt = datetime.utcfromtimestamp(inf["mt"]).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
msg = f"{fn} ({sz})\n📁 {dp}"
|
||||||
|
title = "File received"
|
||||||
|
icon = "emblem-documents-symbolic" if sys.platform == "linux" else ""
|
||||||
|
|
||||||
|
if inf.get("txt"):
|
||||||
|
msg = inf["txt"]
|
||||||
|
title = "Message received"
|
||||||
|
icon = "mail-unread-symbolic" if sys.platform == "linux" else ""
|
||||||
|
|
||||||
|
msg += f"\n👤 {inf['user']} ({inf['ip']})\n🕒 {mt}"
|
||||||
|
|
||||||
|
if "com.termux" in sys.executable:
|
||||||
|
sp.run(["termux-notification", "-t", title, "-c", msg])
|
||||||
|
return
|
||||||
|
|
||||||
|
notification.notify(
|
||||||
|
title=title,
|
||||||
|
message=msg,
|
||||||
|
app_icon=icon,
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
30
bin/hooks/reject-extension.py
Executable file
30
bin/hooks/reject-extension.py
Executable file
@@ -0,0 +1,30 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
reject file uploads by file extension
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xbu c,bin/hooks/reject-extension.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:c,xbu=c,bin/hooks/reject-extension.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xbu = execute before upload
|
||||||
|
c = check result, reject upload if error
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
bad = "exe scr com pif bat ps1 jar msi"
|
||||||
|
|
||||||
|
ext = sys.argv[1].split(".")[-1]
|
||||||
|
|
||||||
|
sys.exit(1 if ext in bad.split() else 0)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
39
bin/hooks/reject-mimetype.py
Executable file
39
bin/hooks/reject-mimetype.py
Executable file
@@ -0,0 +1,39 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import magic
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
reject file uploads by mimetype
|
||||||
|
|
||||||
|
dependencies (linux, macos):
|
||||||
|
python3 -m pip install --user -U python-magic
|
||||||
|
|
||||||
|
dependencies (windows):
|
||||||
|
python3 -m pip install --user -U python-magic-bin
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xau c,bin/hooks/reject-mimetype.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:c,xau=c,bin/hooks/reject-mimetype.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xau = execute after upload
|
||||||
|
c = check result, reject upload if error
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
ok = ["image/jpeg", "image/png"]
|
||||||
|
|
||||||
|
mt = magic.from_file(sys.argv[1], mime=True)
|
||||||
|
|
||||||
|
print(mt)
|
||||||
|
|
||||||
|
sys.exit(1 if mt not in ok else 0)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
54
bin/hooks/wget.py
Executable file
54
bin/hooks/wget.py
Executable file
@@ -0,0 +1,54 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
use copyparty as a file downloader by POSTing URLs as
|
||||||
|
application/x-www-form-urlencoded (for example using the
|
||||||
|
message/pager function on the website)
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xm f,j,t3600,bin/hooks/wget.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:c,xm=f,j,t3600,bin/hooks/wget.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
f = fork so it doesn't block uploads
|
||||||
|
j = provide message information as json; not just the text
|
||||||
|
c3 = mute all output
|
||||||
|
t3600 = timeout and kill download after 1 hour
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
url = inf["txt"]
|
||||||
|
if "://" not in url:
|
||||||
|
url = "https://" + url
|
||||||
|
|
||||||
|
os.chdir(inf["ap"])
|
||||||
|
|
||||||
|
name = url.split("?")[0].split("/")[-1]
|
||||||
|
tfn = "-- DOWNLOADING " + name
|
||||||
|
print(f"{tfn}\n", end="")
|
||||||
|
open(tfn, "wb").close()
|
||||||
|
|
||||||
|
cmd = ["wget", "--trust-server-names", "-nv", "--", url]
|
||||||
|
|
||||||
|
try:
|
||||||
|
sp.check_call(cmd)
|
||||||
|
except:
|
||||||
|
t = "-- FAILED TO DONWLOAD " + name
|
||||||
|
print(f"{t}\n", end="")
|
||||||
|
open(t, "wb").close()
|
||||||
|
|
||||||
|
os.unlink(tfn)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
103
bin/hooks/xiu-sha.py
Executable file
103
bin/hooks/xiu-sha.py
Executable file
@@ -0,0 +1,103 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
this hook will produce a single sha512 file which
|
||||||
|
covers all recent uploads (plus metadata comments)
|
||||||
|
|
||||||
|
use this with --xiu, which makes copyparty buffer
|
||||||
|
uploads until server is idle, providing file infos
|
||||||
|
on stdin (filepaths or json)
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xiu i5,j,bin/hooks/xiu-sha.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:c,xiu=i5,j,bin/hooks/xiu-sha.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xiu = execute after uploads...
|
||||||
|
i5 = ...after volume has been idle for 5sec
|
||||||
|
j = provide json instead of filepath list
|
||||||
|
|
||||||
|
note the "f" (fork) flag is not set, so this xiu
|
||||||
|
will block other xiu hooks while it's running
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
except:
|
||||||
|
|
||||||
|
def fsenc(p):
|
||||||
|
return p
|
||||||
|
|
||||||
|
|
||||||
|
def humantime(ts):
|
||||||
|
return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
|
||||||
|
def find_files_root(inf):
|
||||||
|
di = 9000
|
||||||
|
for f1, f2 in zip(inf, inf[1:]):
|
||||||
|
p1 = f1["ap"].replace("\\", "/").rsplit("/", 1)[0]
|
||||||
|
p2 = f2["ap"].replace("\\", "/").rsplit("/", 1)[0]
|
||||||
|
di = min(len(p1), len(p2), di)
|
||||||
|
di = next((i for i in range(di) if p1[i] != p2[i]), di)
|
||||||
|
|
||||||
|
return di + 1
|
||||||
|
|
||||||
|
|
||||||
|
def find_vol_root(inf):
|
||||||
|
return len(inf[0]["ap"][: -len(inf[0]["vp"])])
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
zb = sys.stdin.buffer.read()
|
||||||
|
zs = zb.decode("utf-8", "replace")
|
||||||
|
inf = json.loads(zs)
|
||||||
|
|
||||||
|
# root directory (where to put the sha512 file);
|
||||||
|
# di = find_files_root(inf) # next to the file closest to volume root
|
||||||
|
di = find_vol_root(inf) # top of the entire volume
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
total_sz = 0
|
||||||
|
for md in inf:
|
||||||
|
ap = md["ap"]
|
||||||
|
rp = ap[di:]
|
||||||
|
total_sz += md["sz"]
|
||||||
|
fsize = "{:,}".format(md["sz"])
|
||||||
|
mtime = humantime(md["mt"])
|
||||||
|
up_ts = humantime(md["at"])
|
||||||
|
|
||||||
|
h = hashlib.sha512()
|
||||||
|
with open(fsenc(md["ap"]), "rb", 512 * 1024) as f:
|
||||||
|
while True:
|
||||||
|
buf = f.read(512 * 1024)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
h.update(buf)
|
||||||
|
|
||||||
|
cksum = h.hexdigest()
|
||||||
|
meta = " | ".join([md["wark"], up_ts, mtime, fsize, md["ip"]])
|
||||||
|
ret.append("# {}\n{} *{}".format(meta, cksum, rp))
|
||||||
|
|
||||||
|
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
|
||||||
|
ret.append("")
|
||||||
|
ftime = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")
|
||||||
|
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
|
||||||
|
with open(fsenc(fp), "wb") as f:
|
||||||
|
f.write("\n".join(ret).encode("utf-8", "replace"))
|
||||||
|
|
||||||
|
print("wrote checksums to {}".format(fp))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
45
bin/hooks/xiu.py
Executable file
45
bin/hooks/xiu.py
Executable file
@@ -0,0 +1,45 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
this hook prints absolute filepaths + total size
|
||||||
|
|
||||||
|
use this with --xiu, which makes copyparty buffer
|
||||||
|
uploads until server is idle, providing file infos
|
||||||
|
on stdin (filepaths or json)
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
--xiu i1,j,bin/hooks/xiu.py
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc:inc:c,xiu=i1,j,bin/hooks/xiu.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
xiu = execute after uploads...
|
||||||
|
i1 = ...after volume has been idle for 1sec
|
||||||
|
j = provide json instead of filepath list
|
||||||
|
|
||||||
|
note the "f" (fork) flag is not set, so this xiu
|
||||||
|
will block other xiu hooks while it's running
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
zb = sys.stdin.buffer.read()
|
||||||
|
zs = zb.decode("utf-8", "replace")
|
||||||
|
inf = json.loads(zs)
|
||||||
|
|
||||||
|
total_sz = 0
|
||||||
|
for upload in inf:
|
||||||
|
sz = upload["sz"]
|
||||||
|
total_sz += sz
|
||||||
|
print("{:9} {}".format(sz, upload["ap"]))
|
||||||
|
|
||||||
|
print("{} files, {} bytes total".format(len(inf), total_sz))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,5 +1,9 @@
|
|||||||
standalone programs which take an audio file as argument
|
standalone programs which take an audio file as argument
|
||||||
|
|
||||||
|
you may want to forget about all this fancy complicated stuff and just use [event hooks](../hooks/) instead (which doesn't need `-e2ts` or ffmpeg)
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
|
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
|
||||||
|
|
||||||
some of these rely on libraries which are not MIT-compatible
|
some of these rely on libraries which are not MIT-compatible
|
||||||
@@ -17,6 +21,7 @@ these do not have any problematic dependencies at all:
|
|||||||
* [cksum.py](./cksum.py) computes various checksums
|
* [cksum.py](./cksum.py) computes various checksums
|
||||||
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
||||||
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
||||||
|
* also available as an [event hook](../hooks/wget.py)
|
||||||
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
@@ -26,7 +31,7 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
|
|||||||
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
|
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
|
||||||
|
|
||||||
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
|
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
|
||||||
* from pypy: `keyfinder vamp`
|
* from pip: `keyfinder vamp`
|
||||||
|
|
||||||
|
|
||||||
# usage from copyparty
|
# usage from copyparty
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ def main():
|
|||||||
|
|
||||||
os.chdir(cwd)
|
os.chdir(cwd)
|
||||||
f1 = fsenc(fn)
|
f1 = fsenc(fn)
|
||||||
f2 = os.path.join(b"noexif", f1)
|
f2 = fsenc(os.path.join(b"noexif", fn))
|
||||||
cmd = [
|
cmd = [
|
||||||
b"exiftool",
|
b"exiftool",
|
||||||
b"-exif:all=",
|
b"-exif:all=",
|
||||||
|
|||||||
@@ -57,6 +57,7 @@ hash -r
|
|||||||
command -v python3 && pybin=python3 || pybin=python
|
command -v python3 && pybin=python3 || pybin=python
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$pybin -c 'import numpy' ||
|
||||||
$pybin -m pip install --user numpy
|
$pybin -m pip install --user numpy
|
||||||
|
|
||||||
|
|
||||||
@@ -224,7 +225,7 @@ install_vamp() {
|
|||||||
$pybin -m pip install --user vamp
|
$pybin -m pip install --user vamp
|
||||||
|
|
||||||
cd "$td"
|
cd "$td"
|
||||||
echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
|
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
|
||||||
sha512sum -c <(
|
sha512sum -c <(
|
||||||
|
|||||||
@@ -1,6 +1,11 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
DEPRECATED -- replaced by event hooks;
|
||||||
|
https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
use copyparty as a file downloader by POSTing URLs as
|
use copyparty as a file downloader by POSTing URLs as
|
||||||
application/x-www-form-urlencoded (for example using the
|
application/x-www-form-urlencoded (for example using the
|
||||||
message/pager function on the website)
|
message/pager function on the website)
|
||||||
|
|||||||
@@ -997,7 +997,7 @@ def main():
|
|||||||
ap.add_argument(
|
ap.add_argument(
|
||||||
"-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache"
|
"-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache"
|
||||||
)
|
)
|
||||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
||||||
ap.add_argument("-d", action="store_true", help="enable debug")
|
ap.add_argument("-d", action="store_true", help="enable debug")
|
||||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||||
|
|||||||
@@ -4,8 +4,9 @@ set -e
|
|||||||
# runs copyparty (or any other program really) in a chroot
|
# runs copyparty (or any other program really) in a chroot
|
||||||
#
|
#
|
||||||
# assumption: these directories, and everything within, are owned by root
|
# assumption: these directories, and everything within, are owned by root
|
||||||
sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
|
sysdirs=(); for v in /bin /lib /lib32 /lib64 /sbin /usr /etc/alternatives ; do
|
||||||
|
[ -e $v ] && sysdirs+=($v)
|
||||||
|
done
|
||||||
|
|
||||||
# error-handler
|
# error-handler
|
||||||
help() { cat <<'EOF'
|
help() { cat <<'EOF'
|
||||||
@@ -38,7 +39,7 @@ while true; do
|
|||||||
v="$1"; shift
|
v="$1"; shift
|
||||||
[ "$v" = -- ] && break # end of volumes
|
[ "$v" = -- ] && break # end of volumes
|
||||||
[ "$#" -eq 0 ] && break # invalid usage
|
[ "$#" -eq 0 ] && break # invalid usage
|
||||||
vols+=( "$(realpath "$v")" )
|
vols+=( "$(realpath "$v" || echo "$v")" )
|
||||||
done
|
done
|
||||||
pybin="$1"; shift
|
pybin="$1"; shift
|
||||||
pybin="$(command -v "$pybin")"
|
pybin="$(command -v "$pybin")"
|
||||||
@@ -82,7 +83,7 @@ jail="${jail%/}"
|
|||||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
||||||
while IFS= read -r v; do
|
while IFS= read -r v; do
|
||||||
[ -e "$v" ] || {
|
[ -e "$v" ] || {
|
||||||
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
|
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
||||||
@@ -97,9 +98,11 @@ done
|
|||||||
|
|
||||||
cln() {
|
cln() {
|
||||||
rv=$?
|
rv=$?
|
||||||
# cleanup if not in use
|
wait -f -p rv $p || true
|
||||||
lsof "$jail" | grep -qF "$jail" &&
|
cd /
|
||||||
echo "chroot is in use, will not cleanup" ||
|
echo "stopping chroot..."
|
||||||
|
lsof "$jail" | grep -F "$jail" &&
|
||||||
|
echo "chroot is in use; will not unmount" ||
|
||||||
{
|
{
|
||||||
mount | grep -F " on $jail" |
|
mount | grep -F " on $jail" |
|
||||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||||
@@ -124,5 +127,6 @@ export LOGNAME="$USER"
|
|||||||
#echo "cpp [$cpp]"
|
#echo "cpp [$cpp]"
|
||||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
||||||
p=$!
|
p=$!
|
||||||
|
trap 'kill -USR1 $p' USR1
|
||||||
trap 'kill $p' INT TERM
|
trap 'kill $p' INT TERM
|
||||||
wait
|
wait
|
||||||
|
|||||||
332
bin/up2k.py
332
bin/up2k.py
@@ -1,16 +1,17 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
S_VERSION = "1.5"
|
||||||
|
S_BUILD_DT = "2023-03-12"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
up2k.py: upload to copyparty
|
up2k.py: upload to copyparty
|
||||||
2022-11-29, v0.22, ed <irc.rizon.net>, MIT-Licensed
|
2021, ed <irc.rizon.net>, MIT-Licensed
|
||||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||||
|
|
||||||
- dependencies: requests
|
- dependencies: requests
|
||||||
- supports python 2.6, 2.7, and 3.3 through 3.11
|
- supports python 2.6, 2.7, and 3.3 through 3.12
|
||||||
|
- if something breaks just try again and it'll autoresume
|
||||||
- almost zero error-handling
|
|
||||||
- but if something breaks just try again and it'll autoresume
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -26,6 +27,8 @@ import platform
|
|||||||
import threading
|
import threading
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
EXE = sys.executable.endswith("exe")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import argparse
|
import argparse
|
||||||
except:
|
except:
|
||||||
@@ -36,12 +39,15 @@ except:
|
|||||||
try:
|
try:
|
||||||
import requests
|
import requests
|
||||||
except ImportError:
|
except ImportError:
|
||||||
if sys.version_info > (2, 7):
|
if EXE:
|
||||||
|
raise
|
||||||
|
elif sys.version_info > (2, 7):
|
||||||
m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n"
|
m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n"
|
||||||
else:
|
else:
|
||||||
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
|
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
|
||||||
m = [" https://pypi.org/project/" + x + "/#files" for x in m.split()]
|
m = [" https://pypi.org/project/" + x + "/#files" for x in m.split()]
|
||||||
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
||||||
|
m += "\n for f in *.whl; do unzip $f; done; rm -r *.dist-info\n"
|
||||||
|
|
||||||
print(m.format(sys.executable))
|
print(m.format(sys.executable))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@@ -246,7 +252,13 @@ def eprint(*a, **ka):
|
|||||||
|
|
||||||
|
|
||||||
def flushing_print(*a, **ka):
|
def flushing_print(*a, **ka):
|
||||||
_print(*a, **ka)
|
try:
|
||||||
|
_print(*a, **ka)
|
||||||
|
except:
|
||||||
|
v = " ".join(str(x) for x in a)
|
||||||
|
v = v.encode("ascii", "replace").decode("ascii")
|
||||||
|
_print(v, **ka)
|
||||||
|
|
||||||
if "flush" not in ka:
|
if "flush" not in ka:
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
@@ -262,10 +274,10 @@ def termsize():
|
|||||||
try:
|
try:
|
||||||
import fcntl, termios, struct
|
import fcntl, termios, struct
|
||||||
|
|
||||||
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
r = struct.unpack(b"hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"AAAA"))
|
||||||
|
return r[::-1]
|
||||||
except:
|
except:
|
||||||
return
|
return None
|
||||||
return cr
|
|
||||||
|
|
||||||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||||
if not cr:
|
if not cr:
|
||||||
@@ -275,12 +287,11 @@ def termsize():
|
|||||||
os.close(fd)
|
os.close(fd)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
if not cr:
|
|
||||||
try:
|
try:
|
||||||
cr = (env["LINES"], env["COLUMNS"])
|
return cr or (int(env["COLUMNS"]), int(env["LINES"]))
|
||||||
except:
|
except:
|
||||||
cr = (25, 80)
|
return 80, 25
|
||||||
return int(cr[1]), int(cr[0])
|
|
||||||
|
|
||||||
|
|
||||||
class CTermsize(object):
|
class CTermsize(object):
|
||||||
@@ -362,26 +373,46 @@ def walkdir(err, top, seen):
|
|||||||
|
|
||||||
seen = seen[:] + [atop]
|
seen = seen[:] + [atop]
|
||||||
for ap, inf in sorted(statdir(err, top)):
|
for ap, inf in sorted(statdir(err, top)):
|
||||||
|
yield ap, inf
|
||||||
if stat.S_ISDIR(inf.st_mode):
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
try:
|
try:
|
||||||
for x in walkdir(err, ap, seen):
|
for x in walkdir(err, ap, seen):
|
||||||
yield x
|
yield x
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
err.append((ap, str(ex)))
|
err.append((ap, str(ex)))
|
||||||
else:
|
|
||||||
yield ap, inf
|
|
||||||
|
|
||||||
|
|
||||||
def walkdirs(err, tops):
|
def walkdirs(err, tops):
|
||||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||||
sep = "{0}".format(os.sep).encode("ascii")
|
sep = "{0}".format(os.sep).encode("ascii")
|
||||||
|
if not VT100:
|
||||||
|
za = []
|
||||||
|
for td in tops:
|
||||||
|
try:
|
||||||
|
ap = os.path.abspath(os.path.realpath(td))
|
||||||
|
if td[-1:] in (b"\\", b"/"):
|
||||||
|
ap += sep
|
||||||
|
except:
|
||||||
|
# maybe cpython #88013 (ok)
|
||||||
|
ap = td
|
||||||
|
|
||||||
|
za.append(ap)
|
||||||
|
|
||||||
|
za = [x if x.startswith(b"\\\\") else b"\\\\?\\" + x for x in za]
|
||||||
|
za = [x.replace(b"/", b"\\") for x in za]
|
||||||
|
tops = za
|
||||||
|
|
||||||
for top in tops:
|
for top in tops:
|
||||||
|
isdir = os.path.isdir(top)
|
||||||
if top[-1:] == sep:
|
if top[-1:] == sep:
|
||||||
stop = top.rstrip(sep)
|
stop = top.rstrip(sep)
|
||||||
|
yield stop, b"", os.stat(stop)
|
||||||
else:
|
else:
|
||||||
stop = os.path.dirname(top)
|
stop, dn = os.path.split(top)
|
||||||
|
if isdir:
|
||||||
|
yield stop, dn, os.stat(stop)
|
||||||
|
|
||||||
if os.path.isdir(top):
|
if isdir:
|
||||||
for ap, inf in walkdir(err, top, []):
|
for ap, inf in walkdir(err, top, []):
|
||||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||||
else:
|
else:
|
||||||
@@ -472,14 +503,17 @@ def get_hashlist(file, pcb, mth):
|
|||||||
file.kchunks[k] = [v1, v2]
|
file.kchunks[k] = [v1, v2]
|
||||||
|
|
||||||
|
|
||||||
def handshake(url, file, pw, search):
|
def handshake(ar, file, search):
|
||||||
# type: (str, File, Any, bool) -> tuple[list[str], bool]
|
# type: (argparse.Namespace, File, bool) -> tuple[list[str], bool]
|
||||||
"""
|
"""
|
||||||
performs a handshake with the server; reply is:
|
performs a handshake with the server; reply is:
|
||||||
if search, a list of search results
|
if search, a list of search results
|
||||||
otherwise, a list of chunks to upload
|
otherwise, a list of chunks to upload
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
url = ar.url
|
||||||
|
pw = ar.a
|
||||||
|
|
||||||
req = {
|
req = {
|
||||||
"hash": [x[0] for x in file.cids],
|
"hash": [x[0] for x in file.cids],
|
||||||
"name": file.name,
|
"name": file.name,
|
||||||
@@ -488,36 +522,49 @@ def handshake(url, file, pw, search):
|
|||||||
}
|
}
|
||||||
if search:
|
if search:
|
||||||
req["srch"] = 1
|
req["srch"] = 1
|
||||||
|
elif ar.dr:
|
||||||
|
req["replace"] = True
|
||||||
|
|
||||||
headers = {"Content-Type": "text/plain"} # wtf ed
|
headers = {"Content-Type": "text/plain"} # <=1.5.1 compat
|
||||||
if pw:
|
if pw:
|
||||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||||
|
|
||||||
|
file.recheck = False
|
||||||
if file.url:
|
if file.url:
|
||||||
url = file.url
|
url = file.url
|
||||||
elif b"/" in file.rel:
|
elif b"/" in file.rel:
|
||||||
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
sc = 600
|
||||||
|
txt = ""
|
||||||
try:
|
try:
|
||||||
r = req_ses.post(url, headers=headers, json=req)
|
r = req_ses.post(url, headers=headers, json=req)
|
||||||
break
|
sc = r.status_code
|
||||||
|
txt = r.text
|
||||||
|
if sc < 400:
|
||||||
|
break
|
||||||
|
|
||||||
|
raise Exception("http {0}: {1}".format(sc, txt))
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
em = str(ex).split("SSLError(")[-1]
|
em = str(ex).split("SSLError(")[-1].split("\nURL: ")[0].strip()
|
||||||
|
|
||||||
|
if (
|
||||||
|
sc == 422
|
||||||
|
or "<pre>partial upload exists at a different" in txt
|
||||||
|
or "<pre>source file busy; please try again" in txt
|
||||||
|
):
|
||||||
|
file.recheck = True
|
||||||
|
return [], False
|
||||||
|
elif sc == 409 or "<pre>upload rejected, file already exists" in txt:
|
||||||
|
return [], False
|
||||||
|
elif "<pre>you don't have " in txt:
|
||||||
|
raise
|
||||||
|
|
||||||
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
sc = r.status_code
|
|
||||||
if sc >= 400:
|
|
||||||
txt = r.text
|
|
||||||
if sc == 422 or "<pre>partial upload exists at a different" in txt:
|
|
||||||
file.recheck = True
|
|
||||||
return [], False
|
|
||||||
elif sc == 409 or "<pre>upload rejected, file already exists" in txt:
|
|
||||||
return [], False
|
|
||||||
|
|
||||||
raise Exception("http {0}: {1}".format(sc, txt))
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = r.json()
|
r = r.json()
|
||||||
except:
|
except:
|
||||||
@@ -539,8 +586,8 @@ def handshake(url, file, pw, search):
|
|||||||
return r["hash"], r["sprs"]
|
return r["hash"], r["sprs"]
|
||||||
|
|
||||||
|
|
||||||
def upload(file, cid, pw):
|
def upload(file, cid, pw, stats):
|
||||||
# type: (File, str, Any) -> None
|
# type: (File, str, str, str) -> None
|
||||||
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
@@ -548,6 +595,10 @@ def upload(file, cid, pw):
|
|||||||
"X-Up2k-Wark": file.wark,
|
"X-Up2k-Wark": file.wark,
|
||||||
"Content-Type": "application/octet-stream",
|
"Content-Type": "application/octet-stream",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if stats:
|
||||||
|
headers["X-Up2k-Stat"] = stats
|
||||||
|
|
||||||
if pw:
|
if pw:
|
||||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||||
|
|
||||||
@@ -564,27 +615,20 @@ def upload(file, cid, pw):
|
|||||||
|
|
||||||
class Ctl(object):
|
class Ctl(object):
|
||||||
"""
|
"""
|
||||||
this will be the coordinator which runs everything in parallel
|
the coordinator which runs everything in parallel
|
||||||
(hashing, handshakes, uploads) but right now it's p dumb
|
(hashing, handshakes, uploads)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, ar):
|
def _scan(self):
|
||||||
self.ar = ar
|
ar = self.ar
|
||||||
ar.files = [
|
|
||||||
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
|
||||||
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
|
||||||
for x in ar.files
|
|
||||||
]
|
|
||||||
ar.url = ar.url.rstrip("/") + "/"
|
|
||||||
if "://" not in ar.url:
|
|
||||||
ar.url = "http://" + ar.url
|
|
||||||
|
|
||||||
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
||||||
|
|
||||||
nfiles = 0
|
nfiles = 0
|
||||||
nbytes = 0
|
nbytes = 0
|
||||||
err = []
|
err = []
|
||||||
for _, _, inf in walkdirs(err, ar.files):
|
for _, _, inf in walkdirs(err, ar.files):
|
||||||
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
|
continue
|
||||||
|
|
||||||
nfiles += 1
|
nfiles += 1
|
||||||
nbytes += inf.st_size
|
nbytes += inf.st_size
|
||||||
|
|
||||||
@@ -606,8 +650,15 @@ class Ctl(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||||
self.nfiles = nfiles
|
return nfiles, nbytes
|
||||||
self.nbytes = nbytes
|
|
||||||
|
def __init__(self, ar, stats=None):
|
||||||
|
self.ar = ar
|
||||||
|
self.stats = stats or self._scan()
|
||||||
|
if not self.stats:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.nfiles, self.nbytes = self.stats
|
||||||
|
|
||||||
if ar.td:
|
if ar.td:
|
||||||
requests.packages.urllib3.disable_warnings()
|
requests.packages.urllib3.disable_warnings()
|
||||||
@@ -616,6 +667,8 @@ class Ctl(object):
|
|||||||
req_ses.verify = ar.te
|
req_ses.verify = ar.te
|
||||||
|
|
||||||
self.filegen = walkdirs([], ar.files)
|
self.filegen = walkdirs([], ar.files)
|
||||||
|
self.recheck = [] # type: list[File]
|
||||||
|
|
||||||
if ar.safe:
|
if ar.safe:
|
||||||
self._safe()
|
self._safe()
|
||||||
else:
|
else:
|
||||||
@@ -634,11 +687,11 @@ class Ctl(object):
|
|||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
self.t0_up = None
|
self.t0_up = None
|
||||||
self.spd = None
|
self.spd = None
|
||||||
|
self.eta = "99:99:99"
|
||||||
|
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.q_handshake = Queue() # type: Queue[File]
|
self.q_handshake = Queue() # type: Queue[File]
|
||||||
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||||
self.recheck = [] # type: list[File]
|
|
||||||
|
|
||||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
@@ -651,6 +704,9 @@ class Ctl(object):
|
|||||||
"""minimal basic slow boring fallback codepath"""
|
"""minimal basic slow boring fallback codepath"""
|
||||||
search = self.ar.s
|
search = self.ar.s
|
||||||
for nf, (top, rel, inf) in enumerate(self.filegen):
|
for nf, (top, rel, inf) in enumerate(self.filegen):
|
||||||
|
if stat.S_ISDIR(inf.st_mode) or not rel:
|
||||||
|
continue
|
||||||
|
|
||||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||||
upath = file.abs.decode("utf-8", "replace")
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
|
|
||||||
@@ -660,7 +716,7 @@ class Ctl(object):
|
|||||||
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
|
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
|
||||||
while True:
|
while True:
|
||||||
print(" hs...")
|
print(" hs...")
|
||||||
hs, _ = handshake(self.ar.url, file, self.ar.a, search)
|
hs, _ = handshake(self.ar, file, search)
|
||||||
if search:
|
if search:
|
||||||
if hs:
|
if hs:
|
||||||
for hit in hs:
|
for hit in hs:
|
||||||
@@ -677,7 +733,8 @@ class Ctl(object):
|
|||||||
ncs = len(hs)
|
ncs = len(hs)
|
||||||
for nc, cid in enumerate(hs):
|
for nc, cid in enumerate(hs):
|
||||||
print(" {0} up {1}".format(ncs - nc, cid))
|
print(" {0} up {1}".format(ncs - nc, cid))
|
||||||
upload(file, cid, self.ar.a)
|
stats = "{0}/0/0/{1}".format(nf, self.nfiles - nf)
|
||||||
|
upload(file, cid, self.ar.a, stats)
|
||||||
|
|
||||||
print(" ok!")
|
print(" ok!")
|
||||||
if file.recheck:
|
if file.recheck:
|
||||||
@@ -688,10 +745,10 @@ class Ctl(object):
|
|||||||
|
|
||||||
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
||||||
for file in self.recheck:
|
for file in self.recheck:
|
||||||
handshake(self.ar.url, file, self.ar.a, search)
|
handshake(self.ar, file, search)
|
||||||
|
|
||||||
def _fancy(self):
|
def _fancy(self):
|
||||||
if VT100:
|
if VT100 and not self.ar.ns:
|
||||||
atexit.register(self.cleanup_vt100)
|
atexit.register(self.cleanup_vt100)
|
||||||
ss.scroll_region(3)
|
ss.scroll_region(3)
|
||||||
|
|
||||||
@@ -715,7 +772,7 @@ class Ctl(object):
|
|||||||
else:
|
else:
|
||||||
idles = 0
|
idles = 0
|
||||||
|
|
||||||
if VT100:
|
if VT100 and not self.ar.ns:
|
||||||
maxlen = ss.w - len(str(self.nfiles)) - 14
|
maxlen = ss.w - len(str(self.nfiles)) - 14
|
||||||
txt = "\033[s\033[{0}H".format(ss.g)
|
txt = "\033[s\033[{0}H".format(ss.g)
|
||||||
for y, k, st, f in [
|
for y, k, st, f in [
|
||||||
@@ -752,12 +809,12 @@ class Ctl(object):
|
|||||||
eta = (self.nbytes - self.up_b) / (spd + 1)
|
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||||
|
|
||||||
spd = humansize(spd)
|
spd = humansize(spd)
|
||||||
eta = str(datetime.timedelta(seconds=int(eta)))
|
self.eta = str(datetime.timedelta(seconds=int(eta)))
|
||||||
sleft = humansize(self.nbytes - self.up_b)
|
sleft = humansize(self.nbytes - self.up_b)
|
||||||
nleft = self.nfiles - self.up_f
|
nleft = self.nfiles - self.up_f
|
||||||
tail = "\033[K\033[u" if VT100 else "\r"
|
tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r"
|
||||||
|
|
||||||
t = "{0} eta @ {1}/s, {2}, {3}# left".format(eta, spd, sleft, nleft)
|
t = "{0} eta @ {1}/s, {2}, {3}# left".format(self.eta, spd, sleft, nleft)
|
||||||
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
||||||
|
|
||||||
if not self.recheck:
|
if not self.recheck:
|
||||||
@@ -765,7 +822,7 @@ class Ctl(object):
|
|||||||
|
|
||||||
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
||||||
for file in self.recheck:
|
for file in self.recheck:
|
||||||
handshake(self.ar.url, file, self.ar.a, False)
|
handshake(self.ar, file, False)
|
||||||
|
|
||||||
def cleanup_vt100(self):
|
def cleanup_vt100(self):
|
||||||
ss.scroll_region(None)
|
ss.scroll_region(None)
|
||||||
@@ -778,8 +835,10 @@ class Ctl(object):
|
|||||||
prd = None
|
prd = None
|
||||||
ls = {}
|
ls = {}
|
||||||
for top, rel, inf in self.filegen:
|
for top, rel, inf in self.filegen:
|
||||||
if self.ar.z:
|
isdir = stat.S_ISDIR(inf.st_mode)
|
||||||
rd = os.path.dirname(rel)
|
if self.ar.z or self.ar.drd:
|
||||||
|
rd = rel if isdir else os.path.dirname(rel)
|
||||||
|
srd = rd.decode("utf-8", "replace").replace("\\", "/")
|
||||||
if prd != rd:
|
if prd != rd:
|
||||||
prd = rd
|
prd = rd
|
||||||
headers = {}
|
headers = {}
|
||||||
@@ -788,19 +847,37 @@ class Ctl(object):
|
|||||||
|
|
||||||
ls = {}
|
ls = {}
|
||||||
try:
|
try:
|
||||||
print(" ls ~{0}".format(rd.decode("utf-8", "replace")))
|
print(" ls ~{0}".format(srd))
|
||||||
r = req_ses.get(
|
zb = self.ar.url.encode("utf-8")
|
||||||
self.ar.url.encode("utf-8") + quotep(rd) + b"?ls",
|
zb += quotep(rd.replace(b"\\", b"/"))
|
||||||
headers=headers,
|
r = req_ses.get(zb + b"?ls&dots", headers=headers)
|
||||||
)
|
if not r:
|
||||||
for f in r.json()["files"]:
|
raise Exception("HTTP {0}".format(r.status_code))
|
||||||
rfn = f["href"].split("?")[0].encode("utf-8", "replace")
|
|
||||||
ls[unquote(rfn)] = f
|
|
||||||
except:
|
|
||||||
print(" mkdir ~{0}".format(rd.decode("utf-8", "replace")))
|
|
||||||
|
|
||||||
|
j = r.json()
|
||||||
|
for f in j["dirs"] + j["files"]:
|
||||||
|
rfn = f["href"].split("?")[0].rstrip("/")
|
||||||
|
ls[unquote(rfn.encode("utf-8", "replace"))] = f
|
||||||
|
except Exception as ex:
|
||||||
|
print(" mkdir ~{0} ({1})".format(srd, ex))
|
||||||
|
|
||||||
|
if self.ar.drd:
|
||||||
|
dp = os.path.join(top, rd)
|
||||||
|
lnodes = set(os.listdir(dp))
|
||||||
|
bnames = [x for x in ls if x not in lnodes]
|
||||||
|
if bnames:
|
||||||
|
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||||
|
names = [x.decode("utf-8", "replace") for x in bnames]
|
||||||
|
locs = [vpath + srd + "/" + x for x in names]
|
||||||
|
print("DELETING ~{0}/#{1}".format(srd, len(names)))
|
||||||
|
req_ses.post(self.ar.url + "?delete", json=locs)
|
||||||
|
|
||||||
|
if isdir:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if self.ar.z:
|
||||||
rf = ls.get(os.path.basename(rel), None)
|
rf = ls.get(os.path.basename(rel), None)
|
||||||
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1:
|
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 2:
|
||||||
self.nfiles -= 1
|
self.nfiles -= 1
|
||||||
self.nbytes -= inf.st_size
|
self.nbytes -= inf.st_size
|
||||||
continue
|
continue
|
||||||
@@ -850,7 +927,10 @@ class Ctl(object):
|
|||||||
self.handshaker_busy += 1
|
self.handshaker_busy += 1
|
||||||
|
|
||||||
upath = file.abs.decode("utf-8", "replace")
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
hs, sprs = handshake(self.ar.url, file, self.ar.a, search)
|
if not VT100:
|
||||||
|
upath = upath[4:]
|
||||||
|
|
||||||
|
hs, sprs = handshake(self.ar, file, search)
|
||||||
if search:
|
if search:
|
||||||
if hs:
|
if hs:
|
||||||
for hit in hs:
|
for hit in hs:
|
||||||
@@ -883,6 +963,9 @@ class Ctl(object):
|
|||||||
self.up_c += len(file.cids) - file.up_c
|
self.up_c += len(file.cids) - file.up_c
|
||||||
self.up_b += file.size - file.up_b
|
self.up_b += file.size - file.up_b
|
||||||
|
|
||||||
|
if not file.recheck:
|
||||||
|
self.up_done(file)
|
||||||
|
|
||||||
if hs and file.up_c:
|
if hs and file.up_c:
|
||||||
# some chunks failed
|
# some chunks failed
|
||||||
self.up_c -= len(hs)
|
self.up_c -= len(hs)
|
||||||
@@ -912,12 +995,23 @@ class Ctl(object):
|
|||||||
self.uploader_busy += 1
|
self.uploader_busy += 1
|
||||||
self.t0_up = self.t0_up or time.time()
|
self.t0_up = self.t0_up or time.time()
|
||||||
|
|
||||||
|
zs = "{0}/{1}/{2}/{3} {4}/{5} {6}"
|
||||||
|
stats = zs.format(
|
||||||
|
self.up_f,
|
||||||
|
len(self.recheck),
|
||||||
|
self.uploader_busy,
|
||||||
|
self.nfiles - self.up_f,
|
||||||
|
int(self.nbytes / (1024 * 1024)),
|
||||||
|
int((self.nbytes - self.up_b) / (1024 * 1024)),
|
||||||
|
self.eta,
|
||||||
|
)
|
||||||
|
|
||||||
file, cid = task
|
file, cid = task
|
||||||
try:
|
try:
|
||||||
upload(file, cid, self.ar.a)
|
upload(file, cid, self.ar.a, stats)
|
||||||
except:
|
except:
|
||||||
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
|
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
|
||||||
pass # handshake will fix it
|
# handshake will fix it
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
sz = file.kchunks[cid][1]
|
sz = file.kchunks[cid][1]
|
||||||
@@ -933,6 +1027,10 @@ class Ctl(object):
|
|||||||
self.up_c += 1
|
self.up_c += 1
|
||||||
self.uploader_busy -= 1
|
self.uploader_busy -= 1
|
||||||
|
|
||||||
|
def up_done(self, file):
|
||||||
|
if self.ar.dl:
|
||||||
|
os.unlink(file.abs)
|
||||||
|
|
||||||
|
|
||||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||||
pass
|
pass
|
||||||
@@ -946,8 +1044,13 @@ def main():
|
|||||||
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
||||||
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
||||||
|
|
||||||
|
ver = "{0}, v{1}".format(S_BUILD_DT, S_VERSION)
|
||||||
|
if "--version" in sys.argv:
|
||||||
|
print(ver)
|
||||||
|
return
|
||||||
|
|
||||||
# fmt: off
|
# fmt: off
|
||||||
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
ap = app = argparse.ArgumentParser(formatter_class=APF, description="copyparty up2k uploader / filesearch tool, " + ver, epilog="""
|
||||||
NOTE:
|
NOTE:
|
||||||
source file/folder selection uses rsync syntax, meaning that:
|
source file/folder selection uses rsync syntax, meaning that:
|
||||||
"foo" uploads the entire folder to URL/foo/
|
"foo" uploads the entire folder to URL/foo/
|
||||||
@@ -957,21 +1060,84 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||||
ap.add_argument("-v", action="store_true", help="verbose")
|
ap.add_argument("-v", action="store_true", help="verbose")
|
||||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
||||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||||
|
ap.add_argument("--version", action="store_true", help="show version and exit")
|
||||||
|
|
||||||
|
ap = app.add_argument_group("compatibility")
|
||||||
|
ap.add_argument("--cls", action="store_true", help="clear screen before start")
|
||||||
|
ap.add_argument("--ws", action="store_true", help="copyparty is running on windows; wait before deleting files after uploading")
|
||||||
|
|
||||||
|
ap = app.add_argument_group("folder sync")
|
||||||
|
ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
|
||||||
|
ap.add_argument("--dr", action="store_true", help="delete remote files which don't exist locally")
|
||||||
|
ap.add_argument("--drd", action="store_true", help="delete remote files during upload instead of afterwards; reduces peak disk space usage, but will reupload instead of detecting renames")
|
||||||
|
|
||||||
ap = app.add_argument_group("performance tweaks")
|
ap = app.add_argument_group("performance tweaks")
|
||||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||||
|
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles)")
|
||||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||||
|
|
||||||
ap = app.add_argument_group("tls")
|
ap = app.add_argument_group("tls")
|
||||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
Ctl(app.parse_args())
|
try:
|
||||||
|
ar = app.parse_args()
|
||||||
|
finally:
|
||||||
|
if EXE and not sys.argv[1:]:
|
||||||
|
print("*** hit enter to exit ***")
|
||||||
|
try:
|
||||||
|
input()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if ar.drd:
|
||||||
|
ar.dr = True
|
||||||
|
|
||||||
|
for k in "dl dr drd".split():
|
||||||
|
errs = []
|
||||||
|
if ar.safe and getattr(ar, k):
|
||||||
|
errs.append(k)
|
||||||
|
|
||||||
|
if errs:
|
||||||
|
raise Exception("--safe is incompatible with " + str(errs))
|
||||||
|
|
||||||
|
ar.files = [
|
||||||
|
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||||
|
+ (x[-1:] if x[-1:] in ("\\", "/") else "").encode("utf-8")
|
||||||
|
for x in ar.files
|
||||||
|
]
|
||||||
|
|
||||||
|
ar.url = ar.url.rstrip("/") + "/"
|
||||||
|
if "://" not in ar.url:
|
||||||
|
ar.url = "http://" + ar.url
|
||||||
|
|
||||||
|
if ar.a and ar.a.startswith("$"):
|
||||||
|
fn = ar.a[1:]
|
||||||
|
print("reading password from file [{0}]".format(fn))
|
||||||
|
with open(fn, "rb") as f:
|
||||||
|
ar.a = f.read().decode("utf-8").strip()
|
||||||
|
|
||||||
|
if ar.cls:
|
||||||
|
print("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
|
||||||
|
|
||||||
|
ctl = Ctl(ar)
|
||||||
|
|
||||||
|
if ar.dr and not ar.drd:
|
||||||
|
print("\npass 2/2: delete")
|
||||||
|
if getattr(ctl, "up_br") and ar.ws:
|
||||||
|
# wait for up2k to mtime if there was uploads
|
||||||
|
time.sleep(4)
|
||||||
|
|
||||||
|
ar.drd = True
|
||||||
|
ar.z = True
|
||||||
|
Ctl(ar, ctl.stats)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -29,11 +29,11 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
|
|||||||
* disables thumbnails and folder-type detection in windows explorer
|
* disables thumbnails and folder-type detection in windows explorer
|
||||||
* makes it way faster (especially for slow/networked locations (such as partyfuse))
|
* makes it way faster (especially for slow/networked locations (such as partyfuse))
|
||||||
|
|
||||||
### [`webdav-basicauth.reg`](webdav-basicauth.reg)
|
### [`webdav-cfg.reg`](webdav-cfg.bat)
|
||||||
* enables webdav basic-auth over plaintext http; takes effect after a reboot OR after running `webdav-unlimit.bat`
|
* improves the native webdav support in windows;
|
||||||
|
* removes the 47.6 MiB filesize limit when downloading from webdav
|
||||||
### [`webdav-unlimit.bat`](webdav-unlimit.bat)
|
* optionally enables webdav basic-auth over plaintext http
|
||||||
* removes the 47.6 MiB filesize limit when downloading from webdav
|
* optionally helps disable wpad, removing the 10sec latency
|
||||||
|
|
||||||
### [`cfssl.sh`](cfssl.sh)
|
### [`cfssl.sh`](cfssl.sh)
|
||||||
* creates CA and server certificates using cfssl
|
* creates CA and server certificates using cfssl
|
||||||
|
|||||||
15
contrib/apache/copyparty.conf
Normal file
15
contrib/apache/copyparty.conf
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# when running copyparty behind a reverse proxy,
|
||||||
|
# the following arguments are recommended:
|
||||||
|
#
|
||||||
|
# --http-only lower latency on initial connection
|
||||||
|
# -i 127.0.0.1 only accept connections from nginx
|
||||||
|
#
|
||||||
|
# if you are doing location-based proxying (such as `/stuff` below)
|
||||||
|
# you must run copyparty with --rp-loc=stuff
|
||||||
|
#
|
||||||
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
|
|
||||||
|
LoadModule proxy_module modules/mod_proxy.so
|
||||||
|
ProxyPass "/stuff" "http://127.0.0.1:3923/stuff"
|
||||||
|
# do not specify ProxyPassReverse
|
||||||
|
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
|
||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>⇆🎉 redirect</title>
|
<title>💾🎉 redirect</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<style>
|
<style>
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,8 @@
|
|||||||
#
|
#
|
||||||
# you may also consider adding -j0 for CPU-intensive configurations
|
# you may also consider adding -j0 for CPU-intensive configurations
|
||||||
# (not that i can really think of any good examples)
|
# (not that i can really think of any good examples)
|
||||||
|
#
|
||||||
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
|
|
||||||
upstream cpp {
|
upstream cpp {
|
||||||
server 127.0.0.1:3923;
|
server 127.0.0.1:3923;
|
||||||
|
|||||||
@@ -14,5 +14,5 @@ name="$SVCNAME"
|
|||||||
command_background=true
|
command_background=true
|
||||||
pidfile="/var/run/$SVCNAME.pid"
|
pidfile="/var/run/$SVCNAME.pid"
|
||||||
|
|
||||||
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
|
command="/usr/bin/python3 /usr/local/bin/copyparty-sfx.py"
|
||||||
command_args="-q -v /mnt::rw"
|
command_args="-q -v /mnt::rw"
|
||||||
|
|||||||
57
contrib/package/arch/PKGBUILD
Normal file
57
contrib/package/arch/PKGBUILD
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
# Maintainer: icxes <dev.null@need.moe>
|
||||||
|
pkgname=copyparty
|
||||||
|
pkgver="1.6.7"
|
||||||
|
pkgrel=1
|
||||||
|
pkgdesc="Portable file sharing hub"
|
||||||
|
arch=("any")
|
||||||
|
url="https://github.com/9001/${pkgname}"
|
||||||
|
license=('MIT')
|
||||||
|
depends=("python" "lsof")
|
||||||
|
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
||||||
|
"python-jinja: faster html generator"
|
||||||
|
"python-mutagen: music tags (alternative)"
|
||||||
|
"python-pillow: thumbnails for images"
|
||||||
|
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
|
||||||
|
"libkeyfinder-git: detection of musical keys"
|
||||||
|
"qm-vamp-plugins: BPM detection"
|
||||||
|
"python-pyopenssl: ftps functionality"
|
||||||
|
"python-impacket-git: smb support (bad idea)"
|
||||||
|
)
|
||||||
|
source=("${url}/releases/download/v${pkgver}/${pkgname}-sfx.py"
|
||||||
|
"${pkgname}.conf"
|
||||||
|
"${pkgname}.service"
|
||||||
|
"prisonparty.service"
|
||||||
|
"index.md"
|
||||||
|
"https://raw.githubusercontent.com/9001/${pkgname}/v${pkgver}/bin/prisonparty.sh"
|
||||||
|
"https://raw.githubusercontent.com/9001/${pkgname}/v${pkgver}/LICENSE"
|
||||||
|
)
|
||||||
|
backup=("etc/${pkgname}.d/init" )
|
||||||
|
sha256sums=("3fb40a631e9decf0073db06aab6fd8d743de91f4ddb82a65164d39d53e0b413f"
|
||||||
|
"b8565eba5e64dedba1cf6c7aac7e31c5a731ed7153d6810288a28f00a36c28b2"
|
||||||
|
"f65c207e0670f9d78ad2e399bda18d5502ff30d2ac79e0e7fc48e7fbdc39afdc"
|
||||||
|
"c4f396b083c9ec02ad50b52412c84d2a82be7f079b2d016e1c9fad22d68285ff"
|
||||||
|
"dba701de9fd584405917e923ea1e59dbb249b96ef23bad479cf4e42740b774c8"
|
||||||
|
"23054bb206153a1ed34038accaf490b8068f9c856e423c2f2595b148b40c0a0c"
|
||||||
|
"cb2ce3d6277bf2f5a82ecf336cc44963bc6490bcf496ffbd75fc9e21abaa75f3"
|
||||||
|
)
|
||||||
|
|
||||||
|
package() {
|
||||||
|
cd "${srcdir}/"
|
||||||
|
|
||||||
|
install -dm755 "${pkgdir}/etc/${pkgname}.d"
|
||||||
|
install -Dm755 "${pkgname}-sfx.py" "${pkgdir}/usr/bin/${pkgname}"
|
||||||
|
install -Dm755 "prisonparty.sh" "${pkgdir}/usr/bin/prisonparty"
|
||||||
|
install -Dm644 "${pkgname}.conf" "${pkgdir}/etc/${pkgname}.d/init"
|
||||||
|
install -Dm644 "${pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}.service"
|
||||||
|
install -Dm644 "prisonparty.service" "${pkgdir}/usr/lib/systemd/system/prisonparty.service"
|
||||||
|
install -Dm644 "index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md"
|
||||||
|
install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
|
||||||
|
|
||||||
|
find /etc/${pkgname}.d -iname '*.conf' 2>/dev/null | grep -qE . && return
|
||||||
|
echo "┏━━━━━━━━━━━━━━━──-"
|
||||||
|
echo "┃ Configure ${pkgname} by adding .conf files into /etc/${pkgname}.d/"
|
||||||
|
echo "┃ and maybe copy+edit one of the following to /etc/systemd/system/:"
|
||||||
|
echo "┣━♦ /usr/lib/systemd/system/${pkgname}.service (standard)"
|
||||||
|
echo "┣━♦ /usr/lib/systemd/system/prisonparty.service (chroot)"
|
||||||
|
echo "┗━━━━━━━━━━━━━━━──-"
|
||||||
|
}
|
||||||
7
contrib/package/arch/copyparty.conf
Normal file
7
contrib/package/arch/copyparty.conf
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
## import all *.conf files from the current folder (/etc/copyparty.d)
|
||||||
|
% ./
|
||||||
|
|
||||||
|
# add additional .conf files to this folder;
|
||||||
|
# see example config files for reference:
|
||||||
|
# https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf
|
||||||
|
# https://github.com/9001/copyparty/tree/hovudstraum/docs/copyparty.d
|
||||||
32
contrib/package/arch/copyparty.service
Normal file
32
contrib/package/arch/copyparty.service
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# this will start `/usr/bin/copyparty-sfx.py`
|
||||||
|
# and read config from `/etc/copyparty.d/*.conf`
|
||||||
|
#
|
||||||
|
# you probably want to:
|
||||||
|
# change "User=cpp" and "/home/cpp/" to another user
|
||||||
|
#
|
||||||
|
# unless you add -q to disable logging, you may want to remove the
|
||||||
|
# following line to allow buffering (slightly better performance):
|
||||||
|
# Environment=PYTHONUNBUFFERED=x
|
||||||
|
|
||||||
|
[Unit]
|
||||||
|
Description=copyparty file server
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=notify
|
||||||
|
SyslogIdentifier=copyparty
|
||||||
|
Environment=PYTHONUNBUFFERED=x
|
||||||
|
WorkingDirectory=/var/lib/copyparty-jail
|
||||||
|
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||||
|
|
||||||
|
# user to run as + where the TLS certificate is (if any)
|
||||||
|
User=cpp
|
||||||
|
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
||||||
|
|
||||||
|
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||||
|
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
|
# run copyparty
|
||||||
|
ExecStart=/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
3
contrib/package/arch/index.md
Normal file
3
contrib/package/arch/index.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
this is `/var/lib/copyparty-jail`, the fallback webroot when copyparty has not yet been configured
|
||||||
|
|
||||||
|
please add some `*.conf` files to `/etc/copyparty.d/`
|
||||||
31
contrib/package/arch/prisonparty.service
Normal file
31
contrib/package/arch/prisonparty.service
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# this will start `/usr/bin/copyparty-sfx.py`
|
||||||
|
# in a chroot, preventing accidental access elsewhere
|
||||||
|
# and read config from `/etc/copyparty.d/*.conf`
|
||||||
|
#
|
||||||
|
# expose additional filesystem locations to copyparty
|
||||||
|
# by listing them between the last `1000` and `--`
|
||||||
|
#
|
||||||
|
# `1000 1000` = what user to run copyparty as
|
||||||
|
#
|
||||||
|
# unless you add -q to disable logging, you may want to remove the
|
||||||
|
# following line to allow buffering (slightly better performance):
|
||||||
|
# Environment=PYTHONUNBUFFERED=x
|
||||||
|
|
||||||
|
[Unit]
|
||||||
|
Description=copyparty file server
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
SyslogIdentifier=prisonparty
|
||||||
|
Environment=PYTHONUNBUFFERED=x
|
||||||
|
WorkingDirectory=/var/lib/copyparty-jail
|
||||||
|
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||||
|
|
||||||
|
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||||
|
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
|
# run copyparty
|
||||||
|
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail 1000 1000 /etc/copyparty.d -- \
|
||||||
|
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
@@ -1,13 +1,22 @@
|
|||||||
<!--
|
<!--
|
||||||
|
NOTE: DEPRECATED; please use the javascript version instead:
|
||||||
|
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/minimal-up2k.js
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
save this as .epilogue.html inside a write-only folder to declutter the UI, makes it look like
|
save this as .epilogue.html inside a write-only folder to declutter the UI, makes it look like
|
||||||
https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png
|
https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png
|
||||||
|
|
||||||
|
only works if you disable the prologue/epilogue sandbox with --no-sb-lg
|
||||||
|
which should probably be combined with --no-dot-ren to prevent damage
|
||||||
|
(`no_sb_lg` can also be set per-volume with volflags)
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
|
|
||||||
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
|
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
|
||||||
|
|
||||||
#ops, #tree, #path, #epi+h2, /* main tabs and navigators (tree/breadcrumbs) */
|
#ops, #tree, #path, #wfp, /* main tabs and navigators (tree/breadcrumbs) */
|
||||||
|
|
||||||
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ almost the same as minimal-up2k.html except this one...:
|
|||||||
var u2min = `
|
var u2min = `
|
||||||
<style>
|
<style>
|
||||||
|
|
||||||
#ops, #path, #tree, #files, #epi+div+h2,
|
#ops, #path, #tree, #files, #wfp,
|
||||||
#u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd {
|
#u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd {
|
||||||
display: none !important;
|
display: none !important;
|
||||||
}
|
}
|
||||||
@@ -55,5 +55,5 @@ var u2min = `
|
|||||||
if (!has(perms, 'read')) {
|
if (!has(perms, 'read')) {
|
||||||
var e2 = mknod('div');
|
var e2 = mknod('div');
|
||||||
e2.innerHTML = u2min;
|
e2.innerHTML = u2min;
|
||||||
ebi('wrap').insertBefore(e2, QS('#epi+h2'));
|
ebi('wrap').insertBefore(e2, QS('#wfp'));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,12 +6,17 @@
|
|||||||
# 1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin
|
# 1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin
|
||||||
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
||||||
#
|
#
|
||||||
|
# expose additional filesystem locations to copyparty
|
||||||
|
# by listing them between the last `1000` and `--`
|
||||||
|
#
|
||||||
|
# `1000 1000` = what user to run copyparty as
|
||||||
|
#
|
||||||
# you may want to:
|
# you may want to:
|
||||||
# change '/mnt::rw' to another location or permission-set
|
# change '/mnt::rw' to another location or permission-set
|
||||||
# (remember to change the '/mnt' chroot arg too)
|
# (remember to change the '/mnt' chroot arg too)
|
||||||
#
|
#
|
||||||
# enable line-buffering for realtime logging (slight performance cost):
|
# unless you add -q to disable logging, you may want to remove the
|
||||||
# inside the [Service] block, add the following line:
|
# following line to allow buffering (slightly better performance):
|
||||||
# Environment=PYTHONUNBUFFERED=x
|
# Environment=PYTHONUNBUFFERED=x
|
||||||
|
|
||||||
[Unit]
|
[Unit]
|
||||||
@@ -19,7 +24,14 @@ Description=copyparty file server
|
|||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
SyslogIdentifier=prisonparty
|
SyslogIdentifier=prisonparty
|
||||||
WorkingDirectory=/usr/local/bin
|
Environment=PYTHONUNBUFFERED=x
|
||||||
|
WorkingDirectory=/var/lib/copyparty-jail
|
||||||
|
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||||
|
|
||||||
|
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||||
|
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
|
# run copyparty
|
||||||
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
|
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
|
||||||
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
@echo off
|
@echo off
|
||||||
rem removes the 47.6 MiB filesize limit when downloading from webdav
|
rem removes the 47.6 MiB filesize limit when downloading from webdav
|
||||||
rem + optionally allows/enables password-auth over plaintext http
|
rem + optionally allows/enables password-auth over plaintext http
|
||||||
rem + optionally helps disable wpad
|
rem + optionally helps disable wpad, removing the 10sec latency
|
||||||
|
|
||||||
setlocal enabledelayedexpansion
|
|
||||||
|
|
||||||
net session >nul 2>&1
|
net session >nul 2>&1
|
||||||
if %errorlevel% neq 0 (
|
if %errorlevel% neq 0 (
|
||||||
@@ -20,30 +18,26 @@ echo OK;
|
|||||||
echo allow webdav basic-auth over plaintext http?
|
echo allow webdav basic-auth over plaintext http?
|
||||||
echo Y: login works, but the password will be visible in wireshark etc
|
echo Y: login works, but the password will be visible in wireshark etc
|
||||||
echo N: login will NOT work unless you use https and valid certificates
|
echo N: login will NOT work unless you use https and valid certificates
|
||||||
set c=.
|
choice
|
||||||
set /p "c=(Y/N): "
|
if %errorlevel% equ 1 (
|
||||||
echo(
|
reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f
|
||||||
if /i not "!c!"=="y" goto :g1
|
rem default is 1 (require tls)
|
||||||
reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f
|
)
|
||||||
rem default is 1 (require tls)
|
|
||||||
|
|
||||||
:g1
|
|
||||||
echo(
|
echo(
|
||||||
echo OK;
|
echo OK;
|
||||||
echo do you want to disable wpad?
|
echo do you want to disable wpad?
|
||||||
echo can give a HUGE speed boost depending on network settings
|
echo can give a HUGE speed boost depending on network settings
|
||||||
set c=.
|
choice
|
||||||
set /p "c=(Y/N): "
|
if %errorlevel% equ 1 (
|
||||||
echo(
|
echo(
|
||||||
if /i not "!c!"=="y" goto :g2
|
echo i'm about to open the [Connections] tab in [Internet Properties] for you;
|
||||||
echo(
|
echo please click [LAN settings] and disable [Automatically detect settings]
|
||||||
echo i'm about to open the [Connections] tab in [Internet Properties] for you;
|
echo(
|
||||||
echo please click [LAN settings] and disable [Automatically detect settings]
|
pause
|
||||||
echo(
|
control inetcpl.cpl,,4
|
||||||
pause
|
)
|
||||||
control inetcpl.cpl,,4
|
|
||||||
|
|
||||||
:g2
|
|
||||||
net stop webclient
|
net stop webclient
|
||||||
net start webclient
|
net start webclient
|
||||||
echo(
|
echo(
|
||||||
|
|||||||
@@ -34,6 +34,8 @@ ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
|
|||||||
|
|
||||||
MACOS = platform.system() == "Darwin"
|
MACOS = platform.system() == "Darwin"
|
||||||
|
|
||||||
|
EXE = bool(getattr(sys, "frozen", False))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
CORES = len(os.sched_getaffinity(0))
|
CORES = len(os.sched_getaffinity(0))
|
||||||
except:
|
except:
|
||||||
|
|||||||
@@ -23,9 +23,10 @@ import traceback
|
|||||||
import uuid
|
import uuid
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
from .__init__ import ANYWIN, CORES, PY2, VT100, WINDOWS, E, EnvParams, unicode
|
from .__init__ import ANYWIN, CORES, EXE, PY2, VT100, WINDOWS, E, EnvParams, unicode
|
||||||
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
|
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
|
||||||
from .authsrv import expand_config_file, re_vol
|
from .authsrv import expand_config_file, re_vol, split_cfg_ln, upgrade_cfg_fmt
|
||||||
|
from .cfg import flagcats, onedash
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
from .util import (
|
from .util import (
|
||||||
IMPLICATIONS,
|
IMPLICATIONS,
|
||||||
@@ -37,6 +38,7 @@ from .util import (
|
|||||||
ansi_re,
|
ansi_re,
|
||||||
min_ex,
|
min_ex,
|
||||||
py_desc,
|
py_desc,
|
||||||
|
pybin,
|
||||||
termsize,
|
termsize,
|
||||||
wrap,
|
wrap,
|
||||||
)
|
)
|
||||||
@@ -53,7 +55,9 @@ try:
|
|||||||
except:
|
except:
|
||||||
HAVE_SSL = False
|
HAVE_SSL = False
|
||||||
|
|
||||||
|
u = unicode
|
||||||
printed: list[str] = []
|
printed: list[str] = []
|
||||||
|
zsid = uuid.uuid4().urn[4:]
|
||||||
|
|
||||||
|
|
||||||
class RiceFormatter(argparse.HelpFormatter):
|
class RiceFormatter(argparse.HelpFormatter):
|
||||||
@@ -228,9 +232,10 @@ def get_srvname() -> str:
|
|||||||
ret = f.read().decode("utf-8", "replace").strip()
|
ret = f.read().decode("utf-8", "replace").strip()
|
||||||
except:
|
except:
|
||||||
ret = ""
|
ret = ""
|
||||||
while len(ret) < 7:
|
namelen = 5
|
||||||
|
while len(ret) < namelen:
|
||||||
ret += base64.b32encode(os.urandom(4))[:7].decode("utf-8").lower()
|
ret += base64.b32encode(os.urandom(4))[:7].decode("utf-8").lower()
|
||||||
ret = re.sub("[234567=]", "", ret)[:7]
|
ret = re.sub("[234567=]", "", ret)[:namelen]
|
||||||
with open(fp, "wb") as f:
|
with open(fp, "wb") as f:
|
||||||
f.write(ret.encode("utf-8") + b"\n")
|
f.write(ret.encode("utf-8") + b"\n")
|
||||||
|
|
||||||
@@ -238,18 +243,23 @@ def get_srvname() -> str:
|
|||||||
|
|
||||||
|
|
||||||
def ensure_locale() -> None:
|
def ensure_locale() -> None:
|
||||||
|
safe = "en_US.UTF-8"
|
||||||
for x in [
|
for x in [
|
||||||
"en_US.UTF-8",
|
safe,
|
||||||
"English_United States.UTF8",
|
"English_United States.UTF8",
|
||||||
"English_United States.1252",
|
"English_United States.1252",
|
||||||
]:
|
]:
|
||||||
try:
|
try:
|
||||||
locale.setlocale(locale.LC_ALL, x)
|
locale.setlocale(locale.LC_ALL, x)
|
||||||
lprint("Locale: {}\n".format(x))
|
if x != safe:
|
||||||
break
|
lprint("Locale: {}\n".format(x))
|
||||||
|
return
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
t = "setlocale {} failed,\n sorting and dates might get funky\n"
|
||||||
|
warn(t.format(safe))
|
||||||
|
|
||||||
|
|
||||||
def ensure_cert() -> None:
|
def ensure_cert() -> None:
|
||||||
"""
|
"""
|
||||||
@@ -267,8 +277,8 @@ def ensure_cert() -> None:
|
|||||||
try:
|
try:
|
||||||
if filecmp.cmp(cert_cfg, cert_insec):
|
if filecmp.cmp(cert_cfg, cert_insec):
|
||||||
lprint(
|
lprint(
|
||||||
"\033[33m using default TLS certificate; https will be insecure."
|
"\033[33musing default TLS certificate; https will be insecure."
|
||||||
+ "\033[36m\n certificate location: {}\033[0m\n".format(cert_cfg)
|
+ "\033[36m\ncertificate location: {}\033[0m\n".format(cert_cfg)
|
||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
@@ -347,27 +357,28 @@ def configure_ssl_ciphers(al: argparse.Namespace) -> None:
|
|||||||
def args_from_cfg(cfg_path: str) -> list[str]:
|
def args_from_cfg(cfg_path: str) -> list[str]:
|
||||||
lines: list[str] = []
|
lines: list[str] = []
|
||||||
expand_config_file(lines, cfg_path, "")
|
expand_config_file(lines, cfg_path, "")
|
||||||
|
lines = upgrade_cfg_fmt(None, argparse.Namespace(vc=False), lines, "")
|
||||||
|
|
||||||
ret: list[str] = []
|
ret: list[str] = []
|
||||||
skip = False
|
skip = True
|
||||||
for ln in lines:
|
for ln in lines:
|
||||||
if not ln:
|
sn = ln.split(" #")[0].strip()
|
||||||
|
if sn.startswith("["):
|
||||||
|
skip = True
|
||||||
|
if sn.startswith("[global]"):
|
||||||
skip = False
|
skip = False
|
||||||
continue
|
continue
|
||||||
|
if skip or not sn.split("#")[0].strip():
|
||||||
if ln.startswith("#"):
|
|
||||||
continue
|
continue
|
||||||
|
for k, v in split_cfg_ln(sn).items():
|
||||||
if not ln.startswith("-"):
|
k = k.lstrip("-")
|
||||||
continue
|
if not k:
|
||||||
|
continue
|
||||||
if skip:
|
prefix = "-" if k in onedash else "--"
|
||||||
continue
|
if v is True:
|
||||||
|
ret.append(prefix + k)
|
||||||
try:
|
else:
|
||||||
ret.extend(ln.split(" ", 1))
|
ret.append(prefix + k + "=" + v)
|
||||||
except:
|
|
||||||
ret.append(ln)
|
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -441,27 +452,8 @@ def showlic() -> None:
|
|||||||
print(f.read().decode("utf-8", "replace"))
|
print(f.read().decode("utf-8", "replace"))
|
||||||
|
|
||||||
|
|
||||||
def run_argparse(
|
def get_sects():
|
||||||
argv: list[str], formatter: Any, retry: bool, nc: int
|
return [
|
||||||
) -> argparse.Namespace:
|
|
||||||
ap = argparse.ArgumentParser(
|
|
||||||
formatter_class=formatter,
|
|
||||||
prog="copyparty",
|
|
||||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
|
|
||||||
except:
|
|
||||||
fk_salt = "hunter2"
|
|
||||||
|
|
||||||
hcores = min(CORES, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
|
||||||
|
|
||||||
tty = os.environ.get("TERM", "").lower() == "linux"
|
|
||||||
|
|
||||||
srvname = get_srvname()
|
|
||||||
|
|
||||||
sects = [
|
|
||||||
[
|
[
|
||||||
"accounts",
|
"accounts",
|
||||||
"accounts and volumes",
|
"accounts and volumes",
|
||||||
@@ -480,7 +472,7 @@ def run_argparse(
|
|||||||
"g" (get): download files, but cannot see folder contents
|
"g" (get): download files, but cannot see folder contents
|
||||||
"G" (upget): "get", but can see filekeys of their own uploads
|
"G" (upget): "get", but can see filekeys of their own uploads
|
||||||
|
|
||||||
too many volflags to list here, see the other sections
|
too many volflags to list here, see --help-flags
|
||||||
|
|
||||||
example:\033[35m
|
example:\033[35m
|
||||||
-a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m
|
-a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m
|
||||||
@@ -507,64 +499,63 @@ def run_argparse(
|
|||||||
"""
|
"""
|
||||||
volflags are appended to volume definitions, for example,
|
volflags are appended to volume definitions, for example,
|
||||||
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
|
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
|
||||||
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub
|
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub"""
|
||||||
|
)
|
||||||
|
+ build_flags_desc(),
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"hooks",
|
||||||
|
"execute commands before/after various events",
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
execute a command (a program or script) before or after various events;
|
||||||
|
\033[36mxbu\033[35m executes CMD before a file upload starts
|
||||||
|
\033[36mxau\033[35m executes CMD after a file upload finishes
|
||||||
|
\033[36mxiu\033[35m executes CMD after all uploads finish and volume is idle
|
||||||
|
\033[36mxbr\033[35m executes CMD before a file rename/move
|
||||||
|
\033[36mxar\033[35m executes CMD after a file rename/move
|
||||||
|
\033[36mxbd\033[35m executes CMD before a file delete
|
||||||
|
\033[36mxad\033[35m executes CMD after a file delete
|
||||||
|
\033[36mxm\033[35m executes CMD on message
|
||||||
|
\033[0m
|
||||||
|
can be defined as --args or volflags; for example \033[36m
|
||||||
|
--xau notify-send
|
||||||
|
-v .::r:c,xau=notify-send
|
||||||
|
\033[0m
|
||||||
|
commands specified as --args are appended to volflags;
|
||||||
|
each --arg and volflag can be specified multiple times,
|
||||||
|
each command will execute in order unless one returns non-zero
|
||||||
|
|
||||||
\033[0muploads, general:
|
optionally prefix the command with comma-sep. flags similar to -mtp:
|
||||||
\033[36mnodupe\033[35m rejects existing files (instead of symlinking them)
|
|
||||||
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
|
|
||||||
\033[36mmagic$\033[35m enables filetype detection for nameless uploads
|
|
||||||
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
|
|
||||||
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9
|
|
||||||
|
|
||||||
\033[0mupload rules:
|
\033[36mf\033[35m forks the process, doesn't wait for completion
|
||||||
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min
|
\033[36mc\033[35m checks return code, blocks the action if non-zero
|
||||||
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
|
\033[36mj\033[35m provides json with info as 1st arg instead of filepath
|
||||||
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
|
\033[36mwN\033[35m waits N sec after command has been started before continuing
|
||||||
\033[36mdf=1g\033[35m ensure 1 GiB free disk space
|
\033[36mtN\033[35m sets an N sec timeout before the command is abandoned
|
||||||
|
\033[36miN\033[35m xiu only: volume must be idle for N sec (default = 5)
|
||||||
|
|
||||||
\033[0mupload rotation:
|
\033[36mkt\033[35m kills the entire process tree on timeout (default),
|
||||||
(moves all uploads into the specified folder structure)
|
\033[36mkm\033[35m kills just the main process
|
||||||
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
|
\033[36mkn\033[35m lets it continue running until copyparty is terminated
|
||||||
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
|
|
||||||
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
|
|
||||||
|
|
||||||
\033[0mdatabase, general:
|
\033[36mc0\033[35m show all process output (default)
|
||||||
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
\033[36mc1\033[35m show only stderr
|
||||||
\033[36md2ts\033[35m disables metadata collection for existing files
|
\033[36mc2\033[35m show only stdout
|
||||||
\033[36md2ds\033[35m disables onboot indexing, overrides -e2ds*
|
\033[36mc3\033[35m mute all process otput
|
||||||
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
\033[0m
|
||||||
\033[36md2v\033[35m disables file verification, overrides -e2v*
|
each hook is executed once for each event, except for \033[36mxiu\033[0m
|
||||||
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
which builds up a backlog of uploads, running the hook just once
|
||||||
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
as soon as the volume has been idle for iN seconds (5 by default)
|
||||||
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
|
||||||
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
|
|
||||||
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
|
|
||||||
\033[36mnoforget$\033[35m don't forget files when deleted from disk
|
|
||||||
\033[36mxlink$\033[35m cross-volume dupe detection / linking
|
|
||||||
\033[36mxdev\033[35m do not descend into other filesystems
|
|
||||||
\033[36mxvol\033[35m skip symlinks leaving the volume root
|
|
||||||
|
|
||||||
\033[0mdatabase, audio tags:
|
\033[36mxiu\033[0m is also unique in that it will pass the metadata to the
|
||||||
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
|
executed program on STDIN instead of as argv arguments, and
|
||||||
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
|
it also includes the wark (file-id/hash) as a json property
|
||||||
generate ".bpm" tags from uploads (f = overwrite tags)
|
|
||||||
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
|
|
||||||
|
|
||||||
\033[0mthumbnails:
|
except for \033[36mxm\033[0m, only one hook / one action can run at a time,
|
||||||
\033[36mdthumb\033[35m disables all thumbnails
|
so it's recommended to use the \033[36mf\033[0m flag unless you really need
|
||||||
\033[36mdvthumb\033[35m disables video thumbnails
|
to wait for the hook to finish before continuing (without \033[36mf\033[0m
|
||||||
\033[36mdathumb\033[35m disables audio thumbnails (spectrograms)
|
the upload speed can easily drop to 10% for small files)"""
|
||||||
\033[36mdithumb\033[35m disables image thumbnails
|
|
||||||
|
|
||||||
\033[0mclient and ux:
|
|
||||||
\033[36mhtml_head=TXT\033[35m includes TXT in the <head>
|
|
||||||
\033[36mrobots\033[35m allows indexing by search engines (default)
|
|
||||||
\033[36mnorobots\033[35m kindly asks search engines to leave
|
|
||||||
|
|
||||||
\033[0mothers:
|
|
||||||
\033[36mfk=8\033[35m generates per-file accesskeys,
|
|
||||||
which will then be required at the "g" permission
|
|
||||||
\033[0m"""
|
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
@@ -600,10 +591,43 @@ def run_argparse(
|
|||||||
"""
|
"""
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
[
|
||||||
|
"dbd",
|
||||||
|
"database durability profiles",
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
mainly affects uploads of many small files on slow HDDs; speeds measured uploading 520 files on a WD20SPZX (SMR 2.5" 5400rpm 4kb)
|
||||||
|
|
||||||
|
\033[32macid\033[0m = extremely safe but slow; the old default. Should never lose any data no matter what
|
||||||
|
|
||||||
|
\033[32mswal\033[0m = 2.4x faster uploads yet 99.9%% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes
|
||||||
|
|
||||||
|
\033[32mwal\033[0m = another 21x faster on HDDs yet 90%% as safe; same pitfall as \033[33mswal\033[0m except more likely
|
||||||
|
|
||||||
|
\033[32myolo\033[0m = another 1.5x faster, and removes the occasional sudden upload-pause while the disk syncs, but now you're at risk of losing the entire database in a powerloss / OS-crash
|
||||||
|
|
||||||
|
profiles can be set globally (--dbd=yolo), or per-volume with volflags: -v ~/Music:music:r:c,dbd=acid
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
],
|
||||||
]
|
]
|
||||||
|
|
||||||
# fmt: off
|
|
||||||
u = unicode
|
def build_flags_desc():
|
||||||
|
ret = ""
|
||||||
|
for grp, flags in flagcats.items():
|
||||||
|
ret += "\n\n\033[0m" + grp
|
||||||
|
for k, v in flags.items():
|
||||||
|
v = v.replace("\n", "\n ")
|
||||||
|
ret += "\n \033[36m{}\033[35m {}".format(k, v)
|
||||||
|
|
||||||
|
return ret + "\033[0m"
|
||||||
|
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
|
||||||
|
|
||||||
|
def add_general(ap, nc, srvname):
|
||||||
ap2 = ap.add_argument_group('general options')
|
ap2 = ap.add_argument_group('general options')
|
||||||
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
|
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
|
||||||
ap2.add_argument("-nc", metavar="NUM", type=int, default=nc, help="max num clients")
|
ap2.add_argument("-nc", metavar="NUM", type=int, default=nc, help="max num clients")
|
||||||
@@ -619,6 +643,8 @@ def run_argparse(
|
|||||||
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
|
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
|
||||||
ap2.add_argument("--version", action="store_true", help="show versions and exit")
|
ap2.add_argument("--version", action="store_true", help="show versions and exit")
|
||||||
|
|
||||||
|
|
||||||
|
def add_qr(ap, tty):
|
||||||
ap2 = ap.add_argument_group('qr options')
|
ap2 = ap.add_argument_group('qr options')
|
||||||
ap2.add_argument("--qr", action="store_true", help="show http:// QR-code on startup")
|
ap2.add_argument("--qr", action="store_true", help="show http:// QR-code on startup")
|
||||||
ap2.add_argument("--qrs", action="store_true", help="show https:// QR-code on startup")
|
ap2.add_argument("--qrs", action="store_true", help="show https:// QR-code on startup")
|
||||||
@@ -629,18 +655,23 @@ def run_argparse(
|
|||||||
ap2.add_argument("--qrp", metavar="CELLS", type=int, default=4, help="padding (spec says 4 or more, but 1 is usually fine)")
|
ap2.add_argument("--qrp", metavar="CELLS", type=int, default=4, help="padding (spec says 4 or more, but 1 is usually fine)")
|
||||||
ap2.add_argument("--qrz", metavar="N", type=int, default=0, help="[\033[32m1\033[0m]=1x, [\033[32m2\033[0m]=2x, [\033[32m0\033[0m]=auto (try [\033[32m2\033[0m] on broken fonts)")
|
ap2.add_argument("--qrz", metavar="N", type=int, default=0, help="[\033[32m1\033[0m]=1x, [\033[32m2\033[0m]=2x, [\033[32m0\033[0m]=auto (try [\033[32m2\033[0m] on broken fonts)")
|
||||||
|
|
||||||
|
|
||||||
|
def add_upload(ap):
|
||||||
ap2 = ap.add_argument_group('upload options')
|
ap2 = ap.add_argument_group('upload options')
|
||||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
|
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
|
||||||
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
||||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
||||||
|
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than SEC seconds ago)")
|
||||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
|
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
|
||||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)")
|
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)")
|
||||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
||||||
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem)")
|
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem) (volflag=hardlink)")
|
||||||
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made")
|
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=neversymlink)")
|
||||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead")
|
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes")
|
||||||
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
||||||
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
||||||
|
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, --nrand chars long (volflag=rand)")
|
||||||
|
ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)")
|
||||||
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
|
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
|
||||||
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
|
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
|
||||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||||
@@ -648,14 +679,25 @@ def run_argparse(
|
|||||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||||
|
|
||||||
|
|
||||||
|
def add_network(ap):
|
||||||
ap2 = ap.add_argument_group('network options')
|
ap2 = ap.add_argument_group('network options')
|
||||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
|
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
|
||||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||||
|
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 even if the NIC has routable IPs (breaks some mdns clients)")
|
||||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd), [\033[32m2\033[0m]=cloudflare, [\033[32m3\033[0m]=nginx, [\033[32m-1\033[0m]=closest proxy")
|
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd), [\033[32m2\033[0m]=cloudflare, [\033[32m3\033[0m]=nginx, [\033[32m-1\033[0m]=closest proxy")
|
||||||
|
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here (eg. /foo/bar)")
|
||||||
|
if ANYWIN:
|
||||||
|
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
||||||
|
else:
|
||||||
|
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
||||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
||||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
||||||
|
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..SEC")
|
||||||
|
|
||||||
|
|
||||||
|
def add_tls(ap):
|
||||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
|
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
|
||||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
|
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
|
||||||
@@ -664,17 +706,22 @@ def run_argparse(
|
|||||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
|
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
|
||||||
|
|
||||||
|
|
||||||
|
def add_zeroconf(ap):
|
||||||
ap2 = ap.add_argument_group("Zeroconf options")
|
ap2 = ap.add_argument_group("Zeroconf options")
|
||||||
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
|
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
|
||||||
ap2.add_argument("--z-on", metavar="NICS/NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes\n └─example: \033[32meth0, wlo1, virhost0, 192.168.123.0/24, fd00:fda::/96\033[0m")
|
ap2.add_argument("--z-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes\n └─example: \033[32meth0, wlo1, virhost0, 192.168.123.0/24, fd00:fda::/96\033[0m")
|
||||||
ap2.add_argument("--z-off", metavar="NICS/NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
ap2.add_argument("--z-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||||
|
ap2.add_argument("--z-chk", metavar="SEC", type=int, default=10, help="check for network changes every SEC seconds (0=disable)")
|
||||||
ap2.add_argument("-zv", action="store_true", help="verbose all zeroconf backends")
|
ap2.add_argument("-zv", action="store_true", help="verbose all zeroconf backends")
|
||||||
ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every SEC seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]")
|
ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every SEC seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group("Zeroconf-mDNS options:")
|
|
||||||
|
def add_zc_mdns(ap):
|
||||||
|
ap2 = ap.add_argument_group("Zeroconf-mDNS options")
|
||||||
ap2.add_argument("--zm", action="store_true", help="announce the enabled protocols over mDNS (multicast DNS-SD) -- compatible with KDE, gnome, macOS, ...")
|
ap2.add_argument("--zm", action="store_true", help="announce the enabled protocols over mDNS (multicast DNS-SD) -- compatible with KDE, gnome, macOS, ...")
|
||||||
ap2.add_argument("--zm-on", metavar="NICS/NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||||
ap2.add_argument("--zm-off", metavar="NICS/NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||||
ap2.add_argument("--zm4", action="store_true", help="IPv4 only -- try this if some clients can't connect")
|
ap2.add_argument("--zm4", action="store_true", help="IPv4 only -- try this if some clients can't connect")
|
||||||
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
||||||
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
||||||
@@ -688,14 +735,18 @@ def run_argparse(
|
|||||||
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working")
|
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working")
|
||||||
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
|
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group("Zeroconf-SSDP options:")
|
|
||||||
ap2.add_argument("--zs", action="store_true", help="announce the enabled protocols over SSDP -- compatible with Windows")
|
|
||||||
ap2.add_argument("--zs-on", metavar="NICS/NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
|
||||||
ap2.add_argument("--zs-off", metavar="NICS/NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
|
||||||
ap2.add_argument("--zsv", action="store_true", help="verbose SSDP")
|
|
||||||
ap2.add_argument("--zsl", metavar="PATH", type=u, default="/?hc", help="location to include in the url (or a complete external URL), for example [\033[32mpriv/?pw=hunter2\033[0m] or [\033[32mpriv/?pw=hunter2\033[0m]")
|
|
||||||
ap2.add_argument("--zsid", metavar="UUID", type=u, default=uuid.uuid4().urn[4:], help="USN (device identifier) to announce")
|
|
||||||
|
|
||||||
|
def add_zc_ssdp(ap):
|
||||||
|
ap2 = ap.add_argument_group("Zeroconf-SSDP options")
|
||||||
|
ap2.add_argument("--zs", action="store_true", help="announce the enabled protocols over SSDP -- compatible with Windows")
|
||||||
|
ap2.add_argument("--zs-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||||
|
ap2.add_argument("--zs-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||||
|
ap2.add_argument("--zsv", action="store_true", help="verbose SSDP")
|
||||||
|
ap2.add_argument("--zsl", metavar="PATH", type=u, default="/?hc", help="location to include in the url (or a complete external URL), for example [\033[32mpriv/?pw=hunter2\033[0m] (goes directly to /priv/ with password hunter2) or [\033[32m?hc=priv&pw=hunter2\033[0m] (shows mounting options for /priv/ with password)")
|
||||||
|
ap2.add_argument("--zsid", metavar="UUID", type=u, default=zsid, help="USN (device identifier) to announce")
|
||||||
|
|
||||||
|
|
||||||
|
def add_ftp(ap):
|
||||||
ap2 = ap.add_argument_group('FTP options')
|
ap2 = ap.add_argument_group('FTP options')
|
||||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921")
|
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921")
|
||||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990")
|
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990")
|
||||||
@@ -704,11 +755,15 @@ def run_argparse(
|
|||||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
||||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
||||||
|
|
||||||
|
|
||||||
|
def add_webdav(ap):
|
||||||
ap2 = ap.add_argument_group('WebDAV options')
|
ap2 = ap.add_argument_group('WebDAV options')
|
||||||
ap2.add_argument("--daw", action="store_true", help="enable full write support. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
|
ap2.add_argument("--daw", action="store_true", help="enable full write support. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
|
||||||
ap2.add_argument("--dav-inf", action="store_true", help="allow depth:infinite requests (recursive file listing); extremely server-heavy but required for spec compliance -- luckily few clients rely on this")
|
ap2.add_argument("--dav-inf", action="store_true", help="allow depth:infinite requests (recursive file listing); extremely server-heavy but required for spec compliance -- luckily few clients rely on this")
|
||||||
ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)")
|
ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)")
|
||||||
|
|
||||||
|
|
||||||
|
def add_smb(ap):
|
||||||
ap2 = ap.add_argument_group('SMB/CIFS options')
|
ap2 = ap.add_argument_group('SMB/CIFS options')
|
||||||
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless --smb-port is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is dangerous! Never expose to the internet. Account permissions are coalesced; if one account has write-access to a volume, then all accounts do.")
|
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless --smb-port is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is dangerous! Never expose to the internet. Account permissions are coalesced; if one account has write-access to a volume, then all accounts do.")
|
||||||
ap2.add_argument("--smbw", action="store_true", help="enable write support (please dont)")
|
ap2.add_argument("--smbw", action="store_true", help="enable write support (please dont)")
|
||||||
@@ -720,6 +775,26 @@ def run_argparse(
|
|||||||
ap2.add_argument("--smbvv", action="store_true", help="verboser")
|
ap2.add_argument("--smbvv", action="store_true", help="verboser")
|
||||||
ap2.add_argument("--smbvvv", action="store_true", help="verbosest")
|
ap2.add_argument("--smbvvv", action="store_true", help="verbosest")
|
||||||
|
|
||||||
|
|
||||||
|
def add_hooks(ap):
|
||||||
|
ap2 = ap.add_argument_group('event hooks (see --help-hooks)')
|
||||||
|
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute CMD before a file upload starts")
|
||||||
|
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute CMD after a file upload finishes")
|
||||||
|
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute CMD after all uploads finish and volume is idle")
|
||||||
|
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute CMD before a file move/rename")
|
||||||
|
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute CMD after a file move/rename")
|
||||||
|
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute CMD before a file delete")
|
||||||
|
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute CMD after a file delete")
|
||||||
|
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute CMD on message")
|
||||||
|
|
||||||
|
|
||||||
|
def add_yolo(ap):
|
||||||
|
ap2 = ap.add_argument_group('yolo options')
|
||||||
|
ap2.add_argument("--allow-csrf", action="store_true", help="disable csrf protections; let other domains/sites impersonate you through cross-site requests")
|
||||||
|
ap2.add_argument("--getmod", action="store_true", help="permit ?move=[...] and ?delete as GET")
|
||||||
|
|
||||||
|
|
||||||
|
def add_optouts(ap):
|
||||||
ap2 = ap.add_argument_group('opt-outs')
|
ap2 = ap.add_argument_group('opt-outs')
|
||||||
ap2.add_argument("-nw", action="store_true", help="never write anything to disk (debug/benchmark)")
|
ap2.add_argument("-nw", action="store_true", help="never write anything to disk (debug/benchmark)")
|
||||||
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection which will deadlock copyparty)")
|
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection which will deadlock copyparty)")
|
||||||
@@ -728,15 +803,18 @@ def run_argparse(
|
|||||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||||
|
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
|
||||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||||
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)")
|
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)")
|
||||||
|
|
||||||
|
|
||||||
|
def add_safety(ap, fk_salt):
|
||||||
ap2 = ap.add_argument_group('safety options')
|
ap2 = ap.add_argument_group('safety options')
|
||||||
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
||||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --no-dot-mv --no-dot-ren --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
|
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
|
||||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
|
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
|
||||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; used to generate unpredictable internal identifiers for uploads -- doesn't really matter")
|
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
||||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
|
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
|
||||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
||||||
@@ -750,12 +828,18 @@ def run_argparse(
|
|||||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (disabled by default since turbo-up2k counts as 404s)")
|
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (disabled by default since turbo-up2k counts as 404s)")
|
||||||
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0")
|
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0")
|
||||||
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]")
|
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]")
|
||||||
|
ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives")
|
||||||
|
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)")
|
||||||
|
|
||||||
|
|
||||||
|
def add_shutdown(ap):
|
||||||
ap2 = ap.add_argument_group('shutdown options')
|
ap2 = ap.add_argument_group('shutdown options')
|
||||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; for example [\033[32midx\033[0m] will do volume indexing + metadata analysis")
|
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing")
|
||||||
|
|
||||||
|
|
||||||
|
def add_logging(ap):
|
||||||
ap2 = ap.add_argument_group('logging options')
|
ap2 = ap.add_argument_group('logging options')
|
||||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
||||||
@@ -765,11 +849,15 @@ def run_argparse(
|
|||||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
|
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
|
||||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching")
|
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching")
|
||||||
|
|
||||||
|
|
||||||
|
def add_admin(ap):
|
||||||
ap2 = ap.add_argument_group('admin panel options')
|
ap2 = ap.add_argument_group('admin panel options')
|
||||||
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
||||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||||
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
||||||
|
|
||||||
|
|
||||||
|
def add_thumbnail(ap):
|
||||||
ap2 = ap.add_argument_group('thumbnail options')
|
ap2 = ap.add_argument_group('thumbnail options')
|
||||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
||||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
||||||
@@ -786,22 +874,26 @@ def run_argparse(
|
|||||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
|
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
|
||||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
|
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
|
||||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for")
|
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; case-insensitive if -e2d")
|
||||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||||
# https://github.com/libvips/libvips
|
# https://github.com/libvips/libvips
|
||||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="bmp,dib,gif,icns,ico,jpg,jpeg,jp2,jpx,pcx,png,pbm,pgm,ppm,pnm,sgi,tga,tif,tiff,webp,xbm,dds,xpm,heif,heifs,heic,heics,avif,avifs", help="image formats to decode using pillow")
|
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
||||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="jpg,jpeg,jp2,jpx,jxl,tif,tiff,png,webp,heic,avif,fit,fits,fts,exr,svg,hdr,ppm,pgm,pfm,gif,nii", help="image formats to decode using pyvips")
|
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
||||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="av1,asf,avi,flv,m4v,mkv,mjpeg,mjpg,mpg,mpeg,mpg2,mpeg2,h264,avc,mts,h265,hevc,mov,3gp,mp4,ts,mpegts,nut,ogv,ogm,rm,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,m4a,ogg,opus,flac,alac,mp3,mp2,ac3,dts,wma,ra,wav,aif,aiff,au,alaw,ulaw,mulaw,amr,gsm,ape,tak,tta,wv,mpc", help="audio formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,m4a,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,tak,tta,ulaw,wav,wma,wv,xm,xpk", help="audio formats to decode using ffmpeg")
|
||||||
|
|
||||||
|
|
||||||
|
def add_transcoding(ap):
|
||||||
ap2 = ap.add_argument_group('transcoding options')
|
ap2 = ap.add_argument_group('transcoding options')
|
||||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
|
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
|
||||||
|
|
||||||
|
|
||||||
|
def add_db_general(ap, hcores):
|
||||||
ap2 = ap.add_argument_group('general db options')
|
ap2 = ap.add_argument_group('general db options')
|
||||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplocation")
|
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplication")
|
||||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
|
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
|
||||||
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets -e2ds")
|
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets -e2ds")
|
||||||
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
|
||||||
@@ -811,7 +903,9 @@ def run_argparse(
|
|||||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans (volflag=nohash)")
|
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans (volflag=nohash)")
|
||||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans (volflag=noidx)")
|
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans (volflag=noidx)")
|
||||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||||
|
ap2.add_argument("--re-dhash", action="store_true", help="rebuild the cache if it gets out of sync (for example crash on startup during metadata scanning)")
|
||||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice (volflag=noforget)")
|
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice (volflag=noforget)")
|
||||||
|
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see --help-dbd (volflag=dbd)")
|
||||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
||||||
ap2.add_argument("--xdev", action="store_true", help="do not descend into other filesystems (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
ap2.add_argument("--xdev", action="store_true", help="do not descend into other filesystems (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
||||||
ap2.add_argument("--xvol", action="store_true", help="skip symlinks leaving the volume root (volflag=xvol)")
|
ap2.add_argument("--xvol", action="store_true", help="skip symlinks leaving the volume root (volflag=xvol)")
|
||||||
@@ -820,7 +914,10 @@ def run_argparse(
|
|||||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
|
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
|
||||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than SEC seconds")
|
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than SEC seconds")
|
||||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||||
|
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
||||||
|
|
||||||
|
|
||||||
|
def add_db_metadata(ap):
|
||||||
ap2 = ap.add_argument_group('metadata db options')
|
ap2 = ap.add_argument_group('metadata db options')
|
||||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing; makes it possible to search for artist/title/codec/resolution/...")
|
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing; makes it possible to search for artist/title/codec/resolution/...")
|
||||||
ap2.add_argument("-e2ts", action="store_true", help="scan existing files on startup; sets -e2t")
|
ap2.add_argument("-e2ts", action="store_true", help="scan existing files on startup; sets -e2t")
|
||||||
@@ -838,19 +935,32 @@ def run_argparse(
|
|||||||
default=".vq,.aq,vc,ac,fmt,res,.fps")
|
default=".vq,.aq,vc,ac,fmt,res,.fps")
|
||||||
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
|
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
|
||||||
|
|
||||||
|
|
||||||
|
def add_ui(ap, retry):
|
||||||
ap2 = ap.add_argument_group('ui options')
|
ap2 = ap.add_argument_group('ui options')
|
||||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
|
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
|
||||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
|
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
|
||||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||||
|
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
|
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
|
||||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
|
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
|
||||||
|
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with -np")
|
||||||
|
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible by -np)")
|
||||||
|
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
||||||
|
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
||||||
|
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
|
||||||
|
ap2.add_argument("--no-sb-lg", action="store_true", help="don't sandbox prologue/epilogue docs (volflags: no_sb_lg | sb_lg); enables non-js support")
|
||||||
|
|
||||||
|
|
||||||
|
def add_debug(ap):
|
||||||
ap2 = ap.add_argument_group('debug options')
|
ap2 = ap.add_argument_group('debug options')
|
||||||
|
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
|
||||||
|
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
|
||||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
|
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
|
||||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
|
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
|
||||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
|
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
|
||||||
@@ -862,9 +972,58 @@ def run_argparse(
|
|||||||
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to --bf-nc and --bf-dir")
|
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to --bf-nc and --bf-dir")
|
||||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than NUM files at --kf-dir already; default: 6.3 GiB max (200*32M)")
|
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than NUM files at --kf-dir already; default: 6.3 GiB max (200*32M)")
|
||||||
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at PATH; default: folder named 'bf' wherever copyparty was started")
|
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at PATH; default: folder named 'bf' wherever copyparty was started")
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
|
||||||
|
def run_argparse(
|
||||||
|
argv: list[str], formatter: Any, retry: bool, nc: int
|
||||||
|
) -> argparse.Namespace:
|
||||||
|
ap = argparse.ArgumentParser(
|
||||||
|
formatter_class=formatter,
|
||||||
|
prog="copyparty",
|
||||||
|
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
|
||||||
|
except:
|
||||||
|
fk_salt = "hunter2"
|
||||||
|
|
||||||
|
hcores = min(CORES, 4) # optimal on py3.11 @ r5-4500U
|
||||||
|
|
||||||
|
tty = os.environ.get("TERM", "").lower() == "linux"
|
||||||
|
|
||||||
|
srvname = get_srvname()
|
||||||
|
|
||||||
|
add_general(ap, nc, srvname)
|
||||||
|
add_network(ap)
|
||||||
|
add_tls(ap)
|
||||||
|
add_qr(ap, tty)
|
||||||
|
add_zeroconf(ap)
|
||||||
|
add_zc_mdns(ap)
|
||||||
|
add_zc_ssdp(ap)
|
||||||
|
add_upload(ap)
|
||||||
|
add_db_general(ap, hcores)
|
||||||
|
add_db_metadata(ap)
|
||||||
|
add_thumbnail(ap)
|
||||||
|
add_transcoding(ap)
|
||||||
|
add_ftp(ap)
|
||||||
|
add_webdav(ap)
|
||||||
|
add_smb(ap)
|
||||||
|
add_safety(ap, fk_salt)
|
||||||
|
add_optouts(ap)
|
||||||
|
add_shutdown(ap)
|
||||||
|
add_yolo(ap)
|
||||||
|
add_hooks(ap)
|
||||||
|
add_ui(ap, retry)
|
||||||
|
add_admin(ap)
|
||||||
|
add_logging(ap)
|
||||||
|
add_debug(ap)
|
||||||
|
|
||||||
ap2 = ap.add_argument_group("help sections")
|
ap2 = ap.add_argument_group("help sections")
|
||||||
|
sects = get_sects()
|
||||||
for k, h, _ in sects:
|
for k, h, _ in sects:
|
||||||
ap2.add_argument("--help-" + k, action="store_true", help=h)
|
ap2.add_argument("--help-" + k, action="store_true", help=h)
|
||||||
|
|
||||||
@@ -920,6 +1079,9 @@ def main(argv: Optional[list[str]] = None) -> None:
|
|||||||
showlic()
|
showlic()
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
if EXE:
|
||||||
|
print("pybin: {}\n".format(pybin), end="")
|
||||||
|
|
||||||
ensure_locale()
|
ensure_locale()
|
||||||
if HAVE_SSL:
|
if HAVE_SSL:
|
||||||
ensure_cert()
|
ensure_cert()
|
||||||
@@ -952,7 +1114,8 @@ def main(argv: Optional[list[str]] = None) -> None:
|
|||||||
if da:
|
if da:
|
||||||
argv.extend(["--qr"])
|
argv.extend(["--qr"])
|
||||||
if ANYWIN or not os.geteuid():
|
if ANYWIN or not os.geteuid():
|
||||||
argv.extend(["-p80,443,3923", "--ign-ebind"])
|
# win10 allows symlinks if admin; can be unexpected
|
||||||
|
argv.extend(["-p80,443,3923", "--ign-ebind", "--no-dedup"])
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -974,6 +1137,7 @@ def main(argv: Optional[list[str]] = None) -> None:
|
|||||||
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
|
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
|
||||||
try:
|
try:
|
||||||
al = run_argparse(argv, fmtr, retry, nc)
|
al = run_argparse(argv, fmtr, retry, nc)
|
||||||
|
dal = run_argparse([], fmtr, retry, nc)
|
||||||
break
|
break
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
raise
|
raise
|
||||||
@@ -981,8 +1145,12 @@ def main(argv: Optional[list[str]] = None) -> None:
|
|||||||
retry = True
|
retry = True
|
||||||
lprint("\n[ {} ]:\n{}\n".format(fmtr, min_ex()))
|
lprint("\n[ {} ]:\n{}\n".format(fmtr, min_ex()))
|
||||||
|
|
||||||
assert al # type: ignore
|
try:
|
||||||
al.E = E # __init__ is not shared when oxidized
|
assert al # type: ignore
|
||||||
|
assert dal # type: ignore
|
||||||
|
al.E = E # __init__ is not shared when oxidized
|
||||||
|
except:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
if WINDOWS and not al.keep_qem:
|
if WINDOWS and not al.keep_qem:
|
||||||
try:
|
try:
|
||||||
@@ -1085,7 +1253,7 @@ def main(argv: Optional[list[str]] = None) -> None:
|
|||||||
|
|
||||||
# signal.signal(signal.SIGINT, sighandler)
|
# signal.signal(signal.SIGINT, sighandler)
|
||||||
|
|
||||||
SvcHub(al, argv, "".join(printed)).run()
|
SvcHub(al, dal, argv, "".join(printed)).run()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (1, 5, 0)
|
VERSION = (1, 6, 8)
|
||||||
CODENAME = "babel"
|
CODENAME = "cors k"
|
||||||
BUILD_DT = (2022, 12, 3)
|
BUILD_DT = (2023, 3, 12)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ from datetime import datetime
|
|||||||
|
|
||||||
from .__init__ import ANYWIN, TYPE_CHECKING, WINDOWS
|
from .__init__ import ANYWIN, TYPE_CHECKING, WINDOWS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
|
from .cfg import flagdescs, permdescs, vf_bmap, vf_cmap, vf_vmap
|
||||||
from .util import (
|
from .util import (
|
||||||
IMPLICATIONS,
|
IMPLICATIONS,
|
||||||
META_NOBOTS,
|
META_NOBOTS,
|
||||||
@@ -21,7 +22,7 @@ from .util import (
|
|||||||
UNPLICATIONS,
|
UNPLICATIONS,
|
||||||
Pebkac,
|
Pebkac,
|
||||||
absreal,
|
absreal,
|
||||||
fsenc,
|
afsenc,
|
||||||
get_df,
|
get_df,
|
||||||
humansize,
|
humansize,
|
||||||
relchk,
|
relchk,
|
||||||
@@ -36,7 +37,7 @@ if True: # pylint: disable=using-constant-test
|
|||||||
|
|
||||||
from typing import Any, Generator, Optional, Union
|
from typing import Any, Generator, Optional, Union
|
||||||
|
|
||||||
from .util import RootLogger
|
from .util import NamedLogger, RootLogger
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
pass
|
pass
|
||||||
@@ -370,7 +371,6 @@ class VFS(object):
|
|||||||
|
|
||||||
def _find(self, vpath: str) -> tuple["VFS", str]:
|
def _find(self, vpath: str) -> tuple["VFS", str]:
|
||||||
"""return [vfs,remainder]"""
|
"""return [vfs,remainder]"""
|
||||||
vpath = undot(vpath)
|
|
||||||
if vpath == "":
|
if vpath == "":
|
||||||
return self, ""
|
return self, ""
|
||||||
|
|
||||||
@@ -381,7 +381,7 @@ class VFS(object):
|
|||||||
rem = ""
|
rem = ""
|
||||||
|
|
||||||
if name in self.nodes:
|
if name in self.nodes:
|
||||||
return self.nodes[name]._find(rem)
|
return self.nodes[name]._find(undot(rem))
|
||||||
|
|
||||||
return self, vpath
|
return self, vpath
|
||||||
|
|
||||||
@@ -389,7 +389,7 @@ class VFS(object):
|
|||||||
self, vpath: str, uname: str
|
self, vpath: str, uname: str
|
||||||
) -> tuple[bool, bool, bool, bool, bool, bool]:
|
) -> tuple[bool, bool, bool, bool, bool, bool]:
|
||||||
"""can Read,Write,Move,Delete,Get,Upget"""
|
"""can Read,Write,Move,Delete,Get,Upget"""
|
||||||
vn, _ = self._find(vpath)
|
vn, _ = self._find(undot(vpath))
|
||||||
c = vn.axs
|
c = vn.axs
|
||||||
return (
|
return (
|
||||||
uname in c.uread or "*" in c.uread,
|
uname in c.uread or "*" in c.uread,
|
||||||
@@ -419,7 +419,7 @@ class VFS(object):
|
|||||||
self.log("vfs", "invalid relpath [{}]".format(vpath))
|
self.log("vfs", "invalid relpath [{}]".format(vpath))
|
||||||
raise Pebkac(404)
|
raise Pebkac(404)
|
||||||
|
|
||||||
vn, rem = self._find(vpath)
|
vn, rem = self._find(undot(vpath))
|
||||||
c: AXS = vn.axs
|
c: AXS = vn.axs
|
||||||
|
|
||||||
for req, d, msg in [
|
for req, d, msg in [
|
||||||
@@ -588,7 +588,7 @@ class VFS(object):
|
|||||||
|
|
||||||
# if multiselect: add all items to archive root
|
# if multiselect: add all items to archive root
|
||||||
# if single folder: the folder itself is the top-level item
|
# if single folder: the folder itself is the top-level item
|
||||||
folder = "" if flt or not wrap else (vrem.split("/")[-1] or "top")
|
folder = "" if flt or not wrap else (vrem.split("/")[-1].lstrip(".") or "top")
|
||||||
|
|
||||||
g = self.walk(folder, vrem, [], uname, [[True, False]], dots, scandir, False)
|
g = self.walk(folder, vrem, [], uname, [[True, False]], dots, scandir, False)
|
||||||
for _, _, vpath, apath, files, rd, vd in g:
|
for _, _, vpath, apath, files, rd, vd in g:
|
||||||
@@ -654,11 +654,15 @@ class AuthSrv(object):
|
|||||||
args: argparse.Namespace,
|
args: argparse.Namespace,
|
||||||
log_func: Optional["RootLogger"],
|
log_func: Optional["RootLogger"],
|
||||||
warn_anonwrite: bool = True,
|
warn_anonwrite: bool = True,
|
||||||
|
dargs: Optional[argparse.Namespace] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.args = args
|
self.args = args
|
||||||
|
self.dargs = dargs or args
|
||||||
self.log_func = log_func
|
self.log_func = log_func
|
||||||
self.warn_anonwrite = warn_anonwrite
|
self.warn_anonwrite = warn_anonwrite
|
||||||
self.line_ctr = 0
|
self.line_ctr = 0
|
||||||
|
self.indent = ""
|
||||||
|
self.desc = []
|
||||||
|
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.reload()
|
self.reload()
|
||||||
@@ -691,17 +695,47 @@ class AuthSrv(object):
|
|||||||
raise Exception("invalid config")
|
raise Exception("invalid config")
|
||||||
|
|
||||||
if src in mount.values():
|
if src in mount.values():
|
||||||
t = "warning: filesystem-path [{}] mounted in multiple locations:"
|
t = "filesystem-path [{}] mounted in multiple locations:"
|
||||||
t = t.format(src)
|
t = t.format(src)
|
||||||
for v in [k for k, v in mount.items() if v == src] + [dst]:
|
for v in [k for k, v in mount.items() if v == src] + [dst]:
|
||||||
t += "\n /{}".format(v)
|
t += "\n /{}".format(v)
|
||||||
|
|
||||||
self.log(t, c=3)
|
self.log(t, c=3)
|
||||||
|
raise Exception("invalid config")
|
||||||
|
|
||||||
|
if not bos.path.isdir(src):
|
||||||
|
self.log("warning: filesystem-path does not exist: {}".format(src), 3)
|
||||||
|
|
||||||
mount[dst] = src
|
mount[dst] = src
|
||||||
daxs[dst] = AXS()
|
daxs[dst] = AXS()
|
||||||
mflags[dst] = {}
|
mflags[dst] = {}
|
||||||
|
|
||||||
|
def _e(self, desc: Optional[str] = None) -> None:
|
||||||
|
if not self.args.vc or not self.line_ctr:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not desc and not self.indent:
|
||||||
|
self.log("")
|
||||||
|
return
|
||||||
|
|
||||||
|
desc = desc or ""
|
||||||
|
desc = desc.replace("[", "[\033[0m").replace("]", "\033[90m]")
|
||||||
|
self.log(" >>> {}{}".format(self.indent, desc), "90")
|
||||||
|
|
||||||
|
def _l(self, ln: str, c: int, desc: str) -> None:
|
||||||
|
if not self.args.vc or not self.line_ctr:
|
||||||
|
return
|
||||||
|
|
||||||
|
if c < 10:
|
||||||
|
c += 30
|
||||||
|
|
||||||
|
t = "\033[97m{:4} \033[{}m{}{}"
|
||||||
|
if desc:
|
||||||
|
t += " \033[0;90m# {}\033[0m"
|
||||||
|
desc = desc.replace("[", "[\033[0m").replace("]", "\033[90m]")
|
||||||
|
|
||||||
|
self.log(t.format(self.line_ctr, c, self.indent, ln, desc))
|
||||||
|
|
||||||
def _parse_config_file(
|
def _parse_config_file(
|
||||||
self,
|
self,
|
||||||
fp: str,
|
fp: str,
|
||||||
@@ -711,61 +745,140 @@ class AuthSrv(object):
|
|||||||
mflags: dict[str, dict[str, Any]],
|
mflags: dict[str, dict[str, Any]],
|
||||||
mount: dict[str, str],
|
mount: dict[str, str],
|
||||||
) -> None:
|
) -> None:
|
||||||
skip = False
|
self.desc = []
|
||||||
vol_src = None
|
|
||||||
vol_dst = None
|
|
||||||
self.line_ctr = 0
|
self.line_ctr = 0
|
||||||
|
|
||||||
expand_config_file(cfg_lines, fp, "")
|
expand_config_file(cfg_lines, fp, "")
|
||||||
|
if self.args.vc:
|
||||||
|
lns = ["{:4}: {}".format(n, s) for n, s in enumerate(cfg_lines, 1)]
|
||||||
|
self.log("expanded config file (unprocessed):\n" + "\n".join(lns))
|
||||||
|
|
||||||
|
cfg_lines = upgrade_cfg_fmt(self.log, self.args, cfg_lines, fp)
|
||||||
|
|
||||||
|
cat = ""
|
||||||
|
catg = "[global]"
|
||||||
|
cata = "[accounts]"
|
||||||
|
catx = "accs:"
|
||||||
|
catf = "flags:"
|
||||||
|
ap: Optional[str] = None
|
||||||
|
vp: Optional[str] = None
|
||||||
for ln in cfg_lines:
|
for ln in cfg_lines:
|
||||||
self.line_ctr += 1
|
self.line_ctr += 1
|
||||||
if not ln and vol_src is not None:
|
ln = ln.split(" #")[0].strip()
|
||||||
vol_src = None
|
if not ln.split("#")[0].strip():
|
||||||
vol_dst = None
|
|
||||||
|
|
||||||
if skip:
|
|
||||||
if not ln:
|
|
||||||
skip = False
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not ln or ln.startswith("#"):
|
subsection = ln in (catx, catf)
|
||||||
continue
|
if ln.startswith("[") or subsection:
|
||||||
|
self._e()
|
||||||
|
if ap is None and vp is not None:
|
||||||
|
t = "the first line after [/{}] must be a filesystem path to share on that volume"
|
||||||
|
raise Exception(t.format(vp))
|
||||||
|
|
||||||
if vol_src is None:
|
cat = ln
|
||||||
if ln.startswith("u "):
|
if not subsection:
|
||||||
u, p = ln[2:].split(":", 1)
|
ap = vp = None
|
||||||
acct[u] = p
|
self.indent = ""
|
||||||
elif ln.startswith("-"):
|
|
||||||
skip = True # argv
|
|
||||||
else:
|
else:
|
||||||
vol_src = ln
|
self.indent = " "
|
||||||
|
|
||||||
|
if ln == catg:
|
||||||
|
t = "begin commandline-arguments (anything from --help; dashes are optional)"
|
||||||
|
self._l(ln, 6, t)
|
||||||
|
elif ln == cata:
|
||||||
|
self._l(ln, 5, "begin user-accounts section")
|
||||||
|
elif ln.startswith("[/"):
|
||||||
|
vp = ln[1:-1].strip("/")
|
||||||
|
self._l(ln, 2, "define volume at URL [/{}]".format(vp))
|
||||||
|
elif subsection:
|
||||||
|
if ln == catx:
|
||||||
|
self._l(ln, 5, "volume access config:")
|
||||||
|
else:
|
||||||
|
t = "volume-specific config (anything from --help-flags)"
|
||||||
|
self._l(ln, 6, t)
|
||||||
|
else:
|
||||||
|
raise Exception("invalid section header")
|
||||||
|
|
||||||
|
self.indent = " " if subsection else " "
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if vol_src and vol_dst is None:
|
if cat == catg:
|
||||||
vol_dst = ln
|
self._l(ln, 6, "")
|
||||||
if not vol_dst.startswith("/"):
|
zt = split_cfg_ln(ln)
|
||||||
raise Exception('invalid mountpoint "{}"'.format(vol_dst))
|
for zs, za in zt.items():
|
||||||
|
zs = zs.lstrip("-")
|
||||||
if vol_src.startswith("~"):
|
if za is True:
|
||||||
vol_src = os.path.expanduser(vol_src)
|
self._e("└─argument [{}]".format(zs))
|
||||||
|
else:
|
||||||
# cfg files override arguments and previous files
|
self._e("└─argument [{}] with value [{}]".format(zs, za))
|
||||||
vol_src = absreal(vol_src)
|
|
||||||
vol_dst = vol_dst.strip("/")
|
|
||||||
self._map_volume(vol_src, vol_dst, mount, daxs, mflags)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
if cat == cata:
|
||||||
lvl, uname = ln.split(" ", 1)
|
try:
|
||||||
except:
|
u, p = [zs.strip() for zs in ln.split(":", 1)]
|
||||||
lvl = ln
|
self._l(ln, 5, "account [{}], password [{}]".format(u, p))
|
||||||
uname = "*"
|
acct[u] = p
|
||||||
|
except:
|
||||||
|
t = 'lines inside the [accounts] section must be "username: password"'
|
||||||
|
raise Exception(t)
|
||||||
|
continue
|
||||||
|
|
||||||
if lvl == "a":
|
if vp is not None and ap is None:
|
||||||
t = "WARNING (config-file): permission flag 'a' is deprecated; please use 'rw' instead"
|
ap = ln
|
||||||
self.log(t, 1)
|
if ap.startswith("~"):
|
||||||
|
ap = os.path.expanduser(ap)
|
||||||
|
|
||||||
assert vol_dst is not None
|
ap = absreal(ap)
|
||||||
self._read_vol_str(lvl, uname, daxs[vol_dst], mflags[vol_dst])
|
self._l(ln, 2, "bound to filesystem-path [{}]".format(ap))
|
||||||
|
self._map_volume(ap, vp, mount, daxs, mflags)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if cat == catx:
|
||||||
|
err = ""
|
||||||
|
try:
|
||||||
|
self._l(ln, 5, "volume access config:")
|
||||||
|
sk, sv = ln.split(":")
|
||||||
|
if re.sub("[rwmdgG]", "", sk) or not sk:
|
||||||
|
err = "invalid accs permissions list; "
|
||||||
|
raise Exception(err)
|
||||||
|
if " " in re.sub(", *", "", sv).strip():
|
||||||
|
err = "list of users is not comma-separated; "
|
||||||
|
raise Exception(err)
|
||||||
|
self._read_vol_str(sk, sv.replace(" ", ""), daxs[vp], mflags[vp])
|
||||||
|
continue
|
||||||
|
except:
|
||||||
|
err += "accs entries must be 'rwmdgG: user1, user2, ...'"
|
||||||
|
raise Exception(err)
|
||||||
|
|
||||||
|
if cat == catf:
|
||||||
|
err = ""
|
||||||
|
try:
|
||||||
|
self._l(ln, 6, "volume-specific config:")
|
||||||
|
zd = split_cfg_ln(ln)
|
||||||
|
fstr = ""
|
||||||
|
for sk, sv in zd.items():
|
||||||
|
bad = re.sub(r"[a-z0-9_]", "", sk)
|
||||||
|
if bad:
|
||||||
|
err = "bad characters [{}] in volflag name [{}]; "
|
||||||
|
err = err.format(bad, sk)
|
||||||
|
raise Exception(err)
|
||||||
|
if sv is True:
|
||||||
|
fstr += "," + sk
|
||||||
|
else:
|
||||||
|
fstr += ",{}={}".format(sk, sv)
|
||||||
|
self._read_vol_str("c", fstr[1:], daxs[vp], mflags[vp])
|
||||||
|
fstr = ""
|
||||||
|
if fstr:
|
||||||
|
self._read_vol_str("c", fstr[1:], daxs[vp], mflags[vp])
|
||||||
|
continue
|
||||||
|
except:
|
||||||
|
err += "flags entries (volflags) must be one of the following:\n 'flag1, flag2, ...'\n 'key: value'\n 'flag1, flag2, key: value'"
|
||||||
|
raise Exception(err)
|
||||||
|
|
||||||
|
raise Exception("unprocessable line in config")
|
||||||
|
|
||||||
|
self._e()
|
||||||
|
self.line_ctr = 0
|
||||||
|
|
||||||
def _read_vol_str(
|
def _read_vol_str(
|
||||||
self, lvl: str, uname: str, axs: AXS, flags: dict[str, Any]
|
self, lvl: str, uname: str, axs: AXS, flags: dict[str, Any]
|
||||||
@@ -804,6 +917,13 @@ class AuthSrv(object):
|
|||||||
("G", axs.upget),
|
("G", axs.upget),
|
||||||
]: # b bb bbb
|
]: # b bb bbb
|
||||||
if ch in lvl:
|
if ch in lvl:
|
||||||
|
if un == "*":
|
||||||
|
t = "└─add permission [{0}] for [everyone] -- {2}"
|
||||||
|
else:
|
||||||
|
t = "└─add permission [{0}] for user [{1}] -- {2}"
|
||||||
|
|
||||||
|
desc = permdescs.get(ch, "?")
|
||||||
|
self._e(t.format(ch, un, desc))
|
||||||
al.add(un)
|
al.add(un)
|
||||||
|
|
||||||
def _read_volflag(
|
def _read_volflag(
|
||||||
@@ -813,7 +933,13 @@ class AuthSrv(object):
|
|||||||
value: Union[str, bool, list[str]],
|
value: Union[str, bool, list[str]],
|
||||||
is_list: bool,
|
is_list: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
if name not in ["mtp"]:
|
desc = flagdescs.get(name, "?").replace("\n", " ")
|
||||||
|
if name not in "mtp xbu xau xiu xbr xar xbd xad xm".split():
|
||||||
|
if value is True:
|
||||||
|
t = "└─add volflag [{}] = {} ({})"
|
||||||
|
else:
|
||||||
|
t = "└─add volflag [{}] = [{}] ({})"
|
||||||
|
self._e(t.format(name, value, desc))
|
||||||
flags[name] = value
|
flags[name] = value
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -826,6 +952,7 @@ class AuthSrv(object):
|
|||||||
vals += [value]
|
vals += [value]
|
||||||
|
|
||||||
flags[name] = vals
|
flags[name] = vals
|
||||||
|
self._e("volflag [{}] += {} ({})".format(name, vals, desc))
|
||||||
|
|
||||||
def reload(self) -> None:
|
def reload(self) -> None:
|
||||||
"""
|
"""
|
||||||
@@ -876,6 +1003,18 @@ class AuthSrv(object):
|
|||||||
lns: list[str] = []
|
lns: list[str] = []
|
||||||
try:
|
try:
|
||||||
self._parse_config_file(cfg_fn, lns, acct, daxs, mflags, mount)
|
self._parse_config_file(cfg_fn, lns, acct, daxs, mflags, mount)
|
||||||
|
|
||||||
|
zs = "#\033[36m cfg files in "
|
||||||
|
zst = [x[len(zs) :] for x in lns if x.startswith(zs)]
|
||||||
|
for zs in list(set(zst)):
|
||||||
|
self.log("discovered config files in " + zs, 6)
|
||||||
|
|
||||||
|
zs = "#\033[36m opening cfg file"
|
||||||
|
zstt = [x.split(" -> ") for x in lns if x.startswith(zs)]
|
||||||
|
zst = [(max(0, len(x) - 2) * " ") + "└" + x[-1] for x in zstt]
|
||||||
|
t = "loaded {} config files:\n{}"
|
||||||
|
self.log(t.format(len(zst), "\n".join(zst)))
|
||||||
|
|
||||||
except:
|
except:
|
||||||
lns = lns[: self.line_ctr]
|
lns = lns[: self.line_ctr]
|
||||||
slns = ["{:4}: {}".format(n, s) for n, s in enumerate(lns, 1)]
|
slns = ["{:4}: {}".format(n, s) for n, s in enumerate(lns, 1)]
|
||||||
@@ -956,7 +1095,7 @@ class AuthSrv(object):
|
|||||||
promote = []
|
promote = []
|
||||||
demote = []
|
demote = []
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_vols.values():
|
||||||
zb = hashlib.sha512(fsenc(vol.realpath)).digest()
|
zb = hashlib.sha512(afsenc(vol.realpath)).digest()
|
||||||
hid = base64.b32encode(zb).decode("ascii").lower()
|
hid = base64.b32encode(zb).decode("ascii").lower()
|
||||||
vflag = vol.flags.get("hist")
|
vflag = vol.flags.get("hist")
|
||||||
if vflag == "-":
|
if vflag == "-":
|
||||||
@@ -975,7 +1114,7 @@ class AuthSrv(object):
|
|||||||
except:
|
except:
|
||||||
owner = None
|
owner = None
|
||||||
|
|
||||||
me = fsenc(vol.realpath).rstrip()
|
me = afsenc(vol.realpath).rstrip()
|
||||||
if owner not in [None, me]:
|
if owner not in [None, me]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -1115,19 +1254,30 @@ class AuthSrv(object):
|
|||||||
if ptn:
|
if ptn:
|
||||||
vol.flags[vf] = re.compile(ptn)
|
vol.flags[vf] = re.compile(ptn)
|
||||||
|
|
||||||
for k in ["e2t", "e2ts", "e2tsr", "e2v", "e2vu", "e2vp", "xdev", "xvol"]:
|
for ga, vf in vf_bmap().items():
|
||||||
if getattr(self.args, k):
|
|
||||||
vol.flags[k] = True
|
|
||||||
|
|
||||||
for ga, vf in (
|
|
||||||
("no_forget", "noforget"),
|
|
||||||
("no_dupe", "nodupe"),
|
|
||||||
("magic", "magic"),
|
|
||||||
("xlink", "xlink"),
|
|
||||||
):
|
|
||||||
if getattr(self.args, ga):
|
if getattr(self.args, ga):
|
||||||
vol.flags[vf] = True
|
vol.flags[vf] = True
|
||||||
|
|
||||||
|
for ve, vd in (
|
||||||
|
("nodotsrch", "dotsrch"),
|
||||||
|
("sb_lg", "no_sb_lg"),
|
||||||
|
("sb_md", "no_sb_md"),
|
||||||
|
):
|
||||||
|
if ve in vol.flags:
|
||||||
|
vol.flags.pop(vd, None)
|
||||||
|
|
||||||
|
for ga, vf in vf_vmap().items():
|
||||||
|
if vf not in vol.flags:
|
||||||
|
vol.flags[vf] = getattr(self.args, ga)
|
||||||
|
|
||||||
|
for k in ("nrand",):
|
||||||
|
if k not in vol.flags:
|
||||||
|
vol.flags[k] = getattr(self.args, k)
|
||||||
|
|
||||||
|
for k in ("nrand",):
|
||||||
|
if k in vol.flags:
|
||||||
|
vol.flags[k] = int(vol.flags[k])
|
||||||
|
|
||||||
for k1, k2 in IMPLICATIONS:
|
for k1, k2 in IMPLICATIONS:
|
||||||
if k1 in vol.flags:
|
if k1 in vol.flags:
|
||||||
vol.flags[k2] = True
|
vol.flags[k2] = True
|
||||||
@@ -1136,6 +1286,12 @@ class AuthSrv(object):
|
|||||||
if k1 in vol.flags:
|
if k1 in vol.flags:
|
||||||
vol.flags[k2] = False
|
vol.flags[k2] = False
|
||||||
|
|
||||||
|
dbds = "acid|swal|wal|yolo"
|
||||||
|
vol.flags["dbd"] = dbd = vol.flags.get("dbd") or self.args.dbd
|
||||||
|
if dbd not in dbds.split("|"):
|
||||||
|
t = "invalid dbd [{}]; must be one of [{}]"
|
||||||
|
raise Exception(t.format(dbd, dbds))
|
||||||
|
|
||||||
# default tag cfgs if unset
|
# default tag cfgs if unset
|
||||||
if "mte" not in vol.flags:
|
if "mte" not in vol.flags:
|
||||||
vol.flags["mte"] = self.args.mte
|
vol.flags["mte"] = self.args.mte
|
||||||
@@ -1146,8 +1302,32 @@ class AuthSrv(object):
|
|||||||
if "mth" not in vol.flags:
|
if "mth" not in vol.flags:
|
||||||
vol.flags["mth"] = self.args.mth
|
vol.flags["mth"] = self.args.mth
|
||||||
|
|
||||||
# append parsers from argv to volflags
|
# append additive args from argv to volflags
|
||||||
self._read_volflag(vol.flags, "mtp", self.args.mtp, True)
|
hooks = "xbu xau xiu xbr xar xbd xad xm".split()
|
||||||
|
for name in ["mtp"] + hooks:
|
||||||
|
self._read_volflag(vol.flags, name, getattr(self.args, name), True)
|
||||||
|
|
||||||
|
for hn in hooks:
|
||||||
|
cmds = vol.flags.get(hn)
|
||||||
|
if not cmds:
|
||||||
|
continue
|
||||||
|
|
||||||
|
ncmds = []
|
||||||
|
for cmd in cmds:
|
||||||
|
hfs = []
|
||||||
|
ocmd = cmd
|
||||||
|
while "," in cmd[:6]:
|
||||||
|
zs, cmd = cmd.split(",", 1)
|
||||||
|
hfs.append(zs)
|
||||||
|
|
||||||
|
if "c" in hfs and "f" in hfs:
|
||||||
|
t = "cannot combine flags c and f; removing f from eventhook [{}]"
|
||||||
|
self.log(t.format(ocmd), 1)
|
||||||
|
hfs = [x for x in hfs if x != "f"]
|
||||||
|
ocmd = ",".join(hfs + [cmd])
|
||||||
|
|
||||||
|
ncmds.append(ocmd)
|
||||||
|
vol.flags[hn] = ncmds
|
||||||
|
|
||||||
# d2d drops all database features for a volume
|
# d2d drops all database features for a volume
|
||||||
for grp, rm in [["d2d", "e2d"], ["d2t", "e2t"], ["d2d", "e2v"]]:
|
for grp, rm in [["d2d", "e2d"], ["d2t", "e2t"], ["d2d", "e2v"]]:
|
||||||
@@ -1183,10 +1363,22 @@ class AuthSrv(object):
|
|||||||
if k in ints:
|
if k in ints:
|
||||||
vol.flags[k] = int(vol.flags[k])
|
vol.flags[k] = int(vol.flags[k])
|
||||||
|
|
||||||
if "lifetime" in vol.flags and "e2d" not in vol.flags:
|
if "e2d" not in vol.flags:
|
||||||
t = 'removing lifetime config from volume "/{}" because e2d is disabled'
|
if "lifetime" in vol.flags:
|
||||||
self.log(t.format(vol.vpath), 1)
|
t = 'removing lifetime config from volume "/{}" because e2d is disabled'
|
||||||
del vol.flags["lifetime"]
|
self.log(t.format(vol.vpath), 1)
|
||||||
|
del vol.flags["lifetime"]
|
||||||
|
|
||||||
|
needs_e2d = [x for x in hooks if x != "xm"]
|
||||||
|
drop = [x for x in needs_e2d if vol.flags.get(x)]
|
||||||
|
if drop:
|
||||||
|
t = 'removing [{}] from volume "/{}" because e2d is disabled'
|
||||||
|
self.log(t.format(", ".join(drop), vol.vpath), 1)
|
||||||
|
for x in drop:
|
||||||
|
vol.flags.pop(x)
|
||||||
|
|
||||||
|
if vol.flags.get("neversymlink") and not vol.flags.get("hardlink"):
|
||||||
|
vol.flags["copydupes"] = True
|
||||||
|
|
||||||
# verify tags mentioned by -mt[mp] are used by -mte
|
# verify tags mentioned by -mt[mp] are used by -mte
|
||||||
local_mtp = {}
|
local_mtp = {}
|
||||||
@@ -1432,32 +1624,298 @@ class AuthSrv(object):
|
|||||||
if not flag_r:
|
if not flag_r:
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
def cgen(self) -> None:
|
||||||
|
ret = [
|
||||||
|
"## WARNING:",
|
||||||
|
"## there will probably be mistakes in",
|
||||||
|
"## commandline-args (and maybe volflags)",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
|
||||||
|
csv = set("i p".split())
|
||||||
|
lst = set("c ihead mtm mtp xad xar xau xiu xbd xbr xbu xm".split())
|
||||||
|
askip = set("a v c vc cgen theme".split())
|
||||||
|
|
||||||
|
# keymap from argv to vflag
|
||||||
|
amap = vf_bmap()
|
||||||
|
amap.update(vf_vmap())
|
||||||
|
amap.update(vf_cmap())
|
||||||
|
vmap = {v: k for k, v in amap.items()}
|
||||||
|
|
||||||
|
args = {k: v for k, v in vars(self.args).items()}
|
||||||
|
pops = []
|
||||||
|
for k1, k2 in IMPLICATIONS:
|
||||||
|
if args.get(k1):
|
||||||
|
pops.append(k2)
|
||||||
|
for pop in pops:
|
||||||
|
args.pop(pop, None)
|
||||||
|
|
||||||
|
if args:
|
||||||
|
ret.append("[global]")
|
||||||
|
for k, v in args.items():
|
||||||
|
if k in askip:
|
||||||
|
continue
|
||||||
|
if k in csv:
|
||||||
|
v = ", ".join([str(za) for za in v])
|
||||||
|
try:
|
||||||
|
v2 = getattr(self.dargs, k)
|
||||||
|
if v == v2:
|
||||||
|
continue
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
dk = " " + k.replace("_", "-")
|
||||||
|
if k in lst:
|
||||||
|
for ve in v:
|
||||||
|
ret.append("{}: {}".format(dk, ve))
|
||||||
|
else:
|
||||||
|
if v is True:
|
||||||
|
ret.append(dk)
|
||||||
|
elif v not in (False, None, ""):
|
||||||
|
ret.append("{}: {}".format(dk, v))
|
||||||
|
ret.append("")
|
||||||
|
|
||||||
|
if self.acct:
|
||||||
|
ret.append("[accounts]")
|
||||||
|
for u, p in self.acct.items():
|
||||||
|
ret.append(" {}: {}".format(u, p))
|
||||||
|
ret.append("")
|
||||||
|
|
||||||
|
for vol in self.vfs.all_vols.values():
|
||||||
|
ret.append("[/{}]".format(vol.vpath))
|
||||||
|
ret.append(" " + vol.realpath)
|
||||||
|
ret.append(" accs:")
|
||||||
|
perms = {
|
||||||
|
"r": "uread",
|
||||||
|
"w": "uwrite",
|
||||||
|
"m": "umove",
|
||||||
|
"d": "udel",
|
||||||
|
"g": "uget",
|
||||||
|
"G": "upget",
|
||||||
|
}
|
||||||
|
users = {}
|
||||||
|
for pkey in perms.values():
|
||||||
|
for uname in getattr(vol.axs, pkey):
|
||||||
|
try:
|
||||||
|
users[uname] += 1
|
||||||
|
except:
|
||||||
|
users[uname] = 1
|
||||||
|
lusers = [(v, k) for k, v in users.items()]
|
||||||
|
vperms = {}
|
||||||
|
for _, uname in sorted(lusers):
|
||||||
|
pstr = ""
|
||||||
|
for pchar, pkey in perms.items():
|
||||||
|
if uname in getattr(vol.axs, pkey):
|
||||||
|
pstr += pchar
|
||||||
|
if "g" in pstr and "G" in pstr:
|
||||||
|
pstr = pstr.replace("g", "")
|
||||||
|
try:
|
||||||
|
vperms[pstr].append(uname)
|
||||||
|
except:
|
||||||
|
vperms[pstr] = [uname]
|
||||||
|
for pstr, uname in vperms.items():
|
||||||
|
ret.append(" {}: {}".format(pstr, ", ".join(uname)))
|
||||||
|
trues = []
|
||||||
|
vals = []
|
||||||
|
for k, v in sorted(vol.flags.items()):
|
||||||
|
try:
|
||||||
|
ak = vmap[k]
|
||||||
|
if getattr(self.args, ak) is v:
|
||||||
|
continue
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if k in lst:
|
||||||
|
for ve in v:
|
||||||
|
vals.append("{}: {}".format(k, ve))
|
||||||
|
elif v is True:
|
||||||
|
trues.append(k)
|
||||||
|
elif v is not False:
|
||||||
|
try:
|
||||||
|
v = v.pattern
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
vals.append("{}: {}".format(k, v))
|
||||||
|
pops = []
|
||||||
|
for k1, k2 in IMPLICATIONS:
|
||||||
|
if k1 in trues:
|
||||||
|
pops.append(k2)
|
||||||
|
trues = [x for x in trues if x not in pops]
|
||||||
|
if trues:
|
||||||
|
vals.append(", ".join(trues))
|
||||||
|
if vals:
|
||||||
|
ret.append(" flags:")
|
||||||
|
for zs in vals:
|
||||||
|
ret.append(" " + zs)
|
||||||
|
ret.append("")
|
||||||
|
|
||||||
|
self.log("generated config:\n\n" + "\n".join(ret))
|
||||||
|
|
||||||
|
|
||||||
|
def split_cfg_ln(ln: str) -> dict[str, Any]:
|
||||||
|
# "a, b, c: 3" => {a:true, b:true, c:3}
|
||||||
|
ret = {}
|
||||||
|
while True:
|
||||||
|
ln = ln.strip()
|
||||||
|
if not ln:
|
||||||
|
break
|
||||||
|
ofs_sep = ln.find(",") + 1
|
||||||
|
ofs_var = ln.find(":") + 1
|
||||||
|
if not ofs_sep and not ofs_var:
|
||||||
|
ret[ln] = True
|
||||||
|
break
|
||||||
|
if ofs_sep and (ofs_sep < ofs_var or not ofs_var):
|
||||||
|
k, ln = ln.split(",", 1)
|
||||||
|
ret[k.strip()] = True
|
||||||
|
else:
|
||||||
|
k, ln = ln.split(":", 1)
|
||||||
|
ret[k.strip()] = ln.strip()
|
||||||
|
break
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def expand_config_file(ret: list[str], fp: str, ipath: str) -> None:
|
def expand_config_file(ret: list[str], fp: str, ipath: str) -> None:
|
||||||
"""expand all % file includes"""
|
"""expand all % file includes"""
|
||||||
fp = absreal(fp)
|
fp = absreal(fp)
|
||||||
ipath += " -> " + fp
|
|
||||||
ret.append("#\033[36m opening cfg file{}\033[0m".format(ipath))
|
|
||||||
if len(ipath.split(" -> ")) > 64:
|
if len(ipath.split(" -> ")) > 64:
|
||||||
raise Exception("hit max depth of 64 includes")
|
raise Exception("hit max depth of 64 includes")
|
||||||
|
|
||||||
if os.path.isdir(fp):
|
if os.path.isdir(fp):
|
||||||
for fn in sorted(os.listdir(fp)):
|
names = os.listdir(fp)
|
||||||
|
ret.append("#\033[36m cfg files in {} => {}\033[0m".format(fp, names))
|
||||||
|
for fn in sorted(names):
|
||||||
fp2 = os.path.join(fp, fn)
|
fp2 = os.path.join(fp, fn)
|
||||||
if not os.path.isfile(fp2):
|
if not fp2.endswith(".conf") or fp2 in ipath:
|
||||||
continue # dont recurse
|
continue
|
||||||
|
|
||||||
expand_config_file(ret, fp2, ipath)
|
expand_config_file(ret, fp2, ipath)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
ipath += " -> " + fp
|
||||||
|
ret.append("#\033[36m opening cfg file{}\033[0m".format(ipath))
|
||||||
|
|
||||||
with open(fp, "rb") as f:
|
with open(fp, "rb") as f:
|
||||||
for ln in [x.decode("utf-8").strip() for x in f]:
|
for oln in [x.decode("utf-8").rstrip() for x in f]:
|
||||||
|
ln = oln.split(" #")[0].strip()
|
||||||
if ln.startswith("% "):
|
if ln.startswith("% "):
|
||||||
|
pad = " " * len(oln.split("%")[0])
|
||||||
fp2 = ln[1:].strip()
|
fp2 = ln[1:].strip()
|
||||||
fp2 = os.path.join(os.path.dirname(fp), fp2)
|
fp2 = os.path.join(os.path.dirname(fp), fp2)
|
||||||
|
ofs = len(ret)
|
||||||
expand_config_file(ret, fp2, ipath)
|
expand_config_file(ret, fp2, ipath)
|
||||||
|
for n in range(ofs, len(ret)):
|
||||||
|
ret[n] = pad + ret[n]
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ret.append(ln)
|
ret.append(oln)
|
||||||
|
|
||||||
ret.append("#\033[36m closed{}\033[0m".format(ipath))
|
ret.append("#\033[36m closed{}\033[0m".format(ipath))
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade_cfg_fmt(
|
||||||
|
log: Optional["NamedLogger"], args: argparse.Namespace, orig: list[str], cfg_fp: str
|
||||||
|
) -> list[str]:
|
||||||
|
"""convert from v1 to v2 format"""
|
||||||
|
zst = [x.split("#")[0].strip() for x in orig]
|
||||||
|
zst = [x for x in zst if x]
|
||||||
|
if (
|
||||||
|
"[global]" in zst
|
||||||
|
or "[accounts]" in zst
|
||||||
|
or "accs:" in zst
|
||||||
|
or "flags:" in zst
|
||||||
|
or [x for x in zst if x.startswith("[/")]
|
||||||
|
or len(zst) == len([x for x in zst if x.startswith("%")])
|
||||||
|
):
|
||||||
|
return orig
|
||||||
|
|
||||||
|
zst = [x for x in orig if "#\033[36m opening cfg file" not in x]
|
||||||
|
incl = len(zst) != len(orig) - 1
|
||||||
|
|
||||||
|
t = "upgrading config file [{}] from v1 to v2"
|
||||||
|
if not args.vc:
|
||||||
|
t += ". Run with argument '--vc' to see the converted config if you want to upgrade"
|
||||||
|
if incl:
|
||||||
|
t += ". Please don't include v1 configs from v2 files or vice versa! Upgrade all of them at the same time."
|
||||||
|
if log:
|
||||||
|
log(t.format(cfg_fp), 3)
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
vp = ""
|
||||||
|
ap = ""
|
||||||
|
cat = ""
|
||||||
|
catg = "[global]"
|
||||||
|
cata = "[accounts]"
|
||||||
|
catx = " accs:"
|
||||||
|
catf = " flags:"
|
||||||
|
for ln in orig:
|
||||||
|
sn = ln.strip()
|
||||||
|
if not sn:
|
||||||
|
cat = vp = ap = ""
|
||||||
|
if not sn.split("#")[0]:
|
||||||
|
ret.append(ln)
|
||||||
|
elif sn.startswith("-") and cat in ("", catg):
|
||||||
|
if cat != catg:
|
||||||
|
cat = catg
|
||||||
|
ret.append(cat)
|
||||||
|
sn = sn.lstrip("-")
|
||||||
|
zst = sn.split(" ", 1)
|
||||||
|
if len(zst) > 1:
|
||||||
|
sn = "{}: {}".format(zst[0], zst[1].strip())
|
||||||
|
ret.append(" " + sn)
|
||||||
|
elif sn.startswith("u ") and cat in ("", catg, cata):
|
||||||
|
if cat != cata:
|
||||||
|
cat = cata
|
||||||
|
ret.append(cat)
|
||||||
|
s1, s2 = sn[1:].split(":", 1)
|
||||||
|
ret.append(" {}: {}".format(s1.strip(), s2.strip()))
|
||||||
|
elif not ap:
|
||||||
|
ap = sn
|
||||||
|
elif not vp:
|
||||||
|
vp = "/" + sn.strip("/")
|
||||||
|
cat = "[{}]".format(vp)
|
||||||
|
ret.append(cat)
|
||||||
|
ret.append(" " + ap)
|
||||||
|
elif sn.startswith("c "):
|
||||||
|
if cat != catf:
|
||||||
|
cat = catf
|
||||||
|
ret.append(cat)
|
||||||
|
sn = sn[1:].strip()
|
||||||
|
if "=" in sn:
|
||||||
|
zst = sn.split("=", 1)
|
||||||
|
sn = zst[0].replace(",", ", ")
|
||||||
|
sn += ": " + zst[1]
|
||||||
|
else:
|
||||||
|
sn = sn.replace(",", ", ")
|
||||||
|
ret.append(" " + sn)
|
||||||
|
elif sn[:1] in "rwmdgG":
|
||||||
|
if cat != catx:
|
||||||
|
cat = catx
|
||||||
|
ret.append(cat)
|
||||||
|
zst = sn.split(" ")
|
||||||
|
zst = [x for x in zst if x]
|
||||||
|
if len(zst) == 1:
|
||||||
|
zst.append("*")
|
||||||
|
ret.append(" {}: {}".format(zst[0], ", ".join(zst[1:])))
|
||||||
|
else:
|
||||||
|
t = "did not understand line {} in the config"
|
||||||
|
t1 = t
|
||||||
|
n = 0
|
||||||
|
for ln in orig:
|
||||||
|
n += 1
|
||||||
|
t += "\n{:4} {}".format(n, ln)
|
||||||
|
if log:
|
||||||
|
log(t, 1)
|
||||||
|
else:
|
||||||
|
print("\033[31m" + t)
|
||||||
|
raise Exception(t1)
|
||||||
|
|
||||||
|
if args.vc and log:
|
||||||
|
t = "new config syntax (copy/paste this to upgrade your config):\n"
|
||||||
|
t += "\n# ======================[ begin upgraded config ]======================\n\n"
|
||||||
|
for ln in ret:
|
||||||
|
t += ln + "\n"
|
||||||
|
t += "\n# ======================[ end of upgraded config ]======================\n"
|
||||||
|
log(t)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|||||||
@@ -24,13 +24,15 @@ def listdir(p: str = ".") -> list[str]:
|
|||||||
return [fsdec(x) for x in os.listdir(fsenc(p))]
|
return [fsdec(x) for x in os.listdir(fsenc(p))]
|
||||||
|
|
||||||
|
|
||||||
def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> None:
|
def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> bool:
|
||||||
bname = fsenc(name)
|
bname = fsenc(name)
|
||||||
try:
|
try:
|
||||||
os.makedirs(bname, mode)
|
os.makedirs(bname, mode)
|
||||||
|
return True
|
||||||
except:
|
except:
|
||||||
if not exist_ok or not os.path.isdir(bname):
|
if not exist_ok or not os.path.isdir(bname):
|
||||||
raise
|
raise
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def mkdir(p: str, mode: int = 0o755) -> None:
|
def mkdir(p: str, mode: int = 0o755) -> None:
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
|
|||||||
|
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
import traceback
|
||||||
|
|
||||||
import queue
|
import queue
|
||||||
|
|
||||||
@@ -93,12 +94,15 @@ class BrokerMp(object):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
# new ipc invoking managed service in hub
|
# new ipc invoking managed service in hub
|
||||||
obj = self.hub
|
try:
|
||||||
for node in dest.split("."):
|
obj = self.hub
|
||||||
obj = getattr(obj, node)
|
for node in dest.split("."):
|
||||||
|
obj = getattr(obj, node)
|
||||||
|
|
||||||
# TODO will deadlock if dest performs another ipc
|
# TODO will deadlock if dest performs another ipc
|
||||||
rv = try_exec(retq_id, obj, *args)
|
rv = try_exec(retq_id, obj, *args)
|
||||||
|
except:
|
||||||
|
rv = ["exception", "stack", traceback.format_exc()]
|
||||||
|
|
||||||
if retq_id:
|
if retq_id:
|
||||||
proc.q_pend.put((retq_id, "retq", rv))
|
proc.q_pend.put((retq_id, "retq", rv))
|
||||||
|
|||||||
150
copyparty/cfg.py
Normal file
150
copyparty/cfg.py
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
# awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' '
|
||||||
|
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nw p q s ss sss v z zv"
|
||||||
|
onedash = set(zs.split())
|
||||||
|
|
||||||
|
|
||||||
|
def vf_bmap() -> dict[str, str]:
|
||||||
|
"""argv-to-volflag: simple bools"""
|
||||||
|
ret = {
|
||||||
|
"never_symlink": "neversymlink",
|
||||||
|
"no_dedup": "copydupes",
|
||||||
|
"no_dupe": "nodupe",
|
||||||
|
"no_forget": "noforget",
|
||||||
|
}
|
||||||
|
for k in (
|
||||||
|
"dotsrch",
|
||||||
|
"e2t",
|
||||||
|
"e2ts",
|
||||||
|
"e2tsr",
|
||||||
|
"e2v",
|
||||||
|
"e2vu",
|
||||||
|
"e2vp",
|
||||||
|
"hardlink",
|
||||||
|
"magic",
|
||||||
|
"no_sb_md",
|
||||||
|
"no_sb_lg",
|
||||||
|
"rand",
|
||||||
|
"xdev",
|
||||||
|
"xlink",
|
||||||
|
"xvol",
|
||||||
|
):
|
||||||
|
ret[k] = k
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def vf_vmap() -> dict[str, str]:
|
||||||
|
"""argv-to-volflag: simple values"""
|
||||||
|
ret = {}
|
||||||
|
for k in ("lg_sbf", "md_sbf"):
|
||||||
|
ret[k] = k
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def vf_cmap() -> dict[str, str]:
|
||||||
|
"""argv-to-volflag: complex/lists"""
|
||||||
|
ret = {}
|
||||||
|
for k in ("dbd", "html_head", "mte", "mth", "nrand"):
|
||||||
|
ret[k] = k
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
permdescs = {
|
||||||
|
"r": "read; list folder contents, download files",
|
||||||
|
"w": 'write; upload files; need "r" to see the uploads',
|
||||||
|
"m": 'move; move files and folders; need "w" at destination',
|
||||||
|
"d": "delete; permanently delete files and folders",
|
||||||
|
"g": "get; download files, but cannot see folder contents",
|
||||||
|
"G": 'upget; same as "g" but can see filekeys of their own uploads',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
flagcats = {
|
||||||
|
"uploads, general": {
|
||||||
|
"nodupe": "rejects existing files (instead of symlinking them)",
|
||||||
|
"hardlink": "does dedup with hardlinks instead of symlinks",
|
||||||
|
"neversymlink": "disables symlink fallback; full copy instead",
|
||||||
|
"copydupes": "disables dedup, always saves full copies of dupes",
|
||||||
|
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
||||||
|
"nosub": "forces all uploads into the top folder of the vfs",
|
||||||
|
"magic": "enables filetype detection for nameless uploads",
|
||||||
|
"gz": "allows server-side gzip of uploads with ?gz (also c,xz)",
|
||||||
|
"pk": "forces server-side compression, optional arg: xz,9",
|
||||||
|
},
|
||||||
|
"upload rules": {
|
||||||
|
"maxn=250,600": "max 250 uploads over 15min",
|
||||||
|
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g)",
|
||||||
|
"rand": "force randomized filenames, 9 chars long by default",
|
||||||
|
"nrand=N": "randomized filenames are N chars long",
|
||||||
|
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
||||||
|
"df=1g": "ensure 1 GiB free disk space",
|
||||||
|
},
|
||||||
|
"upload rotation\n(moves all uploads into the specified folder structure)": {
|
||||||
|
"rotn=100,3": "3 levels of subfolders with 100 entries in each",
|
||||||
|
"rotf=%Y-%m/%d-%H": "date-formatted organizing",
|
||||||
|
"lifetime=3600": "uploads are deleted after 1 hour",
|
||||||
|
},
|
||||||
|
"database, general": {
|
||||||
|
"e2d": "enable database; makes files searchable + enables upload dedup",
|
||||||
|
"e2ds": "scan writable folders for new files on startup; also sets -e2d",
|
||||||
|
"e2dsa": "scans all folders for new files on startup; also sets -e2d",
|
||||||
|
"e2t": "enable multimedia indexing; makes it possible to search for tags",
|
||||||
|
"e2ts": "scan existing files for tags on startup; also sets -e2t",
|
||||||
|
"e2tsa": "delete all metadata from DB (full rescan); also sets -e2ts",
|
||||||
|
"d2ts": "disables metadata collection for existing files",
|
||||||
|
"d2ds": "disables onboot indexing, overrides -e2ds*",
|
||||||
|
"d2t": "disables metadata collection, overrides -e2t*",
|
||||||
|
"d2v": "disables file verification, overrides -e2v*",
|
||||||
|
"d2d": "disables all database stuff, overrides -e2*",
|
||||||
|
"hist=/tmp/cdb": "puts thumbnails and indexes at that location",
|
||||||
|
"scan=60": "scan for new files every 60sec, same as --re-maxage",
|
||||||
|
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
|
||||||
|
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
|
||||||
|
"noforget": "don't forget files when deleted from disk",
|
||||||
|
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
||||||
|
"xlink": "cross-volume dupe detection / linking",
|
||||||
|
"xdev": "do not descend into other filesystems",
|
||||||
|
"xvol": "skip symlinks leaving the volume root",
|
||||||
|
"dotsrch": "show dotfiles in search results",
|
||||||
|
"nodotsrch": "hide dotfiles in search results (default)",
|
||||||
|
},
|
||||||
|
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
|
||||||
|
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
|
||||||
|
"mtp=ahash,vhash=media-hash.py": "collects two tags at once",
|
||||||
|
},
|
||||||
|
"thumbnails": {
|
||||||
|
"dthumb": "disables all thumbnails",
|
||||||
|
"dvthumb": "disables video thumbnails",
|
||||||
|
"dathumb": "disables audio thumbnails (spectrograms)",
|
||||||
|
"dithumb": "disables image thumbnails",
|
||||||
|
},
|
||||||
|
"event hooks\n(better explained in --help-hooks)": {
|
||||||
|
"xbu=CMD": "execute CMD before a file upload starts",
|
||||||
|
"xau=CMD": "execute CMD after a file upload finishes",
|
||||||
|
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
|
||||||
|
"xbr=CMD": "execute CMD before a file rename/move",
|
||||||
|
"xar=CMD": "execute CMD after a file rename/move",
|
||||||
|
"xbd=CMD": "execute CMD before a file delete",
|
||||||
|
"xad=CMD": "execute CMD after a file delete",
|
||||||
|
"xm=CMD": "execute CMD on message",
|
||||||
|
},
|
||||||
|
"client and ux": {
|
||||||
|
"html_head=TXT": "includes TXT in the <head>",
|
||||||
|
"robots": "allows indexing by search engines (default)",
|
||||||
|
"norobots": "kindly asks search engines to leave",
|
||||||
|
"no_sb_md": "disable js sandbox for markdown files",
|
||||||
|
"no_sb_lg": "disable js sandbox for prologue/epilogue",
|
||||||
|
"sb_md": "enable js sandbox for markdown files (default)",
|
||||||
|
"sb_lg": "enable js sandbox for prologue/epilogue (default)",
|
||||||
|
"md_sbf": "list of markdown-sandbox safeguards to disable",
|
||||||
|
"lg_sbf": "list of *logue-sandbox safeguards to disable",
|
||||||
|
},
|
||||||
|
"others": {
|
||||||
|
"fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission'
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
flagdescs = {k.split("=")[0]: v for tab in flagcats.values() for k, v in tab.items()}
|
||||||
@@ -13,9 +13,21 @@ from pyftpdlib.filesystems import AbstractedFS, FilesystemError
|
|||||||
from pyftpdlib.handlers import FTPHandler
|
from pyftpdlib.handlers import FTPHandler
|
||||||
from pyftpdlib.servers import FTPServer
|
from pyftpdlib.servers import FTPServer
|
||||||
|
|
||||||
from .__init__ import PY2, TYPE_CHECKING, E
|
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import Daemon, Pebkac, exclude_dotfiles, fsenc, ipnorm
|
from .authsrv import VFS
|
||||||
|
from .util import (
|
||||||
|
Daemon,
|
||||||
|
Pebkac,
|
||||||
|
exclude_dotfiles,
|
||||||
|
fsenc,
|
||||||
|
ipnorm,
|
||||||
|
pybin,
|
||||||
|
relchk,
|
||||||
|
runhook,
|
||||||
|
sanitize_fn,
|
||||||
|
vjoin,
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from pyftpdlib.ioloop import IOLoop
|
from pyftpdlib.ioloop import IOLoop
|
||||||
@@ -115,6 +127,9 @@ class FtpFs(AbstractedFS):
|
|||||||
self.listdirinfo = self.listdir
|
self.listdirinfo = self.listdir
|
||||||
self.chdir(".")
|
self.chdir(".")
|
||||||
|
|
||||||
|
def die(self, msg):
|
||||||
|
self.h.die(msg)
|
||||||
|
|
||||||
def v2a(
|
def v2a(
|
||||||
self,
|
self,
|
||||||
vpath: str,
|
vpath: str,
|
||||||
@@ -122,16 +137,23 @@ class FtpFs(AbstractedFS):
|
|||||||
w: bool = False,
|
w: bool = False,
|
||||||
m: bool = False,
|
m: bool = False,
|
||||||
d: bool = False,
|
d: bool = False,
|
||||||
) -> str:
|
) -> tuple[str, VFS, str]:
|
||||||
try:
|
try:
|
||||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||||
|
rd, fn = os.path.split(vpath)
|
||||||
|
if ANYWIN and relchk(rd):
|
||||||
|
logging.warning("malicious vpath: %s", vpath)
|
||||||
|
self.die("Unsupported characters in filepath")
|
||||||
|
|
||||||
|
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
|
||||||
|
vpath = vjoin(rd, fn)
|
||||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||||
if not vfs.realpath:
|
if not vfs.realpath:
|
||||||
raise FilesystemError("no filesystem mounted at this path")
|
self.die("No filesystem mounted at this path")
|
||||||
|
|
||||||
return os.path.join(vfs.realpath, rem)
|
return os.path.join(vfs.realpath, rem), vfs, rem
|
||||||
except Pebkac as ex:
|
except Pebkac as ex:
|
||||||
raise FilesystemError(str(ex))
|
self.die(str(ex))
|
||||||
|
|
||||||
def rv2a(
|
def rv2a(
|
||||||
self,
|
self,
|
||||||
@@ -140,7 +162,7 @@ class FtpFs(AbstractedFS):
|
|||||||
w: bool = False,
|
w: bool = False,
|
||||||
m: bool = False,
|
m: bool = False,
|
||||||
d: bool = False,
|
d: bool = False,
|
||||||
) -> str:
|
) -> tuple[str, VFS, str]:
|
||||||
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
|
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
|
||||||
|
|
||||||
def ftp2fs(self, ftppath: str) -> str:
|
def ftp2fs(self, ftppath: str) -> str:
|
||||||
@@ -154,7 +176,7 @@ class FtpFs(AbstractedFS):
|
|||||||
def validpath(self, path: str) -> bool:
|
def validpath(self, path: str) -> bool:
|
||||||
if "/.hist/" in path:
|
if "/.hist/" in path:
|
||||||
if "/up2k." in path or path.endswith("/dir.txt"):
|
if "/up2k." in path or path.endswith("/dir.txt"):
|
||||||
raise FilesystemError("access to this file is forbidden")
|
self.die("Access to this file is forbidden")
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -162,7 +184,7 @@ class FtpFs(AbstractedFS):
|
|||||||
r = "r" in mode
|
r = "r" in mode
|
||||||
w = "w" in mode or "a" in mode or "+" in mode
|
w = "w" in mode or "a" in mode or "+" in mode
|
||||||
|
|
||||||
ap = self.rv2a(filename, r, w)
|
ap = self.rv2a(filename, r, w)[0]
|
||||||
if w:
|
if w:
|
||||||
try:
|
try:
|
||||||
st = bos.stat(ap)
|
st = bos.stat(ap)
|
||||||
@@ -171,7 +193,7 @@ class FtpFs(AbstractedFS):
|
|||||||
td = 0
|
td = 0
|
||||||
|
|
||||||
if td < -1 or td > self.args.ftp_wt:
|
if td < -1 or td > self.args.ftp_wt:
|
||||||
raise FilesystemError("cannot open existing file for writing")
|
self.die("Cannot open existing file for writing")
|
||||||
|
|
||||||
self.validpath(ap)
|
self.validpath(ap)
|
||||||
return open(fsenc(ap), mode)
|
return open(fsenc(ap), mode)
|
||||||
@@ -182,7 +204,7 @@ class FtpFs(AbstractedFS):
|
|||||||
ap = vfs.canonical(rem)
|
ap = vfs.canonical(rem)
|
||||||
if not bos.path.isdir(ap):
|
if not bos.path.isdir(ap):
|
||||||
# returning 550 is library-default and suitable
|
# returning 550 is library-default and suitable
|
||||||
raise FilesystemError("Failed to change directory")
|
self.die("Failed to change directory")
|
||||||
|
|
||||||
self.cwd = nwd
|
self.cwd = nwd
|
||||||
(
|
(
|
||||||
@@ -195,8 +217,8 @@ class FtpFs(AbstractedFS):
|
|||||||
) = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
|
) = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
|
||||||
|
|
||||||
def mkdir(self, path: str) -> None:
|
def mkdir(self, path: str) -> None:
|
||||||
ap = self.rv2a(path, w=True)
|
ap = self.rv2a(path, w=True)[0]
|
||||||
bos.mkdir(ap)
|
bos.makedirs(ap) # filezilla expects this
|
||||||
|
|
||||||
def listdir(self, path: str) -> list[str]:
|
def listdir(self, path: str) -> list[str]:
|
||||||
vpath = join(self.cwd, path).lstrip("/")
|
vpath = join(self.cwd, path).lstrip("/")
|
||||||
@@ -227,43 +249,42 @@ class FtpFs(AbstractedFS):
|
|||||||
return list(sorted(list(r.keys())))
|
return list(sorted(list(r.keys())))
|
||||||
|
|
||||||
def rmdir(self, path: str) -> None:
|
def rmdir(self, path: str) -> None:
|
||||||
ap = self.rv2a(path, d=True)
|
ap = self.rv2a(path, d=True)[0]
|
||||||
bos.rmdir(ap)
|
bos.rmdir(ap)
|
||||||
|
|
||||||
def remove(self, path: str) -> None:
|
def remove(self, path: str) -> None:
|
||||||
if self.args.no_del:
|
if self.args.no_del:
|
||||||
raise FilesystemError("the delete feature is disabled in server config")
|
self.die("The delete feature is disabled in server config")
|
||||||
|
|
||||||
vp = join(self.cwd, path).lstrip("/")
|
vp = join(self.cwd, path).lstrip("/")
|
||||||
try:
|
try:
|
||||||
self.hub.up2k.handle_rm(self.uname, self.h.remote_ip, [vp], [])
|
self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [])
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise FilesystemError(str(ex))
|
self.die(str(ex))
|
||||||
|
|
||||||
def rename(self, src: str, dst: str) -> None:
|
def rename(self, src: str, dst: str) -> None:
|
||||||
if not self.can_move:
|
if not self.can_move:
|
||||||
raise FilesystemError("not allowed for user " + self.h.username)
|
self.die("Not allowed for user " + self.h.username)
|
||||||
|
|
||||||
if self.args.no_mv:
|
if self.args.no_mv:
|
||||||
t = "the rename/move feature is disabled in server config"
|
self.die("The rename/move feature is disabled in server config")
|
||||||
raise FilesystemError(t)
|
|
||||||
|
|
||||||
svp = join(self.cwd, src).lstrip("/")
|
svp = join(self.cwd, src).lstrip("/")
|
||||||
dvp = join(self.cwd, dst).lstrip("/")
|
dvp = join(self.cwd, dst).lstrip("/")
|
||||||
try:
|
try:
|
||||||
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
self.hub.up2k.handle_mv(self.uname, svp, dvp)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise FilesystemError(str(ex))
|
self.die(str(ex))
|
||||||
|
|
||||||
def chmod(self, path: str, mode: str) -> None:
|
def chmod(self, path: str, mode: str) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def stat(self, path: str) -> os.stat_result:
|
def stat(self, path: str) -> os.stat_result:
|
||||||
try:
|
try:
|
||||||
ap = self.rv2a(path, r=True)
|
ap = self.rv2a(path, r=True)[0]
|
||||||
return bos.stat(ap)
|
return bos.stat(ap)
|
||||||
except:
|
except:
|
||||||
ap = self.rv2a(path)
|
ap = self.rv2a(path)[0]
|
||||||
st = bos.stat(ap)
|
st = bos.stat(ap)
|
||||||
if not stat.S_ISDIR(st.st_mode):
|
if not stat.S_ISDIR(st.st_mode):
|
||||||
raise
|
raise
|
||||||
@@ -271,11 +292,11 @@ class FtpFs(AbstractedFS):
|
|||||||
return st
|
return st
|
||||||
|
|
||||||
def utime(self, path: str, timeval: float) -> None:
|
def utime(self, path: str, timeval: float) -> None:
|
||||||
ap = self.rv2a(path, w=True)
|
ap = self.rv2a(path, w=True)[0]
|
||||||
return bos.utime(ap, (timeval, timeval))
|
return bos.utime(ap, (timeval, timeval))
|
||||||
|
|
||||||
def lstat(self, path: str) -> os.stat_result:
|
def lstat(self, path: str) -> os.stat_result:
|
||||||
ap = self.rv2a(path)
|
ap = self.rv2a(path)[0]
|
||||||
return bos.stat(ap)
|
return bos.stat(ap)
|
||||||
|
|
||||||
def isfile(self, path: str) -> bool:
|
def isfile(self, path: str) -> bool:
|
||||||
@@ -286,7 +307,7 @@ class FtpFs(AbstractedFS):
|
|||||||
return False # expected for mojibake in ftp_SIZE()
|
return False # expected for mojibake in ftp_SIZE()
|
||||||
|
|
||||||
def islink(self, path: str) -> bool:
|
def islink(self, path: str) -> bool:
|
||||||
ap = self.rv2a(path)
|
ap = self.rv2a(path)[0]
|
||||||
return bos.path.islink(ap)
|
return bos.path.islink(ap)
|
||||||
|
|
||||||
def isdir(self, path: str) -> bool:
|
def isdir(self, path: str) -> bool:
|
||||||
@@ -297,18 +318,18 @@ class FtpFs(AbstractedFS):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def getsize(self, path: str) -> int:
|
def getsize(self, path: str) -> int:
|
||||||
ap = self.rv2a(path)
|
ap = self.rv2a(path)[0]
|
||||||
return bos.path.getsize(ap)
|
return bos.path.getsize(ap)
|
||||||
|
|
||||||
def getmtime(self, path: str) -> float:
|
def getmtime(self, path: str) -> float:
|
||||||
ap = self.rv2a(path)
|
ap = self.rv2a(path)[0]
|
||||||
return bos.path.getmtime(ap)
|
return bos.path.getmtime(ap)
|
||||||
|
|
||||||
def realpath(self, path: str) -> str:
|
def realpath(self, path: str) -> str:
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def lexists(self, path: str) -> bool:
|
def lexists(self, path: str) -> bool:
|
||||||
ap = self.rv2a(path)
|
ap = self.rv2a(path)[0]
|
||||||
return bos.path.lexists(ap)
|
return bos.path.lexists(ap)
|
||||||
|
|
||||||
def get_user_by_uid(self, uid: int) -> str:
|
def get_user_by_uid(self, uid: int) -> str:
|
||||||
@@ -332,17 +353,40 @@ class FtpHandler(FTPHandler):
|
|||||||
else:
|
else:
|
||||||
super(FtpHandler, self).__init__(conn, server, ioloop)
|
super(FtpHandler, self).__init__(conn, server, ioloop)
|
||||||
|
|
||||||
|
cip = self.remote_ip
|
||||||
|
self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip
|
||||||
|
|
||||||
# abspath->vpath mapping to resolve log_transfer paths
|
# abspath->vpath mapping to resolve log_transfer paths
|
||||||
self.vfs_map: dict[str, str] = {}
|
self.vfs_map: dict[str, str] = {}
|
||||||
|
|
||||||
# reduce non-debug logging
|
# reduce non-debug logging
|
||||||
self.log_cmds_list = [x for x in self.log_cmds_list if x not in ("CWD", "XCWD")]
|
self.log_cmds_list = [x for x in self.log_cmds_list if x not in ("CWD", "XCWD")]
|
||||||
|
|
||||||
|
def die(self, msg):
|
||||||
|
self.respond("550 {}".format(msg))
|
||||||
|
raise FilesystemError(msg)
|
||||||
|
|
||||||
def ftp_STOR(self, file: str, mode: str = "w") -> Any:
|
def ftp_STOR(self, file: str, mode: str = "w") -> Any:
|
||||||
# Optional[str]
|
# Optional[str]
|
||||||
vp = join(self.fs.cwd, file).lstrip("/")
|
vp = join(self.fs.cwd, file).lstrip("/")
|
||||||
ap = self.fs.v2a(vp)
|
ap, vfs, rem = self.fs.v2a(vp)
|
||||||
self.vfs_map[ap] = vp
|
self.vfs_map[ap] = vp
|
||||||
|
xbu = vfs.flags.get("xbu")
|
||||||
|
if xbu and not runhook(
|
||||||
|
None,
|
||||||
|
xbu,
|
||||||
|
ap,
|
||||||
|
vfs.canonical(rem),
|
||||||
|
"",
|
||||||
|
self.username,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
self.cli_ip,
|
||||||
|
0,
|
||||||
|
"",
|
||||||
|
):
|
||||||
|
self.die("Upload blocked by xbu server config")
|
||||||
|
|
||||||
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
|
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
|
||||||
ret = FTPHandler.ftp_STOR(self, file, mode)
|
ret = FTPHandler.ftp_STOR(self, file, mode)
|
||||||
# print("ftp_STOR: {} {} OK".format(vp, mode))
|
# print("ftp_STOR: {} {} OK".format(vp, mode))
|
||||||
@@ -367,11 +411,13 @@ class FtpHandler(FTPHandler):
|
|||||||
vfs, rem = vfs.get_dbv(rem)
|
vfs, rem = vfs.get_dbv(rem)
|
||||||
self.hub.up2k.hash_file(
|
self.hub.up2k.hash_file(
|
||||||
vfs.realpath,
|
vfs.realpath,
|
||||||
|
vfs.vpath,
|
||||||
vfs.flags,
|
vfs.flags,
|
||||||
rem,
|
rem,
|
||||||
fn,
|
fn,
|
||||||
self.remote_ip,
|
self.cli_ip,
|
||||||
time.time(),
|
time.time(),
|
||||||
|
self.username,
|
||||||
)
|
)
|
||||||
|
|
||||||
return FTPHandler.log_transfer(
|
return FTPHandler.log_transfer(
|
||||||
@@ -402,7 +448,7 @@ class Ftpd(object):
|
|||||||
h1 = SftpHandler
|
h1 = SftpHandler
|
||||||
except:
|
except:
|
||||||
t = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
|
t = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
|
||||||
print(t.format(sys.executable))
|
print(t.format(pybin))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
h1.certfile = os.path.join(self.args.E.cfg, "cert.pem")
|
h1.certfile = os.path.join(self.args.E.cfg, "cert.pem")
|
||||||
@@ -435,10 +481,18 @@ class Ftpd(object):
|
|||||||
lgr = logging.getLogger("pyftpdlib")
|
lgr = logging.getLogger("pyftpdlib")
|
||||||
lgr.setLevel(logging.DEBUG if self.args.ftpv else logging.INFO)
|
lgr.setLevel(logging.DEBUG if self.args.ftpv else logging.INFO)
|
||||||
|
|
||||||
|
ips = self.args.i
|
||||||
|
if "::" in ips:
|
||||||
|
ips.append("0.0.0.0")
|
||||||
|
|
||||||
ioloop = IOLoop()
|
ioloop = IOLoop()
|
||||||
for ip in self.args.i:
|
for ip in ips:
|
||||||
for h, lp in hs:
|
for h, lp in hs:
|
||||||
FTPServer((ip, int(lp)), h, ioloop)
|
try:
|
||||||
|
FTPServer((ip, int(lp)), h, ioloop)
|
||||||
|
except:
|
||||||
|
if ip != "0.0.0.0" or "::" not in ips:
|
||||||
|
raise
|
||||||
|
|
||||||
Daemon(ioloop.loop, "ftp")
|
Daemon(ioloop.loop, "ftp")
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -65,6 +65,7 @@ class HttpConn(object):
|
|||||||
self.ico: Ico = Ico(self.args) # mypy404
|
self.ico: Ico = Ico(self.args) # mypy404
|
||||||
|
|
||||||
self.t0: float = time.time() # mypy404
|
self.t0: float = time.time() # mypy404
|
||||||
|
self.freshen_pwd: float = 0.0
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.nreq: int = -1 # mypy404
|
self.nreq: int = -1 # mypy404
|
||||||
self.nbyte: int = 0 # mypy404
|
self.nbyte: int = 0 # mypy404
|
||||||
|
|||||||
@@ -11,9 +11,19 @@ import time
|
|||||||
|
|
||||||
import queue
|
import queue
|
||||||
|
|
||||||
|
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams
|
||||||
|
|
||||||
|
try:
|
||||||
|
MNFE = ModuleNotFoundError
|
||||||
|
except:
|
||||||
|
MNFE = ImportError
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import jinja2
|
import jinja2
|
||||||
except ImportError:
|
except MNFE:
|
||||||
|
if EXE:
|
||||||
|
raise
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"""\033[1;31m
|
"""\033[1;31m
|
||||||
you do not have jinja2 installed,\033[33m
|
you do not have jinja2 installed,\033[33m
|
||||||
@@ -28,7 +38,6 @@ except ImportError:
|
|||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
from .__init__ import MACOS, TYPE_CHECKING, EnvParams
|
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .httpconn import HttpConn
|
from .httpconn import HttpConn
|
||||||
from .util import (
|
from .util import (
|
||||||
@@ -81,8 +90,7 @@ class HttpSrv(object):
|
|||||||
self.bans: dict[str, int] = {}
|
self.bans: dict[str, int] = {}
|
||||||
self.aclose: dict[str, int] = {}
|
self.aclose: dict[str, int] = {}
|
||||||
|
|
||||||
self.ip = ""
|
self.bound: set[tuple[str, int]] = set()
|
||||||
self.port = 0
|
|
||||||
self.name = "hsrv" + nsuf
|
self.name = "hsrv" + nsuf
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
@@ -110,6 +118,11 @@ class HttpSrv(object):
|
|||||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
||||||
self.prism = os.path.exists(zs)
|
self.prism = os.path.exists(zs)
|
||||||
|
|
||||||
|
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
||||||
|
if not self.args.no_dav:
|
||||||
|
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
|
||||||
|
self.mallow += zs.split()
|
||||||
|
|
||||||
if self.args.zs:
|
if self.args.zs:
|
||||||
from .ssdp import SSDPr
|
from .ssdp import SSDPr
|
||||||
|
|
||||||
@@ -142,7 +155,11 @@ class HttpSrv(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||||
self.nm = NetMap([self.ip], netdevs)
|
ips = set()
|
||||||
|
for ip, _ in self.bound:
|
||||||
|
ips.add(ip)
|
||||||
|
|
||||||
|
self.nm = NetMap(list(ips), netdevs)
|
||||||
|
|
||||||
def start_threads(self, n: int) -> None:
|
def start_threads(self, n: int) -> None:
|
||||||
self.tp_nthr += n
|
self.tp_nthr += n
|
||||||
@@ -178,20 +195,19 @@ class HttpSrv(object):
|
|||||||
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
||||||
if self.args.j != 1:
|
if self.args.j != 1:
|
||||||
# lost in the pickle; redefine
|
# lost in the pickle; redefine
|
||||||
try:
|
if not ANYWIN or self.args.reuseaddr:
|
||||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
except:
|
|
||||||
pass
|
|
||||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
||||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
|
||||||
sck.settimeout(None) # < does not inherit, ^ does
|
|
||||||
|
|
||||||
self.ip, self.port = sck.getsockname()[:2]
|
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
|
sck.settimeout(None) # < does not inherit, ^ opts above do
|
||||||
|
|
||||||
|
ip, port = sck.getsockname()[:2]
|
||||||
self.srvs.append(sck)
|
self.srvs.append(sck)
|
||||||
|
self.bound.add((ip, port))
|
||||||
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
||||||
Daemon(
|
Daemon(
|
||||||
self.thr_listen,
|
self.thr_listen,
|
||||||
"httpsrv-n{}-listen-{}-{}".format(self.nid or "0", self.ip, self.port),
|
"httpsrv-n{}-listen-{}-{}".format(self.nid or "0", ip, port),
|
||||||
(sck,),
|
(sck,),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ from ipaddress import IPv4Network, IPv6Network
|
|||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import TYPE_CHECKING
|
||||||
from .__init__ import unicode as U
|
from .__init__ import unicode as U
|
||||||
from .multicast import MC_Sck, MCast
|
from .multicast import MC_Sck, MCast
|
||||||
|
from .stolen.dnslib import AAAA
|
||||||
from .stolen.dnslib import CLASS as DC
|
from .stolen.dnslib import CLASS as DC
|
||||||
from .stolen.dnslib import (
|
from .stolen.dnslib import (
|
||||||
NSEC,
|
NSEC,
|
||||||
@@ -20,12 +21,11 @@ from .stolen.dnslib import (
|
|||||||
SRV,
|
SRV,
|
||||||
TXT,
|
TXT,
|
||||||
A,
|
A,
|
||||||
AAAA,
|
|
||||||
DNSHeader,
|
DNSHeader,
|
||||||
DNSQuestion,
|
DNSQuestion,
|
||||||
DNSRecord,
|
DNSRecord,
|
||||||
)
|
)
|
||||||
from .util import CachedSet, Daemon, Netdev, min_ex
|
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
@@ -55,10 +55,11 @@ class MDNS_Sck(MC_Sck):
|
|||||||
self.bp_bye = b""
|
self.bp_bye = b""
|
||||||
|
|
||||||
self.last_tx = 0.0
|
self.last_tx = 0.0
|
||||||
|
self.tx_ex = False
|
||||||
|
|
||||||
|
|
||||||
class MDNS(MCast):
|
class MDNS(MCast):
|
||||||
def __init__(self, hub: "SvcHub") -> None:
|
def __init__(self, hub: "SvcHub", ngen: int) -> None:
|
||||||
al = hub.args
|
al = hub.args
|
||||||
grp4 = "" if al.zm6 else MDNS4
|
grp4 = "" if al.zm6 else MDNS4
|
||||||
grp6 = "" if al.zm4 else MDNS6
|
grp6 = "" if al.zm4 else MDNS6
|
||||||
@@ -66,7 +67,8 @@ class MDNS(MCast):
|
|||||||
hub, MDNS_Sck, al.zm_on, al.zm_off, grp4, grp6, 5353, hub.args.zmv
|
hub, MDNS_Sck, al.zm_on, al.zm_off, grp4, grp6, 5353, hub.args.zmv
|
||||||
)
|
)
|
||||||
self.srv: dict[socket.socket, MDNS_Sck] = {}
|
self.srv: dict[socket.socket, MDNS_Sck] = {}
|
||||||
|
self.logsrc = "mDNS-{}".format(ngen)
|
||||||
|
self.ngen = ngen
|
||||||
self.ttl = 300
|
self.ttl = 300
|
||||||
|
|
||||||
zs = self.args.name + ".local."
|
zs = self.args.name + ".local."
|
||||||
@@ -89,7 +91,7 @@ class MDNS(MCast):
|
|||||||
self.defend: dict[MDNS_Sck, float] = {} # server -> deadline
|
self.defend: dict[MDNS_Sck, float] = {} # server -> deadline
|
||||||
|
|
||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log_func("mDNS", msg, c)
|
self.log_func(self.logsrc, msg, c)
|
||||||
|
|
||||||
def build_svcs(self) -> tuple[dict[str, dict[str, Any]], set[str]]:
|
def build_svcs(self) -> tuple[dict[str, dict[str, Any]], set[str]]:
|
||||||
zms = self.args.zms
|
zms = self.args.zms
|
||||||
@@ -287,12 +289,15 @@ class MDNS(MCast):
|
|||||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||||
self.rx4.cln()
|
self.rx4.cln()
|
||||||
self.rx6.cln()
|
self.rx6.cln()
|
||||||
|
buf = b""
|
||||||
|
addr = ("0", 0)
|
||||||
for sck in rx:
|
for sck in rx:
|
||||||
buf, addr = sck.recvfrom(4096)
|
|
||||||
try:
|
try:
|
||||||
|
buf, addr = sck.recvfrom(4096)
|
||||||
self.eat(buf, addr, sck)
|
self.eat(buf, addr, sck)
|
||||||
except:
|
except:
|
||||||
if not self.running:
|
if not self.running:
|
||||||
|
self.log("stopped", 2)
|
||||||
return
|
return
|
||||||
|
|
||||||
t = "{} {} \033[33m|{}| {}\n{}".format(
|
t = "{} {} \033[33m|{}| {}\n{}".format(
|
||||||
@@ -309,29 +314,38 @@ class MDNS(MCast):
|
|||||||
self.log(t.format(self.hn[:-1]), 2)
|
self.log(t.format(self.hn[:-1]), 2)
|
||||||
self.probing = 0
|
self.probing = 0
|
||||||
|
|
||||||
|
self.log("stopped", 2)
|
||||||
|
|
||||||
def stop(self, panic=False) -> None:
|
def stop(self, panic=False) -> None:
|
||||||
self.running = False
|
self.running = False
|
||||||
if not panic:
|
for srv in self.srv.values():
|
||||||
for srv in self.srv.values():
|
try:
|
||||||
srv.sck.sendto(srv.bp_bye, (srv.grp, 5353))
|
if panic:
|
||||||
|
srv.sck.close()
|
||||||
|
else:
|
||||||
|
srv.sck.sendto(srv.bp_bye, (srv.grp, 5353))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
self.srv = {}
|
self.srv = {}
|
||||||
|
|
||||||
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
||||||
cip = addr[0]
|
cip = addr[0]
|
||||||
v6 = ":" in cip
|
v6 = ":" in cip
|
||||||
if cip.startswith("169.254") or v6 and not cip.startswith("fe80"):
|
if (cip.startswith("169.254") and not self.ll_ok) or (
|
||||||
|
v6 and not cip.startswith("fe80")
|
||||||
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
cache = self.rx6 if v6 else self.rx4
|
cache = self.rx6 if v6 else self.rx4
|
||||||
if buf in cache.c:
|
if buf in cache.c:
|
||||||
return
|
return
|
||||||
|
|
||||||
cache.add(buf)
|
|
||||||
srv: Optional[MDNS_Sck] = self.srv[sck] if v6 else self.map_client(cip) # type: ignore
|
srv: Optional[MDNS_Sck] = self.srv[sck] if v6 else self.map_client(cip) # type: ignore
|
||||||
if not srv:
|
if not srv:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
cache.add(buf)
|
||||||
now = time.time()
|
now = time.time()
|
||||||
|
|
||||||
if self.args.zmv and cip != srv.ip and cip not in srv.ips:
|
if self.args.zmv and cip != srv.ip and cip not in srv.ips:
|
||||||
@@ -369,6 +383,14 @@ class MDNS(MCast):
|
|||||||
# avahi broadcasting 127.0.0.1-only packets
|
# avahi broadcasting 127.0.0.1-only packets
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# check if we've been given additional IPs
|
||||||
|
for ip in list_ips():
|
||||||
|
if ip in cips:
|
||||||
|
self.sips.add(ip)
|
||||||
|
|
||||||
|
if not self.sips.isdisjoint(cips):
|
||||||
|
return
|
||||||
|
|
||||||
t = "mdns zeroconf: "
|
t = "mdns zeroconf: "
|
||||||
if self.probing:
|
if self.probing:
|
||||||
t += "Cannot start; hostname '{}' is occupied"
|
t += "Cannot start; hostname '{}' is occupied"
|
||||||
@@ -502,6 +524,15 @@ class MDNS(MCast):
|
|||||||
if now < srv.last_tx + cooldown:
|
if now < srv.last_tx + cooldown:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
srv.sck.sendto(msg, (srv.grp, 5353))
|
try:
|
||||||
srv.last_tx = now
|
srv.sck.sendto(msg, (srv.grp, 5353))
|
||||||
|
srv.last_tx = now
|
||||||
|
except Exception as ex:
|
||||||
|
if srv.tx_ex:
|
||||||
|
return True
|
||||||
|
|
||||||
|
srv.tx_ex = True
|
||||||
|
t = "tx({},|{}|,{}): {}"
|
||||||
|
self.log(t.format(srv.ip, len(msg), cooldown, ex), 3)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -8,9 +8,19 @@ import shutil
|
|||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS, E, unicode
|
from .__init__ import EXE, PY2, WINDOWS, E, unicode
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import REKOBO_LKEY, fsenc, min_ex, retchk, runcmd, uncyg
|
from .util import (
|
||||||
|
FFMPEG_URL,
|
||||||
|
REKOBO_LKEY,
|
||||||
|
fsenc,
|
||||||
|
min_ex,
|
||||||
|
pybin,
|
||||||
|
retchk,
|
||||||
|
runcmd,
|
||||||
|
sfsenc,
|
||||||
|
uncyg,
|
||||||
|
)
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
@@ -259,7 +269,9 @@ class MTag(object):
|
|||||||
self.args = args
|
self.args = args
|
||||||
self.usable = True
|
self.usable = True
|
||||||
self.prefer_mt = not args.no_mtag_ff
|
self.prefer_mt = not args.no_mtag_ff
|
||||||
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
|
self.backend = (
|
||||||
|
"ffprobe" if args.no_mutagen or (HAVE_FFPROBE and EXE) else "mutagen"
|
||||||
|
)
|
||||||
self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff
|
self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff
|
||||||
mappings = args.mtm
|
mappings = args.mtm
|
||||||
or_ffprobe = " or FFprobe"
|
or_ffprobe = " or FFprobe"
|
||||||
@@ -285,9 +297,14 @@ class MTag(object):
|
|||||||
self.log(msg, c=3)
|
self.log(msg, c=3)
|
||||||
|
|
||||||
if not self.usable:
|
if not self.usable:
|
||||||
|
if EXE:
|
||||||
|
t = "copyparty.exe cannot use mutagen; need ffprobe.exe to read media tags: "
|
||||||
|
self.log(t + FFMPEG_URL)
|
||||||
|
return
|
||||||
|
|
||||||
msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
|
msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
|
||||||
pybin = os.path.basename(sys.executable)
|
pyname = os.path.basename(pybin)
|
||||||
self.log(msg.format(or_ffprobe, " " * 37, pybin), c=1)
|
self.log(msg.format(or_ffprobe, " " * 37, pyname), c=1)
|
||||||
return
|
return
|
||||||
|
|
||||||
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||||
@@ -456,7 +473,10 @@ class MTag(object):
|
|||||||
self.log("mutagen: {}\033[0m".format(" ".join(zl)), "90")
|
self.log("mutagen: {}\033[0m".format(" ".join(zl)), "90")
|
||||||
if not md.info.length and not md.info.codec:
|
if not md.info.length and not md.info.codec:
|
||||||
raise Exception()
|
raise Exception()
|
||||||
except:
|
except Exception as ex:
|
||||||
|
if self.args.mtag_v:
|
||||||
|
self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90")
|
||||||
|
|
||||||
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||||
|
|
||||||
sz = bos.path.getsize(abspath)
|
sz = bos.path.getsize(abspath)
|
||||||
@@ -519,12 +539,15 @@ class MTag(object):
|
|||||||
|
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
try:
|
try:
|
||||||
|
if EXE:
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||||
pypath = str(os.pathsep.join(zsl))
|
pypath = str(os.pathsep.join(zsl))
|
||||||
env["PYTHONPATH"] = pypath
|
env["PYTHONPATH"] = pypath
|
||||||
except:
|
except:
|
||||||
if not E.ox:
|
if not E.ox and not EXE:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
ret: dict[str, Any] = {}
|
ret: dict[str, Any] = {}
|
||||||
@@ -532,7 +555,7 @@ class MTag(object):
|
|||||||
try:
|
try:
|
||||||
cmd = [parser.bin, abspath]
|
cmd = [parser.bin, abspath]
|
||||||
if parser.bin.endswith(".py"):
|
if parser.bin.endswith(".py"):
|
||||||
cmd = [sys.executable] + cmd
|
cmd = [pybin] + cmd
|
||||||
|
|
||||||
args = {
|
args = {
|
||||||
"env": env,
|
"env": env,
|
||||||
@@ -551,7 +574,7 @@ class MTag(object):
|
|||||||
else:
|
else:
|
||||||
cmd = ["nice"] + cmd
|
cmd = ["nice"] + cmd
|
||||||
|
|
||||||
bcmd = [fsenc(x) for x in cmd]
|
bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])]
|
||||||
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
rc, v, err = runcmd(bcmd, **args) # type: ignore
|
||||||
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)
|
||||||
v = v.strip()
|
v = v.strip()
|
||||||
|
|||||||
@@ -5,10 +5,17 @@ import socket
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
import ipaddress
|
import ipaddress
|
||||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
from ipaddress import (
|
||||||
|
IPv4Address,
|
||||||
|
IPv4Network,
|
||||||
|
IPv6Address,
|
||||||
|
IPv6Network,
|
||||||
|
ip_address,
|
||||||
|
ip_network,
|
||||||
|
)
|
||||||
|
|
||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import MACOS, TYPE_CHECKING
|
||||||
from .util import MACOS, Netdev, min_ex, spack
|
from .util import Netdev, find_prefix, min_ex, spack
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
@@ -75,6 +82,7 @@ class MCast(object):
|
|||||||
|
|
||||||
self.srv: dict[socket.socket, MC_Sck] = {} # listening sockets
|
self.srv: dict[socket.socket, MC_Sck] = {} # listening sockets
|
||||||
self.sips: set[str] = set() # all listening ips (including failed attempts)
|
self.sips: set[str] = set() # all listening ips (including failed attempts)
|
||||||
|
self.ll_ok: set[str] = set() # fallback linklocal IPv4 and IPv6 addresses
|
||||||
self.b2srv: dict[bytes, MC_Sck] = {} # binary-ip -> server socket
|
self.b2srv: dict[bytes, MC_Sck] = {} # binary-ip -> server socket
|
||||||
self.b4: list[bytes] = [] # sorted list of binary-ips
|
self.b4: list[bytes] = [] # sorted list of binary-ips
|
||||||
self.b6: list[bytes] = [] # sorted list of binary-ips
|
self.b6: list[bytes] = [] # sorted list of binary-ips
|
||||||
@@ -102,15 +110,23 @@ class MCast(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
||||||
|
ips = find_prefix(ips, netdevs)
|
||||||
# ip -> ip/prefix
|
|
||||||
ips = [[x for x in netdevs if x.startswith(y + "/")][0] for y in ips]
|
|
||||||
|
|
||||||
on = self.on[:]
|
on = self.on[:]
|
||||||
off = self.off[:]
|
off = self.off[:]
|
||||||
for lst in (on, off):
|
for lst in (on, off):
|
||||||
for av in list(lst):
|
for av in list(lst):
|
||||||
|
try:
|
||||||
|
arg_net = ip_network(av, False)
|
||||||
|
except:
|
||||||
|
arg_net = None
|
||||||
|
|
||||||
for sk, sv in netdevs.items():
|
for sk, sv in netdevs.items():
|
||||||
|
if arg_net:
|
||||||
|
net_ip = ip_address(sk.split("/")[0])
|
||||||
|
if net_ip in arg_net and sk not in lst:
|
||||||
|
lst.append(sk)
|
||||||
|
|
||||||
if (av == str(sv.idx) or av == sv.name) and sk not in lst:
|
if (av == str(sv.idx) or av == sv.name) and sk not in lst:
|
||||||
lst.append(sk)
|
lst.append(sk)
|
||||||
|
|
||||||
@@ -166,9 +182,21 @@ class MCast(object):
|
|||||||
srv.ips[oth_ip.split("/")[0]] = ipaddress.ip_network(oth_ip, False)
|
srv.ips[oth_ip.split("/")[0]] = ipaddress.ip_network(oth_ip, False)
|
||||||
|
|
||||||
# gvfs breaks if a linklocal ip appears in a dns reply
|
# gvfs breaks if a linklocal ip appears in a dns reply
|
||||||
srv.ips = {k: v for k, v in srv.ips.items() if not k.startswith("fe80")}
|
ll = {
|
||||||
|
k: v
|
||||||
|
for k, v in srv.ips.items()
|
||||||
|
if k.startswith("169.254") or k.startswith("fe80")
|
||||||
|
}
|
||||||
|
rt = {k: v for k, v in srv.ips.items() if k not in ll}
|
||||||
|
|
||||||
|
if self.args.ll or not rt:
|
||||||
|
self.ll_ok.update(list(ll))
|
||||||
|
|
||||||
|
if not self.args.ll:
|
||||||
|
srv.ips = rt or ll
|
||||||
|
|
||||||
if not srv.ips:
|
if not srv.ips:
|
||||||
self.log("no routable IPs on {}; skipping [{}]".format(netdev, ip), 3)
|
self.log("no IPs on {}; skipping [{}]".format(netdev, ip), 3)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -318,6 +346,16 @@ class MCast(object):
|
|||||||
# just give it something
|
# just give it something
|
||||||
ret = list(self.srv.values())[0]
|
ret = list(self.srv.values())[0]
|
||||||
|
|
||||||
|
if not ret and cip.startswith("169.254"):
|
||||||
|
# idk how to map LL IPv4 msgs to nics;
|
||||||
|
# just pick one and hope for the best
|
||||||
|
lls = (
|
||||||
|
x
|
||||||
|
for x in self.srv.values()
|
||||||
|
if next((y for y in x.ips if y in self.ll_ok), None)
|
||||||
|
)
|
||||||
|
ret = next(lls, None)
|
||||||
|
|
||||||
if ret:
|
if ret:
|
||||||
t = "new client on {} ({}): {}"
|
t = "new client on {} ({}): {}"
|
||||||
self.log(t.format(ret.name, ret.net, cip), 6)
|
self.log(t.format(ret.name, ret.net, cip), 6)
|
||||||
|
|||||||
@@ -9,13 +9,13 @@ import sys
|
|||||||
import time
|
import time
|
||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
|
|
||||||
from .__init__ import ANYWIN, TYPE_CHECKING
|
from .__init__ import ANYWIN, EXE, TYPE_CHECKING
|
||||||
from .authsrv import LEELOO_DALLAS, VFS
|
from .authsrv import LEELOO_DALLAS, VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import Daemon, min_ex
|
from .util import Daemon, min_ex, pybin, runhook
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any
|
from typing import Any, Union
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
@@ -42,8 +42,12 @@ class SMB(object):
|
|||||||
from impacket import smbserver
|
from impacket import smbserver
|
||||||
from impacket.ntlm import compute_lmhash, compute_nthash
|
from impacket.ntlm import compute_lmhash, compute_nthash
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
if EXE:
|
||||||
|
print("copyparty.exe cannot do SMB")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
m = "\033[36m\n{}\033[31m\n\nERROR: need 'impacket'; please run this command:\033[33m\n {} -m pip install --user impacket\n\033[0m"
|
m = "\033[36m\n{}\033[31m\n\nERROR: need 'impacket'; please run this command:\033[33m\n {} -m pip install --user impacket\n\033[0m"
|
||||||
print(m.format(min_ex(), sys.executable))
|
print(m.format(min_ex(), pybin))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# patch vfs into smbserver.os
|
# patch vfs into smbserver.os
|
||||||
@@ -109,6 +113,9 @@ class SMB(object):
|
|||||||
self.stop = srv.stop
|
self.stop = srv.stop
|
||||||
self.log("smb", "listening @ {}:{}".format(ip, port))
|
self.log("smb", "listening @ {}:{}".format(ip, port))
|
||||||
|
|
||||||
|
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
|
self.log("smb", msg, c)
|
||||||
|
|
||||||
def start(self) -> None:
|
def start(self) -> None:
|
||||||
Daemon(self.srv.start)
|
Daemon(self.srv.start)
|
||||||
|
|
||||||
@@ -165,8 +172,15 @@ class SMB(object):
|
|||||||
yeet("blocked write (no --smbw): " + vpath)
|
yeet("blocked write (no --smbw): " + vpath)
|
||||||
|
|
||||||
vfs, ap = self._v2a("open", vpath, *a)
|
vfs, ap = self._v2a("open", vpath, *a)
|
||||||
if wr and not vfs.axs.uwrite:
|
if wr:
|
||||||
yeet("blocked write (no-write-acc): " + vpath)
|
if not vfs.axs.uwrite:
|
||||||
|
yeet("blocked write (no-write-acc): " + vpath)
|
||||||
|
|
||||||
|
xbu = vfs.flags.get("xbu")
|
||||||
|
if xbu and not runhook(
|
||||||
|
self.nlog, xbu, ap, vpath, "", "", 0, 0, "1.7.6.2", 0, ""
|
||||||
|
):
|
||||||
|
yeet("blocked by xbu server config: " + vpath)
|
||||||
|
|
||||||
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
||||||
if wr:
|
if wr:
|
||||||
@@ -194,11 +208,13 @@ class SMB(object):
|
|||||||
vfs, rem = vfs.get_dbv(rem)
|
vfs, rem = vfs.get_dbv(rem)
|
||||||
self.hub.up2k.hash_file(
|
self.hub.up2k.hash_file(
|
||||||
vfs.realpath,
|
vfs.realpath,
|
||||||
|
vfs.vpath,
|
||||||
vfs.flags,
|
vfs.flags,
|
||||||
rem,
|
rem,
|
||||||
fn,
|
fn,
|
||||||
"1.7.6.2",
|
"1.7.6.2",
|
||||||
time.time(),
|
time.time(),
|
||||||
|
"",
|
||||||
)
|
)
|
||||||
|
|
||||||
def _rename(self, vp1: str, vp2: str) -> None:
|
def _rename(self, vp1: str, vp2: str) -> None:
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from email.utils import formatdate
|
|||||||
|
|
||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import TYPE_CHECKING
|
||||||
from .multicast import MC_Sck, MCast
|
from .multicast import MC_Sck, MCast
|
||||||
from .util import CachedSet, min_ex
|
from .util import CachedSet, html_escape, min_ex
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .broker_util import BrokerCli
|
from .broker_util import BrokerCli
|
||||||
@@ -73,13 +73,15 @@ class SSDPr(object):
|
|||||||
</device>
|
</device>
|
||||||
</root>"""
|
</root>"""
|
||||||
|
|
||||||
|
c = html_escape
|
||||||
sip, sport = hc.s.getsockname()[:2]
|
sip, sport = hc.s.getsockname()[:2]
|
||||||
|
sip = sip.replace("::ffff:", "")
|
||||||
proto = "https" if self.args.https_only else "http"
|
proto = "https" if self.args.https_only else "http"
|
||||||
ubase = "{}://{}:{}".format(proto, sip, sport)
|
ubase = "{}://{}:{}".format(proto, sip, sport)
|
||||||
zsl = self.args.zsl
|
zsl = self.args.zsl
|
||||||
url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/")
|
url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/")
|
||||||
name = "{} @ {}".format(self.args.doctitle, self.args.name)
|
name = "{} @ {}".format(self.args.doctitle, self.args.name)
|
||||||
zs = zs.strip().format(ubase, url, name, self.args.zsid)
|
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
|
||||||
hc.reply(zs.encode("utf-8", "replace"))
|
hc.reply(zs.encode("utf-8", "replace"))
|
||||||
return False # close connectino
|
return False # close connectino
|
||||||
|
|
||||||
@@ -87,19 +89,22 @@ class SSDPr(object):
|
|||||||
class SSDPd(MCast):
|
class SSDPd(MCast):
|
||||||
"""communicates with ssdp clients over multicast"""
|
"""communicates with ssdp clients over multicast"""
|
||||||
|
|
||||||
def __init__(self, hub: "SvcHub") -> None:
|
def __init__(self, hub: "SvcHub", ngen: int) -> None:
|
||||||
al = hub.args
|
al = hub.args
|
||||||
vinit = al.zsv and not al.zmv
|
vinit = al.zsv and not al.zmv
|
||||||
super(SSDPd, self).__init__(
|
super(SSDPd, self).__init__(
|
||||||
hub, SSDP_Sck, al.zs_on, al.zs_off, GRP, "", 1900, vinit
|
hub, SSDP_Sck, al.zs_on, al.zs_off, GRP, "", 1900, vinit
|
||||||
)
|
)
|
||||||
self.srv: dict[socket.socket, SSDP_Sck] = {}
|
self.srv: dict[socket.socket, SSDP_Sck] = {}
|
||||||
|
self.logsrc = "SSDP-{}".format(ngen)
|
||||||
|
self.ngen = ngen
|
||||||
|
|
||||||
self.rxc = CachedSet(0.7)
|
self.rxc = CachedSet(0.7)
|
||||||
self.txc = CachedSet(5) # win10: every 3 sec
|
self.txc = CachedSet(5) # win10: every 3 sec
|
||||||
self.ptn_st = re.compile(b"\nst: *upnp:rootdevice", re.I)
|
self.ptn_st = re.compile(b"\nst: *upnp:rootdevice", re.I)
|
||||||
|
|
||||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
self.log_func("SSDP", msg, c)
|
self.log_func(self.logsrc, msg, c)
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
try:
|
try:
|
||||||
@@ -125,41 +130,51 @@ class SSDPd(MCast):
|
|||||||
|
|
||||||
self.log("listening")
|
self.log("listening")
|
||||||
while self.running:
|
while self.running:
|
||||||
rdy = select.select(self.srv, [], [], 180)
|
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
||||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||||
self.rxc.cln()
|
self.rxc.cln()
|
||||||
|
buf = b""
|
||||||
|
addr = ("0", 0)
|
||||||
for sck in rx:
|
for sck in rx:
|
||||||
buf, addr = sck.recvfrom(4096)
|
|
||||||
try:
|
try:
|
||||||
|
buf, addr = sck.recvfrom(4096)
|
||||||
self.eat(buf, addr)
|
self.eat(buf, addr)
|
||||||
except:
|
except:
|
||||||
if not self.running:
|
if not self.running:
|
||||||
return
|
break
|
||||||
|
|
||||||
t = "{} {} \033[33m|{}| {}\n{}".format(
|
t = "{} {} \033[33m|{}| {}\n{}".format(
|
||||||
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
||||||
)
|
)
|
||||||
self.log(t, 6)
|
self.log(t, 6)
|
||||||
|
|
||||||
|
self.log("stopped", 2)
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
self.running = False
|
self.running = False
|
||||||
|
for srv in self.srv.values():
|
||||||
|
try:
|
||||||
|
srv.sck.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
self.srv = {}
|
self.srv = {}
|
||||||
|
|
||||||
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
||||||
cip = addr[0]
|
cip = addr[0]
|
||||||
if cip.startswith("169.254"):
|
if cip.startswith("169.254") and not self.ll_ok:
|
||||||
return
|
return
|
||||||
|
|
||||||
if buf in self.rxc.c:
|
if buf in self.rxc.c:
|
||||||
return
|
return
|
||||||
|
|
||||||
self.rxc.add(buf)
|
|
||||||
srv: Optional[SSDP_Sck] = self.map_client(cip) # type: ignore
|
srv: Optional[SSDP_Sck] = self.map_client(cip) # type: ignore
|
||||||
if not srv:
|
if not srv:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
self.rxc.add(buf)
|
||||||
if not buf.startswith(b"M-SEARCH * HTTP/1."):
|
if not buf.startswith(b"M-SEARCH * HTTP/1."):
|
||||||
raise Exception("not an ssdp message")
|
return
|
||||||
|
|
||||||
if not self.ptn_st.search(buf):
|
if not self.ptn_st.search(buf):
|
||||||
return
|
return
|
||||||
@@ -183,7 +198,8 @@ BOOTID.UPNP.ORG: 0
|
|||||||
CONFIGID.UPNP.ORG: 1
|
CONFIGID.UPNP.ORG: 1
|
||||||
|
|
||||||
"""
|
"""
|
||||||
zs = zs.format(formatdate(usegmt=True), srv.ip, srv.hport, self.args.zsid)
|
v4 = srv.ip.replace("::ffff:", "")
|
||||||
|
zs = zs.format(formatdate(usegmt=True), v4, srv.hport, self.args.zsid)
|
||||||
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
||||||
srv.sck.sendto(zb, addr[:2])
|
srv.sck.sendto(zb, addr[:2])
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import stat
|
||||||
import tarfile
|
import tarfile
|
||||||
|
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
@@ -79,6 +80,9 @@ class StreamTar(StreamArc):
|
|||||||
src = f["ap"]
|
src = f["ap"]
|
||||||
fsi = f["st"]
|
fsi = f["st"]
|
||||||
|
|
||||||
|
if stat.S_ISDIR(fsi.st_mode):
|
||||||
|
return
|
||||||
|
|
||||||
inf = tarfile.TarInfo(name=name)
|
inf = tarfile.TarInfo(name=name)
|
||||||
inf.mode = fsi.st_mode
|
inf.mode = fsi.st_mode
|
||||||
inf.size = fsi.st_size
|
inf.size = fsi.st_size
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
|||||||
import argparse
|
import argparse
|
||||||
import base64
|
import base64
|
||||||
import calendar
|
import calendar
|
||||||
|
import errno
|
||||||
import gzip
|
import gzip
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@@ -27,13 +28,14 @@ if True: # pylint: disable=using-constant-test
|
|||||||
import typing
|
import typing
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, VT100, EnvParams, unicode
|
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, VT100, EnvParams, unicode
|
||||||
from .authsrv import AuthSrv
|
from .authsrv import AuthSrv
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
|
||||||
from .tcpsrv import TcpSrv
|
from .tcpsrv import TcpSrv
|
||||||
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
|
||||||
from .up2k import Up2k
|
from .up2k import Up2k
|
||||||
from .util import (
|
from .util import (
|
||||||
|
FFMPEG_URL,
|
||||||
VERSIONS,
|
VERSIONS,
|
||||||
Daemon,
|
Daemon,
|
||||||
Garda,
|
Garda,
|
||||||
@@ -43,6 +45,7 @@ from .util import (
|
|||||||
ansi_re,
|
ansi_re,
|
||||||
min_ex,
|
min_ex,
|
||||||
mp,
|
mp,
|
||||||
|
pybin,
|
||||||
start_log_thrs,
|
start_log_thrs,
|
||||||
start_stackmon,
|
start_stackmon,
|
||||||
)
|
)
|
||||||
@@ -66,8 +69,15 @@ class SvcHub(object):
|
|||||||
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
|
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, args: argparse.Namespace, argv: list[str], printed: str) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
args: argparse.Namespace,
|
||||||
|
dargs: argparse.Namespace,
|
||||||
|
argv: list[str],
|
||||||
|
printed: str,
|
||||||
|
) -> None:
|
||||||
self.args = args
|
self.args = args
|
||||||
|
self.dargs = dargs
|
||||||
self.argv = argv
|
self.argv = argv
|
||||||
self.E: EnvParams = args.E
|
self.E: EnvParams = args.E
|
||||||
self.logf: Optional[typing.TextIO] = None
|
self.logf: Optional[typing.TextIO] = None
|
||||||
@@ -96,13 +106,13 @@ class SvcHub(object):
|
|||||||
if args.sss or args.s >= 3:
|
if args.sss or args.s >= 3:
|
||||||
args.ss = True
|
args.ss = True
|
||||||
args.no_dav = True
|
args.no_dav = True
|
||||||
|
args.no_logues = True
|
||||||
|
args.no_readme = True
|
||||||
args.lo = args.lo or "cpp-%Y-%m%d-%H%M%S.txt.xz"
|
args.lo = args.lo or "cpp-%Y-%m%d-%H%M%S.txt.xz"
|
||||||
args.ls = args.ls or "**,*,ln,p,r"
|
args.ls = args.ls or "**,*,ln,p,r"
|
||||||
|
|
||||||
if args.ss or args.s >= 2:
|
if args.ss or args.s >= 2:
|
||||||
args.s = True
|
args.s = True
|
||||||
args.no_logues = True
|
|
||||||
args.no_readme = True
|
|
||||||
args.unpost = 0
|
args.unpost = 0
|
||||||
args.no_del = True
|
args.no_del = True
|
||||||
args.no_mv = True
|
args.no_mv = True
|
||||||
@@ -139,25 +149,26 @@ class SvcHub(object):
|
|||||||
self.log("root", t.format(args.j))
|
self.log("root", t.format(args.j))
|
||||||
|
|
||||||
if not args.no_fpool and args.j != 1:
|
if not args.no_fpool and args.j != 1:
|
||||||
t = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
t = "WARNING: ignoring --use-fpool because multithreading (-j{}) is enabled"
|
||||||
if ANYWIN:
|
self.log("root", t.format(args.j), c=3)
|
||||||
t = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
|
args.no_fpool = True
|
||||||
args.no_fpool = True
|
|
||||||
|
|
||||||
self.log("root", t, c=3)
|
|
||||||
|
|
||||||
bri = "zy"[args.theme % 2 :][:1]
|
bri = "zy"[args.theme % 2 :][:1]
|
||||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||||
|
|
||||||
if not args.hardlink and args.never_symlink:
|
|
||||||
args.no_dedup = True
|
|
||||||
|
|
||||||
if args.log_fk:
|
if args.log_fk:
|
||||||
args.log_fk = re.compile(args.log_fk)
|
args.log_fk = re.compile(args.log_fk)
|
||||||
|
|
||||||
# initiate all services to manage
|
# initiate all services to manage
|
||||||
self.asrv = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log, dargs=self.dargs)
|
||||||
|
|
||||||
|
if args.cgen:
|
||||||
|
self.asrv.cgen()
|
||||||
|
|
||||||
|
if args.exit == "cfg":
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
if args.ls:
|
if args.ls:
|
||||||
self.asrv.dbg_ls()
|
self.asrv.dbg_ls()
|
||||||
|
|
||||||
@@ -182,6 +193,7 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.args.th_dec = list(decs.keys())
|
self.args.th_dec = list(decs.keys())
|
||||||
self.thumbsrv = None
|
self.thumbsrv = None
|
||||||
|
want_ff = False
|
||||||
if not args.no_thumb:
|
if not args.no_thumb:
|
||||||
t = ", ".join(self.args.th_dec) or "(None available)"
|
t = ", ".join(self.args.th_dec) or "(None available)"
|
||||||
self.log("thumb", "decoder preference: {}".format(t))
|
self.log("thumb", "decoder preference: {}".format(t))
|
||||||
@@ -193,8 +205,12 @@ class SvcHub(object):
|
|||||||
if self.args.th_dec:
|
if self.args.th_dec:
|
||||||
self.thumbsrv = ThumbSrv(self)
|
self.thumbsrv = ThumbSrv(self)
|
||||||
else:
|
else:
|
||||||
|
want_ff = True
|
||||||
msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg"
|
msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg"
|
||||||
msg = msg.format(" " * 37, os.path.basename(sys.executable))
|
msg = msg.format(" " * 37, os.path.basename(pybin))
|
||||||
|
if EXE:
|
||||||
|
msg = "copyparty.exe cannot use Pillow or pyvips; need ffprobe.exe and ffmpeg.exe to create thumbnails"
|
||||||
|
|
||||||
self.log("thumb", msg, c=3)
|
self.log("thumb", msg, c=3)
|
||||||
|
|
||||||
if not args.no_acode and args.no_thumb:
|
if not args.no_acode and args.no_thumb:
|
||||||
@@ -206,6 +222,10 @@ class SvcHub(object):
|
|||||||
msg = "setting --no-acode because either FFmpeg or FFprobe is not available"
|
msg = "setting --no-acode because either FFmpeg or FFprobe is not available"
|
||||||
self.log("thumb", msg, c=6)
|
self.log("thumb", msg, c=6)
|
||||||
args.no_acode = True
|
args.no_acode = True
|
||||||
|
want_ff = True
|
||||||
|
|
||||||
|
if want_ff and ANYWIN:
|
||||||
|
self.log("thumb", "download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
|
||||||
|
|
||||||
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
|
||||||
|
|
||||||
@@ -236,6 +256,7 @@ class SvcHub(object):
|
|||||||
if not args.zms:
|
if not args.zms:
|
||||||
args.zms = zms
|
args.zms = zms
|
||||||
|
|
||||||
|
self.zc_ngen = 0
|
||||||
self.mdns: Optional["MDNS"] = None
|
self.mdns: Optional["MDNS"] = None
|
||||||
self.ssdp: Optional["SSDPd"] = None
|
self.ssdp: Optional["SSDPd"] = None
|
||||||
|
|
||||||
@@ -295,11 +316,40 @@ class SvcHub(object):
|
|||||||
al.zs_on = al.zs_on or al.z_on
|
al.zs_on = al.zs_on or al.z_on
|
||||||
al.zm_off = al.zm_off or al.z_off
|
al.zm_off = al.zm_off or al.z_off
|
||||||
al.zs_off = al.zs_off or al.z_off
|
al.zs_off = al.zs_off or al.z_off
|
||||||
for n in ("zm_on", "zm_off", "zs_on", "zs_off"):
|
ns = "zm_on zm_off zs_on zs_off acao acam"
|
||||||
vs = getattr(al, n).replace(" ", ",").split(",")
|
for n in ns.split(" "):
|
||||||
|
vs = getattr(al, n).split(",")
|
||||||
|
vs = [x.strip() for x in vs]
|
||||||
vs = [x for x in vs if x]
|
vs = [x for x in vs if x]
|
||||||
setattr(al, n, vs)
|
setattr(al, n, vs)
|
||||||
|
|
||||||
|
ns = "acao acam"
|
||||||
|
for n in ns.split(" "):
|
||||||
|
vs = getattr(al, n)
|
||||||
|
vd = {zs: 1 for zs in vs}
|
||||||
|
setattr(al, n, vd)
|
||||||
|
|
||||||
|
ns = "acao"
|
||||||
|
for n in ns.split(" "):
|
||||||
|
vs = getattr(al, n)
|
||||||
|
vs = [x.lower() for x in vs]
|
||||||
|
setattr(al, n, vs)
|
||||||
|
|
||||||
|
R = al.rp_loc
|
||||||
|
if "//" in R or ":" in R:
|
||||||
|
t = "found URL in --rp-loc; it should be just the location, for example /foo/bar"
|
||||||
|
raise Exception(t)
|
||||||
|
|
||||||
|
al.R = R = R.strip("/")
|
||||||
|
al.SR = "/" + R if R else ""
|
||||||
|
al.RS = R + "/" if R else ""
|
||||||
|
al.SRS = "/" + R + "/" if R else "/"
|
||||||
|
|
||||||
|
if al.rsp_jtr:
|
||||||
|
al.rsp_slp = 0.000001
|
||||||
|
|
||||||
|
al.th_covers = set(al.th_covers.split(","))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _setlimits(self) -> None:
|
def _setlimits(self) -> None:
|
||||||
@@ -354,6 +404,7 @@ class SvcHub(object):
|
|||||||
|
|
||||||
def _setup_logfile(self, printed: str) -> None:
|
def _setup_logfile(self, printed: str) -> None:
|
||||||
base_fn = fn = sel_fn = self._logname()
|
base_fn = fn = sel_fn = self._logname()
|
||||||
|
do_xz = fn.lower().endswith(".xz")
|
||||||
if fn != self.args.lo:
|
if fn != self.args.lo:
|
||||||
ctr = 0
|
ctr = 0
|
||||||
# yup this is a race; if started sufficiently concurrently, two
|
# yup this is a race; if started sufficiently concurrently, two
|
||||||
@@ -365,7 +416,7 @@ class SvcHub(object):
|
|||||||
fn = sel_fn
|
fn = sel_fn
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if fn.lower().endswith(".xz"):
|
if do_xz:
|
||||||
import lzma
|
import lzma
|
||||||
|
|
||||||
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
||||||
@@ -376,7 +427,7 @@ class SvcHub(object):
|
|||||||
|
|
||||||
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
|
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
|
||||||
|
|
||||||
argv = [sys.executable] + self.argv
|
argv = [pybin] + self.argv
|
||||||
if hasattr(shlex, "quote"):
|
if hasattr(shlex, "quote"):
|
||||||
argv = [shlex.quote(x) for x in argv]
|
argv = [shlex.quote(x) for x in argv]
|
||||||
else:
|
else:
|
||||||
@@ -392,24 +443,10 @@ class SvcHub(object):
|
|||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
self.tcpsrv.run()
|
self.tcpsrv.run()
|
||||||
|
if getattr(self.args, "z_chk", 0) and (
|
||||||
if getattr(self.args, "zm", False):
|
getattr(self.args, "zm", False) or getattr(self.args, "zs", False)
|
||||||
try:
|
):
|
||||||
from .mdns import MDNS
|
Daemon(self.tcpsrv.netmon, "netmon")
|
||||||
|
|
||||||
self.mdns = MDNS(self)
|
|
||||||
Daemon(self.mdns.run, "mdns")
|
|
||||||
except:
|
|
||||||
self.log("root", "mdns startup failed;\n" + min_ex(), 3)
|
|
||||||
|
|
||||||
if getattr(self.args, "zs", False):
|
|
||||||
try:
|
|
||||||
from .ssdp import SSDPd
|
|
||||||
|
|
||||||
self.ssdp = SSDPd(self)
|
|
||||||
Daemon(self.ssdp.run, "ssdp")
|
|
||||||
except:
|
|
||||||
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
|
||||||
|
|
||||||
Daemon(self.thr_httpsrv_up, "sig-hsrv-up2")
|
Daemon(self.thr_httpsrv_up, "sig-hsrv-up2")
|
||||||
|
|
||||||
@@ -441,6 +478,33 @@ class SvcHub(object):
|
|||||||
else:
|
else:
|
||||||
self.stop_thr()
|
self.stop_thr()
|
||||||
|
|
||||||
|
def start_zeroconf(self) -> None:
|
||||||
|
self.zc_ngen += 1
|
||||||
|
|
||||||
|
if getattr(self.args, "zm", False):
|
||||||
|
try:
|
||||||
|
from .mdns import MDNS
|
||||||
|
|
||||||
|
if self.mdns:
|
||||||
|
self.mdns.stop(True)
|
||||||
|
|
||||||
|
self.mdns = MDNS(self, self.zc_ngen)
|
||||||
|
Daemon(self.mdns.run, "mdns")
|
||||||
|
except:
|
||||||
|
self.log("root", "mdns startup failed;\n" + min_ex(), 3)
|
||||||
|
|
||||||
|
if getattr(self.args, "zs", False):
|
||||||
|
try:
|
||||||
|
from .ssdp import SSDPd
|
||||||
|
|
||||||
|
if self.ssdp:
|
||||||
|
self.ssdp.stop()
|
||||||
|
|
||||||
|
self.ssdp = SSDPd(self, self.zc_ngen)
|
||||||
|
Daemon(self.ssdp.run, "ssdp")
|
||||||
|
except:
|
||||||
|
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
||||||
|
|
||||||
def reload(self) -> str:
|
def reload(self) -> str:
|
||||||
if self.reloading:
|
if self.reloading:
|
||||||
return "cannot reload; already in progress"
|
return "cannot reload; already in progress"
|
||||||
@@ -625,13 +689,20 @@ class SvcHub(object):
|
|||||||
print(msg.encode("utf-8", "replace").decode(), end="")
|
print(msg.encode("utf-8", "replace").decode(), end="")
|
||||||
except:
|
except:
|
||||||
print(msg.encode("ascii", "replace").decode(), end="")
|
print(msg.encode("ascii", "replace").decode(), end="")
|
||||||
|
except OSError as ex:
|
||||||
|
if ex.errno != errno.EPIPE:
|
||||||
|
raise
|
||||||
|
|
||||||
if self.logf:
|
if self.logf:
|
||||||
self.logf.write(msg)
|
self.logf.write(msg)
|
||||||
|
|
||||||
def pr(self, *a: Any, **ka: Any) -> None:
|
def pr(self, *a: Any, **ka: Any) -> None:
|
||||||
with self.log_mutex:
|
try:
|
||||||
print(*a, **ka)
|
with self.log_mutex:
|
||||||
|
print(*a, **ka)
|
||||||
|
except OSError as ex:
|
||||||
|
if ex.errno != errno.EPIPE:
|
||||||
|
raise
|
||||||
|
|
||||||
def check_mp_support(self) -> str:
|
def check_mp_support(self) -> str:
|
||||||
if MACOS:
|
if MACOS:
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import calendar
|
import calendar
|
||||||
|
import stat
|
||||||
import time
|
import time
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
@@ -238,6 +239,9 @@ class StreamZip(StreamArc):
|
|||||||
src = f["ap"]
|
src = f["ap"]
|
||||||
st = f["st"]
|
st = f["st"]
|
||||||
|
|
||||||
|
if stat.S_ISDIR(st.st_mode):
|
||||||
|
return
|
||||||
|
|
||||||
sz = st.st_size
|
sz = st.st_size
|
||||||
ts = st.st_mtime
|
ts = st.st_mtime
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import os
|
|||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, VT100, unicode
|
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, VT100, unicode
|
||||||
from .stolen.qrcodegen import QrCode
|
from .stolen.qrcodegen import QrCode
|
||||||
@@ -28,6 +29,9 @@ if TYPE_CHECKING:
|
|||||||
if not hasattr(socket, "IPPROTO_IPV6"):
|
if not hasattr(socket, "IPPROTO_IPV6"):
|
||||||
setattr(socket, "IPPROTO_IPV6", 41)
|
setattr(socket, "IPPROTO_IPV6", 41)
|
||||||
|
|
||||||
|
if not hasattr(socket, "IP_FREEBIND"):
|
||||||
|
setattr(socket, "IP_FREEBIND", 15)
|
||||||
|
|
||||||
|
|
||||||
class TcpSrv(object):
|
class TcpSrv(object):
|
||||||
"""
|
"""
|
||||||
@@ -46,6 +50,8 @@ class TcpSrv(object):
|
|||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.srv: list[socket.socket] = []
|
self.srv: list[socket.socket] = []
|
||||||
self.bound: list[tuple[str, int]] = []
|
self.bound: list[tuple[str, int]] = []
|
||||||
|
self.netdevs: dict[str, Netdev] = {}
|
||||||
|
self.netlist = ""
|
||||||
self.nsrv = 0
|
self.nsrv = 0
|
||||||
self.qr = ""
|
self.qr = ""
|
||||||
pad = False
|
pad = False
|
||||||
@@ -121,6 +127,20 @@ class TcpSrv(object):
|
|||||||
else:
|
else:
|
||||||
self.netdevs = {}
|
self.netdevs = {}
|
||||||
|
|
||||||
|
# keep IPv6 LL-only nics
|
||||||
|
ll_ok: set[str] = set()
|
||||||
|
for ip, nd in self.netdevs.items():
|
||||||
|
if not ip.startswith("fe80"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
just_ll = True
|
||||||
|
for ip2, nd2 in self.netdevs.items():
|
||||||
|
if nd == nd2 and ":" in ip2 and not ip2.startswith("fe80"):
|
||||||
|
just_ll = False
|
||||||
|
|
||||||
|
if just_ll or self.args.ll:
|
||||||
|
ll_ok.add(ip.split("/")[0])
|
||||||
|
|
||||||
qr1: dict[str, list[int]] = {}
|
qr1: dict[str, list[int]] = {}
|
||||||
qr2: dict[str, list[int]] = {}
|
qr2: dict[str, list[int]] = {}
|
||||||
msgs = []
|
msgs = []
|
||||||
@@ -128,7 +148,7 @@ class TcpSrv(object):
|
|||||||
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
|
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
|
||||||
t = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
|
t = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
|
||||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||||
if ip.startswith("fe80"):
|
if ip.startswith("fe80") and ip not in ll_ok:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for port in sorted(self.args.p):
|
for port in sorted(self.args.p):
|
||||||
@@ -195,21 +215,28 @@ class TcpSrv(object):
|
|||||||
def _listen(self, ip: str, port: int) -> None:
|
def _listen(self, ip: str, port: int) -> None:
|
||||||
ipv = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
ipv = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
||||||
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
||||||
try:
|
|
||||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
if not ANYWIN or self.args.reuseaddr:
|
||||||
except:
|
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
pass
|
|
||||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
||||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
srv.settimeout(None) # < does not inherit, ^ does
|
srv.settimeout(None) # < does not inherit, ^ opts above do
|
||||||
|
|
||||||
try:
|
try:
|
||||||
srv.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False)
|
srv.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False)
|
||||||
except:
|
except:
|
||||||
pass # will create another ipv4 socket instead
|
pass # will create another ipv4 socket instead
|
||||||
|
|
||||||
|
if not ANYWIN and self.args.freebind:
|
||||||
|
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
srv.bind((ip, port))
|
srv.bind((ip, port))
|
||||||
|
sport = srv.getsockname()[1]
|
||||||
|
if port != sport:
|
||||||
|
# linux 6.0.16 lets you bind a port which is in use
|
||||||
|
# except it just gives you a random port instead
|
||||||
|
raise OSError(E_ADDR_IN_USE[0], "")
|
||||||
self.srv.append(srv)
|
self.srv.append(srv)
|
||||||
except (OSError, socket.error) as ex:
|
except (OSError, socket.error) as ex:
|
||||||
if ex.errno in E_ADDR_IN_USE:
|
if ex.errno in E_ADDR_IN_USE:
|
||||||
@@ -228,6 +255,14 @@ class TcpSrv(object):
|
|||||||
ip, port = srv.getsockname()[:2]
|
ip, port = srv.getsockname()[:2]
|
||||||
try:
|
try:
|
||||||
srv.listen(self.args.nc)
|
srv.listen(self.args.nc)
|
||||||
|
try:
|
||||||
|
ok = srv.getsockopt(socket.SOL_SOCKET, socket.SO_ACCEPTCONN)
|
||||||
|
except:
|
||||||
|
ok = 1 # macos
|
||||||
|
|
||||||
|
if not ok:
|
||||||
|
# some linux don't throw on listen(0.0.0.0) after listen(::)
|
||||||
|
raise Exception("failed to listen on {}".format(srv.getsockname()))
|
||||||
except:
|
except:
|
||||||
if ip == "0.0.0.0" and ("::", port) in bound:
|
if ip == "0.0.0.0" and ("::", port) in bound:
|
||||||
# dualstack
|
# dualstack
|
||||||
@@ -255,7 +290,11 @@ class TcpSrv(object):
|
|||||||
self.srv = srvs
|
self.srv = srvs
|
||||||
self.bound = bound
|
self.bound = bound
|
||||||
self.nsrv = len(srvs)
|
self.nsrv = len(srvs)
|
||||||
|
self._distribute_netdevs()
|
||||||
|
|
||||||
|
def _distribute_netdevs(self):
|
||||||
self.hub.broker.say("set_netdevs", self.netdevs)
|
self.hub.broker.say("set_netdevs", self.netdevs)
|
||||||
|
self.hub.start_zeroconf()
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
self.stopping = True
|
self.stopping = True
|
||||||
@@ -267,6 +306,27 @@ class TcpSrv(object):
|
|||||||
|
|
||||||
self.log("tcpsrv", "ok bye")
|
self.log("tcpsrv", "ok bye")
|
||||||
|
|
||||||
|
def netmon(self):
|
||||||
|
while not self.stopping:
|
||||||
|
time.sleep(self.args.z_chk)
|
||||||
|
netdevs = self.detect_interfaces(self.args.i)
|
||||||
|
if not netdevs:
|
||||||
|
continue
|
||||||
|
|
||||||
|
added = "nothing"
|
||||||
|
removed = "nothing"
|
||||||
|
for k, v in netdevs.items():
|
||||||
|
if k not in self.netdevs:
|
||||||
|
added = "{} = {}".format(k, v)
|
||||||
|
for k, v in self.netdevs.items():
|
||||||
|
if k not in netdevs:
|
||||||
|
removed = "{} = {}".format(k, v)
|
||||||
|
|
||||||
|
t = "network change detected:\n added {}\nremoved {}"
|
||||||
|
self.log("tcpsrv", t.format(added, removed), 3)
|
||||||
|
self.netdevs = netdevs
|
||||||
|
self._distribute_netdevs()
|
||||||
|
|
||||||
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
||||||
from .stolen.ifaddr import get_adapters
|
from .stolen.ifaddr import get_adapters
|
||||||
|
|
||||||
@@ -276,10 +336,6 @@ class TcpSrv(object):
|
|||||||
for nip in nic.ips:
|
for nip in nic.ips:
|
||||||
ipa = nip.ip[0] if ":" in str(nip.ip) else nip.ip
|
ipa = nip.ip[0] if ":" in str(nip.ip) else nip.ip
|
||||||
sip = "{}/{}".format(ipa, nip.network_prefix)
|
sip = "{}/{}".format(ipa, nip.network_prefix)
|
||||||
if sip.startswith("169.254"):
|
|
||||||
# browsers dont impl linklocal
|
|
||||||
continue
|
|
||||||
|
|
||||||
nd = Netdev(sip, nic.index or 0, nic.nice_name, "")
|
nd = Netdev(sip, nic.index or 0, nic.nice_name, "")
|
||||||
eps[sip] = nd
|
eps[sip] = nd
|
||||||
try:
|
try:
|
||||||
@@ -291,6 +347,12 @@ class TcpSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
netlist = str(sorted(eps.items()))
|
||||||
|
if netlist == self.netlist and self.netdevs:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
self.netlist = netlist
|
||||||
|
|
||||||
if "0.0.0.0" not in listen_ips and "::" not in listen_ips:
|
if "0.0.0.0" not in listen_ips and "::" not in listen_ips:
|
||||||
eps = {k: v for k, v in eps.items() if k.split("/")[0] in listen_ips}
|
eps = {k: v for k, v in eps.items() if k.split("/")[0] in listen_ips}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
|
|||||||
|
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
@@ -11,14 +12,16 @@ import time
|
|||||||
|
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
|
||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import ANYWIN, TYPE_CHECKING
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||||
from .util import (
|
from .util import (
|
||||||
BytesIO,
|
BytesIO,
|
||||||
Cooldown,
|
Cooldown,
|
||||||
Daemon,
|
Daemon,
|
||||||
|
FFMPEG_URL,
|
||||||
Pebkac,
|
Pebkac,
|
||||||
|
afsenc,
|
||||||
fsenc,
|
fsenc,
|
||||||
min_ex,
|
min_ex,
|
||||||
runcmd,
|
runcmd,
|
||||||
@@ -61,12 +64,16 @@ try:
|
|||||||
HAVE_AVIF = True
|
HAVE_AVIF = True
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
logging.getLogger("PIL").setLevel(logging.WARNING)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
try:
|
||||||
HAVE_VIPS = True
|
HAVE_VIPS = True
|
||||||
import pyvips
|
import pyvips
|
||||||
|
|
||||||
|
logging.getLogger("pyvips").setLevel(logging.WARNING)
|
||||||
except:
|
except:
|
||||||
HAVE_VIPS = False
|
HAVE_VIPS = False
|
||||||
|
|
||||||
@@ -77,14 +84,14 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str) -> str:
|
|||||||
# base64 = 64 = 4096
|
# base64 = 64 = 4096
|
||||||
rd, fn = vsplit(rem)
|
rd, fn = vsplit(rem)
|
||||||
if rd:
|
if rd:
|
||||||
h = hashlib.sha512(fsenc(rd)).digest()
|
h = hashlib.sha512(afsenc(rd)).digest()
|
||||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||||
else:
|
else:
|
||||||
rd = "top"
|
rd = "top"
|
||||||
|
|
||||||
# could keep original filenames but this is safer re pathlen
|
# could keep original filenames but this is safer re pathlen
|
||||||
h = hashlib.sha512(fsenc(fn)).digest()
|
h = hashlib.sha512(afsenc(fn)).digest()
|
||||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
|
|
||||||
if fmt in ("opus", "caf"):
|
if fmt in ("opus", "caf"):
|
||||||
@@ -128,6 +135,8 @@ class ThumbSrv(object):
|
|||||||
msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
|
msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
|
||||||
msg += ", ".join(missing)
|
msg += ", ".join(missing)
|
||||||
self.log(msg, c=3)
|
self.log(msg, c=3)
|
||||||
|
if ANYWIN and self.args.no_acode:
|
||||||
|
self.log("download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
|
||||||
|
|
||||||
if self.args.th_clean:
|
if self.args.th_clean:
|
||||||
Daemon(self.cleaner, "thumb.cln")
|
Daemon(self.cleaner, "thumb.cln")
|
||||||
@@ -191,12 +200,12 @@ class ThumbSrv(object):
|
|||||||
self.log("wait {}".format(tpath))
|
self.log("wait {}".format(tpath))
|
||||||
except:
|
except:
|
||||||
thdir = os.path.dirname(tpath)
|
thdir = os.path.dirname(tpath)
|
||||||
bos.makedirs(thdir)
|
bos.makedirs(os.path.join(thdir, "w"))
|
||||||
|
|
||||||
inf_path = os.path.join(thdir, "dir.txt")
|
inf_path = os.path.join(thdir, "dir.txt")
|
||||||
if not bos.path.exists(inf_path):
|
if not bos.path.exists(inf_path):
|
||||||
with open(inf_path, "wb") as f:
|
with open(inf_path, "wb") as f:
|
||||||
f.write(fsenc(os.path.dirname(abspath)))
|
f.write(afsenc(os.path.dirname(abspath)))
|
||||||
|
|
||||||
self.busy[tpath] = [cond]
|
self.busy[tpath] = [cond]
|
||||||
do_conv = True
|
do_conv = True
|
||||||
@@ -242,47 +251,55 @@ class ThumbSrv(object):
|
|||||||
abspath, tpath = task
|
abspath, tpath = task
|
||||||
ext = abspath.split(".")[-1].lower()
|
ext = abspath.split(".")[-1].lower()
|
||||||
png_ok = False
|
png_ok = False
|
||||||
fun = None
|
funs = []
|
||||||
if not bos.path.exists(tpath):
|
if not bos.path.exists(tpath):
|
||||||
for lib in self.args.th_dec:
|
for lib in self.args.th_dec:
|
||||||
if fun:
|
if lib == "pil" and ext in self.fmt_pil:
|
||||||
break
|
funs.append(self.conv_pil)
|
||||||
elif lib == "pil" and ext in self.fmt_pil:
|
|
||||||
fun = self.conv_pil
|
|
||||||
elif lib == "vips" and ext in self.fmt_vips:
|
elif lib == "vips" and ext in self.fmt_vips:
|
||||||
fun = self.conv_vips
|
funs.append(self.conv_vips)
|
||||||
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
||||||
fun = self.conv_ffmpeg
|
funs.append(self.conv_ffmpeg)
|
||||||
elif lib == "ff" and ext in self.fmt_ffa:
|
elif lib == "ff" and ext in self.fmt_ffa:
|
||||||
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
||||||
fun = self.conv_opus
|
funs.append(self.conv_opus)
|
||||||
elif tpath.endswith(".png"):
|
elif tpath.endswith(".png"):
|
||||||
fun = self.conv_waves
|
funs.append(self.conv_waves)
|
||||||
png_ok = True
|
png_ok = True
|
||||||
else:
|
else:
|
||||||
fun = self.conv_spec
|
funs.append(self.conv_spec)
|
||||||
|
|
||||||
if not png_ok and tpath.endswith(".png"):
|
if not png_ok and tpath.endswith(".png"):
|
||||||
raise Pebkac(400, "png only allowed for waveforms")
|
raise Pebkac(400, "png only allowed for waveforms")
|
||||||
|
|
||||||
if fun:
|
tdir, tfn = os.path.split(tpath)
|
||||||
|
ttpath = os.path.join(tdir, "w", tfn)
|
||||||
|
|
||||||
|
for fun in funs:
|
||||||
try:
|
try:
|
||||||
fun(abspath, tpath)
|
fun(abspath, ttpath)
|
||||||
|
break
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
msg = "{} could not create thumbnail of {}\n{}"
|
msg = "{} could not create thumbnail of {}\n{}"
|
||||||
msg = msg.format(fun.__name__, abspath, min_ex())
|
msg = msg.format(fun.__name__, abspath, min_ex())
|
||||||
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
||||||
self.log(msg, c)
|
self.log(msg, c)
|
||||||
if getattr(ex, "returncode", 0) != 321:
|
if getattr(ex, "returncode", 0) != 321:
|
||||||
with open(tpath, "wb") as _:
|
if fun == funs[-1]:
|
||||||
pass
|
with open(ttpath, "wb") as _:
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
# ffmpeg may spawn empty files on windows
|
# ffmpeg may spawn empty files on windows
|
||||||
try:
|
try:
|
||||||
os.unlink(tpath)
|
os.unlink(ttpath)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
bos.rename(ttpath, tpath)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
subs = self.busy[tpath]
|
subs = self.busy[tpath]
|
||||||
del self.busy[tpath]
|
del self.busy[tpath]
|
||||||
@@ -363,7 +380,8 @@ class ThumbSrv(object):
|
|||||||
img = pyvips.Image.thumbnail(abspath, w, **kw)
|
img = pyvips.Image.thumbnail(abspath, w, **kw)
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
pass
|
if c == crops[-1]:
|
||||||
|
raise
|
||||||
|
|
||||||
img.write_to_file(tpath, Q=40)
|
img.write_to_file(tpath, Q=40)
|
||||||
|
|
||||||
|
|||||||
@@ -97,14 +97,17 @@ class U2idx(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
cur = None
|
cur = None
|
||||||
if ANYWIN:
|
if ANYWIN and not bos.path.exists(db_path + "-wal"):
|
||||||
uri = ""
|
uri = ""
|
||||||
try:
|
try:
|
||||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||||
cur = sqlite3.connect(uri, 2, uri=True).cursor()
|
cur = sqlite3.connect(uri, 2, uri=True).cursor()
|
||||||
|
cur.execute('pragma table_info("up")').fetchone()
|
||||||
self.log("ro: {}".format(db_path))
|
self.log("ro: {}".format(db_path))
|
||||||
except:
|
except:
|
||||||
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
||||||
|
# may not fail until the pragma so unset it
|
||||||
|
cur = None
|
||||||
|
|
||||||
if not cur:
|
if not cur:
|
||||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||||
@@ -290,6 +293,7 @@ class U2idx(object):
|
|||||||
self.log("qs: {!r} {!r}".format(uq, uv))
|
self.log("qs: {!r} {!r}".format(uq, uv))
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
|
seen_rps: set[str] = set()
|
||||||
lim = min(lim, int(self.args.srch_hits))
|
lim = min(lim, int(self.args.srch_hits))
|
||||||
taglist = {}
|
taglist = {}
|
||||||
for (vtop, ptop, flags) in vols:
|
for (vtop, ptop, flags) in vols:
|
||||||
@@ -308,6 +312,7 @@ class U2idx(object):
|
|||||||
|
|
||||||
sret = []
|
sret = []
|
||||||
fk = flags.get("fk")
|
fk = flags.get("fk")
|
||||||
|
dots = flags.get("dotsrch")
|
||||||
c = cur.execute(uq, tuple(vuv))
|
c = cur.execute(uq, tuple(vuv))
|
||||||
for hit in c:
|
for hit in c:
|
||||||
w, ts, sz, rd, fn, ip, at = hit[:7]
|
w, ts, sz, rd, fn, ip, at = hit[:7]
|
||||||
@@ -318,6 +323,13 @@ class U2idx(object):
|
|||||||
if rd.startswith("//") or fn.startswith("//"):
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
|
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
||||||
|
if not dots and "/." in ("/" + rp):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if rp in seen_rps:
|
||||||
|
continue
|
||||||
|
|
||||||
if not fk:
|
if not fk:
|
||||||
suf = ""
|
suf = ""
|
||||||
else:
|
else:
|
||||||
@@ -334,8 +346,8 @@ class U2idx(object):
|
|||||||
)[:fk]
|
)[:fk]
|
||||||
)
|
)
|
||||||
|
|
||||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf
|
seen_rps.add(rp)
|
||||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
|
||||||
|
|
||||||
for hit in sret:
|
for hit in sret:
|
||||||
w = hit["w"]
|
w = hit["w"]
|
||||||
@@ -354,14 +366,6 @@ class U2idx(object):
|
|||||||
done_flag.append(True)
|
done_flag.append(True)
|
||||||
self.active_id = ""
|
self.active_id = ""
|
||||||
|
|
||||||
# undupe hits from multiple metadata keys
|
|
||||||
if len(ret) > 1:
|
|
||||||
ret = [ret[0]] + [
|
|
||||||
y
|
|
||||||
for x, y in zip(ret[:-1], ret[1:])
|
|
||||||
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
|
|
||||||
]
|
|
||||||
|
|
||||||
ret.sort(key=itemgetter("rp"))
|
ret.sort(key=itemgetter("rp"))
|
||||||
|
|
||||||
return ret, list(taglist.keys())
|
return ret, list(taglist.keys())
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -6,6 +6,7 @@ import contextlib
|
|||||||
import errno
|
import errno
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import math
|
import math
|
||||||
import mimetypes
|
import mimetypes
|
||||||
@@ -13,6 +14,7 @@ import os
|
|||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
import select
|
import select
|
||||||
|
import shutil
|
||||||
import signal
|
import signal
|
||||||
import socket
|
import socket
|
||||||
import stat
|
import stat
|
||||||
@@ -29,7 +31,7 @@ from email.utils import formatdate
|
|||||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
|
||||||
from .__init__ import ANYWIN, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
|
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
|
||||||
from .__version__ import S_BUILD_DT, S_VERSION
|
from .__version__ import S_BUILD_DT, S_VERSION
|
||||||
from .stolen import surrogateescape
|
from .stolen import surrogateescape
|
||||||
|
|
||||||
@@ -142,12 +144,15 @@ SYMTIME = sys.version_info > (3, 6) and os.utime in os.supports_follow_symlinks
|
|||||||
|
|
||||||
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">'
|
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">'
|
||||||
|
|
||||||
|
FFMPEG_URL = "https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z"
|
||||||
|
|
||||||
HTTPCODE = {
|
HTTPCODE = {
|
||||||
200: "OK",
|
200: "OK",
|
||||||
201: "Created",
|
201: "Created",
|
||||||
204: "No Content",
|
204: "No Content",
|
||||||
206: "Partial Content",
|
206: "Partial Content",
|
||||||
207: "Multi-Status",
|
207: "Multi-Status",
|
||||||
|
301: "Moved Permanently",
|
||||||
302: "Found",
|
302: "Found",
|
||||||
304: "Not Modified",
|
304: "Not Modified",
|
||||||
400: "Bad Request",
|
400: "Bad Request",
|
||||||
@@ -227,6 +232,7 @@ application msi=x-ms-installer cab=vnd.ms-cab-compressed rpm=x-rpm crx=x-chrome-
|
|||||||
application epub=epub+zip mobi=x-mobipocket-ebook lit=x-ms-reader rss=rss+xml atom=atom+xml torrent=x-bittorrent
|
application epub=epub+zip mobi=x-mobipocket-ebook lit=x-ms-reader rss=rss+xml atom=atom+xml torrent=x-bittorrent
|
||||||
application p7s=pkcs7-signature dcm=dicom shx=vnd.shx shp=vnd.shp dbf=x-dbf gml=gml+xml gpx=gpx+xml amf=x-amf
|
application p7s=pkcs7-signature dcm=dicom shx=vnd.shx shp=vnd.shp dbf=x-dbf gml=gml+xml gpx=gpx+xml amf=x-amf
|
||||||
application swf=x-shockwave-flash m3u=vnd.apple.mpegurl db3=vnd.sqlite3 sqlite=vnd.sqlite3
|
application swf=x-shockwave-flash m3u=vnd.apple.mpegurl db3=vnd.sqlite3 sqlite=vnd.sqlite3
|
||||||
|
text ass=plain ssa=plain
|
||||||
image jpg=jpeg xpm=x-xpixmap psd=vnd.adobe.photoshop jpf=jpx tif=tiff ico=x-icon djvu=vnd.djvu
|
image jpg=jpeg xpm=x-xpixmap psd=vnd.adobe.photoshop jpf=jpx tif=tiff ico=x-icon djvu=vnd.djvu
|
||||||
image heic=heic-sequence heif=heif-sequence hdr=vnd.radiance svg=svg+xml
|
image heic=heic-sequence heif=heif-sequence hdr=vnd.radiance svg=svg+xml
|
||||||
audio caf=x-caf mp3=mpeg m4a=mp4 mid=midi mpc=musepack aif=aiff au=basic qcp=qcelp
|
audio caf=x-caf mp3=mpeg m4a=mp4 mid=midi mpc=musepack aif=aiff au=basic qcp=qcelp
|
||||||
@@ -287,6 +293,19 @@ REKOBO_KEY = {
|
|||||||
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
||||||
|
|
||||||
|
|
||||||
|
pybin = sys.executable or ""
|
||||||
|
if EXE:
|
||||||
|
pybin = ""
|
||||||
|
for p in "python3 python".split():
|
||||||
|
try:
|
||||||
|
p = shutil.which(p)
|
||||||
|
if p:
|
||||||
|
pybin = p
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def py_desc() -> str:
|
def py_desc() -> str:
|
||||||
interp = platform.python_implementation()
|
interp = platform.python_implementation()
|
||||||
py_ver = ".".join([str(x) for x in sys.version_info])
|
py_ver = ".".join([str(x) for x in sys.version_info])
|
||||||
@@ -360,8 +379,11 @@ class Daemon(threading.Thread):
|
|||||||
name: Optional[str] = None,
|
name: Optional[str] = None,
|
||||||
a: Optional[Iterable[Any]] = None,
|
a: Optional[Iterable[Any]] = None,
|
||||||
r: bool = True,
|
r: bool = True,
|
||||||
|
ka: Optional[dict[Any, Any]] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
threading.Thread.__init__(self, target=target, name=name, args=a or ())
|
threading.Thread.__init__(
|
||||||
|
self, target=target, name=name, args=a or (), kwargs=ka
|
||||||
|
)
|
||||||
self.daemon = True
|
self.daemon = True
|
||||||
if r:
|
if r:
|
||||||
self.start()
|
self.start()
|
||||||
@@ -377,6 +399,9 @@ class Netdev(object):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "{}-{}{}".format(self.idx, self.name, self.desc)
|
return "{}-{}{}".format(self.idx, self.name, self.desc)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "'{}-{}'".format(self.idx, self.name)
|
||||||
|
|
||||||
def __lt__(self, rhs):
|
def __lt__(self, rhs):
|
||||||
return str(self) < str(rhs)
|
return str(self) < str(rhs)
|
||||||
|
|
||||||
@@ -436,9 +461,7 @@ class HLog(logging.Handler):
|
|||||||
else:
|
else:
|
||||||
c = 1
|
c = 1
|
||||||
|
|
||||||
if record.name.startswith("PIL") and lv < logging.WARNING:
|
if record.name == "pyftpdlib":
|
||||||
return
|
|
||||||
elif record.name == "pyftpdlib":
|
|
||||||
m = self.ptn_ftp.match(msg)
|
m = self.ptn_ftp.match(msg)
|
||||||
if m:
|
if m:
|
||||||
ip = m.group(1)
|
ip = m.group(1)
|
||||||
@@ -468,7 +491,7 @@ class NetMap(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
||||||
ips = [[x for x in netdevs if x.startswith(y + "/")][0] for y in ips]
|
ips = find_prefix(ips, netdevs)
|
||||||
|
|
||||||
self.cache: dict[str, str] = {}
|
self.cache: dict[str, str] = {}
|
||||||
self.b2sip: dict[bytes, str] = {}
|
self.b2sip: dict[bytes, str] = {}
|
||||||
@@ -645,6 +668,7 @@ class FHC(object):
|
|||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.cache: dict[str, FHC.CE] = {}
|
self.cache: dict[str, FHC.CE] = {}
|
||||||
|
self.aps: set[str] = set()
|
||||||
|
|
||||||
def close(self, path: str) -> None:
|
def close(self, path: str) -> None:
|
||||||
try:
|
try:
|
||||||
@@ -656,6 +680,7 @@ class FHC(object):
|
|||||||
fh.close()
|
fh.close()
|
||||||
|
|
||||||
del self.cache[path]
|
del self.cache[path]
|
||||||
|
self.aps.remove(path)
|
||||||
|
|
||||||
def clean(self) -> None:
|
def clean(self) -> None:
|
||||||
if not self.cache:
|
if not self.cache:
|
||||||
@@ -676,6 +701,7 @@ class FHC(object):
|
|||||||
return self.cache[path].fhs.pop()
|
return self.cache[path].fhs.pop()
|
||||||
|
|
||||||
def put(self, path: str, fh: typing.BinaryIO) -> None:
|
def put(self, path: str, fh: typing.BinaryIO) -> None:
|
||||||
|
self.aps.add(path)
|
||||||
try:
|
try:
|
||||||
ce = self.cache[path]
|
ce = self.cache[path]
|
||||||
ce.fhs.append(fh)
|
ce.fhs.append(fh)
|
||||||
@@ -1146,20 +1172,12 @@ def ren_open(
|
|||||||
fun = kwargs.pop("fun", open)
|
fun = kwargs.pop("fun", open)
|
||||||
fdir = kwargs.pop("fdir", None)
|
fdir = kwargs.pop("fdir", None)
|
||||||
suffix = kwargs.pop("suffix", None)
|
suffix = kwargs.pop("suffix", None)
|
||||||
overwrite = kwargs.pop("overwrite", None)
|
|
||||||
|
|
||||||
if fname == os.devnull:
|
if fname == os.devnull:
|
||||||
with fun(fname, *args, **kwargs) as f:
|
with fun(fname, *args, **kwargs) as f:
|
||||||
yield {"orz": (f, fname)}
|
yield {"orz": (f, fname)}
|
||||||
return
|
return
|
||||||
|
|
||||||
if overwrite:
|
|
||||||
assert fdir
|
|
||||||
fpath = os.path.join(fdir, fname)
|
|
||||||
with fun(fsenc(fpath), *args, **kwargs) as f:
|
|
||||||
yield {"orz": (f, fname)}
|
|
||||||
return
|
|
||||||
|
|
||||||
if suffix:
|
if suffix:
|
||||||
ext = fname.split(".")[-1]
|
ext = fname.split(".")[-1]
|
||||||
if len(ext) < 7:
|
if len(ext) < 7:
|
||||||
@@ -1186,7 +1204,7 @@ def ren_open(
|
|||||||
else:
|
else:
|
||||||
fpath = fname
|
fpath = fname
|
||||||
|
|
||||||
if suffix and os.path.exists(fsenc(fpath)):
|
if suffix and os.path.lexists(fsenc(fpath)):
|
||||||
fpath += suffix
|
fpath += suffix
|
||||||
fname += suffix
|
fname += suffix
|
||||||
ext += suffix
|
ext += suffix
|
||||||
@@ -1506,6 +1524,28 @@ def read_header(sr: Unrecv) -> list[str]:
|
|||||||
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
||||||
|
|
||||||
|
|
||||||
|
def rand_name(fdir: str, fn: str, rnd: int) -> str:
|
||||||
|
ok = False
|
||||||
|
try:
|
||||||
|
ext = "." + fn.rsplit(".", 1)[1]
|
||||||
|
except:
|
||||||
|
ext = ""
|
||||||
|
|
||||||
|
for extra in range(16):
|
||||||
|
for _ in range(16):
|
||||||
|
if ok:
|
||||||
|
break
|
||||||
|
|
||||||
|
nc = rnd + extra
|
||||||
|
nb = int((6 + 6 * nc) / 8)
|
||||||
|
zb = os.urandom(nb)
|
||||||
|
zb = base64.urlsafe_b64encode(zb)
|
||||||
|
fn = zb[:nc].decode("utf-8") + ext
|
||||||
|
ok = not os.path.exists(fsenc(os.path.join(fdir, fn)))
|
||||||
|
|
||||||
|
return fn
|
||||||
|
|
||||||
|
|
||||||
def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
|
def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||||
return base64.urlsafe_b64encode(
|
return base64.urlsafe_b64encode(
|
||||||
hashlib.sha512(
|
hashlib.sha512(
|
||||||
@@ -1548,14 +1588,16 @@ def gen_filekey_dbg(
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def gencookie(k: str, v: str, dur: Optional[int]) -> str:
|
def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str:
|
||||||
v = v.replace(";", "")
|
v = v.replace(";", "")
|
||||||
if dur:
|
if dur:
|
||||||
exp = formatdate(time.time() + dur, usegmt=True)
|
exp = formatdate(time.time() + dur, usegmt=True)
|
||||||
else:
|
else:
|
||||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||||
|
|
||||||
return "{}={}; Path=/; Expires={}; SameSite=Lax".format(k, v, exp)
|
return "{}={}; Path=/{}; Expires={}{}; SameSite=Lax".format(
|
||||||
|
k, v, r, exp, "; Secure" if tls else ""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def humansize(sz: float, terse: bool = False) -> str:
|
def humansize(sz: float, terse: bool = False) -> str:
|
||||||
@@ -1681,7 +1723,7 @@ def relchk(rp: str) -> str:
|
|||||||
|
|
||||||
def absreal(fpath: str) -> str:
|
def absreal(fpath: str) -> str:
|
||||||
try:
|
try:
|
||||||
return fsdec(os.path.abspath(os.path.realpath(fsenc(fpath))))
|
return fsdec(os.path.abspath(os.path.realpath(afsenc(fpath))))
|
||||||
except:
|
except:
|
||||||
if not WINDOWS:
|
if not WINDOWS:
|
||||||
raise
|
raise
|
||||||
@@ -1711,6 +1753,15 @@ def ipnorm(ip: str) -> str:
|
|||||||
return ip
|
return ip
|
||||||
|
|
||||||
|
|
||||||
|
def find_prefix(ips: list[str], netdevs: dict[str, Netdev]) -> list[str]:
|
||||||
|
ret = []
|
||||||
|
for ip in ips:
|
||||||
|
hit = next((x for x in netdevs if x.startswith(ip + "/")), None)
|
||||||
|
if hit:
|
||||||
|
ret.append(hit)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def html_escape(s: str, quot: bool = False, crlf: bool = False) -> str:
|
def html_escape(s: str, quot: bool = False, crlf: bool = False) -> str:
|
||||||
"""html.escape but also newlines"""
|
"""html.escape but also newlines"""
|
||||||
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||||
@@ -1792,6 +1843,32 @@ def _w8enc3(txt: str) -> bytes:
|
|||||||
return txt.encode(FS_ENCODING, "surrogateescape")
|
return txt.encode(FS_ENCODING, "surrogateescape")
|
||||||
|
|
||||||
|
|
||||||
|
def _msdec(txt: bytes) -> str:
|
||||||
|
ret = txt.decode(FS_ENCODING, "surrogateescape")
|
||||||
|
return ret[4:] if ret.startswith("\\\\?\\") else ret
|
||||||
|
|
||||||
|
|
||||||
|
def _msaenc(txt: str) -> bytes:
|
||||||
|
return txt.replace("/", "\\").encode(FS_ENCODING, "surrogateescape")
|
||||||
|
|
||||||
|
|
||||||
|
def _uncify(txt: str) -> str:
|
||||||
|
txt = txt.replace("/", "\\")
|
||||||
|
if ":" not in txt and not txt.startswith("\\\\"):
|
||||||
|
txt = absreal(txt)
|
||||||
|
|
||||||
|
return txt if txt.startswith("\\\\") else "\\\\?\\" + txt
|
||||||
|
|
||||||
|
|
||||||
|
def _msenc(txt: str) -> bytes:
|
||||||
|
txt = txt.replace("/", "\\")
|
||||||
|
if ":" not in txt and not txt.startswith("\\\\"):
|
||||||
|
txt = absreal(txt)
|
||||||
|
|
||||||
|
ret = txt.encode(FS_ENCODING, "surrogateescape")
|
||||||
|
return ret if ret.startswith(b"\\\\") else b"\\\\?\\" + ret
|
||||||
|
|
||||||
|
|
||||||
w8dec = _w8dec3 if not PY2 else _w8dec2
|
w8dec = _w8dec3 if not PY2 else _w8dec2
|
||||||
w8enc = _w8enc3 if not PY2 else _w8enc2
|
w8enc = _w8enc3 if not PY2 else _w8enc2
|
||||||
|
|
||||||
@@ -1806,9 +1883,16 @@ def w8b64enc(txt: str) -> str:
|
|||||||
return base64.urlsafe_b64encode(w8enc(txt)).decode("ascii")
|
return base64.urlsafe_b64encode(w8enc(txt)).decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
if not PY2 or not WINDOWS:
|
if not PY2 and WINDOWS:
|
||||||
fsenc = w8enc
|
sfsenc = w8enc
|
||||||
|
afsenc = _msaenc
|
||||||
|
fsenc = _msenc
|
||||||
|
fsdec = _msdec
|
||||||
|
uncify = _uncify
|
||||||
|
elif not PY2 or not WINDOWS:
|
||||||
|
fsenc = afsenc = sfsenc = w8enc
|
||||||
fsdec = w8dec
|
fsdec = w8dec
|
||||||
|
uncify = str
|
||||||
else:
|
else:
|
||||||
# moonrunes become \x3f with bytestrings,
|
# moonrunes become \x3f with bytestrings,
|
||||||
# losing mojibake support is worth
|
# losing mojibake support is worth
|
||||||
@@ -1818,8 +1902,9 @@ else:
|
|||||||
def _not_actually_mbcs_dec(txt: bytes) -> str:
|
def _not_actually_mbcs_dec(txt: bytes) -> str:
|
||||||
return txt
|
return txt
|
||||||
|
|
||||||
fsenc = _not_actually_mbcs_enc
|
fsenc = afsenc = sfsenc = _not_actually_mbcs_enc
|
||||||
fsdec = _not_actually_mbcs_dec
|
fsdec = _not_actually_mbcs_dec
|
||||||
|
uncify = str
|
||||||
|
|
||||||
|
|
||||||
def s3enc(mem_cur: "sqlite3.Cursor", rd: str, fn: str) -> tuple[str, str]:
|
def s3enc(mem_cur: "sqlite3.Cursor", rd: str, fn: str) -> tuple[str, str]:
|
||||||
@@ -2007,6 +2092,20 @@ def read_socket_chunked(
|
|||||||
raise Pebkac(400, t.format(x))
|
raise Pebkac(400, t.format(x))
|
||||||
|
|
||||||
|
|
||||||
|
def list_ips() -> list[str]:
|
||||||
|
from .stolen.ifaddr import get_adapters
|
||||||
|
|
||||||
|
ret: set[str] = set()
|
||||||
|
for nic in get_adapters():
|
||||||
|
for ipo in nic.ips:
|
||||||
|
if len(ipo.ip) < 7:
|
||||||
|
ret.add(ipo.ip[0]) # ipv6 is (ip,0,0)
|
||||||
|
else:
|
||||||
|
ret.add(ipo.ip)
|
||||||
|
|
||||||
|
return list(ret)
|
||||||
|
|
||||||
|
|
||||||
def yieldfile(fn: str) -> Generator[bytes, None, None]:
|
def yieldfile(fn: str) -> Generator[bytes, None, None]:
|
||||||
with open(fsenc(fn), "rb", 512 * 1024) as f:
|
with open(fsenc(fn), "rb", 512 * 1024) as f:
|
||||||
while True:
|
while True:
|
||||||
@@ -2182,18 +2281,21 @@ def rmdirs(
|
|||||||
return ok, ng
|
return ok, ng
|
||||||
|
|
||||||
|
|
||||||
def rmdirs_up(top: str) -> tuple[list[str], list[str]]:
|
def rmdirs_up(top: str, stop: str) -> tuple[list[str], list[str]]:
|
||||||
"""rmdir on self, then all parents"""
|
"""rmdir on self, then all parents"""
|
||||||
|
if top == stop:
|
||||||
|
return [], [top]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.rmdir(fsenc(top))
|
os.rmdir(fsenc(top))
|
||||||
except:
|
except:
|
||||||
return [], [top]
|
return [], [top]
|
||||||
|
|
||||||
par = os.path.dirname(top)
|
par = os.path.dirname(top)
|
||||||
if not par:
|
if not par or par == stop:
|
||||||
return [top], []
|
return [top], []
|
||||||
|
|
||||||
ok, ng = rmdirs_up(par)
|
ok, ng = rmdirs_up(par, stop)
|
||||||
return [top] + ok, ng
|
return [top] + ok, ng
|
||||||
|
|
||||||
|
|
||||||
@@ -2427,6 +2529,193 @@ def retchk(
|
|||||||
raise Exception(t)
|
raise Exception(t)
|
||||||
|
|
||||||
|
|
||||||
|
def _parsehook(
|
||||||
|
log: Optional["NamedLogger"], cmd: str
|
||||||
|
) -> tuple[bool, bool, bool, float, dict[str, Any], str]:
|
||||||
|
chk = False
|
||||||
|
fork = False
|
||||||
|
jtxt = False
|
||||||
|
wait = 0.0
|
||||||
|
tout = 0.0
|
||||||
|
kill = "t"
|
||||||
|
cap = 0
|
||||||
|
ocmd = cmd
|
||||||
|
while "," in cmd[:6]:
|
||||||
|
arg, cmd = cmd.split(",", 1)
|
||||||
|
if arg == "c":
|
||||||
|
chk = True
|
||||||
|
elif arg == "f":
|
||||||
|
fork = True
|
||||||
|
elif arg == "j":
|
||||||
|
jtxt = True
|
||||||
|
elif arg.startswith("w"):
|
||||||
|
wait = float(arg[1:])
|
||||||
|
elif arg.startswith("t"):
|
||||||
|
tout = float(arg[1:])
|
||||||
|
elif arg.startswith("c"):
|
||||||
|
cap = int(arg[1:]) # 0=none 1=stdout 2=stderr 3=both
|
||||||
|
elif arg.startswith("k"):
|
||||||
|
kill = arg[1:] # [t]ree [m]ain [n]one
|
||||||
|
elif arg.startswith("i"):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
t = "hook: invalid flag {} in {}"
|
||||||
|
(log or print)(t.format(arg, ocmd))
|
||||||
|
|
||||||
|
env = os.environ.copy()
|
||||||
|
try:
|
||||||
|
if EXE:
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||||
|
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||||
|
pypath = str(os.pathsep.join(zsl))
|
||||||
|
env["PYTHONPATH"] = pypath
|
||||||
|
except:
|
||||||
|
if not EXE:
|
||||||
|
raise
|
||||||
|
|
||||||
|
sp_ka = {
|
||||||
|
"env": env,
|
||||||
|
"timeout": tout,
|
||||||
|
"kill": kill,
|
||||||
|
"capture": cap,
|
||||||
|
}
|
||||||
|
|
||||||
|
if cmd.startswith("~"):
|
||||||
|
cmd = os.path.expanduser(cmd)
|
||||||
|
|
||||||
|
return chk, fork, jtxt, wait, sp_ka, cmd
|
||||||
|
|
||||||
|
|
||||||
|
def runihook(
|
||||||
|
log: Optional["NamedLogger"],
|
||||||
|
cmd: str,
|
||||||
|
vol: "VFS",
|
||||||
|
ups: list[tuple[str, int, int, str, str, str, int]],
|
||||||
|
) -> bool:
|
||||||
|
ocmd = cmd
|
||||||
|
chk, fork, jtxt, wait, sp_ka, cmd = _parsehook(log, cmd)
|
||||||
|
bcmd = [sfsenc(cmd)]
|
||||||
|
if cmd.endswith(".py"):
|
||||||
|
bcmd = [sfsenc(pybin)] + bcmd
|
||||||
|
|
||||||
|
vps = [vjoin(*list(s3dec(x[3], x[4]))) for x in ups]
|
||||||
|
aps = [djoin(vol.realpath, x) for x in vps]
|
||||||
|
if jtxt:
|
||||||
|
# 0w 1mt 2sz 3rd 4fn 5ip 6at
|
||||||
|
ja = [
|
||||||
|
{
|
||||||
|
"ap": uncify(ap), # utf8 for json
|
||||||
|
"vp": vp,
|
||||||
|
"wark": x[0][:16],
|
||||||
|
"mt": x[1],
|
||||||
|
"sz": x[2],
|
||||||
|
"ip": x[5],
|
||||||
|
"at": x[6],
|
||||||
|
}
|
||||||
|
for x, vp, ap in zip(ups, vps, aps)
|
||||||
|
]
|
||||||
|
sp_ka["sin"] = json.dumps(ja).encode("utf-8", "replace")
|
||||||
|
else:
|
||||||
|
sp_ka["sin"] = b"\n".join(fsenc(x) for x in aps)
|
||||||
|
|
||||||
|
t0 = time.time()
|
||||||
|
if fork:
|
||||||
|
Daemon(runcmd, ocmd, [bcmd], ka=sp_ka)
|
||||||
|
else:
|
||||||
|
rc, v, err = runcmd(bcmd, **sp_ka) # type: ignore
|
||||||
|
if chk and rc:
|
||||||
|
retchk(rc, bcmd, err, log, 5)
|
||||||
|
return False
|
||||||
|
|
||||||
|
wait -= time.time() - t0
|
||||||
|
if wait > 0:
|
||||||
|
time.sleep(wait)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _runhook(
|
||||||
|
log: Optional["NamedLogger"],
|
||||||
|
cmd: str,
|
||||||
|
ap: str,
|
||||||
|
vp: str,
|
||||||
|
host: str,
|
||||||
|
uname: str,
|
||||||
|
mt: float,
|
||||||
|
sz: int,
|
||||||
|
ip: str,
|
||||||
|
at: float,
|
||||||
|
txt: str,
|
||||||
|
) -> bool:
|
||||||
|
ocmd = cmd
|
||||||
|
chk, fork, jtxt, wait, sp_ka, cmd = _parsehook(log, cmd)
|
||||||
|
if jtxt:
|
||||||
|
ja = {
|
||||||
|
"ap": ap,
|
||||||
|
"vp": vp,
|
||||||
|
"mt": mt,
|
||||||
|
"sz": sz,
|
||||||
|
"ip": ip,
|
||||||
|
"at": at or time.time(),
|
||||||
|
"host": host,
|
||||||
|
"user": uname,
|
||||||
|
"txt": txt,
|
||||||
|
}
|
||||||
|
arg = json.dumps(ja)
|
||||||
|
else:
|
||||||
|
arg = txt or ap
|
||||||
|
|
||||||
|
acmd = [cmd, arg]
|
||||||
|
if cmd.endswith(".py"):
|
||||||
|
acmd = [pybin] + acmd
|
||||||
|
|
||||||
|
bcmd = [fsenc(x) if x == ap else sfsenc(x) for x in acmd]
|
||||||
|
|
||||||
|
t0 = time.time()
|
||||||
|
if fork:
|
||||||
|
Daemon(runcmd, ocmd, [bcmd], ka=sp_ka)
|
||||||
|
else:
|
||||||
|
rc, v, err = runcmd(bcmd, **sp_ka) # type: ignore
|
||||||
|
if chk and rc:
|
||||||
|
retchk(rc, bcmd, err, log, 5)
|
||||||
|
return False
|
||||||
|
|
||||||
|
wait -= time.time() - t0
|
||||||
|
if wait > 0:
|
||||||
|
time.sleep(wait)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def runhook(
|
||||||
|
log: Optional["NamedLogger"],
|
||||||
|
cmds: list[str],
|
||||||
|
ap: str,
|
||||||
|
vp: str,
|
||||||
|
host: str,
|
||||||
|
uname: str,
|
||||||
|
mt: float,
|
||||||
|
sz: int,
|
||||||
|
ip: str,
|
||||||
|
at: float,
|
||||||
|
txt: str,
|
||||||
|
) -> bool:
|
||||||
|
vp = vp.replace("\\", "/")
|
||||||
|
for cmd in cmds:
|
||||||
|
try:
|
||||||
|
if not _runhook(log, cmd, ap, vp, host, uname, mt, sz, ip, at, txt):
|
||||||
|
return False
|
||||||
|
except Exception as ex:
|
||||||
|
(log or print)("hook: {}".format(ex))
|
||||||
|
if ",c," in "," + cmd:
|
||||||
|
return False
|
||||||
|
break
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def gzip_orig_sz(fn: str) -> int:
|
def gzip_orig_sz(fn: str) -> int:
|
||||||
with open(fsenc(fn), "rb") as f:
|
with open(fsenc(fn), "rb") as f:
|
||||||
f.seek(-4, 2)
|
f.seek(-4, 2)
|
||||||
@@ -2536,7 +2825,7 @@ def termsize() -> tuple[int, int]:
|
|||||||
def ioctl_GWINSZ(fd: int) -> Optional[tuple[int, int]]:
|
def ioctl_GWINSZ(fd: int) -> Optional[tuple[int, int]]:
|
||||||
try:
|
try:
|
||||||
cr = sunpack(b"hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"AAAA"))
|
cr = sunpack(b"hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"AAAA"))
|
||||||
return int(cr[1]), int(cr[0])
|
return cr[::-1]
|
||||||
except:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -2549,15 +2838,23 @@ def termsize() -> tuple[int, int]:
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if cr:
|
|
||||||
return cr
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return int(env["COLUMNS"]), int(env["LINES"])
|
return cr or (int(env["COLUMNS"]), int(env["LINES"]))
|
||||||
except:
|
except:
|
||||||
return 80, 25
|
return 80, 25
|
||||||
|
|
||||||
|
|
||||||
|
def hidedir(dp) -> None:
|
||||||
|
if ANYWIN:
|
||||||
|
try:
|
||||||
|
k32 = ctypes.WinDLL("kernel32")
|
||||||
|
attrs = k32.GetFileAttributesW(dp)
|
||||||
|
if attrs >= 0:
|
||||||
|
k32.SetFileAttributesW(dp, attrs | 2)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Pebkac(Exception):
|
class Pebkac(Exception):
|
||||||
def __init__(self, code: int, msg: Optional[str] = None) -> None:
|
def __init__(self, code: int, msg: Optional[str] = None) -> None:
|
||||||
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ window.baguetteBox = (function () {
|
|||||||
isOverlayVisible = false,
|
isOverlayVisible = false,
|
||||||
touch = {}, // start-pos
|
touch = {}, // start-pos
|
||||||
touchFlag = false, // busy
|
touchFlag = false, // busy
|
||||||
re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i,
|
re_i = /.+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||||
re_v = /.+\.(webm|mkv|mp4)(\?|$)/i,
|
re_v = /.+\.(webm|mkv|mp4)(\?|$)/i,
|
||||||
anims = ['slideIn', 'fadeIn', 'none'],
|
anims = ['slideIn', 'fadeIn', 'none'],
|
||||||
data = {}, // all galleries
|
data = {}, // all galleries
|
||||||
@@ -277,8 +277,8 @@ window.baguetteBox = (function () {
|
|||||||
playpause();
|
playpause();
|
||||||
else if (k == "KeyU" || k == "KeyO")
|
else if (k == "KeyU" || k == "KeyO")
|
||||||
relseek(k == "KeyU" ? -10 : 10);
|
relseek(k == "KeyU" ? -10 : 10);
|
||||||
else if (k.indexOf('Digit') === 0)
|
else if (k.indexOf('Digit') === 0 && v)
|
||||||
vid().currentTime = vid().duration * parseInt(k.slice(-1)) * 0.1;
|
v.currentTime = v.duration * parseInt(k.slice(-1)) * 0.1;
|
||||||
else if (k == "KeyM" && v) {
|
else if (k == "KeyM" && v) {
|
||||||
v.muted = vmute = !vmute;
|
v.muted = vmute = !vmute;
|
||||||
mp_ctl();
|
mp_ctl();
|
||||||
|
|||||||
@@ -93,6 +93,7 @@
|
|||||||
--g-fsel-bg: #d39;
|
--g-fsel-bg: #d39;
|
||||||
--g-fsel-b1: #f4a;
|
--g-fsel-b1: #f4a;
|
||||||
--g-fsel-ts: #804;
|
--g-fsel-ts: #804;
|
||||||
|
--g-dfg: var(--srv-3);
|
||||||
--g-fg: var(--a-hil);
|
--g-fg: var(--a-hil);
|
||||||
--g-bg: var(--bg-u2);
|
--g-bg: var(--bg-u2);
|
||||||
--g-b1: var(--bg-u4);
|
--g-b1: var(--bg-u4);
|
||||||
@@ -327,6 +328,7 @@ html.c {
|
|||||||
}
|
}
|
||||||
html.cz {
|
html.cz {
|
||||||
--bgg: var(--bg-u2);
|
--bgg: var(--bg-u2);
|
||||||
|
--srv-3: #fff;
|
||||||
}
|
}
|
||||||
html.cy {
|
html.cy {
|
||||||
--fg: #fff;
|
--fg: #fff;
|
||||||
@@ -354,6 +356,7 @@ html.cy {
|
|||||||
--chk-fg: #fd0;
|
--chk-fg: #fd0;
|
||||||
|
|
||||||
--srv-1: #f00;
|
--srv-1: #f00;
|
||||||
|
--srv-3: #fff;
|
||||||
--op-aa-bg: #fff;
|
--op-aa-bg: #fff;
|
||||||
|
|
||||||
--u2-b1-bg: #f00;
|
--u2-b1-bg: #f00;
|
||||||
@@ -572,6 +575,11 @@ html.dy {
|
|||||||
* {
|
* {
|
||||||
line-height: 1.2em;
|
line-height: 1.2em;
|
||||||
}
|
}
|
||||||
|
::selection {
|
||||||
|
color: var(--bg-d1);
|
||||||
|
background: var(--fg);
|
||||||
|
text-shadow: none;
|
||||||
|
}
|
||||||
html,body,tr,th,td,#files,a {
|
html,body,tr,th,td,#files,a {
|
||||||
color: inherit;
|
color: inherit;
|
||||||
background: none;
|
background: none;
|
||||||
@@ -754,8 +762,9 @@ html.y #files thead th {
|
|||||||
display: inline;
|
display: inline;
|
||||||
}
|
}
|
||||||
#path a {
|
#path a {
|
||||||
margin: 0 0 0 -.2em;
|
padding: 0 .35em;
|
||||||
padding: 0 0 0 .4em;
|
position: relative;
|
||||||
|
z-index: 1;
|
||||||
/* ie: */
|
/* ie: */
|
||||||
border-bottom: .1em solid #777\9;
|
border-bottom: .1em solid #777\9;
|
||||||
margin-right: 1em\9;
|
margin-right: 1em\9;
|
||||||
@@ -763,18 +772,17 @@ html.y #files thead th {
|
|||||||
#path a:first-child {
|
#path a:first-child {
|
||||||
padding-left: .8em;
|
padding-left: .8em;
|
||||||
}
|
}
|
||||||
#path a:not(:last-child):after {
|
#path i {
|
||||||
content: '';
|
|
||||||
width: 1.05em;
|
width: 1.05em;
|
||||||
height: 1.05em;
|
height: 1.05em;
|
||||||
margin: -.2em .3em -.2em -.4em;
|
margin: -.5em .15em -.15em -.7em;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
border: 1px solid rgba(255,224,192,0.3);
|
border: 1px solid rgba(255,224,192,0.3);
|
||||||
border-width: .05em .05em 0 0;
|
border-width: .05em .05em 0 0;
|
||||||
transform: rotate(45deg);
|
transform: rotate(45deg);
|
||||||
background: linear-gradient(45deg, rgba(0,0,0,0) 40%, rgba(0,0,0,0.25) 75%, rgba(0,0,0,0.35));
|
background: linear-gradient(45deg, rgba(0,0,0,0) 40%, rgba(0,0,0,0.25) 75%, rgba(0,0,0,0.35));
|
||||||
}
|
}
|
||||||
html.y #path a:not(:last-child)::after {
|
html.y #path i {
|
||||||
background: none;
|
background: none;
|
||||||
border-color: rgba(0,0,0,0.2);
|
border-color: rgba(0,0,0,0.2);
|
||||||
border-width: .1em .1em 0 0;
|
border-width: .1em .1em 0 0;
|
||||||
@@ -793,6 +801,24 @@ html.y #path a:hover {
|
|||||||
.logue:empty {
|
.logue:empty {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
|
#doc>iframe,
|
||||||
|
.logue>iframe {
|
||||||
|
background: var(--bgg);
|
||||||
|
border: 1px solid var(--bgg);
|
||||||
|
border-width: 0 .3em 0 .3em;
|
||||||
|
border-radius: .5em;
|
||||||
|
visibility: hidden;
|
||||||
|
margin: 0 -.3em;
|
||||||
|
width: 100%;
|
||||||
|
height: 0;
|
||||||
|
}
|
||||||
|
#doc>iframe.focus,
|
||||||
|
.logue>iframe.focus {
|
||||||
|
box-shadow: 0 0 .1em .1em var(--a);
|
||||||
|
}
|
||||||
|
#pro.logue>iframe {
|
||||||
|
height: 100vh;
|
||||||
|
}
|
||||||
#pro.logue {
|
#pro.logue {
|
||||||
margin-bottom: .8em;
|
margin-bottom: .8em;
|
||||||
}
|
}
|
||||||
@@ -817,6 +843,10 @@ html.y #path a:hover {
|
|||||||
.mdo {
|
.mdo {
|
||||||
max-width: 52em;
|
max-width: 52em;
|
||||||
}
|
}
|
||||||
|
.mdo.sb,
|
||||||
|
#epi.logue.mdo>iframe {
|
||||||
|
max-width: 54em;
|
||||||
|
}
|
||||||
.mdo,
|
.mdo,
|
||||||
.mdo * {
|
.mdo * {
|
||||||
line-height: 1.4em;
|
line-height: 1.4em;
|
||||||
@@ -937,6 +967,9 @@ html.y #path a:hover {
|
|||||||
#ggrid>a.dir:before {
|
#ggrid>a.dir:before {
|
||||||
content: '📂';
|
content: '📂';
|
||||||
}
|
}
|
||||||
|
#ggrid>a.dir>span {
|
||||||
|
color: var(--g-dfg);
|
||||||
|
}
|
||||||
#ggrid>a.au:before {
|
#ggrid>a.au:before {
|
||||||
content: '💾';
|
content: '💾';
|
||||||
}
|
}
|
||||||
@@ -983,6 +1016,9 @@ html.np_open #ggrid>a.au:before {
|
|||||||
background: var(--g-sel-bg);
|
background: var(--g-sel-bg);
|
||||||
border-color: var(--g-sel-b1);
|
border-color: var(--g-sel-b1);
|
||||||
}
|
}
|
||||||
|
#ggrid>a.sel>span {
|
||||||
|
color: var(--g-sel-fg);
|
||||||
|
}
|
||||||
#ggrid>a.sel,
|
#ggrid>a.sel,
|
||||||
#ggrid>a[tt].sel {
|
#ggrid>a[tt].sel {
|
||||||
border-top: 1px solid var(--g-fsel-b1);
|
border-top: 1px solid var(--g-fsel-b1);
|
||||||
@@ -1075,18 +1111,18 @@ html.y #widget.open {
|
|||||||
top: -.12em;
|
top: -.12em;
|
||||||
}
|
}
|
||||||
#wtico {
|
#wtico {
|
||||||
cursor: url(/.cpr/dd/4.png), pointer;
|
cursor: url(dd/4.png), pointer;
|
||||||
animation: cursor 500ms;
|
animation: cursor 500ms;
|
||||||
}
|
}
|
||||||
#wtico:hover {
|
#wtico:hover {
|
||||||
animation: cursor 500ms infinite;
|
animation: cursor 500ms infinite;
|
||||||
}
|
}
|
||||||
@keyframes cursor {
|
@keyframes cursor {
|
||||||
0% {cursor: url(/.cpr/dd/2.png), pointer}
|
0% {cursor: url(dd/2.png), pointer}
|
||||||
30% {cursor: url(/.cpr/dd/3.png), pointer}
|
30% {cursor: url(dd/3.png), pointer}
|
||||||
50% {cursor: url(/.cpr/dd/4.png), pointer}
|
50% {cursor: url(dd/4.png), pointer}
|
||||||
75% {cursor: url(/.cpr/dd/5.png), pointer}
|
75% {cursor: url(dd/5.png), pointer}
|
||||||
85% {cursor: url(/.cpr/dd/4.png), pointer}
|
85% {cursor: url(dd/4.png), pointer}
|
||||||
}
|
}
|
||||||
@keyframes spin {
|
@keyframes spin {
|
||||||
100% {transform: rotate(360deg)}
|
100% {transform: rotate(360deg)}
|
||||||
@@ -1294,6 +1330,10 @@ html.y #ops svg circle {
|
|||||||
padding: .3em .6em;
|
padding: .3em .6em;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
#noie {
|
||||||
|
color: #b60;
|
||||||
|
margin: 0 0 0 .5em;
|
||||||
|
}
|
||||||
.opbox {
|
.opbox {
|
||||||
padding: .5em;
|
padding: .5em;
|
||||||
border-radius: 0 .3em .3em 0;
|
border-radius: 0 .3em .3em 0;
|
||||||
@@ -2557,7 +2597,6 @@ html.b #u2conf a.b:hover {
|
|||||||
#u2conf input[type="checkbox"]:checked+label:hover {
|
#u2conf input[type="checkbox"]:checked+label:hover {
|
||||||
background: var(--u2-o-1h-bg);
|
background: var(--u2-o-1h-bg);
|
||||||
}
|
}
|
||||||
#op_up2k.srch #u2conf td:nth-child(1)>*,
|
|
||||||
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
||||||
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
||||||
background: #777;
|
background: #777;
|
||||||
|
|||||||
@@ -8,8 +8,8 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8, minimum-scale=0.6">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8, minimum-scale=0.6">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#333">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/browser.css?_={{ ts }}">
|
||||||
{%- if css %}
|
{%- if css %}
|
||||||
<link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
@@ -36,7 +36,7 @@
|
|||||||
<input type="file" name="f" multiple /><br />
|
<input type="file" name="f" multiple /><br />
|
||||||
<input type="submit" value="start upload">
|
<input type="submit" value="start upload">
|
||||||
</form>
|
</form>
|
||||||
<a id="bbsw" href="?b=u"><br />switch to basic browser</a>
|
<a id="bbsw" href="?b=u" rel="nofollow"><br />switch to basic browser</a>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div id="op_mkdir" class="opview opbox act">
|
<div id="op_mkdir" class="opview opbox act">
|
||||||
@@ -71,7 +71,7 @@
|
|||||||
<h1 id="path">
|
<h1 id="path">
|
||||||
<a href="#" id="entree">🌲</a>
|
<a href="#" id="entree">🌲</a>
|
||||||
{%- for n in vpnodes %}
|
{%- for n in vpnodes %}
|
||||||
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
<a href="{{ r }}/{{ n[0] }}">{{ n[1] }}</a>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
</h1>
|
</h1>
|
||||||
|
|
||||||
@@ -85,7 +85,7 @@
|
|||||||
<div id="bdoc"></div>
|
<div id="bdoc"></div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<div id="pro" class="logue">{{ logues[0] }}</div>
|
<div id="pro" class="logue">{{ "" if sb_lg else logues[0] }}</div>
|
||||||
|
|
||||||
<table id="files">
|
<table id="files">
|
||||||
<thead>
|
<thead>
|
||||||
@@ -119,9 +119,9 @@
|
|||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<div id="epi" class="logue">{{ logues[1] }}</div>
|
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
|
||||||
|
|
||||||
<h2><a href="/?h" id="goh">control-panel</a></h2>
|
<h2 id="wfp"><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
||||||
|
|
||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
|
|
||||||
@@ -134,7 +134,9 @@
|
|||||||
<div id="widget"></div>
|
<div id="widget"></div>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
var acct = "{{ acct }}",
|
var SR = {{ r|tojson }},
|
||||||
|
TS = "{{ ts }}",
|
||||||
|
acct = "{{ acct }}",
|
||||||
perms = {{ perms }},
|
perms = {{ perms }},
|
||||||
themes = {{ themes }},
|
themes = {{ themes }},
|
||||||
dtheme = "{{ dtheme }}",
|
dtheme = "{{ dtheme }}",
|
||||||
@@ -149,21 +151,24 @@
|
|||||||
have_del = {{ have_del|tojson }},
|
have_del = {{ have_del|tojson }},
|
||||||
have_unpost = {{ have_unpost }},
|
have_unpost = {{ have_unpost }},
|
||||||
have_zip = {{ have_zip|tojson }},
|
have_zip = {{ have_zip|tojson }},
|
||||||
|
sb_md = "{{ sb_md }}",
|
||||||
|
sb_lg = "{{ sb_lg }}",
|
||||||
lifetime = {{ lifetime }},
|
lifetime = {{ lifetime }},
|
||||||
turbolvl = {{ turbolvl }},
|
turbolvl = {{ turbolvl }},
|
||||||
|
frand = {{ frand|tojson }},
|
||||||
u2sort = "{{ u2sort }}",
|
u2sort = "{{ u2sort }}",
|
||||||
have_emp = {{ have_emp|tojson }},
|
have_emp = {{ have_emp|tojson }},
|
||||||
txt_ext = "{{ txt_ext }}",
|
txt_ext = "{{ txt_ext }}",
|
||||||
{% if no_prism %}no_prism = 1,{% endif %}
|
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||||
readme = {{ readme|tojson }},
|
readme = {{ readme|tojson }},
|
||||||
ls0 = {{ ls0|tojson }};
|
ls0 = {{ ls0|tojson }};
|
||||||
|
|
||||||
document.documentElement.className = localStorage.theme || dtheme;
|
document.documentElement.className = localStorage.theme || dtheme;
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/baguettebox.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/baguettebox.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/browser.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/up2k.js?_={{ ts }}"></script>
|
||||||
{%- if js %}
|
{%- if js %}
|
||||||
<script src="{{ js }}?_={{ ts }}"></script>
|
<script src="{{ js }}?_={{ ts }}"></script>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|||||||
@@ -94,6 +94,9 @@ var Ls = {
|
|||||||
"ht_and": " and ",
|
"ht_and": " and ",
|
||||||
|
|
||||||
"goh": "control-panel",
|
"goh": "control-panel",
|
||||||
|
"gop": 'previous sibling">prev',
|
||||||
|
"gou": 'parent folder">up',
|
||||||
|
"gon": 'next folder">next',
|
||||||
"logout": "Logout ",
|
"logout": "Logout ",
|
||||||
"access": " access",
|
"access": " access",
|
||||||
"ot_close": "close submenu",
|
"ot_close": "close submenu",
|
||||||
@@ -105,8 +108,9 @@ var Ls = {
|
|||||||
"ot_msg": "msg: send a message to the server log",
|
"ot_msg": "msg: send a message to the server log",
|
||||||
"ot_mp": "media player options",
|
"ot_mp": "media player options",
|
||||||
"ot_cfg": "configuration options",
|
"ot_cfg": "configuration options",
|
||||||
"ot_u2i": 'up2k: upload files (if you have write-access) or toggle into the search-mode to see if they exist somewhere on the server$N$Nuploads are resumable, multithreaded, and file timestamps are preserved, but it uses more CPU than the basic uploader<br /><br />during uploads, this icon becomes a progress indicator!',
|
"ot_u2i": 'up2k: upload files (if you have write-access) or toggle into the search-mode to see if they exist somewhere on the server$N$Nuploads are resumable, multithreaded, and file timestamps are preserved, but it uses more CPU than [🎈] (the basic uploader)<br /><br />during uploads, this icon becomes a progress indicator!',
|
||||||
"ot_u2w": 'up2k: upload files with resume support (close your browser and drop the same files in later)$N$Nmultithreaded, and file timestamps are preserved, but it uses more CPU than the basic uploader<br /><br />during uploads, this icon becomes a progress indicator!',
|
"ot_u2w": 'up2k: upload files with resume support (close your browser and drop the same files in later)$N$Nmultithreaded, and file timestamps are preserved, but it uses more CPU than [🎈] (the basic uploader)<br /><br />during uploads, this icon becomes a progress indicator!',
|
||||||
|
"ot_noie": 'Please use Chrome / Firefox / Edge',
|
||||||
|
|
||||||
"ab_mkdir": "make directory",
|
"ab_mkdir": "make directory",
|
||||||
"ab_mkdoc": "new markdown doc",
|
"ab_mkdoc": "new markdown doc",
|
||||||
@@ -131,6 +135,7 @@ var Ls = {
|
|||||||
"wt_next": "next track$NHotkey: L",
|
"wt_next": "next track$NHotkey: L",
|
||||||
|
|
||||||
"ul_par": "parallel uploads:",
|
"ul_par": "parallel uploads:",
|
||||||
|
"ut_rand": "randomize filenames",
|
||||||
"ut_mt": "continue hashing other files while uploading$N$Nmaybe disable if your CPU or HDD is a bottleneck",
|
"ut_mt": "continue hashing other files while uploading$N$Nmaybe disable if your CPU or HDD is a bottleneck",
|
||||||
"ut_ask": "ask for confirmation before upload starts",
|
"ut_ask": "ask for confirmation before upload starts",
|
||||||
"ut_pot": "improve upload speed on slow devices$Nby making the UI less complex",
|
"ut_pot": "improve upload speed on slow devices$Nby making the UI less complex",
|
||||||
@@ -155,6 +160,9 @@ var Ls = {
|
|||||||
"uct_q": "idle, pending",
|
"uct_q": "idle, pending",
|
||||||
|
|
||||||
"utl_name": "filename",
|
"utl_name": "filename",
|
||||||
|
"utl_ulist": "list",
|
||||||
|
"utl_ucopy": "copy",
|
||||||
|
"utl_links": "links",
|
||||||
"utl_stat": "status",
|
"utl_stat": "status",
|
||||||
"utl_prog": "progress",
|
"utl_prog": "progress",
|
||||||
|
|
||||||
@@ -251,12 +259,18 @@ var Ls = {
|
|||||||
"mm_e404": "Could not play audio; error 404: File not found.",
|
"mm_e404": "Could not play audio; error 404: File not found.",
|
||||||
"mm_e403": "Could not play audio; error 403: Access denied.\n\nTry pressing F5 to reload, maybe you got logged out",
|
"mm_e403": "Could not play audio; error 403: Access denied.\n\nTry pressing F5 to reload, maybe you got logged out",
|
||||||
"mm_e5xx": "Could not play audio; server error ",
|
"mm_e5xx": "Could not play audio; server error ",
|
||||||
|
"mm_nof": "not finding any more audio files nearby",
|
||||||
|
"mm_hnf": "that song no longer exists",
|
||||||
|
|
||||||
|
"im_hnf": "that image no longer exists",
|
||||||
|
|
||||||
"f_chide": 'this will hide the column «{0}»\n\nyou can unhide columns in the settings tab',
|
"f_chide": 'this will hide the column «{0}»\n\nyou can unhide columns in the settings tab',
|
||||||
"f_bigtxt": "this file is {0} MiB large -- really view as text?",
|
"f_bigtxt": "this file is {0} MiB large -- really view as text?",
|
||||||
"fbd_more": '<div id="blazy">showing <code>{0}</code> of <code>{1}</code> files; <a href="#" id="bd_more">show {2}</a> or <a href="#" id="bd_all">show all</a></div>',
|
"fbd_more": '<div id="blazy">showing <code>{0}</code> of <code>{1}</code> files; <a href="#" id="bd_more">show {2}</a> or <a href="#" id="bd_all">show all</a></div>',
|
||||||
"fbd_all": '<div id="blazy">showing <code>{0}</code> of <code>{1}</code> files; <a href="#" id="bd_all">show all</a></div>',
|
"fbd_all": '<div id="blazy">showing <code>{0}</code> of <code>{1}</code> files; <a href="#" id="bd_all">show all</a></div>',
|
||||||
|
|
||||||
|
"f_dls": 'the file links in the current folder have\nbeen changed into download links',
|
||||||
|
|
||||||
"ft_paste": "paste {0} items$NHotkey: ctrl-V",
|
"ft_paste": "paste {0} items$NHotkey: ctrl-V",
|
||||||
"fr_eperm": 'cannot rename:\nyou do not have “move” permission in this folder',
|
"fr_eperm": 'cannot rename:\nyou do not have “move” permission in this folder',
|
||||||
"fd_eperm": 'cannot delete:\nyou do not have “delete” permission in this folder',
|
"fd_eperm": 'cannot delete:\nyou do not have “delete” permission in this folder',
|
||||||
@@ -284,6 +298,7 @@ var Ls = {
|
|||||||
|
|
||||||
"fd_ok": "delete OK",
|
"fd_ok": "delete OK",
|
||||||
"fd_err": "delete failed:\n",
|
"fd_err": "delete failed:\n",
|
||||||
|
"fd_none": "nothing was deleted; maybe blocked by server config (xbd)?",
|
||||||
"fd_busy": "deleting {0} items...\n\n{1}",
|
"fd_busy": "deleting {0} items...\n\n{1}",
|
||||||
"fd_warn1": "DELETE these {0} items?",
|
"fd_warn1": "DELETE these {0} items?",
|
||||||
"fd_warn2": "<b>Last chance!</b> No way to undo. Delete?",
|
"fd_warn2": "<b>Last chance!</b> No way to undo. Delete?",
|
||||||
@@ -341,6 +356,7 @@ var Ls = {
|
|||||||
"s_a1": "specific metadata properties",
|
"s_a1": "specific metadata properties",
|
||||||
|
|
||||||
"md_eshow": "cannot show ",
|
"md_eshow": "cannot show ",
|
||||||
|
"md_off": "[📜<em>readme</em>] disabled in [⚙️] -- document hidden",
|
||||||
|
|
||||||
"xhr403": "403: Access denied\n\ntry pressing F5, maybe you got logged out",
|
"xhr403": "403: Access denied\n\ntry pressing F5, maybe you got logged out",
|
||||||
"cf_ok": "sorry about that -- DD" + wah + "oS protection kicked in\n\nthings should resume in about 30 sec\n\nif nothing happens, hit F5 to reload the page",
|
"cf_ok": "sorry about that -- DD" + wah + "oS protection kicked in\n\nthings should resume in about 30 sec\n\nif nothing happens, hit F5 to reload the page",
|
||||||
@@ -361,7 +377,10 @@ var Ls = {
|
|||||||
"fz_zipc": "cp437 with crc32 computed early,$Nfor MS-DOS PKZIP v2.04g (october 1993)$N(takes longer to process before download can start)",
|
"fz_zipc": "cp437 with crc32 computed early,$Nfor MS-DOS PKZIP v2.04g (october 1993)$N(takes longer to process before download can start)",
|
||||||
|
|
||||||
"un_m1": "you can delete your recent uploads below",
|
"un_m1": "you can delete your recent uploads below",
|
||||||
"un_upd": "refresh list",
|
"un_upd": "refresh",
|
||||||
|
"un_m4": "or share the files visible below:",
|
||||||
|
"un_ulist": "show",
|
||||||
|
"un_ucopy": "copy",
|
||||||
"un_flt": "optional filter: URL must contain",
|
"un_flt": "optional filter: URL must contain",
|
||||||
"un_fclr": "clear filter",
|
"un_fclr": "clear filter",
|
||||||
"un_derr": 'unpost-delete failed:\n',
|
"un_derr": 'unpost-delete failed:\n',
|
||||||
@@ -437,7 +456,7 @@ var Ls = {
|
|||||||
"ur_aun": "All {0} uploads failed, sorry",
|
"ur_aun": "All {0} uploads failed, sorry",
|
||||||
"ur_1sn": "File was NOT found on server",
|
"ur_1sn": "File was NOT found on server",
|
||||||
"ur_asn": "The {0} files were NOT found on server",
|
"ur_asn": "The {0} files were NOT found on server",
|
||||||
"ur_um": "Finished;\n{0} uplads OK,\n{1} uploads failed, sorry",
|
"ur_um": "Finished;\n{0} uploads OK,\n{1} uploads failed, sorry",
|
||||||
"ur_sm": "Finished;\n{0} files found on server,\n{1} files NOT found on server",
|
"ur_sm": "Finished;\n{0} files found on server,\n{1} files NOT found on server",
|
||||||
|
|
||||||
"lang_set": "refresh to make the change take effect?",
|
"lang_set": "refresh to make the change take effect?",
|
||||||
@@ -534,6 +553,9 @@ var Ls = {
|
|||||||
"ht_and": " og ",
|
"ht_and": " og ",
|
||||||
|
|
||||||
"goh": "kontrollpanel",
|
"goh": "kontrollpanel",
|
||||||
|
"gop": 'naviger til mappen før denne">forr.',
|
||||||
|
"gou": 'naviger ett nivå opp">opp',
|
||||||
|
"gon": 'naviger til mappen etter denne">neste',
|
||||||
"logout": "Logg ut ",
|
"logout": "Logg ut ",
|
||||||
"access": " tilgang",
|
"access": " tilgang",
|
||||||
"ot_close": "lukk verktøy",
|
"ot_close": "lukk verktøy",
|
||||||
@@ -545,8 +567,9 @@ var Ls = {
|
|||||||
"ot_msg": "msg: send en beskjed til serverloggen",
|
"ot_msg": "msg: send en beskjed til serverloggen",
|
||||||
"ot_mp": "musikkspiller-instillinger",
|
"ot_mp": "musikkspiller-instillinger",
|
||||||
"ot_cfg": "andre innstillinger",
|
"ot_cfg": "andre innstillinger",
|
||||||
"ot_u2i": 'up2k: last opp filer (hvis du har skrivetilgang) eller bytt til søkemodus for å sjekke om filene finnes et-eller-annet sted på serveren$N$Nopplastninger kan gjenopptas etter avbrudd, skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn den primitive opplasteren bup<br /><br />mens opplastninger foregår så vises fremdriften her oppe!',
|
"ot_u2i": 'up2k: last opp filer (hvis du har skrivetilgang) eller bytt til søkemodus for å sjekke om filene finnes et-eller-annet sted på serveren$N$Nopplastninger kan gjenopptas etter avbrudd, skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn [🎈] (den primitive opplasteren "bup")<br /><br />mens opplastninger foregår så vises fremdriften her oppe!',
|
||||||
"ot_u2w": 'up2k: filopplastning med støtte for å gjenoppta avbrutte opplastninger -- steng ned nettleseren og dra de samme filene inn i nettleseren igjen for å plukke opp igjen der du slapp$N$Nopplastninger skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn den primitive opplasteren "bup"<br /><br />mens opplastninger foregår så vises fremdriften her oppe!',
|
"ot_u2w": 'up2k: filopplastning med støtte for å gjenoppta avbrutte opplastninger -- steng ned nettleseren og dra de samme filene inn i nettleseren igjen for å plukke opp igjen der du slapp$N$Nopplastninger skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn [🎈] (den primitive opplasteren "bup")<br /><br />mens opplastninger foregår så vises fremdriften her oppe!',
|
||||||
|
"ot_noie": 'Fungerer mye bedre i Chrome / Firefox / Edge',
|
||||||
|
|
||||||
"ab_mkdir": "lag mappe",
|
"ab_mkdir": "lag mappe",
|
||||||
"ab_mkdoc": "nytt dokument",
|
"ab_mkdoc": "nytt dokument",
|
||||||
@@ -563,14 +586,15 @@ var Ls = {
|
|||||||
"wt_selinv": "inverter utvalg",
|
"wt_selinv": "inverter utvalg",
|
||||||
"wt_selzip": "last ned de valgte filene som et arkiv",
|
"wt_selzip": "last ned de valgte filene som et arkiv",
|
||||||
"wt_seldl": "last ned de valgte filene$NSnarvei: Y",
|
"wt_seldl": "last ned de valgte filene$NSnarvei: Y",
|
||||||
"wt_npirc": "kopier sang-info (irc-formattert)",
|
"wt_npirc": "kopiér sang-info (irc-formattert)",
|
||||||
"wt_nptxt": "kopier sang-info",
|
"wt_nptxt": "kopiér sang-info",
|
||||||
"wt_grid": "bytt mellom ikoner og listevisning$NSnarvei: G",
|
"wt_grid": "bytt mellom ikoner og listevisning$NSnarvei: G",
|
||||||
"wt_prev": "forrige sang$NSnarvei: J",
|
"wt_prev": "forrige sang$NSnarvei: J",
|
||||||
"wt_play": "play / pause$NSnarvei: P",
|
"wt_play": "play / pause$NSnarvei: P",
|
||||||
"wt_next": "neste sang$NSnarvei: L",
|
"wt_next": "neste sang$NSnarvei: L",
|
||||||
|
|
||||||
"ul_par": "samtidige handl.:",
|
"ul_par": "samtidige handl.:",
|
||||||
|
"ut_rand": "finn opp nye tilfeldige filnavn",
|
||||||
"ut_mt": "fortsett å befare køen mens opplastning foregår$N$Nskru denne av dersom du har en$Ntreg prosessor eller harddisk",
|
"ut_mt": "fortsett å befare køen mens opplastning foregår$N$Nskru denne av dersom du har en$Ntreg prosessor eller harddisk",
|
||||||
"ut_ask": "bekreft filutvalg før opplastning starter",
|
"ut_ask": "bekreft filutvalg før opplastning starter",
|
||||||
"ut_pot": "forbedre ytelsen på trege enheter ved å$Nforenkle brukergrensesnittet",
|
"ut_pot": "forbedre ytelsen på trege enheter ved å$Nforenkle brukergrensesnittet",
|
||||||
@@ -595,6 +619,9 @@ var Ls = {
|
|||||||
"uct_q": "køen",
|
"uct_q": "køen",
|
||||||
|
|
||||||
"utl_name": "filnavn",
|
"utl_name": "filnavn",
|
||||||
|
"utl_ulist": "vis",
|
||||||
|
"utl_ucopy": "kopiér",
|
||||||
|
"utl_links": "lenker",
|
||||||
"utl_stat": "status",
|
"utl_stat": "status",
|
||||||
"utl_prog": "fremdrift",
|
"utl_prog": "fremdrift",
|
||||||
|
|
||||||
@@ -691,12 +718,18 @@ var Ls = {
|
|||||||
"mm_e404": "Avspilling feilet: Fil ikke funnet.",
|
"mm_e404": "Avspilling feilet: Fil ikke funnet.",
|
||||||
"mm_e403": "Avspilling feilet: Tilgang nektet.\n\nKanskje du ble logget ut?\nPrøv å trykk F5 for å laste siden på nytt.",
|
"mm_e403": "Avspilling feilet: Tilgang nektet.\n\nKanskje du ble logget ut?\nPrøv å trykk F5 for å laste siden på nytt.",
|
||||||
"mm_e5xx": "Avspilling feilet: ",
|
"mm_e5xx": "Avspilling feilet: ",
|
||||||
|
"mm_nof": "finner ikke flere sanger i nærheten",
|
||||||
|
"mm_hnf": "sangen finnes ikke lenger",
|
||||||
|
|
||||||
|
"im_hnf": "bildet finnes ikke lenger",
|
||||||
|
|
||||||
"f_chide": 'dette vil skjule kolonnen «{0}»\n\nfanen for "andre innstillinger" lar deg vise kolonnen igjen',
|
"f_chide": 'dette vil skjule kolonnen «{0}»\n\nfanen for "andre innstillinger" lar deg vise kolonnen igjen',
|
||||||
"f_bigtxt": "denne filen er hele {0} MiB -- vis som tekst?",
|
"f_bigtxt": "denne filen er hele {0} MiB -- vis som tekst?",
|
||||||
"fbd_more": '<div id="blazy">viser <code>{0}</code> av <code>{1}</code> filer; <a href="#" id="bd_more">vis {2}</a> eller <a href="#" id="bd_all">vis alle</a></div>',
|
"fbd_more": '<div id="blazy">viser <code>{0}</code> av <code>{1}</code> filer; <a href="#" id="bd_more">vis {2}</a> eller <a href="#" id="bd_all">vis alle</a></div>',
|
||||||
"fbd_all": '<div id="blazy">viser <code>{0}</code> av <code>{1}</code> filer; <a href="#" id="bd_all">vis alle</a></div>',
|
"fbd_all": '<div id="blazy">viser <code>{0}</code> av <code>{1}</code> filer; <a href="#" id="bd_all">vis alle</a></div>',
|
||||||
|
|
||||||
|
"f_dls": 'linkene i denne mappen er nå\nomgjort til nedlastningsknapper',
|
||||||
|
|
||||||
"ft_paste": "Lim inn {0} filer$NSnarvei: ctrl-V",
|
"ft_paste": "Lim inn {0} filer$NSnarvei: ctrl-V",
|
||||||
"fr_eperm": 'kan ikke endre navn:\ndu har ikke “move”-rettigheten i denne mappen',
|
"fr_eperm": 'kan ikke endre navn:\ndu har ikke “move”-rettigheten i denne mappen',
|
||||||
"fd_eperm": 'kan ikke slette:\ndu har ikke “delete”-rettigheten i denne mappen',
|
"fd_eperm": 'kan ikke slette:\ndu har ikke “delete”-rettigheten i denne mappen',
|
||||||
@@ -724,6 +757,7 @@ var Ls = {
|
|||||||
|
|
||||||
"fd_ok": "sletting OK",
|
"fd_ok": "sletting OK",
|
||||||
"fd_err": "sletting feilet:\n",
|
"fd_err": "sletting feilet:\n",
|
||||||
|
"fd_none": "ingenting ble slettet; kanskje avvist av serverkonfigurasjon (xbd)?",
|
||||||
"fd_busy": "sletter {0} filer...\n\n{1}",
|
"fd_busy": "sletter {0} filer...\n\n{1}",
|
||||||
"fd_warn1": "SLETT disse {0} filene?",
|
"fd_warn1": "SLETT disse {0} filene?",
|
||||||
"fd_warn2": "<b>Siste sjanse!</b> Dette kan ikke angres. Slett?",
|
"fd_warn2": "<b>Siste sjanse!</b> Dette kan ikke angres. Slett?",
|
||||||
@@ -781,6 +815,7 @@ var Ls = {
|
|||||||
"s_a1": "konkrete egenskaper",
|
"s_a1": "konkrete egenskaper",
|
||||||
|
|
||||||
"md_eshow": "kan ikke vise ",
|
"md_eshow": "kan ikke vise ",
|
||||||
|
"md_off": "[📜<em>readme</em>] er avskrudd i [⚙️] -- dokument skjult",
|
||||||
|
|
||||||
"xhr403": "403: Tilgang nektet\n\nkanskje du ble logget ut? prøv å trykk F5",
|
"xhr403": "403: Tilgang nektet\n\nkanskje du ble logget ut? prøv å trykk F5",
|
||||||
"cf_ok": "beklager -- liten tilfeldig kontroll, alt OK\n\nting skal fortsette om ca. 30 sekunder\n\nhvis ikkeno skjer, trykk F5 for å laste siden på nytt",
|
"cf_ok": "beklager -- liten tilfeldig kontroll, alt OK\n\nting skal fortsette om ca. 30 sekunder\n\nhvis ikkeno skjer, trykk F5 for å laste siden på nytt",
|
||||||
@@ -801,7 +836,10 @@ var Ls = {
|
|||||||
"fz_zipc": "cp437 med tidlig crc32,$Nfor MS-DOS PKZIP v2.04g (oktober 1993)$N(øker behandlingstid på server)",
|
"fz_zipc": "cp437 med tidlig crc32,$Nfor MS-DOS PKZIP v2.04g (oktober 1993)$N(øker behandlingstid på server)",
|
||||||
|
|
||||||
"un_m1": "nedenfor kan du angre / slette filer som du nylig har lastet opp",
|
"un_m1": "nedenfor kan du angre / slette filer som du nylig har lastet opp",
|
||||||
"un_upd": "oppdater listen",
|
"un_upd": "oppdater",
|
||||||
|
"un_m4": "eller hvis du vil dele nedlastnings-lenkene:",
|
||||||
|
"un_ulist": "vis",
|
||||||
|
"un_ucopy": "kopiér",
|
||||||
"un_flt": "valgfritt filter: filnavn / filsti må inneholde",
|
"un_flt": "valgfritt filter: filnavn / filsti må inneholde",
|
||||||
"un_fclr": "nullstill filter",
|
"un_fclr": "nullstill filter",
|
||||||
"un_derr": 'unpost-sletting feilet:\n',
|
"un_derr": 'unpost-sletting feilet:\n',
|
||||||
@@ -841,7 +879,7 @@ var Ls = {
|
|||||||
"u_hashdone": 'befaring ferdig',
|
"u_hashdone": 'befaring ferdig',
|
||||||
"u_hashing": 'les',
|
"u_hashing": 'les',
|
||||||
"u_fixed": "OK! Løste seg 👍",
|
"u_fixed": "OK! Løste seg 👍",
|
||||||
"u_cuerr": "kunne ikke laste opp del {0} av {1};\nsikkert harmløst, fortsetter\n\nfil: {2}",
|
"u_cuerr": "kunne ikke laste opp del {0} av {1};\nsikkert greit, fortsetter\n\nfil: {2}",
|
||||||
"u_cuerr2": "server nektet opplastningen (del {0} av {1});\nprøver igjen senere\n\nfil: {2}\n\nerror ",
|
"u_cuerr2": "server nektet opplastningen (del {0} av {1});\nprøver igjen senere\n\nfil: {2}\n\nerror ",
|
||||||
"u_ehstmp": "prøver igjen; se mld nederst",
|
"u_ehstmp": "prøver igjen; se mld nederst",
|
||||||
"u_ehsfin": "server nektet forespørselen om å ferdigstille filen; prøver igjen...",
|
"u_ehsfin": "server nektet forespørselen om å ferdigstille filen; prøver igjen...",
|
||||||
@@ -904,6 +942,7 @@ ebi('ops').innerHTML = (
|
|||||||
'<a href="#" data-perm="write" data-dest="msg" tt="' + L.ot_msg + '">📟</a>' +
|
'<a href="#" data-perm="write" data-dest="msg" tt="' + L.ot_msg + '">📟</a>' +
|
||||||
'<a href="#" data-dest="player" tt="' + L.ot_mp + '">🎺</a>' +
|
'<a href="#" data-dest="player" tt="' + L.ot_mp + '">🎺</a>' +
|
||||||
'<a href="#" data-dest="cfg" tt="' + L.ot_cfg + '">⚙️</a>' +
|
'<a href="#" data-dest="cfg" tt="' + L.ot_cfg + '">⚙️</a>' +
|
||||||
|
(IE ? '<span id="noie">' + L.ot_noie + '</span>' : '') +
|
||||||
'<div id="opdesc"></div>'
|
'<div id="opdesc"></div>'
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -943,7 +982,7 @@ ebi('op_up2k').innerHTML = (
|
|||||||
|
|
||||||
'<table id="u2conf">\n' +
|
'<table id="u2conf">\n' +
|
||||||
' <tr>\n' +
|
' <tr>\n' +
|
||||||
' <td class="c"><br />' + L.ul_par + '</td>\n' +
|
' <td class="c" data-perm="read"><br />' + L.ul_par + '</td>\n' +
|
||||||
' <td class="c" rowspan="2">\n' +
|
' <td class="c" rowspan="2">\n' +
|
||||||
' <input type="checkbox" id="multitask" />\n' +
|
' <input type="checkbox" id="multitask" />\n' +
|
||||||
' <label for="multitask" tt="' + L.ut_mt + '">🏃</label>\n' +
|
' <label for="multitask" tt="' + L.ut_mt + '">🏃</label>\n' +
|
||||||
@@ -953,8 +992,8 @@ ebi('op_up2k').innerHTML = (
|
|||||||
' <label for="potato" tt="' + L.ut_pot + '">🥔</label>\n' +
|
' <label for="potato" tt="' + L.ut_pot + '">🥔</label>\n' +
|
||||||
' </td>\n' +
|
' </td>\n' +
|
||||||
' <td class="c" rowspan="2">\n' +
|
' <td class="c" rowspan="2">\n' +
|
||||||
' <input type="checkbox" id="ask_up" />\n' +
|
' <input type="checkbox" id="u2rand" />\n' +
|
||||||
' <label for="ask_up" tt="' + L.ut_ask + '">💭</label>\n' +
|
' <label for="u2rand" tt="' + L.ut_rand + '">🎲</label>\n' +
|
||||||
' </td>\n' +
|
' </td>\n' +
|
||||||
' <td class="c" data-perm="read" data-dep="idx" rowspan="2">\n' +
|
' <td class="c" data-perm="read" data-dep="idx" rowspan="2">\n' +
|
||||||
' <input type="checkbox" id="fsearch" />\n' +
|
' <input type="checkbox" id="fsearch" />\n' +
|
||||||
@@ -964,7 +1003,7 @@ ebi('op_up2k').innerHTML = (
|
|||||||
' <td data-perm="read" rowspan="2" id="u2c3w"></td>\n' +
|
' <td data-perm="read" rowspan="2" id="u2c3w"></td>\n' +
|
||||||
' </tr>\n' +
|
' </tr>\n' +
|
||||||
' <tr>\n' +
|
' <tr>\n' +
|
||||||
' <td class="c">\n' +
|
' <td class="c" data-perm="read">\n' +
|
||||||
' <a href="#" class="b" id="nthread_sub">–</a><input\n' +
|
' <a href="#" class="b" id="nthread_sub">–</a><input\n' +
|
||||||
' class="txtbox" id="nthread" value="2" tt="' + L.ut_par + '"/><a\n' +
|
' class="txtbox" id="nthread" value="2" tt="' + L.ut_par + '"/><a\n' +
|
||||||
' href="#" class="b" id="nthread_add">+</a><br /> \n' +
|
' href="#" class="b" id="nthread_add">+</a><br /> \n' +
|
||||||
@@ -1002,7 +1041,7 @@ ebi('op_up2k').innerHTML = (
|
|||||||
'<div id="u2tabw" class="na"><table id="u2tab">\n' +
|
'<div id="u2tabw" class="na"><table id="u2tab">\n' +
|
||||||
' <thead>\n' +
|
' <thead>\n' +
|
||||||
' <tr>\n' +
|
' <tr>\n' +
|
||||||
' <td>' + L.utl_name + '</td>\n' +
|
' <td>' + L.utl_name + ' (<a href="#" id="luplinks">' + L.utl_ulist + '</a>/<a href="#" id="cuplinks">' + L.utl_ucopy + '</a>' + L.utl_links + ')</td>\n' +
|
||||||
' <td>' + L.utl_stat + '</td>\n' +
|
' <td>' + L.utl_stat + '</td>\n' +
|
||||||
' <td>' + L.utl_prog + '</td>\n' +
|
' <td>' + L.utl_prog + '</td>\n' +
|
||||||
' </tr>\n' +
|
' </tr>\n' +
|
||||||
@@ -1063,6 +1102,7 @@ ebi('op_cfg').innerHTML = (
|
|||||||
'<div>\n' +
|
'<div>\n' +
|
||||||
' <h3>' + L.cl_uopts + '</h3>\n' +
|
' <h3>' + L.cl_uopts + '</h3>\n' +
|
||||||
' <div>\n' +
|
' <div>\n' +
|
||||||
|
' <a id="ask_up" class="tgl btn" href="#" tt="' + L.ut_ask + '">💭</a>\n' +
|
||||||
' <a id="hashw" class="tgl btn" href="#" tt="' + L.cut_mt + '">mt</a>\n' +
|
' <a id="hashw" class="tgl btn" href="#" tt="' + L.cut_mt + '">mt</a>\n' +
|
||||||
' <a id="u2turbo" class="tgl btn ttb" href="#" tt="' + L.cut_turbo + '">turbo</a>\n' +
|
' <a id="u2turbo" class="tgl btn ttb" href="#" tt="' + L.cut_turbo + '">turbo</a>\n' +
|
||||||
' <a id="u2tdate" class="tgl btn ttb" href="#" tt="' + L.cut_datechk + '">date-chk</a>\n' +
|
' <a id="u2tdate" class="tgl btn ttb" href="#" tt="' + L.cut_datechk + '">date-chk</a>\n' +
|
||||||
@@ -1190,6 +1230,17 @@ function goto(dest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
var SBW, SBH; // scrollbar size
|
||||||
|
(function () {
|
||||||
|
var el = mknod('div');
|
||||||
|
el.style.cssText = 'overflow:scroll;width:100px;height:100px';
|
||||||
|
document.body.appendChild(el);
|
||||||
|
SBW = el.offsetWidth - el.clientWidth;
|
||||||
|
SBH = el.offsetHeight - el.clientHeight;
|
||||||
|
document.body.removeChild(el);
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
var have_webp = sread('have_webp');
|
var have_webp = sread('have_webp');
|
||||||
(function () {
|
(function () {
|
||||||
if (have_webp !== null)
|
if (have_webp !== null)
|
||||||
@@ -1269,6 +1320,7 @@ var mpl = (function () {
|
|||||||
var r = {
|
var r = {
|
||||||
"pb_mode": (sread('pb_mode') || 'next').split('-')[0],
|
"pb_mode": (sread('pb_mode') || 'next').split('-')[0],
|
||||||
"os_ctl": bcfg_get('au_os_ctl', have_mctl) && have_mctl,
|
"os_ctl": bcfg_get('au_os_ctl', have_mctl) && have_mctl,
|
||||||
|
'traversals': 0,
|
||||||
};
|
};
|
||||||
bcfg_bind(r, 'preload', 'au_preload', true);
|
bcfg_bind(r, 'preload', 'au_preload', true);
|
||||||
bcfg_bind(r, 'fullpre', 'au_fullpre', false);
|
bcfg_bind(r, 'fullpre', 'au_fullpre', false);
|
||||||
@@ -1441,9 +1493,9 @@ try {
|
|||||||
catch (ex) { }
|
catch (ex) { }
|
||||||
|
|
||||||
|
|
||||||
var re_au_native = can_ogg ? /\.(opus|ogg|m4a|aac|mp3|wav|flac)$/i :
|
var re_au_native = can_ogg ? /\.(aac|flac|m4a|mp3|ogg|opus|wav)$/i :
|
||||||
have_acode ? /\.(opus|m4a|aac|mp3|wav|flac)$/i : /\.(m4a|aac|mp3|wav|flac)$/i,
|
have_acode ? /\.(aac|flac|m4a|mp3|opus|wav)$/i : /\.(aac|flac|m4a|mp3|wav)$/i,
|
||||||
re_au_all = /\.(aac|m4a|ogg|opus|flac|alac|mp3|mp2|ac3|dts|wma|ra|wav|aif|aiff|au|alaw|ulaw|mulaw|amr|gsm|ape|tak|tta|wv|mpc)$/i;
|
re_au_all = /\.(aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3m|tak|tta|ulaw|wav|wma|wv|xm|xpk)$/i;
|
||||||
|
|
||||||
|
|
||||||
// extract songs + add play column
|
// extract songs + add play column
|
||||||
@@ -1687,29 +1739,9 @@ var widget = (function () {
|
|||||||
|
|
||||||
m += '[' + cv + s2ms(mp.au.currentTime) + ck + '/' + cv + s2ms(mp.au.duration) + ck + ']';
|
m += '[' + cv + s2ms(mp.au.currentTime) + ck + '/' + cv + s2ms(mp.au.duration) + ck + ']';
|
||||||
|
|
||||||
var o = mknod('input');
|
cliptxt(m, function () {
|
||||||
o.style.cssText = 'position:fixed;top:45%;left:48%;padding:1em;z-index:9';
|
toast.ok(1, 'copied to clipboard', null, 'top');
|
||||||
o.value = m;
|
});
|
||||||
document.body.appendChild(o);
|
|
||||||
|
|
||||||
var cln = function () {
|
|
||||||
o.value = 'copied to clipboard ';
|
|
||||||
setTimeout(function () {
|
|
||||||
document.body.removeChild(o);
|
|
||||||
}, 500);
|
|
||||||
};
|
|
||||||
var fb = function () {
|
|
||||||
console.log('fb');
|
|
||||||
o.focus();
|
|
||||||
o.select();
|
|
||||||
document.execCommand("copy");
|
|
||||||
cln();
|
|
||||||
};
|
|
||||||
try {
|
|
||||||
// https only
|
|
||||||
navigator.clipboard.writeText(m).then(cln, fb);
|
|
||||||
}
|
|
||||||
catch (ex) { fb(); }
|
|
||||||
};
|
};
|
||||||
r.set(sread('au_open') == 1);
|
r.set(sread('au_open') == 1);
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
@@ -1775,6 +1807,9 @@ var pbar = (function () {
|
|||||||
r.wurl = url;
|
r.wurl = url;
|
||||||
var img = new Image();
|
var img = new Image();
|
||||||
img.onload = function () {
|
img.onload = function () {
|
||||||
|
if (r.wurl != url)
|
||||||
|
return;
|
||||||
|
|
||||||
r.wimg = img;
|
r.wimg = img;
|
||||||
r.onresize();
|
r.onresize();
|
||||||
};
|
};
|
||||||
@@ -2067,7 +2102,15 @@ function song_skip(n) {
|
|||||||
}
|
}
|
||||||
function next_song(e) {
|
function next_song(e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
return song_skip(1);
|
if (mp.order.length) {
|
||||||
|
mpl.traversals = 0;
|
||||||
|
return song_skip(1);
|
||||||
|
}
|
||||||
|
if (mpl.traversals++ < 5) {
|
||||||
|
treectl.ls_cb = next_song;
|
||||||
|
return tree_neigh(1);
|
||||||
|
}
|
||||||
|
toast.inf(10, L.mm_nof);
|
||||||
}
|
}
|
||||||
function prev_song(e) {
|
function prev_song(e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
@@ -2078,8 +2121,13 @@ function prev_song(e) {
|
|||||||
return song_skip(-1);
|
return song_skip(-1);
|
||||||
}
|
}
|
||||||
function dl_song() {
|
function dl_song() {
|
||||||
if (!mp || !mp.au)
|
if (!mp || !mp.au) {
|
||||||
return;
|
var o = QSA('#files a[id]');
|
||||||
|
for (var a = 0; a < o.length; a++)
|
||||||
|
o[a].setAttribute('download', '');
|
||||||
|
|
||||||
|
return toast.inf(10, L.f_dls);
|
||||||
|
}
|
||||||
|
|
||||||
var url = mp.tracks[mp.au.tid];
|
var url = mp.tracks[mp.au.tid];
|
||||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=987';
|
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=987';
|
||||||
@@ -2551,7 +2599,7 @@ function play(tid, is_ev, seek) {
|
|||||||
if ((tn + '').indexOf('f-') === 0) {
|
if ((tn + '').indexOf('f-') === 0) {
|
||||||
tn = mp.order.indexOf(tn);
|
tn = mp.order.indexOf(tn);
|
||||||
if (tn < 0)
|
if (tn < 0)
|
||||||
return;
|
return toast.warn(10, L.mm_hnf);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tn >= mp.order.length) {
|
if (tn >= mp.order.length) {
|
||||||
@@ -2819,6 +2867,9 @@ function eval_hash() {
|
|||||||
clearInterval(t);
|
clearInterval(t);
|
||||||
baguetteBox.urltime(ts);
|
baguetteBox.urltime(ts);
|
||||||
var im = QS('#ggrid a[ref="' + id + '"]');
|
var im = QS('#ggrid a[ref="' + id + '"]');
|
||||||
|
if (!im)
|
||||||
|
return toast.warn(10, L.im_hnf);
|
||||||
|
|
||||||
im.click();
|
im.click();
|
||||||
im.scrollIntoView();
|
im.scrollIntoView();
|
||||||
}, 50);
|
}, 50);
|
||||||
@@ -2865,7 +2916,7 @@ function eval_hash() {
|
|||||||
|
|
||||||
// folder nav
|
// folder nav
|
||||||
ebi('goh').parentElement.appendChild(mknod('span', null,
|
ebi('goh').parentElement.appendChild(mknod('span', null,
|
||||||
'<a href="#" id="gop">prev</a>/<a href="#" id="gou">up</a>/<a href="#" id="gon">next</a>'));
|
'<a href="#" id="gop" tt="' + L.gop + '</a>/<a href="#" id="gou" tt="' + L.gou + '</a>/<a href="#" id="gon" tt="' + L.gon + '</a>'));
|
||||||
ebi('gop').onclick = function () { tree_neigh(-1); }
|
ebi('gop').onclick = function () { tree_neigh(-1); }
|
||||||
ebi('gon').onclick = function () { tree_neigh(1); }
|
ebi('gon').onclick = function () { tree_neigh(1); }
|
||||||
ebi('gou').onclick = function () { tree_up(true); }
|
ebi('gou').onclick = function () { tree_up(true); }
|
||||||
@@ -3377,7 +3428,7 @@ var fileman = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var xhr = new XHR();
|
var xhr = new XHR();
|
||||||
xhr.open('GET', f[0].src + '?move=' + dst, true);
|
xhr.open('POST', f[0].src + '?move=' + dst, true);
|
||||||
xhr.onload = xhr.onerror = rename_cb;
|
xhr.onload = xhr.onerror = rename_cb;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
}
|
}
|
||||||
@@ -3397,18 +3448,20 @@ var fileman = (function () {
|
|||||||
if (!sel.length)
|
if (!sel.length)
|
||||||
return toast.err(3, L.fd_emore);
|
return toast.err(3, L.fd_emore);
|
||||||
|
|
||||||
function deleter() {
|
function deleter(err) {
|
||||||
var xhr = new XHR(),
|
var xhr = new XHR(),
|
||||||
vp = vps.shift();
|
vp = vps.shift();
|
||||||
|
|
||||||
if (!vp) {
|
if (!vp) {
|
||||||
toast.ok(2, L.fd_ok);
|
if (err !== 'xbd')
|
||||||
|
toast.ok(2, L.fd_ok);
|
||||||
|
|
||||||
treectl.goto(get_evpath());
|
treectl.goto(get_evpath());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
toast.show('inf r', 0, esc(L.fd_busy.format(vps.length + 1, vp)), 'r');
|
toast.show('inf r', 0, esc(L.fd_busy.format(vps.length + 1, vp)), 'r');
|
||||||
|
|
||||||
xhr.open('GET', vp + '?delete', true);
|
xhr.open('POST', vp + '?delete', true);
|
||||||
xhr.onload = xhr.onerror = delete_cb;
|
xhr.onload = xhr.onerror = delete_cb;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
}
|
}
|
||||||
@@ -3418,6 +3471,10 @@ var fileman = (function () {
|
|||||||
toast.err(9, L.fd_err + msg);
|
toast.err(9, L.fd_err + msg);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
if (this.responseText.indexOf('deleted 0 files (and 0') + 1) {
|
||||||
|
toast.err(9, L.fd_none);
|
||||||
|
return deleter('xbd');
|
||||||
|
}
|
||||||
deleter();
|
deleter();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3516,7 +3573,7 @@ var fileman = (function () {
|
|||||||
|
|
||||||
var dst = get_evpath() + vp.split('/').pop();
|
var dst = get_evpath() + vp.split('/').pop();
|
||||||
|
|
||||||
xhr.open('GET', vp + '?move=' + dst, true);
|
xhr.open('POST', vp + '?move=' + dst, true);
|
||||||
xhr.onload = xhr.onerror = paste_cb;
|
xhr.onload = xhr.onerror = paste_cb;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
}
|
}
|
||||||
@@ -3641,7 +3698,7 @@ var showfile = (function () {
|
|||||||
qsr('#prism_css');
|
qsr('#prism_css');
|
||||||
var el = mknod('link', 'prism_css');
|
var el = mknod('link', 'prism_css');
|
||||||
el.rel = 'stylesheet';
|
el.rel = 'stylesheet';
|
||||||
el.href = '/.cpr/deps/prism' + (light ? '' : 'd') + '.css';
|
el.href = SR + '/.cpr/deps/prism' + (light ? '' : 'd') + '.css';
|
||||||
document.head.appendChild(el);
|
document.head.appendChild(el);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -3769,7 +3826,7 @@ var showfile = (function () {
|
|||||||
if (!defer)
|
if (!defer)
|
||||||
fun(el.firstChild);
|
fun(el.firstChild);
|
||||||
else
|
else
|
||||||
import_js('/.cpr/deps/prism.js', function () { fun(); });
|
import_js(SR + '/.cpr/deps/prism.js', function () { fun(); });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -4053,7 +4110,7 @@ var thegrid = (function () {
|
|||||||
var oth = ebi(this.getAttribute('ref')),
|
var oth = ebi(this.getAttribute('ref')),
|
||||||
href = noq_href(this),
|
href = noq_href(this),
|
||||||
aplay = ebi('a' + oth.getAttribute('id')),
|
aplay = ebi('a' + oth.getAttribute('id')),
|
||||||
is_img = /\.(gif|jpe?g|png|webp|webm|mkv|mp4)(\?|$)/i.test(href),
|
is_img = /\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp|webm|mkv|mp4)(\?|$)/i.test(href),
|
||||||
is_dir = href.endsWith('/'),
|
is_dir = href.endsWith('/'),
|
||||||
in_tree = is_dir && treectl.find(oth.textContent.slice(0, -1)),
|
in_tree = is_dir && treectl.find(oth.textContent.slice(0, -1)),
|
||||||
have_sel = QS('#files tr.sel'),
|
have_sel = QS('#files tr.sel'),
|
||||||
@@ -4175,10 +4232,10 @@ var thegrid = (function () {
|
|||||||
if (r.thumbs) {
|
if (r.thumbs) {
|
||||||
ihref += '?th=' + (have_webp ? 'w' : 'j');
|
ihref += '?th=' + (have_webp ? 'w' : 'j');
|
||||||
if (href == "#")
|
if (href == "#")
|
||||||
ihref = '/.cpr/ico/⏏️';
|
ihref = SR + '/.cpr/ico/⏏️';
|
||||||
}
|
}
|
||||||
else if (isdir) {
|
else if (isdir) {
|
||||||
ihref = '/.cpr/ico/folder';
|
ihref = SR + '/.cpr/ico/folder';
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
var ar = href.split('.');
|
var ar = href.split('.');
|
||||||
@@ -4203,7 +4260,7 @@ var thegrid = (function () {
|
|||||||
else
|
else
|
||||||
ext = "unk";
|
ext = "unk";
|
||||||
}
|
}
|
||||||
ihref = '/.cpr/ico/' + ext;
|
ihref = SR + '/.cpr/ico/' + ext;
|
||||||
}
|
}
|
||||||
ihref += (ihref.indexOf('?') > 0 ? '&' : '?') + 'cache=i';
|
ihref += (ihref.indexOf('?') > 0 ? '&' : '?') + 'cache=i';
|
||||||
|
|
||||||
@@ -4513,7 +4570,9 @@ document.onkeydown = function (e) {
|
|||||||
return seek_au_rel(n) || true;
|
return seek_au_rel(n) || true;
|
||||||
|
|
||||||
if (k == 'KeyY')
|
if (k == 'KeyY')
|
||||||
return msel.getsel().length ? ebi('seldl').click() : dl_song();
|
return msel.getsel().length ? ebi('seldl').click() :
|
||||||
|
showfile.active() ? ebi('dldoc').click() :
|
||||||
|
dl_song();
|
||||||
|
|
||||||
n = k == 'KeyI' ? -1 : k == 'KeyK' ? 1 : 0;
|
n = k == 'KeyI' ? -1 : k == 'KeyK' ? 1 : 0;
|
||||||
if (n !== 0)
|
if (n !== 0)
|
||||||
@@ -4778,7 +4837,7 @@ document.onkeydown = function (e) {
|
|||||||
clearTimeout(search_timeout);
|
clearTimeout(search_timeout);
|
||||||
|
|
||||||
var xhr = new XHR();
|
var xhr = new XHR();
|
||||||
xhr.open('POST', '/?srch', true);
|
xhr.open('POST', SR + '/?srch', true);
|
||||||
xhr.setRequestHeader('Content-Type', 'text/plain');
|
xhr.setRequestHeader('Content-Type', 'text/plain');
|
||||||
xhr.onload = xhr.onerror = xhr_search_results;
|
xhr.onload = xhr.onerror = xhr_search_results;
|
||||||
xhr.ts = Date.now();
|
xhr.ts = Date.now();
|
||||||
@@ -5172,8 +5231,8 @@ var treectl = (function () {
|
|||||||
|
|
||||||
function rendertree(res, ts, top0, dst, rst) {
|
function rendertree(res, ts, top0, dst, rst) {
|
||||||
var cur = ebi('treeul').getAttribute('ts');
|
var cur = ebi('treeul').getAttribute('ts');
|
||||||
if (cur && parseInt(cur) > ts) {
|
if (cur && parseInt(cur) > ts + 20 && QS('#treeul>li>a+a')) {
|
||||||
console.log("reject tree");
|
console.log("reject tree; " + cur + " / " + (ts - cur));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ebi('treeul').setAttribute('ts', ts);
|
ebi('treeul').setAttribute('ts', ts);
|
||||||
@@ -5317,7 +5376,12 @@ var treectl = (function () {
|
|||||||
treegrow.call(this.previousSibling, e);
|
treegrow.call(this.previousSibling, e);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
r.reqls(this.getAttribute('href'), true);
|
var href = this.getAttribute('href');
|
||||||
|
if (R && !href.startsWith(SR)) {
|
||||||
|
window.location = href;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
r.reqls(href, true);
|
||||||
r.dir_cb = tree_scrollto;
|
r.dir_cb = tree_scrollto;
|
||||||
thegrid.setvis(true);
|
thegrid.setvis(true);
|
||||||
}
|
}
|
||||||
@@ -5392,7 +5456,7 @@ var treectl = (function () {
|
|||||||
for (var a = 0; a < res.dirs.length; a++)
|
for (var a = 0; a < res.dirs.length; a++)
|
||||||
dirs.push(res.dirs[a].href.split('/')[0].split('?')[0]);
|
dirs.push(res.dirs[a].href.split('/')[0].split('?')[0]);
|
||||||
|
|
||||||
rendertree({ "a": dirs }, Date.now(), ".", get_evpath());
|
rendertree({ "a": dirs }, this.ts, ".", get_evpath());
|
||||||
}
|
}
|
||||||
|
|
||||||
r.gentab(this.top, res);
|
r.gentab(this.top, res);
|
||||||
@@ -5400,8 +5464,8 @@ var treectl = (function () {
|
|||||||
despin('#files');
|
despin('#files');
|
||||||
despin('#gfiles');
|
despin('#gfiles');
|
||||||
|
|
||||||
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
|
sandbox(ebi('pro'), sb_lg, '', res.logues ? res.logues[0] || "" : "");
|
||||||
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
|
sandbox(ebi('epi'), sb_lg, '', res.logues ? res.logues[1] || "" : "");
|
||||||
|
|
||||||
clmod(ebi('epi'), 'mdo');
|
clmod(ebi('epi'), 'mdo');
|
||||||
if (res.readme)
|
if (res.readme)
|
||||||
@@ -5509,7 +5573,7 @@ var treectl = (function () {
|
|||||||
have_up2k_idx = res.idx;
|
have_up2k_idx = res.idx;
|
||||||
have_tags_idx = res.itag;
|
have_tags_idx = res.itag;
|
||||||
lifetime = res.lifetime;
|
lifetime = res.lifetime;
|
||||||
apply_perms(res.perms);
|
apply_perms(res);
|
||||||
fileman.render();
|
fileman.render();
|
||||||
}
|
}
|
||||||
if (sel.length)
|
if (sel.length)
|
||||||
@@ -5541,7 +5605,7 @@ var treectl = (function () {
|
|||||||
qsr('#bbsw');
|
qsr('#bbsw');
|
||||||
if (ls0 === null) {
|
if (ls0 === null) {
|
||||||
var xhr = new XHR();
|
var xhr = new XHR();
|
||||||
xhr.open('GET', '/?am_js', true);
|
xhr.open('GET', SR + '/?am_js', true);
|
||||||
xhr.send();
|
xhr.send();
|
||||||
|
|
||||||
r.ls_cb = showfile.addlinks;
|
r.ls_cb = showfile.addlinks;
|
||||||
@@ -5706,8 +5770,40 @@ function despin(sel) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function apply_perms(newperms) {
|
var wfp_debounce = (function () {
|
||||||
perms = newperms || [];
|
var r = { 'n': 0, 't': 0 };
|
||||||
|
|
||||||
|
r.hide = function () {
|
||||||
|
if (!sb_lg && !sb_md)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (++r.n <= 1) {
|
||||||
|
r.n = 1;
|
||||||
|
clearTimeout(r.t);
|
||||||
|
r.t = setTimeout(r.reset, 300);
|
||||||
|
ebi('wfp').style.opacity = 0.1;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
r.show = function () {
|
||||||
|
if (!sb_lg && !sb_md)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (--r.n <= 0) {
|
||||||
|
r.n = 0;
|
||||||
|
clearTimeout(r.t);
|
||||||
|
ebi('wfp').style.opacity = 'unset';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
r.reset = function () {
|
||||||
|
r.n = 0;
|
||||||
|
r.show();
|
||||||
|
};
|
||||||
|
return r;
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
|
function apply_perms(res) {
|
||||||
|
perms = res.perms || [];
|
||||||
|
|
||||||
var a = QS('#ops a[data-dest="up2k"]');
|
var a = QS('#ops a[data-dest="up2k"]');
|
||||||
if (have_up2k_idx) {
|
if (have_up2k_idx) {
|
||||||
@@ -5739,7 +5835,8 @@ function apply_perms(newperms) {
|
|||||||
|
|
||||||
ebi('acc_info').innerHTML = '<span id="srv_info2"><span>' + srvinf +
|
ebi('acc_info').innerHTML = '<span id="srv_info2"><span>' + srvinf +
|
||||||
'</span></span><span' + aclass + axs + L.access + '</span>' + (acct != '*' ?
|
'</span></span><span' + aclass + axs + L.access + '</span>' + (acct != '*' ?
|
||||||
'<a href="/?pw=x">' + L.logout + acct + '</a>' : '<a href="/?h">Login</a>');
|
'<a href="' + SR + '/?pw=x">' + L.logout + acct + '</a>' :
|
||||||
|
'<a href="?h">Login</a>');
|
||||||
|
|
||||||
var o = QSA('#ops>a[data-perm]');
|
var o = QSA('#ops>a[data-perm]');
|
||||||
for (var a = 0; a < o.length; a++) {
|
for (var a = 0; a < o.length; a++) {
|
||||||
@@ -5780,6 +5877,8 @@ function apply_perms(newperms) {
|
|||||||
(have_write || tds[a].getAttribute('data-perm') == 'read') ?
|
(have_write || tds[a].getAttribute('data-perm') == 'read') ?
|
||||||
'table-cell' : 'none';
|
'table-cell' : 'none';
|
||||||
}
|
}
|
||||||
|
if (res.frand)
|
||||||
|
ebi('u2rand').parentNode.style.display = 'none';
|
||||||
|
|
||||||
if (up2k)
|
if (up2k)
|
||||||
up2k.set_fsearch();
|
up2k.set_fsearch();
|
||||||
@@ -6534,6 +6633,59 @@ var msel = (function () {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
|
|
||||||
|
var globalcss = (function () {
|
||||||
|
var ret = '';
|
||||||
|
return function () {
|
||||||
|
if (ret)
|
||||||
|
return ret;
|
||||||
|
|
||||||
|
var dcs = document.styleSheets;
|
||||||
|
for (var a = 0; a < dcs.length; a++) {
|
||||||
|
var base = dcs[a].href,
|
||||||
|
ds = dcs[a].cssRules;
|
||||||
|
|
||||||
|
if (!base)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
base = base.replace(/[^/]+$/, '');
|
||||||
|
for (var b = 0; b < ds.length; b++) {
|
||||||
|
var css = ds[b].cssText.split(/\burl\(/g);
|
||||||
|
ret += css[0];
|
||||||
|
for (var c = 1; c < css.length; c++) {
|
||||||
|
var delim = (/^["']/.exec(css[c])) ? css[c].slice(0, 1) : '';
|
||||||
|
ret += 'url(' + delim + ((css[c].slice(0, 8).indexOf('://') + 1 || css[c].startsWith('/')) ? '' : base) +
|
||||||
|
css[c].slice(delim ? 1 : 0);
|
||||||
|
}
|
||||||
|
ret += '\n';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
|
var sandboxjs = (function () {
|
||||||
|
var ret = '',
|
||||||
|
busy = false,
|
||||||
|
url = SR + '/.cpr/util.js?_=' + TS,
|
||||||
|
tag = '<script src="' + url + '"></script>';
|
||||||
|
|
||||||
|
return function () {
|
||||||
|
if (ret || busy)
|
||||||
|
return ret || tag;
|
||||||
|
|
||||||
|
var xhr = new XHR();
|
||||||
|
xhr.open('GET', url, true);
|
||||||
|
xhr.onload = function () {
|
||||||
|
if (this.status == 200)
|
||||||
|
ret = '<script>' + this.responseText + '</script>';
|
||||||
|
};
|
||||||
|
xhr.send();
|
||||||
|
busy = true;
|
||||||
|
return tag;
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
function show_md(md, name, div, url, depth) {
|
function show_md(md, name, div, url, depth) {
|
||||||
var errmsg = L.md_eshow + name + ':\n\n',
|
var errmsg = L.md_eshow + name + ':\n\n',
|
||||||
now = get_evpath();
|
now = get_evpath();
|
||||||
@@ -6542,18 +6694,20 @@ function show_md(md, name, div, url, depth) {
|
|||||||
if (url != now)
|
if (url != now)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
wfp_debounce.hide();
|
||||||
if (!marked) {
|
if (!marked) {
|
||||||
if (depth)
|
if (depth)
|
||||||
return toast.warn(10, errmsg + 'failed to load marked.js')
|
return toast.warn(10, errmsg + 'failed to load marked.js')
|
||||||
|
|
||||||
return import_js('/.cpr/deps/marked.js', function () {
|
wfp_debounce.n--;
|
||||||
|
return import_js(SR + '/.cpr/deps/marked.js', function () {
|
||||||
show_md(md, name, div, url, 1);
|
show_md(md, name, div, url, 1);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
md_plug = {}
|
md_plug = {}
|
||||||
md = load_md_plug(md, 'pre');
|
md = load_md_plug(md, 'pre');
|
||||||
md = load_md_plug(md, 'post');
|
md = load_md_plug(md, 'post', sb_md);
|
||||||
|
|
||||||
var marked_opts = {
|
var marked_opts = {
|
||||||
headerPrefix: 'md-',
|
headerPrefix: 'md-',
|
||||||
@@ -6566,7 +6720,8 @@ function show_md(md, name, div, url, depth) {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
clmod(div, 'mdo', 1);
|
clmod(div, 'mdo', 1);
|
||||||
div.innerHTML = marked.parse(md, marked_opts);
|
if (sandbox(div, sb_md, 'mdo', marked.parse(md, marked_opts)))
|
||||||
|
return;
|
||||||
|
|
||||||
ext = md_plug.post;
|
ext = md_plug.post;
|
||||||
ext = ext ? [ext[0].render, ext[0].render2] : [];
|
ext = ext ? [ext[0].render, ext[0].render2] : [];
|
||||||
@@ -6600,6 +6755,7 @@ function show_md(md, name, div, url, depth) {
|
|||||||
catch (ex) {
|
catch (ex) {
|
||||||
toast.warn(10, errmsg + ex);
|
toast.warn(10, errmsg + ex);
|
||||||
}
|
}
|
||||||
|
wfp_debounce.show();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -6612,7 +6768,7 @@ function set_tabindex() {
|
|||||||
|
|
||||||
function show_readme(md) {
|
function show_readme(md) {
|
||||||
if (!treectl.ireadme)
|
if (!treectl.ireadme)
|
||||||
return;
|
return sandbox(ebi('epi'), '', '', 'a');
|
||||||
|
|
||||||
show_md(md, 'README.md', ebi('epi'));
|
show_md(md, 'README.md', ebi('epi'));
|
||||||
}
|
}
|
||||||
@@ -6620,6 +6776,94 @@ if (readme)
|
|||||||
show_readme(readme);
|
show_readme(readme);
|
||||||
|
|
||||||
|
|
||||||
|
function sandbox(tgt, rules, cls, html) {
|
||||||
|
if (!treectl.ireadme) {
|
||||||
|
tgt.innerHTML = html ? L.md_off : '';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!rules || (html || '').indexOf('<') == -1) {
|
||||||
|
tgt.innerHTML = html;
|
||||||
|
clmod(tgt, 'sb');
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
clmod(tgt, 'sb', 1);
|
||||||
|
|
||||||
|
var tid = tgt.getAttribute('id'),
|
||||||
|
hash = location.hash,
|
||||||
|
want = '';
|
||||||
|
|
||||||
|
if (!cls)
|
||||||
|
wfp_debounce.hide();
|
||||||
|
|
||||||
|
if (hash.startsWith('#md-'))
|
||||||
|
want = hash.slice(1);
|
||||||
|
|
||||||
|
var env = '', tags = QSA('script');
|
||||||
|
for (var a = 0; a < tags.length; a++) {
|
||||||
|
var js = tags[a].innerHTML;
|
||||||
|
if (js && js.indexOf('have_up2k_idx') + 1)
|
||||||
|
env = js.split(/\blogues *=/)[0] + 'a;';
|
||||||
|
}
|
||||||
|
|
||||||
|
html = '<html class="iframe ' + document.documentElement.className + '"><head><style>' + globalcss() +
|
||||||
|
'</style><base target="_parent"></head><body id="b" class="logue ' + cls + '">' + html +
|
||||||
|
'<script>' + env + '</script>' + sandboxjs() +
|
||||||
|
'<script>var d=document.documentElement,' +
|
||||||
|
'loc=new URL("' + location.href.split('?')[0] + '");' +
|
||||||
|
'function say(m){window.parent.postMessage(m,"*")};' +
|
||||||
|
'setTimeout(function(){var its=0,pih=-1,f=function(){' +
|
||||||
|
'var ih=2+Math.min(parseInt(getComputedStyle(d).height),d.scrollHeight);' +
|
||||||
|
'if(ih!=pih){pih=ih;say("iheight #' + tid + ' "+ih,"*")}' +
|
||||||
|
'if(++its<20)return setTimeout(f,20);if(its==20)setInterval(f,200)' +
|
||||||
|
'};f();' +
|
||||||
|
'window.onfocus=function(){say("igot #' + tid + '")};' +
|
||||||
|
'window.onblur=function(){say("ilost #' + tid + '")};' +
|
||||||
|
'var el="' + want + '"&&ebi("' + want + '");' +
|
||||||
|
'if(el)say("iscroll #' + tid + ' "+el.offsetTop);' +
|
||||||
|
(cls == 'mdo' && md_plug.post ?
|
||||||
|
'const x={' + md_plug.post + '};' +
|
||||||
|
'if(x.render)x.render(ebi("b"));' +
|
||||||
|
'if(x.render2)x.render2(ebi("b"));' : '') +
|
||||||
|
'},1)</script></body></html>';
|
||||||
|
|
||||||
|
var fr = mknod('iframe');
|
||||||
|
fr.setAttribute('sandbox', rules ? 'allow-' + rules.replace(/ /g, ' allow-') : '');
|
||||||
|
fr.setAttribute('srcdoc', html);
|
||||||
|
tgt.innerHTML = '';
|
||||||
|
tgt.appendChild(fr);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
window.addEventListener("message", function (e) {
|
||||||
|
try {
|
||||||
|
console.log('msg:' + e.data);
|
||||||
|
var t = e.data.split(/ /g);
|
||||||
|
if (t[0] == 'iheight') {
|
||||||
|
var el = QS(t[1] + '>iframe');
|
||||||
|
el.style.height = (parseInt(t[2]) + SBH) + 'px';
|
||||||
|
el.style.visibility = 'unset';
|
||||||
|
wfp_debounce.show();
|
||||||
|
}
|
||||||
|
else if (t[0] == 'iscroll') {
|
||||||
|
var y1 = QS(t[1]).offsetTop,
|
||||||
|
y2 = parseInt(t[2]);
|
||||||
|
console.log(y1, y2);
|
||||||
|
document.documentElement.scrollTop = y1 + y2;
|
||||||
|
}
|
||||||
|
else if (t[0] == 'igot' || t[0] == 'ilost') {
|
||||||
|
clmod(QS(t[1] + '>iframe'), 'focus', t[0] == 'igot');
|
||||||
|
}
|
||||||
|
} catch (ex) {
|
||||||
|
console.log('msg-err: ' + ex);
|
||||||
|
}
|
||||||
|
}, false);
|
||||||
|
|
||||||
|
|
||||||
|
if (sb_lg && logues.length) {
|
||||||
|
sandbox(ebi('pro'), sb_lg, '', logues[0]);
|
||||||
|
sandbox(ebi('epi'), sb_lg, '', logues[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
(function () {
|
(function () {
|
||||||
try {
|
try {
|
||||||
var tr = ebi('files').tBodies[0].rows;
|
var tr = ebi('files').tBodies[0].rows;
|
||||||
@@ -6646,6 +6890,7 @@ function ev_row_tgl(e) {
|
|||||||
var unpost = (function () {
|
var unpost = (function () {
|
||||||
ebi('op_unpost').innerHTML = (
|
ebi('op_unpost').innerHTML = (
|
||||||
L.un_m1 + ' – <a id="unpost_refresh" href="#">' + L.un_upd + '</a>' +
|
L.un_m1 + ' – <a id="unpost_refresh" href="#">' + L.un_upd + '</a>' +
|
||||||
|
'<p>' + L.un_m4 + ' <a id="unpost_ulist" href="#">' + L.un_ulist + '</a> / <a id="unpost_ucopy" href="#">' + L.un_ucopy + '</a>' +
|
||||||
'<p>' + L.un_flt + ' <input type="text" id="unpost_filt" size="20" placeholder="documents/passwords" /><a id="unpost_nofilt" href="#">' + L.un_fclr + '</a></p>' +
|
'<p>' + L.un_flt + ' <input type="text" id="unpost_filt" size="20" placeholder="documents/passwords" /><a id="unpost_nofilt" href="#">' + L.un_fclr + '</a></p>' +
|
||||||
'<div id="unpost"></div>'
|
'<div id="unpost"></div>'
|
||||||
);
|
);
|
||||||
@@ -6678,7 +6923,7 @@ var unpost = (function () {
|
|||||||
else
|
else
|
||||||
html.push('-- <em>' + (filt.value ? L.un_no2 : L.un_no1) + '</em>');
|
html.push('-- <em>' + (filt.value ? L.un_no2 : L.un_no1) + '</em>');
|
||||||
|
|
||||||
var mods = [1000, 100, 10];
|
var mods = [10, 100, 1000];
|
||||||
for (var a = 0; a < res.length; a++) {
|
for (var a = 0; a < res.length; a++) {
|
||||||
for (var b = 0; b < mods.length; b++)
|
for (var b = 0; b < mods.length; b++)
|
||||||
if (a % mods[b] == 0 && res.length > a + mods[b] / 10)
|
if (a % mods[b] == 0 && res.length > a + mods[b] / 10)
|
||||||
@@ -6699,7 +6944,7 @@ var unpost = (function () {
|
|||||||
r.me = me;
|
r.me = me;
|
||||||
}
|
}
|
||||||
|
|
||||||
var q = '/?ups';
|
var q = SR + '/?ups';
|
||||||
if (filt.value)
|
if (filt.value)
|
||||||
q += '&filter=' + uricom_enc(filt.value, true);
|
q += '&filter=' + uricom_enc(filt.value, true);
|
||||||
|
|
||||||
@@ -6711,6 +6956,16 @@ var unpost = (function () {
|
|||||||
ct.innerHTML = "<p><em>" + L.un_m3 + "</em></p>";
|
ct.innerHTML = "<p><em>" + L.un_m3 + "</em></p>";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
function linklist() {
|
||||||
|
var ret = [],
|
||||||
|
base = document.location.origin.replace(/\/$/, '');
|
||||||
|
|
||||||
|
for (var a = 0; a < r.files.length; a++)
|
||||||
|
ret.push(base + r.files[a].vp);
|
||||||
|
|
||||||
|
return ret.join('\r\n');
|
||||||
|
}
|
||||||
|
|
||||||
function unpost_delete_cb() {
|
function unpost_delete_cb() {
|
||||||
if (this.status !== 200) {
|
if (this.status !== 200) {
|
||||||
var msg = this.responseText;
|
var msg = this.responseText;
|
||||||
@@ -6748,14 +7003,16 @@ var unpost = (function () {
|
|||||||
n2 = parseInt(tgt.getAttribute('n2') || n + 1),
|
n2 = parseInt(tgt.getAttribute('n2') || n + 1),
|
||||||
req = [];
|
req = [];
|
||||||
|
|
||||||
for (var a = n; a < n2; a++)
|
for (var a = n; a < n2; a++) {
|
||||||
if (QS('#op_unpost a.n' + a))
|
var links = QSA('#op_unpost a.n' + a);
|
||||||
req.push(uricom_dec(r.files[a].vp.split('?')[0]));
|
if (!links.length)
|
||||||
|
continue;
|
||||||
|
|
||||||
var links = QSA('#op_unpost a.n' + n);
|
req.push(uricom_dec(r.files[a].vp.split('?')[0]));
|
||||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
for (var b = 0; b < links.length; b++) {
|
||||||
links[a].removeAttribute('href');
|
links[b].removeAttribute('href');
|
||||||
links[a].innerHTML = '[busy]';
|
links[b].innerHTML = '[busy]';
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
toast.show('inf r', 0, L.un_busy.format(req.length));
|
toast.show('inf r', 0, L.un_busy.format(req.length));
|
||||||
@@ -6763,7 +7020,7 @@ var unpost = (function () {
|
|||||||
var xhr = new XHR();
|
var xhr = new XHR();
|
||||||
xhr.n = n;
|
xhr.n = n;
|
||||||
xhr.n2 = n2;
|
xhr.n2 = n2;
|
||||||
xhr.open('POST', '/?delete&lim=' + links.length, true);
|
xhr.open('POST', SR + '/?delete&lim=' + req.length, true);
|
||||||
xhr.onload = xhr.onerror = unpost_delete_cb;
|
xhr.onload = xhr.onerror = unpost_delete_cb;
|
||||||
xhr.send(JSON.stringify(req));
|
xhr.send(JSON.stringify(req));
|
||||||
};
|
};
|
||||||
@@ -6785,6 +7042,19 @@ var unpost = (function () {
|
|||||||
goto('unpost');
|
goto('unpost');
|
||||||
};
|
};
|
||||||
|
|
||||||
|
ebi('unpost_ulist').onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
modal.alert(linklist());
|
||||||
|
};
|
||||||
|
|
||||||
|
ebi('unpost_ucopy').onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
var txt = linklist();
|
||||||
|
cliptxt(txt + '\n', function () {
|
||||||
|
toast.inf(5, txt.split('\n').length + ' links copied to clipboard');
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
return r;
|
return r;
|
||||||
})();
|
})();
|
||||||
|
|
||||||
@@ -6880,18 +7150,19 @@ function reload_browser() {
|
|||||||
filecols.set_style();
|
filecols.set_style();
|
||||||
|
|
||||||
var parts = get_evpath().split('/'),
|
var parts = get_evpath().split('/'),
|
||||||
rm = QSA('#path>a+a+a'),
|
rm = ebi('entree'),
|
||||||
ftab = ebi('files'),
|
ftab = ebi('files'),
|
||||||
link = '/', o;
|
link = '', o;
|
||||||
|
|
||||||
for (a = rm.length - 1; a >= 0; a--)
|
while (rm.nextSibling)
|
||||||
rm[a].parentNode.removeChild(rm[a]);
|
rm.parentNode.removeChild(rm.nextSibling);
|
||||||
|
|
||||||
for (var a = 1; a < parts.length - 1; a++) {
|
for (var a = 0; a < parts.length - 1; a++) {
|
||||||
link += parts[a] + '/';
|
link += parts[a] + '/';
|
||||||
o = mknod('a');
|
o = mknod('a');
|
||||||
o.setAttribute('href', link);
|
o.setAttribute('href', link);
|
||||||
o.textContent = uricom_dec(parts[a]);
|
o.textContent = uricom_dec(parts[a]) || '/';
|
||||||
|
ebi('path').appendChild(mknod('i'));
|
||||||
ebi('path').appendChild(o);
|
ebi('path').appendChild(o);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -57,7 +57,7 @@
|
|||||||
<div>{{ logues[1] }}</div><br />
|
<div>{{ logues[1] }}</div><br />
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<h2><a href="/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
<h2><a href="{{ r }}/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||||
|
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@@ -5,10 +5,10 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#333">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/md.css?_={{ ts }}">
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<link rel="stylesheet" href="/.cpr/md2.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/md2.css?_={{ ts }}">
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
@@ -128,7 +128,8 @@ write markdown (most html is 🙆 too)
|
|||||||
|
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var last_modified = {{ lastmod }},
|
var SR = {{ r|tojson }},
|
||||||
|
last_modified = {{ lastmod }},
|
||||||
have_emp = {{ have_emp|tojson }},
|
have_emp = {{ have_emp|tojson }},
|
||||||
dfavico = "{{ favico }}";
|
dfavico = "{{ favico }}";
|
||||||
|
|
||||||
@@ -153,10 +154,10 @@ l.light = drk? 0:1;
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/md.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/md.js?_={{ ts }}"></script>
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<script src="/.cpr/md2.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|||||||
@@ -930,7 +930,9 @@ var set_lno = (function () {
|
|||||||
(function () {
|
(function () {
|
||||||
function keydown(ev) {
|
function keydown(ev) {
|
||||||
ev = ev || window.event;
|
ev = ev || window.event;
|
||||||
var kc = ev.code || ev.keyCode || ev.which;
|
var kc = ev.code || ev.keyCode || ev.which,
|
||||||
|
editing = document.activeElement == dom_src;
|
||||||
|
|
||||||
//console.log(ev.key, ev.code, ev.keyCode, ev.which);
|
//console.log(ev.key, ev.code, ev.keyCode, ev.which);
|
||||||
if (ctrl(ev) && (ev.code == "KeyS" || kc == 83)) {
|
if (ctrl(ev) && (ev.code == "KeyS" || kc == 83)) {
|
||||||
save();
|
save();
|
||||||
@@ -941,12 +943,17 @@ var set_lno = (function () {
|
|||||||
if (d)
|
if (d)
|
||||||
d.click();
|
d.click();
|
||||||
}
|
}
|
||||||
if (document.activeElement != dom_src)
|
if (editing)
|
||||||
return true;
|
set_lno();
|
||||||
|
|
||||||
set_lno();
|
|
||||||
|
|
||||||
if (ctrl(ev)) {
|
if (ctrl(ev)) {
|
||||||
|
if (ev.code == "KeyE") {
|
||||||
|
dom_nsbs.click();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (!editing)
|
||||||
|
return true;
|
||||||
|
|
||||||
if (ev.code == "KeyH" || kc == 72) {
|
if (ev.code == "KeyH" || kc == 72) {
|
||||||
md_header(ev.shiftKey);
|
md_header(ev.shiftKey);
|
||||||
return false;
|
return false;
|
||||||
@@ -971,10 +978,6 @@ var set_lno = (function () {
|
|||||||
iter_uni();
|
iter_uni();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (ev.code == "KeyE") {
|
|
||||||
dom_nsbs.click();
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
var up = ev.code == "ArrowUp" || kc == 38;
|
var up = ev.code == "ArrowUp" || kc == 38;
|
||||||
var dn = ev.code == "ArrowDown" || kc == 40;
|
var dn = ev.code == "ArrowDown" || kc == 40;
|
||||||
if (up || dn) {
|
if (up || dn) {
|
||||||
@@ -987,6 +990,9 @@ var set_lno = (function () {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
if (!editing)
|
||||||
|
return true;
|
||||||
|
|
||||||
if (ev.code == "Tab" || kc == 9) {
|
if (ev.code == "Tab" || kc == 9) {
|
||||||
md_indent(ev.shiftKey);
|
md_indent(ev.shiftKey);
|
||||||
return false;
|
return false;
|
||||||
|
|||||||
@@ -5,10 +5,10 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#333">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/mde.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/deps/mini-fa.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" href="/.cpr/deps/easymde.css?_={{ ts }}">
|
<link rel="stylesheet" href="{{ r }}/.cpr/deps/easymde.css?_={{ ts }}">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="mw">
|
<div id="mw">
|
||||||
@@ -26,7 +26,8 @@
|
|||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var last_modified = {{ lastmod }},
|
var SR = {{ r|tojson }},
|
||||||
|
last_modified = {{ lastmod }},
|
||||||
have_emp = {{ have_emp|tojson }},
|
have_emp = {{ have_emp|tojson }},
|
||||||
dfavico = "{{ favico }}";
|
dfavico = "{{ favico }}";
|
||||||
|
|
||||||
@@ -48,8 +49,8 @@ l.light = drk? 0:1;
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#333">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/msg.css?_={{ ts }}">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
|
|||||||
@@ -16,7 +16,8 @@ html {
|
|||||||
h1 {
|
h1 {
|
||||||
border-bottom: 1px solid #ccc;
|
border-bottom: 1px solid #ccc;
|
||||||
margin: 2em 0 .4em 0;
|
margin: 2em 0 .4em 0;
|
||||||
padding: 0 0 .2em 0;
|
padding: 0;
|
||||||
|
line-height: 1em;
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
}
|
}
|
||||||
li {
|
li {
|
||||||
@@ -26,6 +27,7 @@ a {
|
|||||||
color: #047;
|
color: #047;
|
||||||
background: #fff;
|
background: #fff;
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
|
white-space: nowrap;
|
||||||
border-bottom: 1px solid #8ab;
|
border-bottom: 1px solid #8ab;
|
||||||
border-radius: .2em;
|
border-radius: .2em;
|
||||||
padding: .2em .6em;
|
padding: .2em .6em;
|
||||||
@@ -34,6 +36,11 @@ a {
|
|||||||
td a {
|
td a {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
#w {
|
||||||
|
color: #fff;
|
||||||
|
background: #940;
|
||||||
|
border-color: #b70;
|
||||||
|
}
|
||||||
.af,
|
.af,
|
||||||
.logout {
|
.logout {
|
||||||
float: right;
|
float: right;
|
||||||
@@ -49,12 +56,30 @@ a.g {
|
|||||||
border-color: #3a0;
|
border-color: #3a0;
|
||||||
box-shadow: 0 .3em 1em #4c0;
|
box-shadow: 0 .3em 1em #4c0;
|
||||||
}
|
}
|
||||||
#repl {
|
#repl,
|
||||||
|
#pb a {
|
||||||
border: none;
|
border: none;
|
||||||
background: none;
|
background: none;
|
||||||
color: inherit;
|
color: inherit;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
}
|
}
|
||||||
|
#repl {
|
||||||
|
position: fixed;
|
||||||
|
bottom: .25em;
|
||||||
|
left: .2em;
|
||||||
|
}
|
||||||
|
#pb {
|
||||||
|
opacity: .5;
|
||||||
|
position: fixed;
|
||||||
|
bottom: .25em;
|
||||||
|
right: .3em;
|
||||||
|
}
|
||||||
|
#pb span {
|
||||||
|
opacity: .6;
|
||||||
|
}
|
||||||
|
#pb a {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
table {
|
table {
|
||||||
border-collapse: collapse;
|
border-collapse: collapse;
|
||||||
}
|
}
|
||||||
@@ -155,15 +180,19 @@ html.z a.g {
|
|||||||
border-color: #af4;
|
border-color: #af4;
|
||||||
box-shadow: 0 .3em 1em #7d0;
|
box-shadow: 0 .3em 1em #7d0;
|
||||||
}
|
}
|
||||||
html.z input {
|
input {
|
||||||
color: #fff;
|
color: #a50;
|
||||||
background: #626;
|
background: #fff;
|
||||||
border: 1px solid #c2c;
|
border: 1px solid #a50;
|
||||||
border-width: 1px 0 0 0;
|
|
||||||
border-radius: .5em;
|
border-radius: .5em;
|
||||||
padding: .5em .7em;
|
padding: .5em .7em;
|
||||||
margin: 0 .5em 0 0;
|
margin: 0 .5em 0 0;
|
||||||
}
|
}
|
||||||
|
html.z input {
|
||||||
|
color: #fff;
|
||||||
|
background: #626;
|
||||||
|
border-color: #c2c;
|
||||||
|
}
|
||||||
html.z .num {
|
html.z .num {
|
||||||
border-color: #777;
|
border-color: #777;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,19 +8,19 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#333">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<div id="wrap">
|
<div id="wrap">
|
||||||
<a id="a" href="/?h" class="af">refresh</a>
|
<a id="a" href="{{ r }}/?h" class="af">refresh</a>
|
||||||
<a id="v" href="/?hc" class="af">connect</a>
|
<a id="v" href="{{ r }}/?hc" class="af">connect</a>
|
||||||
|
|
||||||
{%- if this.uname == '*' %}
|
{%- if this.uname == '*' %}
|
||||||
<p id="b">howdy stranger <small>(you're not logged in)</small></p>
|
<p id="b">howdy stranger <small>(you're not logged in)</small></p>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<a id="c" href="/?pw=x" class="logout">logout</a>
|
<a id="c" href="{{ r }}/?pw=x" class="logout">logout</a>
|
||||||
<p><span id="m">welcome back,</span> <strong>{{ this.uname }}</strong></p>
|
<p><span id="m">welcome back,</span> <strong>{{ this.uname }}</strong></p>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
@@ -46,15 +46,15 @@
|
|||||||
<tbody>
|
<tbody>
|
||||||
{% for mp in avol %}
|
{% for mp in avol %}
|
||||||
{%- if mp in vstate and vstate[mp] %}
|
{%- if mp in vstate and vstate[mp] %}
|
||||||
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a class="s" href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
<tr><td><a href="{{ r }}{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a class="s" href="{{ r }}{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
</td></tr></table>
|
</td></tr></table>
|
||||||
<div class="btns">
|
<div class="btns">
|
||||||
<a id="d" href="/?stack">dump stack</a>
|
<a id="d" href="{{ r }}/?stack">dump stack</a>
|
||||||
<a id="e" href="/?reload=cfg">reload cfg</a>
|
<a id="e" href="{{ r }}/?reload=cfg">reload cfg</a>
|
||||||
</div>
|
</div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
@@ -62,7 +62,7 @@
|
|||||||
<h1 id="f">you can browse:</h1>
|
<h1 id="f">you can browse:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
{% for mp in rvol %}
|
{% for mp in rvol %}
|
||||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
<li><a href="{{ r }}{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</ul>
|
</ul>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
@@ -71,7 +71,7 @@
|
|||||||
<h1 id="g">you can upload to:</h1>
|
<h1 id="g">you can upload to:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
{% for mp in wvol %}
|
{% for mp in wvol %}
|
||||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
<li><a href="{{ r }}{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</ul>
|
</ul>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
@@ -79,34 +79,41 @@
|
|||||||
<h1 id="cc">client config:</h1>
|
<h1 id="cc">client config:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
{% if k304 %}
|
{% if k304 %}
|
||||||
<li><a id="h" href="/?k304=n">disable k304</a> (currently enabled)
|
<li><a id="h" href="{{ r }}/?k304=n">disable k304</a> (currently enabled)
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<li><a id="i" href="/?k304=y" class="r">enable k304</a> (currently disabled)
|
<li><a id="i" href="{{ r }}/?k304=y" class="r">enable k304</a> (currently disabled)
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||||
|
|
||||||
<li><a id="k" href="/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
<h1 id="l">login for more:</h1>
|
<h1 id="l">login for more:</h1>
|
||||||
<ul>
|
<div>
|
||||||
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
|
<form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||||
<input type="hidden" name="act" value="login" />
|
<input type="hidden" name="act" value="login" />
|
||||||
<input type="password" name="cppwd" />
|
<input type="password" name="cppwd" />
|
||||||
<input type="submit" value="Login" />
|
<input type="submit" value="Login" />
|
||||||
|
{% if ahttps %}
|
||||||
|
<a id="w" href="{{ ahttps }}">switch to https</a>
|
||||||
|
{% endif %}
|
||||||
</form>
|
</form>
|
||||||
</ul>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
|
{%- if not this.args.nb %}
|
||||||
|
<span id="pb"><span>powered by</span> <a href="{{ this.args.pb_url }}">copyparty {{ver}}</a></span>
|
||||||
|
{%- endif %}
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var lang="{{ lang }}",
|
var SR = {{ r|tojson }},
|
||||||
|
lang="{{ lang }}",
|
||||||
dfavico="{{ favico }}";
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
document.documentElement.className=localStorage.theme||"{{ this.args.theme }}";
|
document.documentElement.className=localStorage.theme||"{{ this.args.theme }}";
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/splash.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/splash.js?_={{ ts }}"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@@ -17,15 +17,16 @@ var Ls = {
|
|||||||
"l1": "logg inn:",
|
"l1": "logg inn:",
|
||||||
"m1": "velkommen tilbake,",
|
"m1": "velkommen tilbake,",
|
||||||
"n1": "404: filen finnes ikke ┐( ´ -`)┌",
|
"n1": "404: filen finnes ikke ┐( ´ -`)┌",
|
||||||
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="/?h">gå hjem</a>',
|
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||||
"p1": "403: tilgang nektet ~┻━┻",
|
"p1": "403: tilgang nektet ~┻━┻",
|
||||||
"q1": 'du må logge inn eller <a href="/?h">gå hjem</a>',
|
"q1": 'du må logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||||
"r1": "gå hjem",
|
"r1": "gå hjem",
|
||||||
".s1": "kartlegg",
|
".s1": "kartlegg",
|
||||||
"t1": "handling",
|
"t1": "handling",
|
||||||
"u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder",
|
"u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder",
|
||||||
"v1": "koble til",
|
"v1": "koble til",
|
||||||
"v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!"
|
"v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!",
|
||||||
|
"w1": "bytt til https",
|
||||||
},
|
},
|
||||||
"eng": {
|
"eng": {
|
||||||
"d2": "shows the state of all active threads",
|
"d2": "shows the state of all active threads",
|
||||||
|
|||||||
@@ -8,14 +8,14 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<meta name="theme-color" content="#333">
|
<meta name="theme-color" content="#333">
|
||||||
{{ html_head }}
|
{{ html_head }}
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<div id="wrap" class="w">
|
<div id="wrap" class="w">
|
||||||
<div class="cn">
|
<div class="cn">
|
||||||
<p class="btns"><a href="/">browse files</a> // <a href="/?h">control panel</a></p>
|
<p class="btns"><a href="/{{ rvp }}">browse files</a> // <a href="{{ r }}/?h">control panel</a></p>
|
||||||
<p>or choose your OS for cooler alternatives:</p>
|
<p>or choose your OS for cooler alternatives:</p>
|
||||||
<div class="ossel">
|
<div class="ossel">
|
||||||
<a id="swin" href="#">Windows</a>
|
<a id="swin" href="#">Windows</a>
|
||||||
@@ -28,7 +28,7 @@
|
|||||||
make this server appear on your computer as a regular HDD!<br />
|
make this server appear on your computer as a regular HDD!<br />
|
||||||
pick your favorite below (sorted by performance, best first) and lets 🎉<br />
|
pick your favorite below (sorted by performance, best first) and lets 🎉<br />
|
||||||
<br />
|
<br />
|
||||||
placeholders:
|
<span class="os win lin mac">placeholders:</span>
|
||||||
<span class="os win">
|
<span class="os win">
|
||||||
{% if accs %}<code><b>{{ pw }}</b></code>=password, {% endif %}<code><b>W:</b></code>=mountpoint
|
{% if accs %}<code><b>{{ pw }}</b></code>=password, {% endif %}<code><b>W:</b></code>=mountpoint
|
||||||
</span>
|
</span>
|
||||||
@@ -47,27 +47,32 @@
|
|||||||
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
|
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
|
||||||
<pre>
|
<pre>
|
||||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ vp }} <b>W:</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
|
||||||
</pre>
|
</pre>
|
||||||
{% if s %}
|
{% if s %}
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
||||||
<pre>
|
<pre>
|
||||||
net use <b>w:</b> http{{ s }}://{{ ep }}/{{ vp }}{% if accs %} k /user:<b>{{ pw }}</b>{% endif %}
|
net use <b>w:</b> http{{ s }}://{{ ep }}/{{ rvp }}{% if accs %} k /user:<b>{{ pw }}</b>{% endif %}
|
||||||
</pre>
|
</pre>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="os lin">
|
<div class="os lin">
|
||||||
<pre>
|
<pre>
|
||||||
yum install davfs2
|
yum install davfs2
|
||||||
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ vp }} <b>mp</b>
|
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
||||||
|
</pre>
|
||||||
|
<p>make it automount on boot:</p>
|
||||||
|
<pre>
|
||||||
|
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
|
||||||
|
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
|
||||||
</pre>
|
</pre>
|
||||||
<p>or you can use rclone instead, which is much slower but doesn't require root:</p>
|
<p>or you can use rclone instead, which is much slower but doesn't require root:</p>
|
||||||
<pre>
|
<pre>
|
||||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ vp }} <b>mp</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
||||||
</pre>
|
</pre>
|
||||||
{% if s %}
|
{% if s %}
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
||||||
@@ -77,20 +82,20 @@
|
|||||||
<!-- gnome-bug: ignores vp -->
|
<!-- gnome-bug: ignores vp -->
|
||||||
<pre>
|
<pre>
|
||||||
{%- if accs %}
|
{%- if accs %}
|
||||||
echo <b>{{ pw }}</b> | gio mount dav{{ s }}://k@{{ ep }}/{{ vp }}
|
echo <b>{{ pw }}</b> | gio mount dav{{ s }}://k@{{ ep }}/{{ rvp }}
|
||||||
{%- else %}
|
{%- else %}
|
||||||
gio mount -a dav{{ s }}://{{ ep }}/{{ vp }}
|
gio mount -a dav{{ s }}://{{ ep }}/{{ rvp }}
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</pre>
|
</pre>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="os mac">
|
<div class="os mac">
|
||||||
<pre>
|
<pre>
|
||||||
osascript -e ' mount volume "http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ vp }}" '
|
osascript -e ' mount volume "http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ rvp }}" '
|
||||||
</pre>
|
</pre>
|
||||||
<p>or you can open up a Finder, press command-K and paste this instead:</p>
|
<p>or you can open up a Finder, press command-K and paste this instead:</p>
|
||||||
<pre>
|
<pre>
|
||||||
http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ vp }}
|
http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ rvp }}
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
{% if s %}
|
{% if s %}
|
||||||
@@ -108,26 +113,26 @@
|
|||||||
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
|
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
|
||||||
<pre>
|
<pre>
|
||||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }}
|
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }}
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ vp }} <b>W:</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>W:</b>
|
||||||
</pre>
|
</pre>
|
||||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||||
<pre>
|
<pre>
|
||||||
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ vp }}
|
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||||
</pre>
|
</pre>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="os lin">
|
<div class="os lin">
|
||||||
<pre>
|
<pre>
|
||||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }}
|
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }}
|
||||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ vp }} <b>mp</b>
|
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>mp</b>
|
||||||
</pre>
|
</pre>
|
||||||
<p>emergency alternative (gnome/gui-only):</p>
|
<p>emergency alternative (gnome/gui-only):</p>
|
||||||
<!-- gnome-bug: ignores vp -->
|
<!-- gnome-bug: ignores vp -->
|
||||||
<pre>
|
<pre>
|
||||||
{%- if accs %}
|
{%- if accs %}
|
||||||
echo <b>{{ pw }}</b> | gio mount ftp{{ "" if args.ftp else "s" }}://k@{{ host }}:{{ args.ftp or args.ftps }}/{{ vp }}
|
echo <b>{{ pw }}</b> | gio mount ftp{{ "" if args.ftp else "s" }}://k@{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||||
{%- else %}
|
{%- else %}
|
||||||
gio mount -a ftp{{ "" if args.ftp else "s" }}://{{ host }}:{{ args.ftp or args.ftps }}/{{ vp }}
|
gio mount -a ftp{{ "" if args.ftp else "s" }}://{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</pre>
|
</pre>
|
||||||
</div>
|
</div>
|
||||||
@@ -135,7 +140,7 @@
|
|||||||
<div class="os mac">
|
<div class="os mac">
|
||||||
<p>note: FTP is read-only on macos; please use WebDAV instead</p>
|
<p>note: FTP is read-only on macos; please use WebDAV instead</p>
|
||||||
<pre>
|
<pre>
|
||||||
open {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}k:<b>{{ pw }}</b>@{% else %}anonymous:@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ vp }}
|
open {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}k:<b>{{ pw }}</b>@{% else %}anonymous:@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||||
</pre>
|
</pre>
|
||||||
</div>
|
</div>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@@ -144,18 +149,18 @@
|
|||||||
|
|
||||||
<h1>partyfuse</h1>
|
<h1>partyfuse</h1>
|
||||||
<p>
|
<p>
|
||||||
<a href="/.cpr/a/partyfuse.py">partyfuse.py</a> -- fast, read-only,
|
<a href="{{ r }}/.cpr/a/partyfuse.py">partyfuse.py</a> -- fast, read-only,
|
||||||
<span class="os win">needs <a href="https://winfsp.dev/rel/">winfsp</a></span>
|
<span class="os win">needs <a href="https://winfsp.dev/rel/">winfsp</a></span>
|
||||||
<span class="os lin">doesn't need root</span>
|
<span class="os lin">doesn't need root</span>
|
||||||
</p>
|
</p>
|
||||||
<pre>
|
<pre>
|
||||||
partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ vp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b>
|
partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ rvp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b>
|
||||||
</pre>
|
</pre>
|
||||||
{% if s %}
|
{% if s %}
|
||||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p>
|
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<p>
|
<p>
|
||||||
you can use <a href="/.cpr/a/up2k.py">up2k.py</a> to upload (sometimes faster than web-browsers)
|
you can use <a href="{{ r }}/.cpr/a/up2k.py">up2k.py</a> to upload (sometimes faster than web-browsers)
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
|
||||||
@@ -188,13 +193,14 @@
|
|||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var lang="{{ lang }}",
|
var SR = {{ r|tojson }},
|
||||||
|
lang="{{ lang }}",
|
||||||
dfavico="{{ favico }}";
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
document.documentElement.className=localStorage.theme||"{{ args.theme }}";
|
document.documentElement.className=localStorage.theme||"{{ args.theme }}";
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/svcs.js?_={{ ts }}"></script>
|
<script src="{{ r }}/.cpr/svcs.js?_={{ ts }}"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@@ -39,4 +39,4 @@ function setos(os) {
|
|||||||
clmod(oa[a], 'g', oa[a].id.slice(1) == os);
|
clmod(oa[a], 'g', oa[a].id.slice(1) == os);
|
||||||
}
|
}
|
||||||
|
|
||||||
setos(WINDOWS ? 'win' : LINUX ? 'lin' : MACOS ? 'mac' : '');
|
setos(WINDOWS ? 'win' : LINUX ? 'lin' : MACOS ? 'mac' : 'idk');
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
@font-face {
|
@font-face {
|
||||||
font-family: 'scp';
|
font-family: 'scp';
|
||||||
font-display: swap;
|
font-display: swap;
|
||||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(deps/scp.woff2) format('woff2');
|
||||||
}
|
}
|
||||||
html {
|
html {
|
||||||
touch-action: manipulation;
|
touch-action: manipulation;
|
||||||
@@ -42,6 +42,10 @@ html {
|
|||||||
text-shadow: 1px 1px 0 #000;
|
text-shadow: 1px 1px 0 #000;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
|
#toast.top {
|
||||||
|
top: 2em;
|
||||||
|
bottom: unset;
|
||||||
|
}
|
||||||
#toast a {
|
#toast a {
|
||||||
color: inherit;
|
color: inherit;
|
||||||
text-shadow: inherit;
|
text-shadow: inherit;
|
||||||
@@ -69,6 +73,7 @@ html {
|
|||||||
#toastb {
|
#toastb {
|
||||||
max-height: 70vh;
|
max-height: 70vh;
|
||||||
overflow-y: auto;
|
overflow-y: auto;
|
||||||
|
padding: 1px;
|
||||||
}
|
}
|
||||||
#toast.scroll #toastb {
|
#toast.scroll #toastb {
|
||||||
overflow-y: scroll;
|
overflow-y: scroll;
|
||||||
|
|||||||
@@ -114,10 +114,10 @@ function up2k_flagbus() {
|
|||||||
do_take(now);
|
do_take(now);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (flag.owner && now - flag.owner[1] > 5000) {
|
if (flag.owner && now - flag.owner[1] > 12000) {
|
||||||
flag.owner = null;
|
flag.owner = null;
|
||||||
}
|
}
|
||||||
if (flag.wants && now - flag.wants[1] > 5000) {
|
if (flag.wants && now - flag.wants[1] > 12000) {
|
||||||
flag.wants = null;
|
flag.wants = null;
|
||||||
}
|
}
|
||||||
if (!flag.owner && !flag.wants) {
|
if (!flag.owner && !flag.wants) {
|
||||||
@@ -672,7 +672,7 @@ function Donut(uc, st) {
|
|||||||
favico.upd();
|
favico.upd();
|
||||||
wintitle();
|
wintitle();
|
||||||
if (document.visibilityState == 'hidden')
|
if (document.visibilityState == 'hidden')
|
||||||
tenstrobe = setTimeout(enstrobe, 500); //debounce
|
tenstrobe = setTimeout(r.enstrobe, 500); //debounce
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -709,7 +709,7 @@ function Donut(uc, st) {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
function enstrobe() {
|
r.enstrobe = function () {
|
||||||
strobes = ['████████████████', '________________', '████████████████'];
|
strobes = ['████████████████', '________________', '████████████████'];
|
||||||
tstrober = setInterval(strobe, 300);
|
tstrober = setInterval(strobe, 300);
|
||||||
|
|
||||||
@@ -772,6 +772,7 @@ function fsearch_explain(n) {
|
|||||||
|
|
||||||
function up2k_init(subtle) {
|
function up2k_init(subtle) {
|
||||||
var r = {
|
var r = {
|
||||||
|
"tact": Date.now(),
|
||||||
"init_deps": init_deps,
|
"init_deps": init_deps,
|
||||||
"set_fsearch": set_fsearch,
|
"set_fsearch": set_fsearch,
|
||||||
"gotallfiles": [gotallfiles] // hooks
|
"gotallfiles": [gotallfiles] // hooks
|
||||||
@@ -779,7 +780,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
if (window.WebAssembly && !hws.length)
|
if (window.WebAssembly && !hws.length)
|
||||||
fetch('/.cpr/w.hash.js' + CB);
|
fetch(SR + '/.cpr/w.hash.js' + CB);
|
||||||
}, 1000);
|
}, 1000);
|
||||||
|
|
||||||
function showmodal(msg) {
|
function showmodal(msg) {
|
||||||
@@ -809,7 +810,7 @@ function up2k_init(subtle) {
|
|||||||
m = L.u_https1 + ' <a href="' + (window.location + '').replace(':', 's:') + '">' + L.u_https2 + '</a> ' + L.u_https3;
|
m = L.u_https1 + ' <a href="' + (window.location + '').replace(':', 's:') + '">' + L.u_https2 + '</a> ' + L.u_https3;
|
||||||
|
|
||||||
showmodal('<h1>loading ' + fn + '</h1>');
|
showmodal('<h1>loading ' + fn + '</h1>');
|
||||||
import_js('/.cpr/deps/' + fn, unmodal);
|
import_js(SR + '/.cpr/deps/' + fn, unmodal);
|
||||||
|
|
||||||
if (HTTPS) {
|
if (HTTPS) {
|
||||||
// chrome<37 firefox<34 edge<12 opera<24 safari<7
|
// chrome<37 firefox<34 edge<12 opera<24 safari<7
|
||||||
@@ -856,6 +857,7 @@ function up2k_init(subtle) {
|
|||||||
fdom_ctr = 0,
|
fdom_ctr = 0,
|
||||||
biggest_file = 0;
|
biggest_file = 0;
|
||||||
|
|
||||||
|
bcfg_bind(uc, 'rand', 'u2rand', false, null, false);
|
||||||
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
||||||
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
|
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
|
||||||
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
||||||
@@ -867,7 +869,7 @@ function up2k_init(subtle) {
|
|||||||
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
|
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
|
||||||
bcfg_bind(uc, 'hashw', 'hashw', !!window.WebAssembly && (!subtle || !CHROME || MOBILE || VCHROME >= 107), set_hashw);
|
bcfg_bind(uc, 'hashw', 'hashw', !!window.WebAssembly && (!subtle || !CHROME || MOBILE || VCHROME >= 107), set_hashw);
|
||||||
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
|
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
|
||||||
bcfg_bind(uc, 'upsfx', 'upsfx', false);
|
bcfg_bind(uc, 'upsfx', 'upsfx', false, set_upsfx);
|
||||||
|
|
||||||
var st = {
|
var st = {
|
||||||
"files": [],
|
"files": [],
|
||||||
@@ -895,15 +897,25 @@ function up2k_init(subtle) {
|
|||||||
"finished": 0
|
"finished": 0
|
||||||
},
|
},
|
||||||
"time": {
|
"time": {
|
||||||
"hashing": 0,
|
"hashing": 0.01,
|
||||||
"uploading": 0,
|
"uploading": 0.01,
|
||||||
"busy": 0
|
"busy": 0.01
|
||||||
},
|
},
|
||||||
"eta": {
|
"eta": {
|
||||||
"h": "",
|
"h": "",
|
||||||
"u": "",
|
"u": "",
|
||||||
"t": ""
|
"t": ""
|
||||||
},
|
},
|
||||||
|
"etaw": {
|
||||||
|
"h": [['', 0, 0, 0]],
|
||||||
|
"u": [['', 0, 0, 0]],
|
||||||
|
"t": [['', 0, 0, 0]]
|
||||||
|
},
|
||||||
|
"etac": {
|
||||||
|
"h": 0,
|
||||||
|
"u": 0,
|
||||||
|
"t": 0
|
||||||
|
},
|
||||||
"car": 0,
|
"car": 0,
|
||||||
"slow_io": null,
|
"slow_io": null,
|
||||||
"oserr": false,
|
"oserr": false,
|
||||||
@@ -1312,7 +1324,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
if (window.WebAssembly && !hws.length) {
|
if (window.WebAssembly && !hws.length) {
|
||||||
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
|
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
|
||||||
hws.push(new Worker('/.cpr/w.hash.js' + CB));
|
hws.push(new Worker(SR + '/.cpr/w.hash.js' + CB));
|
||||||
|
|
||||||
console.log(hws.length + " hashers");
|
console.log(hws.length + " hashers");
|
||||||
}
|
}
|
||||||
@@ -1353,6 +1365,10 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
if (uc.fsearch)
|
if (uc.fsearch)
|
||||||
entry.srch = 1;
|
entry.srch = 1;
|
||||||
|
else if (uc.rand) {
|
||||||
|
entry.rand = true;
|
||||||
|
entry.name = 'a\n' + entry.name;
|
||||||
|
}
|
||||||
|
|
||||||
if (biggest_file < entry.size)
|
if (biggest_file < entry.size)
|
||||||
biggest_file = entry.size;
|
biggest_file = entry.size;
|
||||||
@@ -1388,7 +1404,7 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2tabw').className = 'ye';
|
ebi('u2tabw').className = 'ye';
|
||||||
|
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
if (!actx || actx.state != 'suspended' || toast.tag == L.u_unpt)
|
if (!actx || actx.state != 'suspended' || toast.visible)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
toast.warn(30, "<div onclick=\"start_actx();toast.inf(3,'thanks!')\">please click this text to<br />unlock full upload speed</div>");
|
toast.warn(30, "<div onclick=\"start_actx();toast.inf(3,'thanks!')\">please click this text to<br />unlock full upload speed</div>");
|
||||||
@@ -1408,7 +1424,36 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
more_one_file();
|
more_one_file();
|
||||||
|
|
||||||
var etaref = 0, etaskip = 0, utw_minh = 0, utw_read = 0;
|
function linklist() {
|
||||||
|
var ret = [],
|
||||||
|
base = document.location.origin.replace(/\/$/, '');
|
||||||
|
|
||||||
|
for (var a = 0; a < st.files.length; a++) {
|
||||||
|
var t = st.files[a],
|
||||||
|
url = t.purl + uricom_enc(t.name);
|
||||||
|
|
||||||
|
if (t.fk)
|
||||||
|
url += '?k=' + t.fk;
|
||||||
|
|
||||||
|
ret.push(base + url);
|
||||||
|
}
|
||||||
|
return ret.join('\r\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
ebi('luplinks').onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
modal.alert(linklist());
|
||||||
|
};
|
||||||
|
|
||||||
|
ebi('cuplinks').onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
var txt = linklist();
|
||||||
|
cliptxt(txt + '\n', function () {
|
||||||
|
toast.inf(5, txt.split('\n').length + ' links copied to clipboard');
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
var etaref = 0, etaskip = 0, utw_minh = 0, utw_read = 0, utw_card = 0;
|
||||||
function etafun() {
|
function etafun() {
|
||||||
var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length,
|
var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length,
|
||||||
nsend = st.busy.upload.length + st.todo.upload.length,
|
nsend = st.busy.upload.length + st.todo.upload.length,
|
||||||
@@ -1421,6 +1466,12 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
//ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1);
|
//ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1);
|
||||||
|
|
||||||
|
if (utw_card != pvis.act) {
|
||||||
|
utw_card = pvis.act;
|
||||||
|
utw_read = 9001;
|
||||||
|
ebi('u2tabw').style.minHeight = '0px';
|
||||||
|
}
|
||||||
|
|
||||||
if (++utw_read >= 20) {
|
if (++utw_read >= 20) {
|
||||||
utw_read = 0;
|
utw_read = 0;
|
||||||
utw_minh = parseInt(ebi('u2tabw').style.minHeight || '0');
|
utw_minh = parseInt(ebi('u2tabw').style.minHeight || '0');
|
||||||
@@ -1480,10 +1531,20 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (var a = 0; a < t.length; a++) {
|
for (var a = 0; a < t.length; a++) {
|
||||||
var rem = st.bytes.total - t[a][2],
|
var hid = t[a][0],
|
||||||
bps = t[a][1] / t[a][3],
|
|
||||||
hid = t[a][0],
|
|
||||||
eid = hid.slice(-1),
|
eid = hid.slice(-1),
|
||||||
|
etaw = st.etaw[eid];
|
||||||
|
|
||||||
|
if (st.etac[eid] > 100) { // num chunks
|
||||||
|
st.etac[eid] = 0;
|
||||||
|
etaw.push(jcp(t[a]));
|
||||||
|
if (etaw.length > 5)
|
||||||
|
etaw.shift();
|
||||||
|
}
|
||||||
|
|
||||||
|
var h = etaw[0],
|
||||||
|
rem = st.bytes.total - t[a][2],
|
||||||
|
bps = (t[a][1] - h[1]) / Math.max(0.1, t[a][3] - h[3]),
|
||||||
eta = Math.floor(rem / bps);
|
eta = Math.floor(rem / bps);
|
||||||
|
|
||||||
if (t[a][1] < 1024 || t[a][3] < 0.1) {
|
if (t[a][1] < 1024 || t[a][3] < 0.1) {
|
||||||
@@ -1535,11 +1596,11 @@ function up2k_init(subtle) {
|
|||||||
st.busy.handshake.length)
|
st.busy.handshake.length)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (t.n - st.car > 8)
|
if (t.n - st.car > Math.max(8, parallel_uploads))
|
||||||
// prevent runahead from a stuck upload (slow server hdd)
|
// prevent runahead from a stuck upload (slow server hdd)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if ((uc.multitask ? 1 : 0) <
|
if ((uc.multitask ? parallel_uploads : 0) <
|
||||||
st.todo.upload.length +
|
st.todo.upload.length +
|
||||||
st.busy.upload.length)
|
st.busy.upload.length)
|
||||||
return false;
|
return false;
|
||||||
@@ -1551,21 +1612,22 @@ function up2k_init(subtle) {
|
|||||||
if (!parallel_uploads)
|
if (!parallel_uploads)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
var nhs = st.todo.handshake.length + st.busy.handshake.length,
|
||||||
|
nup = st.todo.upload.length + st.busy.upload.length;
|
||||||
|
|
||||||
if (uc.multitask) {
|
if (uc.multitask) {
|
||||||
|
if (nhs + nup < parallel_uploads)
|
||||||
|
return true;
|
||||||
|
|
||||||
if (!uc.az)
|
if (!uc.az)
|
||||||
return st.todo.handshake.length + st.busy.handshake.length < 2;
|
return nhs < 2;
|
||||||
|
|
||||||
var ahead = st.bytes.hashed - st.bytes.finished,
|
var ahead = st.bytes.hashed - st.bytes.finished,
|
||||||
nmax = ahead < biggest_file / 8 ? 32 : 16;
|
nmax = ahead < biggest_file / 8 ? 32 : 16;
|
||||||
|
|
||||||
return ahead < biggest_file &&
|
return ahead < biggest_file && nhs < nmax;
|
||||||
st.todo.handshake.length + st.busy.handshake.length < nmax;
|
|
||||||
}
|
}
|
||||||
return handshakes_permitted() && 0 ==
|
return handshakes_permitted() && 0 == nhs + nup;
|
||||||
st.todo.handshake.length +
|
|
||||||
st.busy.handshake.length +
|
|
||||||
st.todo.upload.length +
|
|
||||||
st.busy.upload.length;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var tasker = (function () {
|
var tasker = (function () {
|
||||||
@@ -1586,8 +1648,14 @@ function up2k_init(subtle) {
|
|||||||
running = true;
|
running = true;
|
||||||
while (true) {
|
while (true) {
|
||||||
var now = Date.now(),
|
var now = Date.now(),
|
||||||
|
blocktime = now - r.tact,
|
||||||
is_busy = st.car < st.files.length;
|
is_busy = st.car < st.files.length;
|
||||||
|
|
||||||
|
if (blocktime > 2500)
|
||||||
|
console.log('main thread blocked for ' + blocktime);
|
||||||
|
|
||||||
|
r.tact = now;
|
||||||
|
|
||||||
if (was_busy && !is_busy) {
|
if (was_busy && !is_busy) {
|
||||||
for (var a = 0; a < st.files.length; a++) {
|
for (var a = 0; a < st.files.length; a++) {
|
||||||
var t = st.files[a];
|
var t = st.files[a];
|
||||||
@@ -1727,23 +1795,34 @@ function up2k_init(subtle) {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
function uptoast() {
|
function uptoast() {
|
||||||
|
if (st.busy.handshake.length)
|
||||||
|
return;
|
||||||
|
|
||||||
|
for (var a = 0; a < st.files.length; a++) {
|
||||||
|
var t = st.files[a];
|
||||||
|
if (t.want_recheck && !t.rechecks)
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
var sr = uc.fsearch,
|
var sr = uc.fsearch,
|
||||||
ok = pvis.ctr.ok,
|
ok = pvis.ctr.ok,
|
||||||
ng = pvis.ctr.ng,
|
ng = pvis.ctr.ng,
|
||||||
|
spd = Math.floor(st.bytes.finished / st.time.busy),
|
||||||
|
suf = '\n\n{0} @ {1}/s'.format(shumantime(st.time.busy), humansize(spd)),
|
||||||
t = uc.ask_up ? 0 : 10;
|
t = uc.ask_up ? 0 : 10;
|
||||||
|
|
||||||
console.log('toast', ok, ng);
|
console.log('toast', ok, ng);
|
||||||
|
|
||||||
if (ok && ng)
|
if (ok && ng)
|
||||||
toast.warn(t, uc.nagtxt = (sr ? L.ur_sm : L.ur_um).format(ok, ng));
|
toast.warn(t, uc.nagtxt = (sr ? L.ur_sm : L.ur_um).format(ok, ng) + suf);
|
||||||
else if (ok > 1)
|
else if (ok > 1)
|
||||||
toast.ok(t, uc.nagtxt = (sr ? L.ur_aso : L.ur_auo).format(ok));
|
toast.ok(t, uc.nagtxt = (sr ? L.ur_aso : L.ur_auo).format(ok) + suf);
|
||||||
else if (ok)
|
else if (ok)
|
||||||
toast.ok(t, uc.nagtxt = sr ? L.ur_1so : L.ur_1uo);
|
toast.ok(t, uc.nagtxt = (sr ? L.ur_1so : L.ur_1uo) + suf);
|
||||||
else if (ng > 1)
|
else if (ng > 1)
|
||||||
toast.err(t, uc.nagtxt = (sr ? L.ur_asn : L.ur_aun).format(ng));
|
toast.err(t, uc.nagtxt = (sr ? L.ur_asn : L.ur_aun).format(ng) + suf);
|
||||||
else if (ng)
|
else if (ng)
|
||||||
toast.err(t, uc.nagtxt = sr ? L.ur_1sn : L.ur_1un);
|
toast.err(t, uc.nagtxt = (sr ? L.ur_1sn : L.ur_1un) + suf);
|
||||||
|
|
||||||
timer.rm(etafun);
|
timer.rm(etafun);
|
||||||
timer.rm(donut.do);
|
timer.rm(donut.do);
|
||||||
@@ -1854,6 +1933,7 @@ function up2k_init(subtle) {
|
|||||||
cdr = Math.min(chunksize + car, t.size);
|
cdr = Math.min(chunksize + car, t.size);
|
||||||
|
|
||||||
st.bytes.hashed += cdr - car;
|
st.bytes.hashed += cdr - car;
|
||||||
|
st.etac.h++;
|
||||||
|
|
||||||
function orz(e) {
|
function orz(e) {
|
||||||
bpend--;
|
bpend--;
|
||||||
@@ -1979,6 +2059,8 @@ function up2k_init(subtle) {
|
|||||||
nbusy++;
|
nbusy++;
|
||||||
reading++;
|
reading++;
|
||||||
nchunk++;
|
nchunk++;
|
||||||
|
if (Date.now() - up2k.tact > 1500)
|
||||||
|
tasker();
|
||||||
}
|
}
|
||||||
|
|
||||||
function onmsg(d) {
|
function onmsg(d) {
|
||||||
@@ -2189,13 +2271,24 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
t.sprs = response.sprs;
|
t.sprs = response.sprs;
|
||||||
|
|
||||||
var rsp_purl = url_enc(response.purl);
|
var fk = response.fk,
|
||||||
if (rsp_purl !== t.purl || response.name !== t.name) {
|
rsp_purl = url_enc(response.purl),
|
||||||
// server renamed us (file exists / path restrictions)
|
rename = rsp_purl !== t.purl || response.name !== t.name;
|
||||||
console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]");
|
|
||||||
|
if (rename || fk) {
|
||||||
|
if (rename)
|
||||||
|
console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]");
|
||||||
|
|
||||||
t.purl = rsp_purl;
|
t.purl = rsp_purl;
|
||||||
t.name = response.name;
|
t.name = response.name;
|
||||||
pvis.seth(t.n, 0, linksplit(t.purl + uricom_enc(t.name)).join(' '));
|
|
||||||
|
var url = t.purl + uricom_enc(t.name);
|
||||||
|
if (fk) {
|
||||||
|
t.fk = fk;
|
||||||
|
url += '?k=' + fk;
|
||||||
|
}
|
||||||
|
|
||||||
|
pvis.seth(t.n, 0, linksplit(url).join(' '));
|
||||||
}
|
}
|
||||||
|
|
||||||
var chunksize = get_chunksize(t.size),
|
var chunksize = get_chunksize(t.size),
|
||||||
@@ -2298,15 +2391,17 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var err_pend = rsp.indexOf('partial upload exists at a different') + 1,
|
var err_pend = rsp.indexOf('partial upload exists at a different') + 1,
|
||||||
|
err_srcb = rsp.indexOf('source file busy; please try again') + 1,
|
||||||
|
err_plug = rsp.indexOf('upload blocked by x') + 1,
|
||||||
err_dupe = rsp.indexOf('upload rejected, file already exists') + 1;
|
err_dupe = rsp.indexOf('upload rejected, file already exists') + 1;
|
||||||
|
|
||||||
if (err_pend || err_dupe) {
|
if (err_pend || err_srcb || err_plug || err_dupe) {
|
||||||
err = rsp;
|
err = rsp;
|
||||||
ofs = err.indexOf('\n/');
|
ofs = err.indexOf('\n/');
|
||||||
if (ofs !== -1) {
|
if (ofs !== -1) {
|
||||||
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
|
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
|
||||||
}
|
}
|
||||||
if (!t.rechecks && err_pend) {
|
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||||
t.rechecks = 0;
|
t.rechecks = 0;
|
||||||
t.want_recheck = true;
|
t.want_recheck = true;
|
||||||
}
|
}
|
||||||
@@ -2343,6 +2438,8 @@ function up2k_init(subtle) {
|
|||||||
};
|
};
|
||||||
if (t.srch)
|
if (t.srch)
|
||||||
req.srch = 1;
|
req.srch = 1;
|
||||||
|
else if (t.rand)
|
||||||
|
req.rand = true;
|
||||||
|
|
||||||
xhr.open('POST', t.purl, true);
|
xhr.open('POST', t.purl, true);
|
||||||
xhr.responseType = 'text';
|
xhr.responseType = 'text';
|
||||||
@@ -2357,8 +2454,17 @@ function up2k_init(subtle) {
|
|||||||
function can_upload_next() {
|
function can_upload_next() {
|
||||||
var upt = st.todo.upload[0],
|
var upt = st.todo.upload[0],
|
||||||
upf = st.files[upt.nfile],
|
upf = st.files[upt.nfile],
|
||||||
|
nhs = st.busy.handshake.length,
|
||||||
|
hs = nhs && st.busy.handshake[0],
|
||||||
now = Date.now();
|
now = Date.now();
|
||||||
|
|
||||||
|
if (nhs >= 16)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
if (hs && hs.t_uploaded && Date.now() - hs.t_busied > 10000)
|
||||||
|
// verification HS possibly held back by uploads
|
||||||
|
return false;
|
||||||
|
|
||||||
for (var a = 0, aa = st.busy.handshake.length; a < aa; a++) {
|
for (var a = 0, aa = st.busy.handshake.length; a < aa; a++) {
|
||||||
var hs = st.busy.handshake[a];
|
var hs = st.busy.handshake[a];
|
||||||
if (hs.n < upt.nfile && hs.t_busied > now - 10 * 1000 && !st.files[hs.n].bytes_uploaded)
|
if (hs.n < upt.nfile && hs.t_busied > now - 10 * 1000 && !st.files[hs.n].bytes_uploaded)
|
||||||
@@ -2398,11 +2504,21 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
function orz(xhr) {
|
function orz(xhr) {
|
||||||
var txt = ((xhr.response && xhr.response.err) || xhr.responseText) + '';
|
var txt = ((xhr.response && xhr.response.err) || xhr.responseText) + '';
|
||||||
|
if (txt.indexOf('upload blocked by x') + 1) {
|
||||||
|
apop(st.busy.upload, upt);
|
||||||
|
apop(t.postlist, npart);
|
||||||
|
pvis.seth(t.n, 1, "ERROR");
|
||||||
|
pvis.seth(t.n, 2, txt.split(/\n/)[0]);
|
||||||
|
pvis.move(t.n, 'ng');
|
||||||
|
return;
|
||||||
|
}
|
||||||
if (xhr.status == 200) {
|
if (xhr.status == 200) {
|
||||||
pvis.prog(t, npart, cdr - car);
|
pvis.prog(t, npart, cdr - car);
|
||||||
st.bytes.finished += cdr - car;
|
st.bytes.finished += cdr - car;
|
||||||
st.bytes.uploaded += cdr - car;
|
st.bytes.uploaded += cdr - car;
|
||||||
t.bytes_uploaded += cdr - car;
|
t.bytes_uploaded += cdr - car;
|
||||||
|
st.etac.u++;
|
||||||
|
st.etac.t++;
|
||||||
}
|
}
|
||||||
else if (txt.indexOf('already got that') + 1 ||
|
else if (txt.indexOf('already got that') + 1 ||
|
||||||
txt.indexOf('already being written') + 1) {
|
txt.indexOf('already being written') + 1) {
|
||||||
@@ -2530,9 +2646,15 @@ function up2k_init(subtle) {
|
|||||||
if (dir.target) {
|
if (dir.target) {
|
||||||
clmod(obj, 'err', 1);
|
clmod(obj, 'err', 1);
|
||||||
var v = Math.floor(parseInt(obj.value));
|
var v = Math.floor(parseInt(obj.value));
|
||||||
if (v < 0 || v > 64 || v !== v)
|
if (v < 0 || v !== v)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
if (v > 64) {
|
||||||
|
var p = obj.selectionStart;
|
||||||
|
v = obj.value = 64;
|
||||||
|
obj.selectionStart = obj.selectionEnd = p;
|
||||||
|
}
|
||||||
|
|
||||||
parallel_uploads = v;
|
parallel_uploads = v;
|
||||||
swrite('nthread', v);
|
swrite('nthread', v);
|
||||||
clmod(obj, 'err');
|
clmod(obj, 'err');
|
||||||
@@ -2749,6 +2871,21 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
if (en && Notification.permission == 'default')
|
if (en && Notification.permission == 'default')
|
||||||
Notification.requestPermission().then(chknag, chknag);
|
Notification.requestPermission().then(chknag, chknag);
|
||||||
|
|
||||||
|
set_upsfx(en);
|
||||||
|
}
|
||||||
|
|
||||||
|
function set_upsfx(en) {
|
||||||
|
if (!en)
|
||||||
|
return;
|
||||||
|
|
||||||
|
toast.inf(10, 'OK -- <a href="#" id="nagtest">test it!</a>')
|
||||||
|
|
||||||
|
ebi('nagtest').onclick = function () {
|
||||||
|
start_actx();
|
||||||
|
uc.nagtxt = ':^)';
|
||||||
|
setTimeout(donut.enstrobe, 200);
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (uc.upnag && (!window.Notification || Notification.permission != 'granted'))
|
if (uc.upnag && (!window.Notification || Notification.permission != 'granted'))
|
||||||
@@ -2806,7 +2943,7 @@ ebi('ico1').onclick = function () {
|
|||||||
if (QS('#op_up2k.act'))
|
if (QS('#op_up2k.act'))
|
||||||
goto_up2k();
|
goto_up2k();
|
||||||
|
|
||||||
apply_perms(perms);
|
apply_perms({ "perms": perms, "frand": frand });
|
||||||
|
|
||||||
|
|
||||||
(function () {
|
(function () {
|
||||||
|
|||||||
@@ -9,12 +9,15 @@ if (!window.console || !console.log)
|
|||||||
var wah = '',
|
var wah = '',
|
||||||
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
||||||
CB = '?_=' + Date.now(),
|
CB = '?_=' + Date.now(),
|
||||||
|
R = SR.slice(1),
|
||||||
|
RS = R ? "/" + R : "",
|
||||||
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
||||||
HTTPS = (window.location + '').indexOf('https:') === 0,
|
HTTPS = (window.location + '').indexOf('https:') === 0,
|
||||||
TOUCH = 'ontouchstart' in window,
|
TOUCH = 'ontouchstart' in window,
|
||||||
MOBILE = TOUCH,
|
MOBILE = TOUCH,
|
||||||
CHROME = !!window.chrome,
|
CHROME = !!window.chrome,
|
||||||
VCHROME = CHROME ? 1 : 0,
|
VCHROME = CHROME ? 1 : 0,
|
||||||
|
IE = /Trident\//.test(navigator.userAgent),
|
||||||
FIREFOX = ('netscape' in window) && / rv:/.test(navigator.userAgent),
|
FIREFOX = ('netscape' in window) && / rv:/.test(navigator.userAgent),
|
||||||
IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(navigator.userAgent),
|
IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(navigator.userAgent),
|
||||||
LINUX = /Linux/.test(navigator.userAgent),
|
LINUX = /Linux/.test(navigator.userAgent),
|
||||||
@@ -109,12 +112,13 @@ if ((document.location + '').indexOf(',rej,') + 1)
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
console.hist = [];
|
console.hist = [];
|
||||||
|
var CMAXHIST = 100;
|
||||||
var hook = function (t) {
|
var hook = function (t) {
|
||||||
var orig = console[t].bind(console),
|
var orig = console[t].bind(console),
|
||||||
cfun = function () {
|
cfun = function () {
|
||||||
console.hist.push(Date.now() + ' ' + t + ': ' + Array.from(arguments).join(', '));
|
console.hist.push(Date.now() + ' ' + t + ': ' + Array.from(arguments).join(', '));
|
||||||
if (console.hist.length > 100)
|
if (console.hist.length > CMAXHIST)
|
||||||
console.hist = console.hist.slice(50);
|
console.hist = console.hist.slice(CMAXHIST / 2);
|
||||||
|
|
||||||
orig.apply(console, arguments);
|
orig.apply(console, arguments);
|
||||||
};
|
};
|
||||||
@@ -193,8 +197,12 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
|||||||
var lsk = Object.keys(ls);
|
var lsk = Object.keys(ls);
|
||||||
lsk.sort();
|
lsk.sort();
|
||||||
html.push('<p class="b">');
|
html.push('<p class="b">');
|
||||||
for (var a = 0; a < lsk.length; a++)
|
for (var a = 0; a < lsk.length; a++) {
|
||||||
|
if (ls[lsk[a]].length > 9000)
|
||||||
|
continue;
|
||||||
|
|
||||||
html.push(' <b>' + esc(lsk[a]) + '</b> <code>' + esc(ls[lsk[a]]) + '</code> ');
|
html.push(' <b>' + esc(lsk[a]) + '</b> <code>' + esc(ls[lsk[a]]) + '</code> ');
|
||||||
|
}
|
||||||
html.push('</p>');
|
html.push('</p>');
|
||||||
}
|
}
|
||||||
catch (e) { }
|
catch (e) { }
|
||||||
@@ -325,6 +333,25 @@ if (!String.prototype.format)
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
new URL('/a/', 'https://a.com/');
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
console.log('ie11 shim URL()');
|
||||||
|
window.URL = function (url, base) {
|
||||||
|
if (url.indexOf('//') < 0)
|
||||||
|
url = base + '/' + url.replace(/^\/?/, '');
|
||||||
|
else if (url.indexOf('//') == 0)
|
||||||
|
url = 'https:' + url;
|
||||||
|
|
||||||
|
var x = url.split('?');
|
||||||
|
return {
|
||||||
|
"pathname": '/' + x[0].split('://')[1].replace(/[^/]+\//, ''),
|
||||||
|
"search": x.length > 1 ? x[1] : ''
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// https://stackoverflow.com/a/950146
|
// https://stackoverflow.com/a/950146
|
||||||
function import_js(url, cb) {
|
function import_js(url, cb) {
|
||||||
var head = document.head || document.getElementsByTagName('head')[0];
|
var head = document.head || document.getElementsByTagName('head')[0];
|
||||||
@@ -686,7 +713,9 @@ function noq_href(el) {
|
|||||||
|
|
||||||
|
|
||||||
function get_pwd() {
|
function get_pwd() {
|
||||||
var pwd = ('; ' + document.cookie).split('; cppwd=');
|
var k = HTTPS ? 's=' : 'd=',
|
||||||
|
pwd = ('; ' + document.cookie).split('; cppw' + k);
|
||||||
|
|
||||||
if (pwd.length < 2)
|
if (pwd.length < 2)
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
@@ -970,6 +999,7 @@ function sethash(hv) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function dl_file(url) {
|
function dl_file(url) {
|
||||||
console.log('DL [%s]', url);
|
console.log('DL [%s]', url);
|
||||||
var o = mknod('a');
|
var o = mknod('a');
|
||||||
@@ -979,6 +1009,25 @@ function dl_file(url) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function cliptxt(txt, ok) {
|
||||||
|
var fb = function () {
|
||||||
|
console.log('fb');
|
||||||
|
var o = mknod('input');
|
||||||
|
o.value = txt;
|
||||||
|
document.body.appendChild(o);
|
||||||
|
o.focus();
|
||||||
|
o.select();
|
||||||
|
document.execCommand("copy");
|
||||||
|
document.body.removeChild(o);
|
||||||
|
ok();
|
||||||
|
};
|
||||||
|
try {
|
||||||
|
navigator.clipboard.writeText(txt).then(ok, fb);
|
||||||
|
}
|
||||||
|
catch (ex) { fb(); }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
var timer = (function () {
|
var timer = (function () {
|
||||||
var r = {};
|
var r = {};
|
||||||
r.q = [];
|
r.q = [];
|
||||||
@@ -1252,17 +1301,17 @@ var toast = (function () {
|
|||||||
r.tag = tag;
|
r.tag = tag;
|
||||||
};
|
};
|
||||||
|
|
||||||
r.ok = function (sec, txt, tag) {
|
r.ok = function (sec, txt, tag, cls) {
|
||||||
r.show('ok', sec, txt, tag);
|
r.show('ok ' + (cls || ''), sec, txt, tag);
|
||||||
};
|
};
|
||||||
r.inf = function (sec, txt, tag) {
|
r.inf = function (sec, txt, tag, cls) {
|
||||||
r.show('inf', sec, txt, tag);
|
r.show('inf ' + (cls || ''), sec, txt, tag);
|
||||||
};
|
};
|
||||||
r.warn = function (sec, txt, tag) {
|
r.warn = function (sec, txt, tag, cls) {
|
||||||
r.show('warn', sec, txt, tag);
|
r.show('warn ' + (cls || ''), sec, txt, tag);
|
||||||
};
|
};
|
||||||
r.err = function (sec, txt, tag) {
|
r.err = function (sec, txt, tag, cls) {
|
||||||
r.show('err', sec, txt, tag);
|
r.show('err ' + (cls || ''), sec, txt, tag);
|
||||||
};
|
};
|
||||||
|
|
||||||
return r;
|
return r;
|
||||||
@@ -1526,25 +1575,33 @@ var md_plug_err = function (ex, js) {
|
|||||||
if (ex)
|
if (ex)
|
||||||
console.log(ex, js);
|
console.log(ex, js);
|
||||||
};
|
};
|
||||||
function load_md_plug(md_text, plug_type) {
|
function load_md_plug(md_text, plug_type, defer) {
|
||||||
|
if (defer)
|
||||||
|
md_plug[plug_type] = null;
|
||||||
|
|
||||||
if (!have_emp)
|
if (!have_emp)
|
||||||
return md_text;
|
return md_text;
|
||||||
|
|
||||||
var find = '\n```copyparty_' + plug_type + '\n';
|
var find = '\n```copyparty_' + plug_type + '\n',
|
||||||
var ofs = md_text.indexOf(find);
|
md = md_text.replace(/\r/g, ''),
|
||||||
if (ofs === -1)
|
ofs = md.indexOf(find),
|
||||||
|
ofs2 = md.indexOf('\n```', ofs + 1);
|
||||||
|
|
||||||
|
if (ofs < 0 || ofs2 < 0)
|
||||||
return md_text;
|
return md_text;
|
||||||
|
|
||||||
var ofs2 = md_text.indexOf('\n```', ofs + 1);
|
var js = md.slice(ofs + find.length, ofs2 + 1);
|
||||||
if (ofs2 == -1)
|
md = md.slice(0, ofs + 1) + md.slice(ofs2 + 4);
|
||||||
return md_text;
|
md = md.replace(/$/g, '\r');
|
||||||
|
|
||||||
var js = md_text.slice(ofs + find.length, ofs2 + 1);
|
if (defer) { // insert into sandbox
|
||||||
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
|
md_plug[plug_type] = js;
|
||||||
|
return md;
|
||||||
|
}
|
||||||
|
|
||||||
var old_plug = md_plug[plug_type];
|
var old_plug = md_plug[plug_type];
|
||||||
if (!old_plug || old_plug[1] != js) {
|
if (!old_plug || old_plug[1] != js) {
|
||||||
js = 'const x = { ' + js + ' }; x;';
|
js = 'const loc = new URL("' + location.href + '"), x = { ' + js + ' }; x;';
|
||||||
try {
|
try {
|
||||||
var x = eval(js);
|
var x = eval(js);
|
||||||
if (x['ctor']) {
|
if (x['ctor']) {
|
||||||
@@ -1678,7 +1735,7 @@ function xhrchk(xhr, prefix, e404, lvl, tag) {
|
|||||||
|
|
||||||
qsr('#cf_frame');
|
qsr('#cf_frame');
|
||||||
var fr = mknod('iframe', 'cf_frame');
|
var fr = mknod('iframe', 'cf_frame');
|
||||||
fr.src = '/?cf_challenge';
|
fr.src = SR + '/?cf_challenge';
|
||||||
document.body.appendChild(fr);
|
document.body.appendChild(fr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ catch (ex) {
|
|||||||
}
|
}
|
||||||
function load_fb() {
|
function load_fb() {
|
||||||
subtle = null;
|
subtle = null;
|
||||||
importScripts('/.cpr/deps/sha512.hw.js');
|
importScripts('deps/sha512.hw.js');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -13,15 +13,21 @@
|
|||||||
|
|
||||||
# other stuff
|
# other stuff
|
||||||
|
|
||||||
|
## [`example.conf`](example.conf)
|
||||||
|
* example config file for `-c`
|
||||||
|
|
||||||
|
## [`versus.md`](versus.md)
|
||||||
|
* similar software / alternatives (with pros/cons)
|
||||||
|
|
||||||
## [`changelog.md`](changelog.md)
|
## [`changelog.md`](changelog.md)
|
||||||
* occasionally grabbed from github release notes
|
* occasionally grabbed from github release notes
|
||||||
|
|
||||||
|
## [`devnotes.md`](devnotes.md)
|
||||||
|
* technical stuff
|
||||||
|
|
||||||
## [`rclone.md`](rclone.md)
|
## [`rclone.md`](rclone.md)
|
||||||
* notes on using rclone as a fuse client/server
|
* notes on using rclone as a fuse client/server
|
||||||
|
|
||||||
## [`example.conf`](example.conf)
|
|
||||||
* example config file for `-c`
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# junk
|
# junk
|
||||||
|
|||||||
@@ -1,3 +1,387 @@
|
|||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-0305-2018 `v1.6.7` fix no-dedup + add up2k.exe
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* controlpanel-connect: add example for webdav automount
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* fix a race which, in worst case (but unlikely on linux), **could cause data loss**
|
||||||
|
* could only happen if `--no-dedup` or volflag `copydupes` was set (**not** default)
|
||||||
|
* if two identical files were uploaded at the same time, there was a small chance that one of the files would become empty
|
||||||
|
* check if you were affected by doing a search for zero-byte files using either of the following:
|
||||||
|
* https://127.0.0.1:3923/#q=size%20%3D%200
|
||||||
|
* `find -type f -size 0`
|
||||||
|
* let me know if you lost something important and had logging enabled!
|
||||||
|
* ftp: mkdir can do multiple levels at once (support filezilla)
|
||||||
|
* fix flickering toast on upload finish
|
||||||
|
* `[💤]` (upload-baton) could disengage if chrome decides to pause the background tab for 10sec (which it sometimes does)
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
## introducing [up2k.exe](https://github.com/9001/copyparty/releases/latest/download/up2k.exe)
|
||||||
|
|
||||||
|
the commandline up2k upload / filesearch client, now as a standalone windows exe
|
||||||
|
* based on python 3.7 so it runs on 32bit windows7 or anything newer
|
||||||
|
* *no https support* (saves space + the python3.7 openssl is getting old)
|
||||||
|
* built from b39ff92f34e3fca389c78109d20d5454af761f8e so it can do long filepaths and mojibake
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
⭐️ **you probably want [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) below;**
|
||||||
|
the exe is [not recommended](https://github.com/9001/copyparty#copypartyexe) for longterm use
|
||||||
|
and the zip and tar.gz files are source code
|
||||||
|
(python packages are available at [PyPI](https://pypi.org/project/copyparty/#files))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-0226-2030 `v1.6.6` r 2 0 0
|
||||||
|
|
||||||
|
two hundred releases wow
|
||||||
|
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||||
|
* [docker image](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) ╱ [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) ╱ [client testbed](https://cd.ocv.me/b/)
|
||||||
|
* currently fighting a ground fault so the demo server will be unreliable for a while
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* more docker containers! now runs on x64, x32, aarch64, armhf, ppc64, s390x
|
||||||
|
* pls let me know if you actually run copyparty on an IBM mainframe 👍
|
||||||
|
* new [event hook](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) type `xiu` runs just once for all recent uploads
|
||||||
|
* example hook [xiu-sha.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/xiu-sha.py) generates sha512 checksum files
|
||||||
|
* new arg `--rsp-jtr` simulates connection jitter
|
||||||
|
* copyparty.exe integrity selftest
|
||||||
|
* ux:
|
||||||
|
* return to previous page after logging in
|
||||||
|
* show a warning on the login page if you're not using https
|
||||||
|
* freebsd: detect `fetch` and return the [colorful sortable plaintext](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png) listing
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* permit replacing empty files only during a `--blank-wt` grace period
|
||||||
|
* lifetimes: keep upload-time when a size/mtime change triggers a reindex
|
||||||
|
* during cleanup after an unlink, never rmdir the entire volume
|
||||||
|
* rescan button in the controlpanel required volumes to be e2ds
|
||||||
|
* dupes could get indexed with the wrong mtime
|
||||||
|
* only affected the search index; the filesystem got the right one
|
||||||
|
* ux: search results could include the same hit twice in case of overlapping volumes
|
||||||
|
* ux: upload UI would remain expanded permanently after visiting a huge tab
|
||||||
|
* ftp: return proper error messages when client does something illegal
|
||||||
|
* ie11: support the back button
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) replaces copyparty64.exe -- now built for 64-bit windows 10
|
||||||
|
* **on win10 it just works** -- on win8 it needs [vc redist 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48145) -- no win7 support
|
||||||
|
* has the latest security patches, but sfx.py is still better for long-term use
|
||||||
|
* has pillow and mutagen; can make thumbnails and parse/index media
|
||||||
|
* [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is the old win7-compatible, dangerously-insecure edition
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-0212-1411 `v1.6.5` windows smb fix + win10.exe
|
||||||
|
|
||||||
|
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||||
|
* [docker image](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) ╱ [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) ╱ [client testbed](https://cd.ocv.me/b/)
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* **windows-only:** smb locations (network drives) could not be accessed
|
||||||
|
* appeared in [v1.6.4](https://github.com/9001/copyparty/releases/tag/v1.6.4) while adding support for long filepaths (260chars+)
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* removed tentative support for compressed chiptunes (xmgz, xmz, xmj, ...) since FFmpeg usually doesn't
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
# introducing [copyparty640.exe](https://github.com/9001/copyparty/releases/download/v1.6.5/copyparty640.exe)
|
||||||
|
* built for win10, comes with the latest python and deps (supports win8 with [vc redist 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48145))
|
||||||
|
* __*much* safer__ than the old win7-compatible `copyparty.exe` and `copyparty64.exe`
|
||||||
|
* but only `copyparty-sfx.py` takes advantage of the operating system security patches
|
||||||
|
* includes pillow for thumbnails and mutagen for media indexing
|
||||||
|
* around 10% slower (trying to figure out what's up with that)
|
||||||
|
|
||||||
|
starting from the next release,
|
||||||
|
* `copyparty.exe` (win7 x32) will become `copyparty32.exe`
|
||||||
|
* `copyparty640.exe` (win10) will be the new `copyparty.exe`
|
||||||
|
* `copyparty64.exe` (win7 x64) will graduate
|
||||||
|
|
||||||
|
so the [copyparty64.exe](https://github.com/9001/copyparty/releases/download/v1.6.5/copyparty64.exe) in this release will be the "final" version able to run inside a [64bit Win7-era winPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png) (all regular 32/64-bit win7 editions can just use `copyparty32.exe` instead)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-0211-1802 `v1.6.4` 🔧🎲🔗🐳🇦🎶
|
||||||
|
|
||||||
|
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||||
|
* [1.6 theme song](https://a.ocv.me/pub/demo/music/.bonus/#af-134e597c) // [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md)
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* 🔧 new [config syntax](https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf) (#20)
|
||||||
|
* the new syntax is still kinda esoteric and funky but it's an improvement
|
||||||
|
* old config files are still supported
|
||||||
|
* `--vc` prints the autoconverted config which you can copy back into the config file to upgrade
|
||||||
|
* `--vc` will also [annotate and explain](https://user-images.githubusercontent.com/241032/217356028-eb3e141f-80a6-4bc6-8d04-d8d1d874c3e9.png) the config files
|
||||||
|
* new argument `--cgen` to generate config from commandline arguments
|
||||||
|
* kinda buggy, especially the `[global]` section, so give it a lookover before saving it
|
||||||
|
* 🎲 randomize filenames on upload
|
||||||
|
* either optionally, using the 🎲 button in the up2k ui
|
||||||
|
* or force-enabled; globally with `--rand` or per-volume with volflag `rand`
|
||||||
|
* specify filename length with `nrand` (globally or volflag), default 9
|
||||||
|
* 🔗 export a list of links to your recent uploads
|
||||||
|
* `copy links` in the up2k tab (🚀) will copy links to all uploads since last page refresh,
|
||||||
|
* `copy` in the unpost tab (🧯) will copy links to all your recent uploads (max 2000 files / 12 hours by default)
|
||||||
|
* filekeys are included if that's enabled and you have access to view those (permissions `G` or `r`)
|
||||||
|
* 🇦 [arch package](https://github.com/9001/copyparty/tree/hovudstraum/contrib/package/arch) -- added in #18, thx @icxes
|
||||||
|
* maybe in aur soon!
|
||||||
|
* 🐳 [docker containers](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) -- 5 editions,
|
||||||
|
* [min](https://hub.docker.com/r/copyparty/min) (57 MiB), just copyparty without thumbnails or audio transcoding
|
||||||
|
* [im](https://hub.docker.com/r/copyparty/im) (70 MiB), thumbnails of popular image formats + media tags with mutagen
|
||||||
|
* [ac (163 MiB)](https://hub.docker.com/r/copyparty/ac) 🥇 adds audio/video thumbnails + audio transcoding + better tags
|
||||||
|
* [iv](https://hub.docker.com/r/copyparty/iv) (211 MiB), makes heif/avic/jxl faster to thumbnail
|
||||||
|
* [dj](https://hub.docker.com/r/copyparty/dj) (309 MiB), adds optional detection of musical key / bpm
|
||||||
|
* 🎶 [chiptune player](https://a.ocv.me/pub/demo/music/chiptunes/#af-f6fb2e5f)
|
||||||
|
* transcodes mod/xm/s3m/it/mo3/mptm/mt2/okt to opus
|
||||||
|
* uses FFmpeg (libopenmpt) so the accuracy is not perfect, but most files play OK enough
|
||||||
|
* not **yet** supported in the docker container since Alpine's FFmpeg was built without libopenmpt
|
||||||
|
* windows: support long filepaths (over 260 chars)
|
||||||
|
* uses the `//?/` winapi syntax to also support windows 7
|
||||||
|
* `--ver` shows the server version on the control panel
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* markdown files didn't scale properly in the document browser
|
||||||
|
* detect and refuse multiple volume definitions sharing the same filesystem path
|
||||||
|
* don't return incomplete transcodes if multiple clients try to play the same flac file
|
||||||
|
* [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh): more reliable chroot cleanup, sigusr1 for config reload
|
||||||
|
* pypi packaging: compress web resources, include webdav.bat
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-0131-2103 `v1.6.3` sandbox k
|
||||||
|
|
||||||
|
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||||
|
* and since [1.6.0](https://github.com/9001/copyparty/releases/tag/v1.6.2) only got 2 days of prime time,
|
||||||
|
* [1.6 theme song](https://a.ocv.me/pub/demo/music/.bonus/#af-134e597c) (hosted on the demo server)
|
||||||
|
* [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) / feature comparison
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* dotfiles are hidden from search results by default
|
||||||
|
* use `--dotsrch` or volflags `dotsrch` / `nodotsrch` to specify otherwise
|
||||||
|
* they were already being excluded from tar/zip-files if `-ed` is not set, so this makes more sense -- dotfiles *should* now be undiscoverable unless `-ed` or `--smb` is set, but please use [volumes](https://github.com/9001/copyparty#accounts-and-volumes) for isolation / access-control instead, much safer
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* lots of cosmetic fixes for the new readme/prologue/epilogue sandbox
|
||||||
|
* rushed it into the previous release when someone suggested it, bad idea
|
||||||
|
* still flickers a bit (especially prologues), and hotkeys are blocked while the sandboxed document has focus
|
||||||
|
* can be disabled with `--no-sb-md --no-sb-lg` (not recommended)
|
||||||
|
* support webdav uploads from davfs2 (fix LOCK response)
|
||||||
|
* always unlink files before overwriting them, in case they are hardlinks
|
||||||
|
* was primarily an issue with `--daw` and webdav clients
|
||||||
|
* on windows, replace characters in PUT filenames as necessary
|
||||||
|
* [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh): support opus transcoding on debian
|
||||||
|
* `rm -rf .hist/ac` to clear the transcode cache if the old version broke some songs
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* add `rel="nofollow"` to zip download links, basic-browser link
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-0129-1842 `v1.6.2` cors k
|
||||||
|
|
||||||
|
[Ellie Goulding - Stay Awake (kors k Hardcore Bootleg).mp3](https://a.ocv.me/pub/demo/music/.bonus/#af-134e597c)
|
||||||
|
* 👆 the read-only demo server at https://a.ocv.me/pub/demo/
|
||||||
|
|
||||||
|
## breaking changes
|
||||||
|
but nothing is affected (that i know of):
|
||||||
|
* all requests must pass [cors validation](https://github.com/9001/copyparty#cors)
|
||||||
|
* but they almost definitely did already
|
||||||
|
* sharex and others are OK since they don't supply an `Origin` header
|
||||||
|
* [API calls](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#http-api) `?delete` and `?move` are now POST instead of GET
|
||||||
|
* not aware of any clients using these
|
||||||
|
|
||||||
|
## known issues
|
||||||
|
* the document sandbox is a bit laggy and sometimes eats hotkeys
|
||||||
|
* disable it with `--no-sb-md --no-sb-lg` if you trust everyone who has write and/or move access
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* [event hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) -- run programs on new [uploads](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png), renames, deletes
|
||||||
|
* [configurable cors](https://github.com/9001/copyparty#cors) (cross-origin resource sharing) behavior; defaults are mostly same as before
|
||||||
|
* `--allow-csrf` disables all csrf protections and makes it intentionally trivial to send authenticated requests from other domains
|
||||||
|
* sandboxed readme.md / prologues / epilogues
|
||||||
|
* documents can still run scripts like before, but can no longer tamper with the web-ui / read the login session, so the old advice of `--no-readme` and `--no-logues` is mostly deprecated
|
||||||
|
* unfortunately disables hotkeys while the text has focus + blocks dragdropping files onto that area, oh well
|
||||||
|
* password can be provided through http header `PW:` (instead of cookie `cppwd` or or url-param `?pw`)
|
||||||
|
* detect network changes (new NICs, IPs) and reconfigure / reannoucne zeroconf
|
||||||
|
* fixes mdns when running as a systemd service and copyparty is started before networking is up
|
||||||
|
* add `--freebind` to start listening on IPs before the NIC is up yet (linux-only)
|
||||||
|
* per-volume deduplication-control with volflags `hardlink`, `neversymlink`, `copydupes`
|
||||||
|
* detect curl and return a [colorful, sortable plaintext](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png) directory listing instead
|
||||||
|
* add optional [powered-by-copyparty](https://user-images.githubusercontent.com/241032/215322626-11d1f02b-25f4-45df-a3d9-f8c51354a8eb.png) footnode on the controlpanel
|
||||||
|
* can be disabled with `-nb` or redirected with `--pb-url`
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* change some API calls (`?delete`, `?move`) from `GET` to `POST`
|
||||||
|
* don't panic! this was safe against authenticated csrf thanks to [SameSite=Lax](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie/SameSite#lax)
|
||||||
|
* `--getmod` restores the GETs if you need the convenience and accept the risks
|
||||||
|
* [u2cli](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) (command-line uploader):
|
||||||
|
* recover from network hiccups
|
||||||
|
* add `-ns` for slow uefi TTYs
|
||||||
|
* separate login cookies for http / https
|
||||||
|
* avoids an https login from getting accidentally sent over plaintext
|
||||||
|
* sadly no longer possible to login with internet explorer 4.0 / windows 3.11
|
||||||
|
* tar/zip-download of hidden folders
|
||||||
|
* unpost filtering was buggy for non-ascii characters
|
||||||
|
* moving a deduplicated file on a volume where deduplication was since disabled
|
||||||
|
* improved the [linux 6.0.16](https://utcc.utoronto.ca/~cks/space/blog/linux/KernelBindBugIn6016) kernel bug [workaround](https://github.com/9001/copyparty/commit/9065226c3d634a9fc15b14a768116158bc1761ad) because there is similar funk in 5.x
|
||||||
|
* add custom text selection colors because chrome is currently broken on fedora
|
||||||
|
* blockdevs (`/dev/nvme0n1`) couldn't be downloaded as files
|
||||||
|
* misc fixes for location-based reverse-proxying
|
||||||
|
* macos dualstack thing
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* added a collection of [cursed usecases](https://github.com/9001/copyparty/tree/hovudstraum/docs/cursed-usecases)
|
||||||
|
* and [comparisons to similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) in case you ever wanna jump ship
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2023-0112-0515 `v1.5.6` many hands
|
||||||
|
|
||||||
|
hello from warsaw airport (goodbye japan ;_;)
|
||||||
|
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* multiple upload handshakes in parallel
|
||||||
|
* around **5x faster** when uploading small files
|
||||||
|
* or **50x faster** if the server is on the other side of the planet
|
||||||
|
* just crank up the `parallel uploads` like crazy (max is 64)
|
||||||
|
* upload ui: total time and average speed is shown on completion
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* browser ui didn't allow specifying number of threads for file search
|
||||||
|
* dont panic if a digit key is pressed while viewing an image
|
||||||
|
* workaround [linux kernel bug](https://utcc.utoronto.ca/~cks/space/blog/linux/KernelBindBugIn6016) causing log spam on dualstack
|
||||||
|
* ~~related issue (also mostly harmless) will be fixed next relese 010770684db95bece206943768621f2c7c27bace~~
|
||||||
|
* they fixed it in linux 6.1 so these workarounds will be gone too
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2022-1230-0754 `v1.5.5` made in japan
|
||||||
|
|
||||||
|
hello from tokyo
|
||||||
|
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* image viewer now supports heif, avif, apng, svg
|
||||||
|
* [partyfuse and up2k.py](https://github.com/9001/copyparty/tree/hovudstraum/bin): option to read password from textfile
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* thumbnailing could fail if a primitive build of libvips is installed
|
||||||
|
* ssdp was wonky on dualstack ipv6
|
||||||
|
* mdns could crash on networks with invalid routes
|
||||||
|
* support fat32 timestamp precisions
|
||||||
|
* fixes spurious file reindexing in volumes located on SD cards on android tablets which lie about timestamps until the next device reboot or filesystem remount
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2022-1213-1956 `v1.5.3` folder-sync + turbo-rust
|
||||||
|
|
||||||
|
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* one-way folder sync (client to server) using [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/README.md#up2kpy) `-z --dr`
|
||||||
|
* great rsync alternative when combined with `-e2ds --hardlink` deduplication on the server
|
||||||
|
* **50x faster** when uploading small files to HDD, especially SMR
|
||||||
|
* by switching sqlite to WAL which carries a small chance of temporarily forgetting the ~200 most recent uploads if you have a power outage or your OS crashes; see `--help-dbd` if you have `-mtp` plugins which produces metadata you can't afford to lose
|
||||||
|
* location-based [reverse-proxying](https://github.com/9001/copyparty/#reverse-proxy) (but it's still recommended to use a dedicated domain/subdomain instead)
|
||||||
|
* IPv6 link-local automatically enabled for TCP and zeroconf on NICs without a routable IPv6
|
||||||
|
* zeroconf network filters now accept subnets too, for example `--z-on 192.168.0.0/16`
|
||||||
|
* `.hist` folders are hidden on windows
|
||||||
|
* ux:
|
||||||
|
* more accurate total ETA on upload
|
||||||
|
* sorting of batch-unpost links was unintuitive / dangerous
|
||||||
|
* hotkey `Y` turns files into download links if nothing's selected
|
||||||
|
* option to replace or disable the mediaplayer-toggle mouse cursor with `--mpmc`
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* WAL probably/hopefully fixes #10 (we'll know in 6 months roughly)
|
||||||
|
* repair db inconsistencies (which can happen if terminated during startup)
|
||||||
|
* [davfs2](https://wiki.archlinux.org/title/Davfs2) did not approve of the authentication prompt
|
||||||
|
* the `connect` button on the control-panel didn't work on phones
|
||||||
|
* couldn't specify windows NICs in arguments `--z-on` / `--z-off` and friends
|
||||||
|
* ssdp xml escaping for `--zsl` URL
|
||||||
|
* no longer possible to accidentally launch multiple copyparty instances on the same port on windows
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2022-1203-2048 `v1.5.1` babel
|
||||||
|
|
||||||
|
named after [that other thing](https://en.wikipedia.org/wiki/Tower_of_Babel), not [the song](https://soundcloud.com/kanaze/babel-dimension-0-remix)
|
||||||
|
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||||
|
|
||||||
|
## new features
|
||||||
|
* new protocols!
|
||||||
|
* native IPv6 support, no longer requiring a reverse-proxy for that
|
||||||
|
* [webdav server](https://github.com/9001/copyparty#webdav-server) -- read/write-access to copyparty straight from windows explorer, macos finder, kde/gnome
|
||||||
|
* [smb/cifs server](https://github.com/9001/copyparty#smb-server) -- extremely buggy and unsafe, for when there is no other choice
|
||||||
|
* [zeroconf](https://github.com/9001/copyparty#zeroconf) -- copyparty announces itself on the LAN, showing up in various file managers
|
||||||
|
* [mdns](https://github.com/9001/copyparty#mdns) -- macos/kde/gnome + makes copyparty available at http://hostname.local/
|
||||||
|
* [ssdp](https://github.com/9001/copyparty#ssdp) -- windows
|
||||||
|
* commands to mount copyparty as a local disk are in the web-UI at control-panel --> `connect`
|
||||||
|
* detect buggy / malicious clients spamming the server with idle connections
|
||||||
|
* first tries to be nice with `Connection: close` (enough to fix windows-webdav)
|
||||||
|
* eventually bans the IP for `--loris` minutes (default: 1 hour)
|
||||||
|
* new arg `--xlink` for cross-volume detection of duplicate files on upload
|
||||||
|
* new arg `--no-snap` to disable upload tracking on restart
|
||||||
|
* will not create `.hist` folders unless required for thumbnails or markdown backups
|
||||||
|
* [config includes](https://github.com/9001/copyparty/blob/hovudstraum/docs/example2.conf) -- split your config across multiple config files
|
||||||
|
* ux improvements
|
||||||
|
* hotkey `?` shows a summary of all the hotkeys
|
||||||
|
* hotkey `Y` to download selected files
|
||||||
|
* position indicator when hovering over the audio scrubber
|
||||||
|
* textlabel on the volume slider
|
||||||
|
* placeholder values in textboxes
|
||||||
|
* options to hide scrollbars, compact media player, follow playing song
|
||||||
|
* phone-specific
|
||||||
|
* buttons for prev/next folder
|
||||||
|
* much better ui for hiding folder columns
|
||||||
|
|
||||||
|
## bugfixes
|
||||||
|
* now possible to upload files larger than 697 GiB
|
||||||
|
* technically a [breaking change](https://github.com/9001/copyparty#breaking-changes) if you wrote your own up2k client
|
||||||
|
* please let me know if you did because that's awesome
|
||||||
|
* several macos issues due to hardcoded syscall numbers
|
||||||
|
* sfx: fix python 3.12 support (forbids nullbytes in source code)
|
||||||
|
* use ctypes to discover network config -- fixes grapheneos, non-english windows
|
||||||
|
* detect firefox showing stale markdown documents in the editor
|
||||||
|
* detect+ban password bruteforcing on ftp too
|
||||||
|
* http 206 failing on empty files
|
||||||
|
* incorrect header timestamps on non-english locales
|
||||||
|
* remind ftp clients that you cannot cd into an image file -- fixes kde dolphin
|
||||||
|
* ux fixes
|
||||||
|
* uploader survives running into inaccessible folders
|
||||||
|
* middleclick documents in the textviewer sidebar to open in a new tab
|
||||||
|
* playing really long audio files (1 week or more) would spinlock the browser
|
||||||
|
|
||||||
|
## other changes
|
||||||
|
* autodetect max number of clients based on OS limits
|
||||||
|
* `-nc` is probably no longer necessary when running behind a reverse-proxy
|
||||||
|
* allow/try playing mkv files in chrome
|
||||||
|
* markdown documents returned as plaintext unless `?v`
|
||||||
|
* only compress `-lo` logfiles if filename ends with `.xz`
|
||||||
|
* changed sfx compression from bz2 to gz
|
||||||
|
* startup is slightly faster
|
||||||
|
* better compatibility with embedded linux
|
||||||
|
* copyparty64.exe -- 64bit edition for [running inside WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png)
|
||||||
|
* which was an actual feature request, believe it or not!
|
||||||
|
* more attempts at avoiding the [firefox fd leak](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500)
|
||||||
|
* if you are uploading many small files and the browser keeps crashing, use chrome instead
|
||||||
|
* or the commandline client, which is now available for download straight from copyparty
|
||||||
|
* control-panel --> `connect` --> `up2k.py`
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
# 2022-1013-1937 `v1.4.6` wav2opus
|
# 2022-1013-1937 `v1.4.6` wav2opus
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
# this file gets included twice from ../some.conf,
|
# this file gets included twice from ../some.conf,
|
||||||
# setting user permissions for a volume
|
# setting user permissions for a volume
|
||||||
rw usr1
|
accs:
|
||||||
r usr2
|
rw: usr1
|
||||||
% sibling.conf
|
r: usr2
|
||||||
|
% sibling.conf
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
# and this config file gets included from ./another.conf,
|
# and this config file gets included from ./another.conf,
|
||||||
# adding a final permission for each of the two volumes in ../some.conf
|
# adding a final permission for each of the two volumes in ../some.conf
|
||||||
m usr1 usr2
|
m: usr1, usr2
|
||||||
|
|||||||
@@ -1,22 +1,29 @@
|
|||||||
|
# not actually YAML but lets pretend:
|
||||||
|
# -*- mode: yaml -*-
|
||||||
|
# vim: ft=yaml:
|
||||||
|
|
||||||
# lets make two volumes with the same accounts/permissions for both;
|
# lets make two volumes with the same accounts/permissions for both;
|
||||||
# first declare the accounts just once:
|
# first declare the accounts just once:
|
||||||
u usr1:passw0rd
|
[accounts]
|
||||||
u usr2:letmein
|
usr1: passw0rd
|
||||||
|
usr2: letmein
|
||||||
|
|
||||||
# and listen on 127.0.0.1 only, port 2434
|
[global]
|
||||||
-i 127.0.0.1
|
i: 127.0.0.1 # listen on 127.0.0.1 only,
|
||||||
-p 2434
|
p: 2434 # port 2434
|
||||||
|
e2ds # enable file indexing+scanning
|
||||||
|
e2ts # and multimedia indexing+scanning
|
||||||
|
# (inline comments are OK if there is 2 spaces before the #)
|
||||||
|
|
||||||
# share /usr/share/games from the server filesystem
|
# share /usr/share/games from the server filesystem
|
||||||
/usr/share/games
|
[/vidya]
|
||||||
/vidya
|
/usr/share/games
|
||||||
# include config file with volume permissions
|
% foo/another.conf # include config file with volume permissions
|
||||||
% foo/another.conf
|
|
||||||
|
|
||||||
# and share your ~/Music folder too
|
# and share your ~/Music folder too
|
||||||
~/Music
|
[/bangers]
|
||||||
/bangers
|
~/Music
|
||||||
% foo/another.conf
|
% foo/another.conf
|
||||||
|
|
||||||
# which should result in each of the volumes getting the following permissions:
|
# which should result in each of the volumes getting the following permissions:
|
||||||
# usr1 read/write/move
|
# usr1 read/write/move
|
||||||
|
|||||||
22
docs/cursed-usecases/README.md
Normal file
22
docs/cursed-usecases/README.md
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
insane ways to use copyparty
|
||||||
|
|
||||||
|
|
||||||
|
## wireless keyboard
|
||||||
|
|
||||||
|
problem: you wanna control mpv or whatever software from the couch but you don't have a wireless keyboard
|
||||||
|
|
||||||
|
"solution": load some custom javascript which renders a virtual keyboard on the upload UI and each keystroke is actually an upload which gets picked up by a dummy metadata parser which forwards the keystrokes into xdotool
|
||||||
|
|
||||||
|
[no joke, this actually exists and it wasn't even my idea or handiwork (thx steen)](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js)
|
||||||
|
|
||||||
|
|
||||||
|
## appxsvc tarpit
|
||||||
|
|
||||||
|
problem: `svchost.exe` is using 100% of a cpu core, and upon further inspection (`procmon`) it is `wsappx` desperately trying to install something, repeatedly reading a file named `AppxManifest.xml` and messing with an sqlite3 database
|
||||||
|
|
||||||
|
"solution": create a virtual filesystem which is intentionally slow and trick windows into reading it from there instead
|
||||||
|
|
||||||
|
* create a file called `AppxManifest.xml` and put something dumb in it
|
||||||
|
* serve the file from a copyparty instance with `--rsp-slp=1` so every request will hang for 1 sec
|
||||||
|
* `net use m: http://127.0.0.1:3993/` (mount copyparty using the windows-native webdav client)
|
||||||
|
* `mklink /d c:\windows\systemapps\microsoftwindows.client.cbs_cw5n1h2txyewy\AppxManifest.xml m:\AppxManifest.xml`
|
||||||
@@ -3,7 +3,13 @@
|
|||||||
* top
|
* top
|
||||||
* [future plans](#future-plans) - some improvement ideas
|
* [future plans](#future-plans) - some improvement ideas
|
||||||
* [design](#design)
|
* [design](#design)
|
||||||
|
* [up2k](#up2k) - quick outline of the up2k protocol
|
||||||
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
|
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
|
||||||
|
* [http api](#http-api)
|
||||||
|
* [read](#read)
|
||||||
|
* [write](#write)
|
||||||
|
* [admin](#admin)
|
||||||
|
* [general](#general)
|
||||||
* [assumptions](#assumptions)
|
* [assumptions](#assumptions)
|
||||||
* [mdns](#mdns)
|
* [mdns](#mdns)
|
||||||
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
|
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
|
||||||
@@ -19,13 +25,15 @@
|
|||||||
|
|
||||||
some improvement ideas
|
some improvement ideas
|
||||||
|
|
||||||
* the JS is a mess -- a preact rewrite would be nice
|
* the JS is a mess -- a ~~preact~~ rewrite would be nice
|
||||||
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
|
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
|
||||||
* good excuse to look at using virtual lists (browsers start to struggle when folders contain over 5000 files)
|
* good excuse to look at using virtual lists (browsers start to struggle when folders contain over 5000 files)
|
||||||
|
* maybe preact / vdom isn't the best choice, could just wait for the Next Big Thing
|
||||||
* the UX is a mess -- a proper design would be nice
|
* the UX is a mess -- a proper design would be nice
|
||||||
* very organic (much like the python/js), everything was an afterthought
|
* very organic (much like the python/js), everything was an afterthought
|
||||||
* true for both the layout and the visual flair
|
* true for both the layout and the visual flair
|
||||||
* something like the tron board-room ui (or most other hollywood ones, like ironman) would be :100:
|
* something like the tron board-room ui (or most other hollywood ones, like ironman) would be :100:
|
||||||
|
* would preferably keep the information density, just more organized yet [not too boring](https://blog.rachelbinx.com/2023/02/unbearable-sameness/)
|
||||||
* some of the python files are way too big
|
* some of the python files are way too big
|
||||||
* `up2k.py` ended up doing all the file indexing / db management
|
* `up2k.py` ended up doing all the file indexing / db management
|
||||||
* `httpcli.py` should be separated into modules in general
|
* `httpcli.py` should be separated into modules in general
|
||||||
@@ -35,7 +43,7 @@ some improvement ideas
|
|||||||
|
|
||||||
## up2k
|
## up2k
|
||||||
|
|
||||||
quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
|
quick outline of the up2k protocol, see [uploading](https://github.com/9001/copyparty#uploading) for the web-client
|
||||||
* the up2k client splits a file into an "optimal" number of chunks
|
* the up2k client splits a file into an "optimal" number of chunks
|
||||||
* 1 MiB each, unless that becomes more than 256 chunks
|
* 1 MiB each, unless that becomes more than 256 chunks
|
||||||
* tries 1.5M, 2M, 3, 4, 6, ... until <= 256 chunks or size >= 32M
|
* tries 1.5M, 2M, 3, 4, 6, ... until <= 256 chunks or size >= 32M
|
||||||
@@ -121,7 +129,7 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
|||||||
|
|
||||||
| method | params | result |
|
| method | params | result |
|
||||||
|--|--|--|
|
|--|--|--|
|
||||||
| GET | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
|
| POST | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
|
||||||
|
|
||||||
| method | params | body | result |
|
| method | params | body | result |
|
||||||
|--|--|--|--|
|
|--|--|--|--|
|
||||||
@@ -131,7 +139,7 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
|||||||
| mPOST | | `act=bput`, `f=FILE` | upload `FILE` into the folder at URL |
|
| mPOST | | `act=bput`, `f=FILE` | upload `FILE` into the folder at URL |
|
||||||
| mPOST | `?j` | `act=bput`, `f=FILE` | ...and reply with json |
|
| mPOST | `?j` | `act=bput`, `f=FILE` | ...and reply with json |
|
||||||
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
||||||
| GET | `?delete` | | delete URL recursively |
|
| POST | `?delete` | | delete URL recursively |
|
||||||
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
||||||
| uPOST | | `msg=foo` | send message `foo` into server log |
|
| uPOST | | `msg=foo` | send message `foo` into server log |
|
||||||
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
|
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
|
||||||
@@ -223,7 +231,12 @@ rm -rf copyparty/web/deps
|
|||||||
curl -L https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py >x.py
|
curl -L https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py >x.py
|
||||||
python3 x.py --version
|
python3 x.py --version
|
||||||
rm x.py
|
rm x.py
|
||||||
mv /tmp/pe-copyparty/copyparty/web/deps/ copyparty/web/deps/
|
cp -R /tmp/pe-copyparty.$(id -u)/copyparty/web/deps copyparty/web/
|
||||||
|
```
|
||||||
|
|
||||||
|
or you could build the web-dependencies from source instead (NB: does not include prismjs, need to grab that manually):
|
||||||
|
```sh
|
||||||
|
make -C scripts/deps-docker
|
||||||
```
|
```
|
||||||
|
|
||||||
then build the sfx using any of the following examples:
|
then build the sfx using any of the following examples:
|
||||||
|
|||||||
@@ -1,59 +1,69 @@
|
|||||||
|
# not actually YAML but lets pretend:
|
||||||
|
# -*- mode: yaml -*-
|
||||||
|
# vim: ft=yaml:
|
||||||
|
|
||||||
# append some arguments to the commandline;
|
# append some arguments to the commandline;
|
||||||
# the first space in a line counts as a separator,
|
# accepts anything listed in --help (leading dashes are optional)
|
||||||
# any additional spaces are part of the value
|
# and inline comments are OK if there is 2 spaces before the '#'
|
||||||
-e2dsa
|
[global]
|
||||||
-e2ts
|
p: 8086, 3939 # listen on ports 8086 and 3939
|
||||||
-i 127.0.0.1
|
e2dsa # enable file indexing and filesystem scanning
|
||||||
|
e2ts # and enable multimedia indexing
|
||||||
|
z, qr # and zeroconf and qrcode (you can comma-separate arguments)
|
||||||
|
|
||||||
# create users:
|
# create users:
|
||||||
# u username:password
|
[accounts]
|
||||||
u ed:123
|
ed: 123 # username: password
|
||||||
u k:k
|
k: k
|
||||||
|
|
||||||
# leave a blank line between volumes
|
# create volumes:
|
||||||
# (and also between users and volumes)
|
[/] # create a volume at "/" (the webroot), which will
|
||||||
|
. # share the contents of "." (the current directory)
|
||||||
|
accs:
|
||||||
|
r: * # everyone gets read-access, but
|
||||||
|
rw: ed # the user "ed" gets read-write
|
||||||
|
|
||||||
# create a volume:
|
# let's specify different permissions for the "priv" subfolder
|
||||||
# share "." (the current directory)
|
# by creating another volume at that location:
|
||||||
# as "/" (the webroot) for the following users:
|
[/priv]
|
||||||
# "r" grants read-access for anyone
|
./priv
|
||||||
# "rw ed" grants read-write to ed
|
accs:
|
||||||
.
|
r: k # the user "k" can see the contents,
|
||||||
/
|
rw: ed # while "ed" gets read-write
|
||||||
r
|
|
||||||
rw ed
|
|
||||||
|
|
||||||
# custom permissions for the "priv" folder:
|
|
||||||
# user "k" can only see/read the contents
|
|
||||||
# user "ed" gets read-write access
|
|
||||||
./priv
|
|
||||||
/priv
|
|
||||||
r k
|
|
||||||
rw ed
|
|
||||||
|
|
||||||
# this does the same thing,
|
|
||||||
# and will cause an error on startup since /priv is already taken:
|
|
||||||
./priv
|
|
||||||
/priv
|
|
||||||
r ed k
|
|
||||||
w ed
|
|
||||||
|
|
||||||
# share /home/ed/Music/ as /music and let anyone read it
|
# share /home/ed/Music/ as /music and let anyone read it
|
||||||
# (this will replace any folder called "music" in the webroot)
|
# (this will replace any folder called "music" in the webroot)
|
||||||
/home/ed/Music
|
[/music]
|
||||||
/music
|
/home/ed/Music
|
||||||
r
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
# and a folder where anyone can upload, but nobody can see the contents
|
||||||
|
[/dump]
|
||||||
|
/home/ed/inc
|
||||||
|
accs:
|
||||||
|
w: *
|
||||||
|
flags:
|
||||||
|
e2d # the e2d volflag enables the uploads database
|
||||||
|
nodupe # the nodupe volflag rejects duplicate uploads
|
||||||
|
# (see --help-flags for all available volflags to use)
|
||||||
|
|
||||||
# and a folder where anyone can upload
|
# and a folder where anyone can upload
|
||||||
# but nobody can see the contents
|
# and anyone can access their own uploads, but nothing else
|
||||||
# and set the e2d flag to enable the uploads database
|
[/sharex]
|
||||||
# and set the nodupe flag to reject duplicate uploads
|
/home/ed/inc/sharex
|
||||||
/home/ed/inc
|
accs:
|
||||||
/dump
|
wG: * # wG = write-upget = see your own uploads only
|
||||||
w
|
rwmd: ed, k # read-write-modify-delete for users "ed" and "k"
|
||||||
c e2d
|
flags:
|
||||||
c nodupe
|
e2d, d2t, fk: 4
|
||||||
|
# volflag "e2d" enables the uploads database,
|
||||||
|
# "d2t" disables multimedia parsers (in case the uploads are malicious),
|
||||||
|
# "dthumb" disables thumbnails (same reason),
|
||||||
|
# "fk" enables filekeys (necessary for upget permission) (4 chars long)
|
||||||
|
# -- note that its fine to combine all the volflags on
|
||||||
|
# one line because only the last volflag has an argument
|
||||||
|
|
||||||
# this entire config file can be replaced with these arguments:
|
# this entire config file can be replaced with these arguments:
|
||||||
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d,nodupe
|
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d,nodupe -v /home/ed/inc/sharex:sharex:wG:c,e2d,d2t,fk=4
|
||||||
# but note that the config file always wins in case of conflicts
|
# but note that the config file always wins in case of conflicts
|
||||||
|
|||||||
589
docs/versus.md
Normal file
589
docs/versus.md
Normal file
@@ -0,0 +1,589 @@
|
|||||||
|
# alternatives to copyparty
|
||||||
|
|
||||||
|
copyparty compared against all similar software i've bumped into
|
||||||
|
|
||||||
|
there is probably some unintentional bias so please submit corrections
|
||||||
|
|
||||||
|
currently up to date with [awesome-selfhosted](https://github.com/awesome-selfhosted/awesome-selfhosted) but that probably won't last
|
||||||
|
|
||||||
|
|
||||||
|
## symbol legends
|
||||||
|
|
||||||
|
### ...in feature matrices:
|
||||||
|
* `█` = absolutely
|
||||||
|
* `╱` = partially
|
||||||
|
* `•` = maybe?
|
||||||
|
* ` ` = nope
|
||||||
|
|
||||||
|
### ...in reviews:
|
||||||
|
* ✅ = advantages over copyparty
|
||||||
|
* 🔵 = similarities
|
||||||
|
* ⚠️ = disadvantages (something copyparty does "better")
|
||||||
|
|
||||||
|
|
||||||
|
## toc
|
||||||
|
|
||||||
|
* top
|
||||||
|
* [recommendations](#recommendations)
|
||||||
|
* [feature comparisons](#feature-comparisons)
|
||||||
|
* [general](#general)
|
||||||
|
* [file transfer](#file-transfer)
|
||||||
|
* [protocols and client support](#protocols-and-client-support)
|
||||||
|
* [server configuration](#server-configuration)
|
||||||
|
* [server capabilities](#server-capabilities)
|
||||||
|
* [client features](#client-features)
|
||||||
|
* [integration](#integration)
|
||||||
|
* [another matrix](#another-matrix)
|
||||||
|
* [reviews](#reviews)
|
||||||
|
* [copyparty](#copyparty)
|
||||||
|
* [hfs2](#hfs2)
|
||||||
|
* [hfs3](#hfs3)
|
||||||
|
* [nextcloud](#nextcloud)
|
||||||
|
* [seafile](#seafile)
|
||||||
|
* [rclone](#rclone)
|
||||||
|
* [dufs](#dufs)
|
||||||
|
* [chibisafe](#chibisafe)
|
||||||
|
* [kodbox](#kodbox)
|
||||||
|
* [filebrowser](#filebrowser)
|
||||||
|
* [filegator](#filegator)
|
||||||
|
* [updog](#updog)
|
||||||
|
* [goshs](#goshs)
|
||||||
|
* [gimme-that](#gimme-that)
|
||||||
|
* [ass](#ass)
|
||||||
|
* [linx](#linx)
|
||||||
|
* [h5ai](#h5ai)
|
||||||
|
* [autoindex](#autoindex)
|
||||||
|
* [briefly considered](#briefly-considered)
|
||||||
|
|
||||||
|
|
||||||
|
# recommendations
|
||||||
|
|
||||||
|
* [kodbox](https://github.com/kalcaddle/kodbox) ([review](#kodbox)) appears to be a fantastic alternative if you're not worried about running chinese software, with several advantages over copyparty
|
||||||
|
* but anything you want to share must be moved into the kodbox filesystem
|
||||||
|
* [seafile](https://github.com/haiwen/seafile) ([review](#seafile)) and [nextcloud](https://github.com/nextcloud/server) ([review](#nextcloud)) could be decent alternatives if you need something heavier than copyparty
|
||||||
|
* but their [license](https://snyk.io/learn/agpl-license/) is [problematic](https://opensource.google/documentation/reference/using/agpl-policy)
|
||||||
|
* and copyparty is way better at uploads in particular (resumable, accelerated)
|
||||||
|
* and anything you want to share must be moved into the respective filesystems
|
||||||
|
* [filebrowser](https://github.com/filebrowser/filebrowser) ([review](#filebrowser)) and [dufs](https://github.com/sigoden/dufs) ([review](#dufs)) are simpler copyparties but with a settings gui
|
||||||
|
* has some of the same strengths of copyparty, being portable and able to work with an existing folder structure
|
||||||
|
* ...but copyparty is better at uploads + some other things
|
||||||
|
|
||||||
|
|
||||||
|
# feature comparisons
|
||||||
|
|
||||||
|
```
|
||||||
|
<&Kethsar> copyparty is very much bloat ed, so yeah
|
||||||
|
```
|
||||||
|
|
||||||
|
the table headers in the matrixes below are the different softwares, with a quick review of each software in the next section
|
||||||
|
|
||||||
|
the softwares,
|
||||||
|
* `a` = [copyparty](https://github.com/9001/copyparty)
|
||||||
|
* `b` = [hfs2](https://rejetto.com/hfs/)
|
||||||
|
* `c` = [hfs3](https://github.com/rejetto/hfs)
|
||||||
|
* `d` = [nextcloud](https://github.com/nextcloud/server)
|
||||||
|
* `e` = [seafile](https://github.com/haiwen/seafile)
|
||||||
|
* `f` = [rclone](https://github.com/rclone/rclone), specifically `rclone serve webdav .`
|
||||||
|
* `g` = [dufs](https://github.com/sigoden/dufs)
|
||||||
|
* `h` = [chibisafe](https://github.com/chibisafe/chibisafe)
|
||||||
|
* `i` = [kodbox](https://github.com/kalcaddle/kodbox)
|
||||||
|
* `j` = [filebrowser](https://github.com/filebrowser/filebrowser)
|
||||||
|
* `k` = [filegator](https://github.com/filegator/filegator)
|
||||||
|
|
||||||
|
some softwares not in the matrixes,
|
||||||
|
* [updog](#updog)
|
||||||
|
* [goshs](#goshs)
|
||||||
|
* [gimme-that](#gimmethat)
|
||||||
|
* [ass](#ass)
|
||||||
|
* [linx](#linx)
|
||||||
|
|
||||||
|
symbol legend,
|
||||||
|
* `█` = absolutely
|
||||||
|
* `╱` = partially
|
||||||
|
* `•` = maybe?
|
||||||
|
* ` ` = nope
|
||||||
|
|
||||||
|
|
||||||
|
## general
|
||||||
|
|
||||||
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||||
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
|
| intuitive UX | | ╱ | █ | █ | █ | | █ | █ | █ | █ | █ |
|
||||||
|
| config GUI | | █ | █ | █ | █ | | | █ | █ | █ | |
|
||||||
|
| good documentation | | | | █ | █ | █ | █ | | | █ | █ |
|
||||||
|
| runs on iOS | ╱ | | | | | ╱ | | | | | |
|
||||||
|
| runs on Android | █ | | | | | █ | | | | | |
|
||||||
|
| runs on WinXP | █ | █ | | | | █ | | | | | |
|
||||||
|
| runs on Windows | █ | █ | █ | █ | █ | █ | █ | ╱ | █ | █ | █ |
|
||||||
|
| runs on Linux | █ | ╱ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||||
|
| runs on Macos | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||||
|
| runs on FreeBSD | █ | | | • | █ | █ | █ | • | █ | █ | |
|
||||||
|
| portable binary | █ | █ | █ | | | █ | █ | | | █ | |
|
||||||
|
| zero setup, just go | █ | █ | █ | | | ╱ | █ | | | █ | |
|
||||||
|
| android app | ╱ | | | █ | █ | | | | | | |
|
||||||
|
| iOS app | | | | █ | █ | | | | | | |
|
||||||
|
|
||||||
|
* `zero setup` = you can get a mostly working setup by just launching the app, without having to install any software or configure whatever
|
||||||
|
* `a`/copyparty remarks:
|
||||||
|
* no gui for server settings; only for client-side stuff
|
||||||
|
* can theoretically run on iOS / iPads using [iSH](https://ish.app/), but only the iPad will offer sufficient multitasking i think
|
||||||
|
* [android app](https://f-droid.org/en/packages/me.ocv.partyup/) is for uploading only
|
||||||
|
* `b`/hfs2 runs on linux through wine
|
||||||
|
* `f`/rclone must be started with the command `rclone serve webdav .` or similar
|
||||||
|
* `h`/chibisafe has undocumented windows support
|
||||||
|
|
||||||
|
|
||||||
|
## file transfer
|
||||||
|
|
||||||
|
*the thing that copyparty is actually kinda good at*
|
||||||
|
|
||||||
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||||
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
|
| download folder as zip | █ | █ | █ | █ | █ | | █ | | █ | █ | ╱ |
|
||||||
|
| download folder as tar | █ | | | | | | | | | █ | |
|
||||||
|
| upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||||
|
| parallel uploads | █ | | | █ | █ | | • | | █ | | █ |
|
||||||
|
| resumable uploads | █ | | | | | | | | █ | | █ |
|
||||||
|
| upload segmenting | █ | | | | | | | █ | █ | | █ |
|
||||||
|
| upload acceleration | █ | | | | | | | | █ | | █ |
|
||||||
|
| upload verification | █ | | | █ | █ | | | | █ | | |
|
||||||
|
| upload deduplication | █ | | | | █ | | | | █ | | |
|
||||||
|
| upload a 999 TiB file | █ | | | | █ | █ | • | | █ | | █ |
|
||||||
|
| keep last-modified time | █ | | | █ | █ | █ | | | | | |
|
||||||
|
| upload rules | ╱ | ╱ | ╱ | ╱ | ╱ | | | ╱ | ╱ | | ╱ |
|
||||||
|
| ┗ max disk usage | █ | █ | | | █ | | | | █ | | |
|
||||||
|
| ┗ max filesize | █ | | | | | | | █ | | | █ |
|
||||||
|
| ┗ max items in folder | █ | | | | | | | | | | |
|
||||||
|
| ┗ max file age | █ | | | | | | | | █ | | |
|
||||||
|
| ┗ max uploads over time | █ | | | | | | | | | | |
|
||||||
|
| ┗ compress before write | █ | | | | | | | | | | |
|
||||||
|
| ┗ randomize filename | █ | | | | | | | █ | █ | | |
|
||||||
|
| ┗ mimetype reject-list | ╱ | | | | | | | | • | ╱ | |
|
||||||
|
| ┗ extension reject-list | ╱ | | | | | | | █ | • | ╱ | |
|
||||||
|
| checksums provided | | | | █ | █ | | | | █ | ╱ | |
|
||||||
|
| cloud storage backend | ╱ | ╱ | ╱ | █ | █ | █ | ╱ | | | ╱ | █ |
|
||||||
|
|
||||||
|
* `upload segmenting` = files are sliced into chunks, making it possible to upload files larger than 100 MiB on cloudflare for example
|
||||||
|
|
||||||
|
* `upload acceleration` = each file can be uploaded using several TCP connections, which can offer a huge speed boost over huge distances / on flaky connections -- like the good old [download accelerators](https://en.wikipedia.org/wiki/GetRight) except in reverse
|
||||||
|
|
||||||
|
* `upload verification` = uploads are checksummed or otherwise confirmed to have been transferred correctly
|
||||||
|
|
||||||
|
* `checksums provided` = when downloading a file from the server, the file's checksum is provided for verification client-side
|
||||||
|
|
||||||
|
* `cloud storage backend` = able to serve files from (and write to) s3 or similar cloud services; `╱` means the software can do this with some help from `rclone mount` as a bridge
|
||||||
|
|
||||||
|
* `a`/copyparty can reject uploaded files (based on complex conditions), for example [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py)
|
||||||
|
* `j`/filebrowser remarks:
|
||||||
|
* can provide checksums for single files on request
|
||||||
|
* can probably do extension/mimetype rejection similar to copyparty
|
||||||
|
* `k`/filegator download-as-zip is not streaming; it creates the full zipfile before download can start
|
||||||
|
|
||||||
|
|
||||||
|
## protocols and client support
|
||||||
|
|
||||||
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||||
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
|
| serve https | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||||
|
| serve webdav | █ | | | █ | █ | █ | █ | | █ | | |
|
||||||
|
| serve ftp | █ | | | | | █ | | | | | |
|
||||||
|
| serve ftps | █ | | | | | █ | | | | | |
|
||||||
|
| serve sftp | | | | | | █ | | | | | |
|
||||||
|
| serve smb/cifs | ╱ | | | | | █ | | | | | |
|
||||||
|
| serve dlna | | | | | | █ | | | | | |
|
||||||
|
| listen on unix-socket | | | | █ | █ | | █ | █ | █ | | █ |
|
||||||
|
| zeroconf | █ | | | | | | | | | | |
|
||||||
|
| supports netscape 4 | ╱ | | | | | █ | | | | | • |
|
||||||
|
| ...internet explorer 6 | ╱ | █ | | █ | | █ | | | | | • |
|
||||||
|
| mojibake filenames | █ | | | • | • | █ | █ | • | • | • | |
|
||||||
|
| undecodable filenames | █ | | | • | • | █ | | • | • | | |
|
||||||
|
|
||||||
|
* `webdav` = protocol convenient for mounting a remote server as a local filesystem; see zeroconf:
|
||||||
|
* `zeroconf` = the server announces itself on the LAN, [automatically appearing](https://user-images.githubusercontent.com/241032/215344737-0eae8d98-9496-4256-9aa8-cd2f6971810d.png) on other zeroconf-capable devices
|
||||||
|
* `mojibake filenames` = filenames decoded with the wrong codec and then reencoded (usually to utf-8), so `宇多田ヒカル` might look like `ëFæ╜ôcâqâJâï`
|
||||||
|
* `undecodable filenames` = pure binary garbage which cannot be parsed as utf-8
|
||||||
|
* you can successfully play `$'\355\221'` with mpv through mounting a remote copyparty server with rclone, pog
|
||||||
|
* `a`/copyparty remarks:
|
||||||
|
* extremely minimal samba/cifs server
|
||||||
|
* netscape 4 / ie6 support is mostly listed as a joke altho some people have actually found it useful ([ie4 tho](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png))
|
||||||
|
|
||||||
|
|
||||||
|
## server configuration
|
||||||
|
|
||||||
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||||
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
|
| config from cmd args | █ | | | | | █ | █ | | | █ | |
|
||||||
|
| config files | █ | █ | █ | ╱ | ╱ | █ | | █ | | █ | • |
|
||||||
|
| runtime config reload | █ | █ | █ | | | | | █ | █ | █ | █ |
|
||||||
|
| same-port http / https | █ | | | | | | | | | | |
|
||||||
|
| listen multiple ports | █ | | | | | | | | | | |
|
||||||
|
| virtual file system | █ | █ | █ | | | | █ | | | | |
|
||||||
|
| reverse-proxy ok | █ | | █ | █ | █ | █ | █ | █ | • | • | • |
|
||||||
|
| folder-rproxy ok | █ | | | | █ | █ | | • | • | • | • |
|
||||||
|
|
||||||
|
* `folder-rproxy` = reverse-proxying without dedicating an entire (sub)domain, using a subfolder instead
|
||||||
|
|
||||||
|
|
||||||
|
## server capabilities
|
||||||
|
|
||||||
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||||
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
|
| accounts | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||||
|
| single-sign-on | | | | █ | █ | | | | • | | |
|
||||||
|
| token auth | | | | █ | █ | | | █ | | | |
|
||||||
|
| per-volume permissions | █ | █ | █ | █ | █ | █ | █ | | █ | █ | ╱ |
|
||||||
|
| per-folder permissions | ╱ | | | █ | █ | | █ | | █ | █ | ╱ |
|
||||||
|
| per-file permissions | | | | █ | █ | | █ | | █ | | |
|
||||||
|
| per-file passwords | █ | | | █ | █ | | █ | | █ | | |
|
||||||
|
| unmap subfolders | █ | | | | | | █ | | | █ | ╱ |
|
||||||
|
| index.html blocks list | | | | | | | █ | | | • | |
|
||||||
|
| write-only folders | █ | | | | | | | | | | █ |
|
||||||
|
| files stored as-is | █ | █ | █ | █ | | █ | █ | | | █ | █ |
|
||||||
|
| file versioning | | | | █ | █ | | | | | | |
|
||||||
|
| file encryption | | | | █ | █ | █ | | | | | |
|
||||||
|
| file indexing | █ | | █ | █ | █ | | | █ | █ | █ | |
|
||||||
|
| ┗ per-volume db | █ | | • | • | • | | | • | • | | |
|
||||||
|
| ┗ db stored in folder | █ | | | | | | | • | • | █ | |
|
||||||
|
| ┗ db stored out-of-tree | █ | | █ | █ | █ | | | • | • | █ | |
|
||||||
|
| ┗ existing file tree | █ | | █ | | | | | | | █ | |
|
||||||
|
| file action event hooks | █ | | | | | | | | | █ | |
|
||||||
|
| one-way folder sync | █ | | | █ | █ | █ | | | | | |
|
||||||
|
| full sync | | | | █ | █ | | | | | | |
|
||||||
|
| speed throttle | | █ | █ | | | █ | | | █ | | |
|
||||||
|
| anti-bruteforce | █ | █ | █ | █ | █ | | | | • | | |
|
||||||
|
| dyndns updater | | █ | | | | | | | | | |
|
||||||
|
| self-updater | | | █ | | | | | | | | |
|
||||||
|
| log rotation | █ | | █ | █ | █ | | | • | █ | | |
|
||||||
|
| upload tracking / log | █ | █ | • | █ | █ | | | █ | █ | | |
|
||||||
|
| curl-friendly ls | █ | | | | | | | | | | |
|
||||||
|
| curl-friendly upload | █ | | | | | █ | █ | • | | | |
|
||||||
|
|
||||||
|
* `unmap subfolders` = "shadowing"; mounting a local folder in the middle of an existing filesystem tree in order to disable access below that path
|
||||||
|
* `files stored as-is` = uploaded files are trivially readable from the server HDD, not sliced into chunks or in weird folder structures or anything like that
|
||||||
|
* `db stored in folder` = filesystem index can be written to a database file inside the folder itself
|
||||||
|
* `db stored out-of-tree` = filesystem index can be stored some place else, not necessarily inside the shared folders
|
||||||
|
* `existing file tree` = will index any existing files it finds
|
||||||
|
* `file action event hooks` = run script before/after upload, move, rename, ...
|
||||||
|
* `one-way folder sync` = like rsync, optionally deleting unexpected files at target
|
||||||
|
* `full sync` = stateful, dropbox-like sync
|
||||||
|
* `curl-friendly ls` = returns a [sortable plaintext folder listing](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png) when curled
|
||||||
|
* `curl-friendly upload` = uploading with curl is just `curl -T some.bin http://.../`
|
||||||
|
* `a`/copyparty remarks:
|
||||||
|
* one-way folder sync from local to server can be done efficiently with [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py), or with webdav and conventional rsync
|
||||||
|
* can hot-reload config files (with just a few exceptions)
|
||||||
|
* can set per-folder permissions if that folder is made into a separate volume, so there is configuration overhead
|
||||||
|
* [event hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) ([discord](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png), [desktop](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png)) inspired by filebrowser, as well as the more complex [media parser](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) alternative
|
||||||
|
* upload history can be visualized using [partyjournal](https://github.com/9001/copyparty/blob/hovudstraum/bin/partyjournal.py)
|
||||||
|
* `k`/filegator remarks:
|
||||||
|
* `per-* permissions` -- can limit a user to one folder and its subfolders
|
||||||
|
* `unmap subfolders` -- can globally filter a list of paths
|
||||||
|
|
||||||
|
|
||||||
|
## client features
|
||||||
|
|
||||||
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||||
|
| ---------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
|
| single-page app | █ | | █ | █ | █ | | | █ | █ | █ | █ |
|
||||||
|
| themes | █ | █ | | █ | | | | | █ | | |
|
||||||
|
| directory tree nav | █ | ╱ | | | █ | | | | █ | | ╱ |
|
||||||
|
| multi-column sorting | █ | | | | | | | | | | |
|
||||||
|
| thumbnails | █ | | | ╱ | ╱ | | | █ | █ | ╱ | |
|
||||||
|
| ┗ image thumbnails | █ | | | █ | █ | | | █ | █ | █ | |
|
||||||
|
| ┗ video thumbnails | █ | | | █ | █ | | | | █ | | |
|
||||||
|
| ┗ audio spectrograms | █ | | | | | | | | | | |
|
||||||
|
| audio player | █ | | | █ | █ | | | | █ | ╱ | |
|
||||||
|
| ┗ gapless playback | █ | | | | | | | | • | | |
|
||||||
|
| ┗ audio equalizer | █ | | | | | | | | | | |
|
||||||
|
| ┗ waveform seekbar | █ | | | | | | | | | | |
|
||||||
|
| ┗ OS integration | █ | | | | | | | | | | |
|
||||||
|
| ┗ transcode to lossy | █ | | | | | | | | | | |
|
||||||
|
| video player | █ | | | █ | █ | | | | █ | █ | |
|
||||||
|
| ┗ video transcoding | | | | | | | | | █ | | |
|
||||||
|
| audio BPM detector | █ | | | | | | | | | | |
|
||||||
|
| audio key detector | █ | | | | | | | | | | |
|
||||||
|
| search by path / name | █ | █ | █ | █ | █ | | █ | | █ | █ | ╱ |
|
||||||
|
| search by date / size | █ | | | | █ | | | █ | █ | | |
|
||||||
|
| search by bpm / key | █ | | | | | | | | | | |
|
||||||
|
| search by custom tags | | | | | | | | █ | █ | | |
|
||||||
|
| search in file contents | | | | █ | █ | | | | █ | | |
|
||||||
|
| search by custom parser | █ | | | | | | | | | | |
|
||||||
|
| find local file | █ | | | | | | | | | | |
|
||||||
|
| undo recent uploads | █ | | | | | | | | | | |
|
||||||
|
| create directories | █ | | | █ | █ | ╱ | █ | █ | █ | █ | █ |
|
||||||
|
| image viewer | █ | | | █ | █ | | | | █ | █ | █ |
|
||||||
|
| markdown viewer | █ | | | | █ | | | | █ | ╱ | ╱ |
|
||||||
|
| markdown editor | █ | | | | █ | | | | █ | ╱ | ╱ |
|
||||||
|
| readme.md in listing | █ | | | █ | | | | | | | |
|
||||||
|
| rename files | █ | █ | █ | █ | █ | ╱ | █ | | █ | █ | █ |
|
||||||
|
| batch rename | █ | | | | | | | | █ | | |
|
||||||
|
| cut / paste files | █ | █ | | █ | █ | | | | █ | | |
|
||||||
|
| move files | █ | █ | | █ | █ | | █ | | █ | █ | █ |
|
||||||
|
| delete files | █ | █ | | █ | █ | ╱ | █ | █ | █ | █ | █ |
|
||||||
|
| copy files | | | | | █ | | | | █ | █ | █ |
|
||||||
|
|
||||||
|
* `single-page app` = multitasking; possible to continue navigating while uploading
|
||||||
|
* `audio player » os-integration` = use the [lockscreen](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) or [media hotkeys](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) to play/pause, prev/next song
|
||||||
|
* `search by custom tags` = ability to tag files through the UI and search by those
|
||||||
|
* `find local file` = drop a file into the browser to see if it exists on the server
|
||||||
|
* `undo recent uploads` = accounts without delete permissions have a time window where they can undo their own uploads
|
||||||
|
* `a`/copyparty has teeny-tiny skips playing gapless albums depending on audio codec (opus best)
|
||||||
|
* `b`/hfs2 has a very basic directory tree view, not showing sibling folders
|
||||||
|
* `f`/rclone can do some file management (mkdir, rename, delete) when hosting througn webdav
|
||||||
|
* `j`/filebrowser has a plaintext viewer/editor
|
||||||
|
* `k`/filegator directory tree is a modal window
|
||||||
|
|
||||||
|
|
||||||
|
## integration
|
||||||
|
|
||||||
|
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||||
|
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||||
|
| OS alert on upload | █ | | | | | | | | | ╱ | |
|
||||||
|
| discord | █ | | | | | | | | | ╱ | |
|
||||||
|
| ┗ announce uploads | █ | | | | | | | | | | |
|
||||||
|
| ┗ custom embeds | | | | | | | | | | | |
|
||||||
|
| sharex | █ | | | █ | | █ | ╱ | █ | | | |
|
||||||
|
| flameshot | | | | | | █ | | | | | |
|
||||||
|
|
||||||
|
* sharex `╱` = yes, but does not provide example sharex config
|
||||||
|
* `a`/copyparty remarks:
|
||||||
|
* `OS alert on upload` available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/notify.py)
|
||||||
|
* `discord » announce uploads` available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/discord-announce.py)
|
||||||
|
* `j`/filebrowser can probably pull those off with command runners similar to copyparty
|
||||||
|
|
||||||
|
|
||||||
|
## another matrix
|
||||||
|
|
||||||
|
| software / feature | lang | lic | size |
|
||||||
|
| ------------------ | ------ | ------ | ------ |
|
||||||
|
| copyparty | python | █ mit | 0.6 MB |
|
||||||
|
| hfs2 | delphi | ░ gpl3 | 2 MB |
|
||||||
|
| hfs3 | ts | ░ gpl3 | 36 MB |
|
||||||
|
| nextcloud | php | ‼ agpl | • |
|
||||||
|
| seafile | c | ‼ agpl | • |
|
||||||
|
| rclone | c | █ mit | 45 MB |
|
||||||
|
| dufs | rust | █ apl2 | 2.5 MB |
|
||||||
|
| chibisafe | ts | █ mit | • |
|
||||||
|
| kodbox | php | ░ gpl3 | 92 MB |
|
||||||
|
| filebrowser | go | █ apl2 | 20 MB |
|
||||||
|
| filegator | php | █ mit | • |
|
||||||
|
| updog | python | █ mit | 17 MB |
|
||||||
|
| goshs | go | █ mit | 11 MB |
|
||||||
|
| gimme-that | python | █ mit | 4.8 MB |
|
||||||
|
| ass | ts | █ isc | • |
|
||||||
|
| linx | go | ░ gpl3 | 20 MB |
|
||||||
|
|
||||||
|
* `size` = binary (if available) or installed size of program and its dependencies
|
||||||
|
* copyparty size is for the [standalone python](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) file; the [windows exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) is **6 MiB**
|
||||||
|
|
||||||
|
|
||||||
|
# reviews
|
||||||
|
|
||||||
|
* ✅ are advantages over copyparty
|
||||||
|
* 🔵 are similarities
|
||||||
|
* ⚠️ are disadvantages (something copyparty does "better")
|
||||||
|
|
||||||
|
## [copyparty](https://github.com/9001/copyparty)
|
||||||
|
* resumable uploads which are verified server-side
|
||||||
|
* upload segmenting allows for potentially much faster uploads on some connections, and terabyte-sized files even on cloudflare
|
||||||
|
* both of the above are surprisingly uncommon features
|
||||||
|
* very cross-platform (python, no dependencies)
|
||||||
|
|
||||||
|
## [hfs2](https://rejetto.com/hfs/)
|
||||||
|
* the OG, the legend
|
||||||
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ⚠️ windows-only
|
||||||
|
* ✅ config GUI
|
||||||
|
* vfs with gui config, per-volume permissions
|
||||||
|
* starting to show its age, hence the rewrite:
|
||||||
|
|
||||||
|
## [hfs3](https://github.com/rejetto/hfs)
|
||||||
|
* nodejs; cross-platform
|
||||||
|
* vfs with gui config, per-volume permissions
|
||||||
|
* still early development, let's revisit later
|
||||||
|
|
||||||
|
## [nextcloud](https://github.com/nextcloud/server)
|
||||||
|
* php, mariadb
|
||||||
|
* ⚠️ [isolated on-disk file hierarchy] in per-user folders
|
||||||
|
* not that bad, can probably be remedied with bindmounts or maybe symlinks
|
||||||
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ⚠️ no write-only / upload-only folders
|
||||||
|
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||||
|
* ⚠️ less awesome music player
|
||||||
|
* ⚠️ doesn't run on android or ipads
|
||||||
|
* ✅ great ui/ux
|
||||||
|
* ✅ config gui
|
||||||
|
* ✅ apps (android / iphone)
|
||||||
|
* copyparty: android upload-only app
|
||||||
|
* ✅ more granular permissions (per-file)
|
||||||
|
* ✅ search: fulltext indexing of file contents
|
||||||
|
* ✅ webauthn passwordless authentication
|
||||||
|
|
||||||
|
## [seafile](https://github.com/haiwen/seafile)
|
||||||
|
* c, mariadb
|
||||||
|
* ⚠️ [isolated on-disk file hierarchy](https://manual.seafile.com/maintain/seafile_fsck/), incompatible with other software
|
||||||
|
* *much worse than nextcloud* in that regard
|
||||||
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ⚠️ no write-only / upload-only folders
|
||||||
|
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||||
|
* ⚠️ less awesome music player
|
||||||
|
* ⚠️ doesn't run on android or ipads
|
||||||
|
* ✅ great ui/ux
|
||||||
|
* ✅ config gui
|
||||||
|
* ✅ apps (android / iphone)
|
||||||
|
* copyparty: android upload-only app
|
||||||
|
* ✅ more granular permissions (per-file)
|
||||||
|
* ✅ search: fulltext indexing of file contents
|
||||||
|
|
||||||
|
## [rclone](https://github.com/rclone/rclone)
|
||||||
|
* nice standalone c program
|
||||||
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ⚠️ no web-ui, just a server / downloader / uploader utility
|
||||||
|
* ✅ works with almost any protocol, cloud provider
|
||||||
|
* ⚠️ copyparty's webdav server is slightly faster
|
||||||
|
|
||||||
|
## [dufs](https://github.com/sigoden/dufs)
|
||||||
|
* rust; cross-platform (windows, linux, macos)
|
||||||
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ⚠️ doesn't support crazy filenames
|
||||||
|
* ✅ per-url access control (copyparty is per-volume)
|
||||||
|
* 🔵 basic but really snappy ui
|
||||||
|
* 🔵 upload, rename, delete, ... see feature matrix
|
||||||
|
|
||||||
|
## [chibisafe](https://github.com/chibisafe/chibisafe)
|
||||||
|
* nodejs; recommends docker
|
||||||
|
* 🔵 *it has upload segmenting!*
|
||||||
|
* ⚠️ but uploads are still not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ not portable
|
||||||
|
* ⚠️ isolated on-disk file hierarchy, incompatible with other software
|
||||||
|
* ⚠️ http/webdav only; no ftp or zeroconf
|
||||||
|
* ✅ pretty ui
|
||||||
|
* ✅ control panel for server settings and user management
|
||||||
|
* ✅ user registration
|
||||||
|
* ✅ searchable image tags; delete by tag
|
||||||
|
* ✅ browser extension to upload files to the server
|
||||||
|
* ✅ reject uploads by file extension
|
||||||
|
* copyparty: can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins
|
||||||
|
* ✅ token auth (api keys)
|
||||||
|
|
||||||
|
## [kodbox](https://github.com/kalcaddle/kodbox)
|
||||||
|
* this thing is insane
|
||||||
|
* php; [docker](https://hub.docker.com/r/kodcloud/kodbox)
|
||||||
|
* 🔵 *upload segmenting, acceleration, and integrity checking!*
|
||||||
|
* ⚠️ but uploads are not resumable(?)
|
||||||
|
* ⚠️ not portable
|
||||||
|
* ⚠️ isolated on-disk file hierarchy, incompatible with other software
|
||||||
|
* ⚠️ http/webdav only; no ftp or zeroconf
|
||||||
|
* ⚠️ some parts of the GUI are in chinese
|
||||||
|
* ✅ fantastic ui/ux
|
||||||
|
* ✅ control panel for server settings and user management
|
||||||
|
* ✅ file tags; file discussions!?
|
||||||
|
* ✅ video transcoding
|
||||||
|
* ✅ unzip uploaded archives
|
||||||
|
* ✅ IDE with syntax hilighting
|
||||||
|
* ✅ wysiwyg editor for openoffice files
|
||||||
|
|
||||||
|
## [filebrowser](https://github.com/filebrowser/filebrowser)
|
||||||
|
* go; cross-platform (windows, linux, mac)
|
||||||
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ⚠️ http only; no webdav / ftp / zeroconf
|
||||||
|
* ⚠️ doesn't support crazy filenames
|
||||||
|
* ⚠️ no directory tree nav
|
||||||
|
* ⚠️ limited file search
|
||||||
|
* ✅ settings gui
|
||||||
|
* ✅ good ui/ux
|
||||||
|
* ⚠️ but no directory tree for navigation
|
||||||
|
* ✅ user signup
|
||||||
|
* ✅ command runner / remote shell
|
||||||
|
* 🔵 supposed to have write-only folders but couldn't get it to work
|
||||||
|
|
||||||
|
## [filegator](https://github.com/filegator/filegator)
|
||||||
|
* go; cross-platform (windows, linux, mac)
|
||||||
|
* 🔵 *it has upload segmenting and acceleration*
|
||||||
|
* ⚠️ but uploads are still not integrity-checked
|
||||||
|
* ⚠️ http only; no webdav / ftp / zeroconf
|
||||||
|
* ⚠️ does not support symlinks
|
||||||
|
* ⚠️ expensive download-as-zip feature
|
||||||
|
* ⚠️ doesn't support crazy filenames
|
||||||
|
* ⚠️ limited file search
|
||||||
|
|
||||||
|
## [updog](https://github.com/sc0tfree/updog)
|
||||||
|
* python; cross-platform
|
||||||
|
* basic directory listing with upload feature
|
||||||
|
* ⚠️ less portable
|
||||||
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ⚠️ no vfs; single folder, single account
|
||||||
|
|
||||||
|
## [goshs](https://github.com/patrickhener/goshs)
|
||||||
|
* go; cross-platform (windows, linux, mac)
|
||||||
|
* ⚠️ no vfs; single folder, single account
|
||||||
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ✅ cool clipboard widget
|
||||||
|
* copyparty: the markdown editor is an ok substitute
|
||||||
|
* 🔵 read-only and upload-only modes (same as copyparty's write-only)
|
||||||
|
* 🔵 https, webdav, but no ftp
|
||||||
|
|
||||||
|
## [gimme-that](https://github.com/nejdetckenobi/gimme-that)
|
||||||
|
* python, but with c dependencies
|
||||||
|
* ⚠️ no vfs; single folder, multiple accounts
|
||||||
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ⚠️ weird folder structure for uploads
|
||||||
|
* ✅ clamav antivirus check on upload! neat
|
||||||
|
* 🔵 optional max-filesize, os-notification on uploads
|
||||||
|
* copyparty: os-notification available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/notify.py)
|
||||||
|
|
||||||
|
## [ass](https://github.com/tycrek/ass)
|
||||||
|
* nodejs; recommends docker
|
||||||
|
* ⚠️ not portable
|
||||||
|
* ⚠️ upload only; no browser
|
||||||
|
* ⚠️ upload through sharex only; no web-ui
|
||||||
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* ✅ token auth
|
||||||
|
* ✅ gps metadata stripping
|
||||||
|
* copyparty: possible with [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py)
|
||||||
|
* ✅ discord integration (custom embeds, upload webhook)
|
||||||
|
* copyparty: [upload webhook plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/discord-announce.py)
|
||||||
|
* ✅ reject uploads by mimetype
|
||||||
|
* copyparty: can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins
|
||||||
|
* ✅ can use S3 as storage backend; copyparty relies on rclone-mount for that
|
||||||
|
* ✅ custom 404 pages
|
||||||
|
|
||||||
|
## [linx](https://github.com/ZizzyDizzyMC/linx-server/)
|
||||||
|
* originally [andreimarcu/linx-server](https://github.com/andreimarcu/linx-server) but development has ended
|
||||||
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
|
* 🔵 some of its unique features have been added to copyparty as former linx users have migrated
|
||||||
|
* file expiration timers, filename randomization
|
||||||
|
* ✅ password-protected files
|
||||||
|
* copyparty: password-protected folders + filekeys to skip the folder password seem to cover most usecases
|
||||||
|
* ✅ file deletion keys
|
||||||
|
* ✅ download files as torrents
|
||||||
|
* ✅ remote uploads (send a link to the server and it downloads it)
|
||||||
|
* copyparty: available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
||||||
|
* ✅ can use S3 as storage backend; copyparty relies on rclone-mount for that
|
||||||
|
|
||||||
|
## [h5ai](https://larsjung.de/h5ai/)
|
||||||
|
* ⚠️ read only; no upload/move/delete
|
||||||
|
* ⚠️ search hits the filesystem directly; not indexed/cached
|
||||||
|
* ✅ slick ui
|
||||||
|
* ✅ in-browser qr generator to share URLs
|
||||||
|
* 🔵 directory tree, image viewer, thumbnails, download-as-tar
|
||||||
|
|
||||||
|
## [autoindex](https://github.com/nielsAD/autoindex)
|
||||||
|
* ⚠️ read only; no upload/move/delete
|
||||||
|
* ✅ directory cache for faster browsing of cloud storage
|
||||||
|
* copyparty: local index/cache for recursive search (names/attrs/tags), but not for browsing
|
||||||
|
|
||||||
|
|
||||||
|
# briefly considered
|
||||||
|
* [pydio](https://github.com/pydio/cells): python/agpl3, looks great, fantastic ux -- but needs mariadb, systemwide install
|
||||||
|
* [gossa](https://github.com/pldubouilh/gossa): go/mit, minimalistic, basic file upload, text editor, mkdir and rename (no delete/move)
|
||||||
@@ -3,9 +3,9 @@ FROM alpine:3.16
|
|||||||
WORKDIR /z
|
WORKDIR /z
|
||||||
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
||||||
ver_hashwasm=4.9.0 \
|
ver_hashwasm=4.9.0 \
|
||||||
ver_marked=4.2.3 \
|
ver_marked=4.2.5 \
|
||||||
ver_mde=2.18.0 \
|
ver_mde=2.18.0 \
|
||||||
ver_codemirror=5.65.10 \
|
ver_codemirror=5.65.11 \
|
||||||
ver_fontawesome=5.13.0 \
|
ver_fontawesome=5.13.0 \
|
||||||
ver_zopfli=1.0.3
|
ver_zopfli=1.0.3
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ font-family: 'fa';
|
|||||||
font-style: normal;
|
font-style: normal;
|
||||||
font-weight: 400;
|
font-weight: 400;
|
||||||
font-display: block;
|
font-display: block;
|
||||||
src: url("/.cpr/deps/mini-fa.woff") format("woff");
|
src: url("mini-fa.woff") format("woff");
|
||||||
}
|
}
|
||||||
|
|
||||||
.fa,
|
.fa,
|
||||||
|
|||||||
20
scripts/docker/Dockerfile.ac
Normal file
20
scripts/docker/Dockerfile.ac
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
FROM alpine:latest
|
||||||
|
WORKDIR /z
|
||||||
|
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||||
|
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||||
|
org.opencontainers.image.licenses="MIT" \
|
||||||
|
org.opencontainers.image.title="copyparty-ac" \
|
||||||
|
org.opencontainers.image.description="copyparty with Pillow and FFmpeg (image/audio/video thumbnails, audio transcoding, media tags)"
|
||||||
|
|
||||||
|
RUN apk --no-cache add \
|
||||||
|
wget \
|
||||||
|
py3-pillow \
|
||||||
|
ffmpeg \
|
||||||
|
&& mkdir /cfg /w \
|
||||||
|
&& chmod 777 /cfg /w \
|
||||||
|
&& echo % /cfg > initcfg
|
||||||
|
|
||||||
|
COPY i/dist/copyparty-sfx.py ./
|
||||||
|
WORKDIR /w
|
||||||
|
EXPOSE 3923
|
||||||
|
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
|
||||||
37
scripts/docker/Dockerfile.dj
Normal file
37
scripts/docker/Dockerfile.dj
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
FROM alpine:latest
|
||||||
|
WORKDIR /z
|
||||||
|
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||||
|
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||||
|
org.opencontainers.image.licenses="MIT" \
|
||||||
|
org.opencontainers.image.title="copyparty-dj" \
|
||||||
|
org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection"
|
||||||
|
|
||||||
|
COPY i/bin/mtag/install-deps.sh ./
|
||||||
|
COPY i/bin/mtag/audio-bpm.py /mtag/
|
||||||
|
COPY i/bin/mtag/audio-key.py /mtag/
|
||||||
|
RUN apk add -U \
|
||||||
|
wget \
|
||||||
|
py3-pillow py3-pip py3-cffi \
|
||||||
|
ffmpeg \
|
||||||
|
vips-jxl vips-heif vips-poppler vips-magick \
|
||||||
|
py3-numpy fftw libsndfile \
|
||||||
|
vamp-sdk vamp-sdk-libs \
|
||||||
|
&& python3 -m pip install pyvips \
|
||||||
|
&& apk --no-cache add -t .bd \
|
||||||
|
bash wget gcc g++ make cmake patchelf \
|
||||||
|
python3-dev ffmpeg-dev fftw-dev libsndfile-dev \
|
||||||
|
py3-wheel py3-numpy-dev \
|
||||||
|
vamp-sdk-dev \
|
||||||
|
&& bash install-deps.sh \
|
||||||
|
&& apk del py3-pip .bd \
|
||||||
|
&& rm -rf /var/cache/apk/* \
|
||||||
|
&& chmod 777 /root \
|
||||||
|
&& ln -s /root/vamp /root/.local / \
|
||||||
|
&& mkdir /cfg /w \
|
||||||
|
&& chmod 777 /cfg /w \
|
||||||
|
&& echo % /cfg > initcfg
|
||||||
|
|
||||||
|
COPY i/dist/copyparty-sfx.py ./
|
||||||
|
WORKDIR /w
|
||||||
|
EXPOSE 3923
|
||||||
|
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
|
||||||
19
scripts/docker/Dockerfile.im
Normal file
19
scripts/docker/Dockerfile.im
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
FROM alpine:latest
|
||||||
|
WORKDIR /z
|
||||||
|
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||||
|
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||||
|
org.opencontainers.image.licenses="MIT" \
|
||||||
|
org.opencontainers.image.title="copyparty-im" \
|
||||||
|
org.opencontainers.image.description="copyparty with Pillow and Mutagen (image thumbnails, media tags)"
|
||||||
|
|
||||||
|
RUN apk --no-cache add \
|
||||||
|
wget \
|
||||||
|
py3-pillow py3-mutagen \
|
||||||
|
&& mkdir /cfg /w \
|
||||||
|
&& chmod 777 /cfg /w \
|
||||||
|
&& echo % /cfg > initcfg
|
||||||
|
|
||||||
|
COPY i/dist/copyparty-sfx.py ./
|
||||||
|
WORKDIR /w
|
||||||
|
EXPOSE 3923
|
||||||
|
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
|
||||||
23
scripts/docker/Dockerfile.iv
Normal file
23
scripts/docker/Dockerfile.iv
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
FROM alpine:latest
|
||||||
|
WORKDIR /z
|
||||||
|
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||||
|
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||||
|
org.opencontainers.image.licenses="MIT" \
|
||||||
|
org.opencontainers.image.title="copyparty-iv" \
|
||||||
|
org.opencontainers.image.description="copyparty with Pillow, FFmpeg, libvips (image/audio/video thumbnails, audio transcoding, media tags)"
|
||||||
|
|
||||||
|
RUN apk --no-cache add \
|
||||||
|
wget \
|
||||||
|
py3-pillow py3-pip py3-cffi \
|
||||||
|
ffmpeg \
|
||||||
|
vips-jxl vips-heif vips-poppler vips-magick \
|
||||||
|
&& python3 -m pip install pyvips \
|
||||||
|
&& apk del py3-pip \
|
||||||
|
&& mkdir /cfg /w \
|
||||||
|
&& chmod 777 /cfg /w \
|
||||||
|
&& echo % /cfg > initcfg
|
||||||
|
|
||||||
|
COPY i/dist/copyparty-sfx.py ./
|
||||||
|
WORKDIR /w
|
||||||
|
EXPOSE 3923
|
||||||
|
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
|
||||||
18
scripts/docker/Dockerfile.min
Normal file
18
scripts/docker/Dockerfile.min
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
FROM alpine:latest
|
||||||
|
WORKDIR /z
|
||||||
|
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||||
|
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||||
|
org.opencontainers.image.licenses="MIT" \
|
||||||
|
org.opencontainers.image.title="copyparty-min" \
|
||||||
|
org.opencontainers.image.description="just copyparty, no thumbnails / media tags / audio transcoding"
|
||||||
|
|
||||||
|
RUN apk --no-cache add \
|
||||||
|
python3 \
|
||||||
|
&& mkdir /cfg /w \
|
||||||
|
&& chmod 777 /cfg /w \
|
||||||
|
&& echo % /cfg > initcfg
|
||||||
|
|
||||||
|
COPY i/dist/copyparty-sfx.py ./
|
||||||
|
WORKDIR /w
|
||||||
|
EXPOSE 3923
|
||||||
|
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
|
||||||
18
scripts/docker/Dockerfile.min.pip
Normal file
18
scripts/docker/Dockerfile.min.pip
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
FROM alpine:latest
|
||||||
|
WORKDIR /z
|
||||||
|
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||||
|
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||||
|
org.opencontainers.image.licenses="MIT" \
|
||||||
|
org.opencontainers.image.title="copyparty-min-pip" \
|
||||||
|
org.opencontainers.image.description="just copyparty, no thumbnails, no media tags, no audio transcoding"
|
||||||
|
|
||||||
|
RUN apk --no-cache add python3 py3-pip \
|
||||||
|
&& python3 -m pip install copyparty \
|
||||||
|
&& apk del py3-pip \
|
||||||
|
&& mkdir /cfg /w \
|
||||||
|
&& chmod 777 /cfg /w \
|
||||||
|
&& echo % /cfg > initcfg
|
||||||
|
|
||||||
|
WORKDIR /w
|
||||||
|
EXPOSE 3923
|
||||||
|
ENTRYPOINT ["python3", "-m", "copyparty", "-c", "/z/initcfg"]
|
||||||
65
scripts/docker/Makefile
Normal file
65
scripts/docker/Makefile
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
self := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
|
||||||
|
|
||||||
|
all:
|
||||||
|
-service docker start
|
||||||
|
-systemctl start docker
|
||||||
|
|
||||||
|
rm -rf i
|
||||||
|
mkdir i
|
||||||
|
tar -cC../.. dist/copyparty-sfx.py bin/mtag | tar -xvCi
|
||||||
|
|
||||||
|
docker build -t copyparty/min:latest -f Dockerfile.min .
|
||||||
|
echo 'scale=1;'`docker save copyparty/min:latest | pigz -c | wc -c`/1024/1024 | bc
|
||||||
|
|
||||||
|
# docker build -t copyparty/min-pip:latest -f Dockerfile.min.pip .
|
||||||
|
# echo 'scale=1;'`docker save copyparty/min-pip:latest | pigz -c | wc -c`/1024/1024 | bc
|
||||||
|
|
||||||
|
docker build -t copyparty/im:latest -f Dockerfile.im .
|
||||||
|
echo 'scale=1;'`docker save copyparty/im:latest | pigz -c | wc -c`/1024/1024 | bc
|
||||||
|
|
||||||
|
docker build -t copyparty/iv:latest -f Dockerfile.iv .
|
||||||
|
echo 'scale=1;'`docker save copyparty/iv:latest | pigz -c | wc -c`/1024/1024 | bc
|
||||||
|
|
||||||
|
docker build -t copyparty/ac:latest -f Dockerfile.ac .
|
||||||
|
echo 'scale=1;'`docker save copyparty/ac:latest | pigz -c | wc -c`/1024/1024 | bc
|
||||||
|
|
||||||
|
docker build -t copyparty/dj:latest -f Dockerfile.dj .
|
||||||
|
echo 'scale=1;'`docker save copyparty/dj:latest | pigz -c | wc -c`/1024/1024 | bc
|
||||||
|
|
||||||
|
docker image ls
|
||||||
|
|
||||||
|
push:
|
||||||
|
docker push copyparty/min
|
||||||
|
docker push copyparty/im
|
||||||
|
docker push copyparty/iv
|
||||||
|
docker push copyparty/ac
|
||||||
|
docker push copyparty/dj
|
||||||
|
docker image tag copyparty/min:latest ghcr.io/9001/copyparty-min:latest
|
||||||
|
docker image tag copyparty/im:latest ghcr.io/9001/copyparty-im:latest
|
||||||
|
docker image tag copyparty/iv:latest ghcr.io/9001/copyparty-iv:latest
|
||||||
|
docker image tag copyparty/ac:latest ghcr.io/9001/copyparty-ac:latest
|
||||||
|
docker image tag copyparty/dj:latest ghcr.io/9001/copyparty-dj:latest
|
||||||
|
docker push ghcr.io/9001/copyparty-min:latest
|
||||||
|
docker push ghcr.io/9001/copyparty-im:latest
|
||||||
|
docker push ghcr.io/9001/copyparty-iv:latest
|
||||||
|
docker push ghcr.io/9001/copyparty-ac:latest
|
||||||
|
docker push ghcr.io/9001/copyparty-dj:latest
|
||||||
|
|
||||||
|
clean:
|
||||||
|
-docker kill `docker ps -q`
|
||||||
|
-docker rm `docker ps -qa`
|
||||||
|
-docker rmi -f `docker images -a | awk '/<none>/{print$$3}'`
|
||||||
|
|
||||||
|
hclean:
|
||||||
|
-docker kill `docker ps -q`
|
||||||
|
-docker rm `docker ps -qa`
|
||||||
|
-docker rmi `docker images -a | awk '!/^alpine/&&NR>1{print$$3}'`
|
||||||
|
|
||||||
|
purge:
|
||||||
|
-docker kill `docker ps -q`
|
||||||
|
-docker rm `docker ps -qa`
|
||||||
|
-docker rmi `docker images -qa`
|
||||||
|
|
||||||
|
sh:
|
||||||
|
@printf "\n\033[1;31mopening a shell in the most recently created docker image\033[0m\n"
|
||||||
|
docker run --rm -it --entrypoint /bin/ash `docker images -aq | head -n 1`
|
||||||
83
scripts/docker/README.md
Normal file
83
scripts/docker/README.md
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
copyparty is availabe in these repos:
|
||||||
|
* https://hub.docker.com/u/copyparty
|
||||||
|
* https://github.com/9001?tab=packages&repo_name=copyparty
|
||||||
|
|
||||||
|
|
||||||
|
# getting started
|
||||||
|
|
||||||
|
run this command to grab the latest copyparty image and start it:
|
||||||
|
```bash
|
||||||
|
docker run --rm -it -u 1000 -p 3923:3923 -v /mnt/nas:/w -v $PWD/cfgdir:/cfg copyparty/ac
|
||||||
|
```
|
||||||
|
|
||||||
|
* `/w` is the path inside the container that gets shared by default, so mount one or more folders to share below there
|
||||||
|
* `/cfg` is an optional folder with zero or more config files (*.conf) to load
|
||||||
|
* `copyparty/ac` is the recommended [image edition](#editions)
|
||||||
|
* you can download the image from github instead by replacing `copyparty/ac` with `ghcr.io/9001/copyparty-ac`
|
||||||
|
|
||||||
|
i'm unfamiliar with docker-compose and alternatives so let me know if this section could be better 🙏
|
||||||
|
|
||||||
|
|
||||||
|
## configuration
|
||||||
|
|
||||||
|
the container has the same default config as the sfx and the pypi module, meaning it will listen on port 3923 and share the "current folder" (`/w` inside the container) as read-write for anyone
|
||||||
|
|
||||||
|
the recommended way to configure copyparty inside a container is to mount a folder which has one or more [config files](https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf) inside; `-v /your/config/folder:/cfg`
|
||||||
|
|
||||||
|
* but you can also provide arguments to the docker command if you prefer that
|
||||||
|
* config files must be named `something.conf` to get picked up
|
||||||
|
|
||||||
|
|
||||||
|
## editions
|
||||||
|
|
||||||
|
with image size after installation and when gzipped
|
||||||
|
|
||||||
|
* [`min`](https://hub.docker.com/r/copyparty/min) (57 MiB, 20 gz) is just copyparty itself
|
||||||
|
* [`im`](https://hub.docker.com/r/copyparty/im) (70 MiB, 25 gz) can thumbnail images with pillow, parse media files with mutagen
|
||||||
|
* [`ac` (163 MiB, 56 gz)](https://hub.docker.com/r/copyparty/ac) is `im` plus ffmpeg for video/audio thumbs + audio transcoding + better tags
|
||||||
|
* [`iv`](https://hub.docker.com/r/copyparty/iv) (211 MiB, 73 gz) is `ac` plus vips for faster heif / avic / jxl thumbnails
|
||||||
|
* [`dj`](https://hub.docker.com/r/copyparty/dj) (309 MiB, 104 gz) is `iv` plus beatroot/keyfinder to detect musical keys and bpm
|
||||||
|
|
||||||
|
[`ac` is recommended](https://hub.docker.com/r/copyparty/ac) since the additional features available in `iv` and `dj` are rarely useful
|
||||||
|
|
||||||
|
most editions support `x86`, `x86_64`, `armhf`, `aarch64`, `ppc64le`, `s390x`
|
||||||
|
* `dj` doesn't run on `ppc64le`, `s390x`, `armhf`
|
||||||
|
* `iv` doesn't run on `ppc64le`, `s390x`
|
||||||
|
|
||||||
|
|
||||||
|
## detecting bpm and musical key
|
||||||
|
|
||||||
|
the `dj` edition comes with `keyfinder` and `beatroot` which can be used to detect music bpm and musical keys
|
||||||
|
|
||||||
|
enable them globally in a config file:
|
||||||
|
```yaml
|
||||||
|
[global]
|
||||||
|
e2dsa, e2ts # enable filesystem indexing and multimedia indexing
|
||||||
|
mtp: .bpm=f,t30,/mtag/audio-bpm.py # should take ~10sec
|
||||||
|
mtp: key=f,t190,/mtag/audio-key.py # should take ~50sec
|
||||||
|
```
|
||||||
|
|
||||||
|
or enable them for just one volume,
|
||||||
|
```yaml
|
||||||
|
[/music] # share name / URL
|
||||||
|
music # filesystem path inside the docker volume `/w`
|
||||||
|
flags:
|
||||||
|
e2dsa, e2ts
|
||||||
|
mtp: .bpm=f,t30,/mtag/audio-bpm.py
|
||||||
|
mtp: key=f,t190,/mtag/audio-key.py
|
||||||
|
```
|
||||||
|
|
||||||
|
or using commandline arguments,
|
||||||
|
```
|
||||||
|
-e2dsa -e2ts -mtp .bpm=f,t30,/mtag/audio-bpm.py -mtp key=f,t190,/mtag/audio-key.py
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
# build the images yourself
|
||||||
|
|
||||||
|
basically `./make.sh hclean pull img push` but see [devnotes.md](./devnotes.md)
|
||||||
|
|
||||||
|
|
||||||
|
# notes
|
||||||
|
|
||||||
|
* currently unable to play [tracker music](https://en.wikipedia.org/wiki/Module_file) (mod/s3m/xm/it/...) -- will be fixed in june 2023 (Alpine 3.18)
|
||||||
19
scripts/docker/devnotes.md
Normal file
19
scripts/docker/devnotes.md
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# building the images yourself
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./make.sh hclean pull img push
|
||||||
|
```
|
||||||
|
will download the latest copyparty-sfx.py from github unless you have [built it from scratch](../../docs/devnotes.md#just-the-sfx) and then build all the images based on that
|
||||||
|
|
||||||
|
deprecated alternative: run `make` to use the makefile however that uses docker instead of podman and only builds x86_64
|
||||||
|
|
||||||
|
`make.sh` is necessarily(?) overengineered because:
|
||||||
|
* podman keeps burning dockerhub pulls by not using the cached images (`--pull=never` does not apply to manifests)
|
||||||
|
* podman cannot build from a local manifest, only local images or remote manifests
|
||||||
|
|
||||||
|
but I don't really know what i'm doing here 💩
|
||||||
|
|
||||||
|
* auth for pushing images to repos;
|
||||||
|
`podman login docker.io`
|
||||||
|
`podman login ghcr.io -u 9001`
|
||||||
|
[about gchq](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry) (takes a classic token as password)
|
||||||
152
scripts/docker/make.sh
Executable file
152
scripts/docker/make.sh
Executable file
@@ -0,0 +1,152 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
[ $(id -u) -eq 0 ] && {
|
||||||
|
echo dont root
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
sarchs="386 amd64 arm/v7 arm64/v8 ppc64le s390x"
|
||||||
|
archs="amd64 arm s390x 386 arm64 ppc64le"
|
||||||
|
imgs="dj iv min im ac"
|
||||||
|
dhub_order="iv dj min im ac"
|
||||||
|
ghcr_order="ac im min dj iv"
|
||||||
|
ngs=(
|
||||||
|
iv-{ppc64le,s390x}
|
||||||
|
dj-{ppc64le,s390x,arm}
|
||||||
|
)
|
||||||
|
|
||||||
|
for v in "$@"; do
|
||||||
|
[ "$v" = clean ] && clean=1
|
||||||
|
[ "$v" = hclean ] && hclean=1
|
||||||
|
[ "$v" = purge ] && purge=1
|
||||||
|
[ "$v" = pull ] && pull=1
|
||||||
|
[ "$v" = img ] && img=1
|
||||||
|
[ "$v" = push ] && push=1
|
||||||
|
[ "$v" = sh ] && sh=1
|
||||||
|
done
|
||||||
|
|
||||||
|
[ $# -gt 0 ] || {
|
||||||
|
echo "need list of commands, for example: hclean pull img push"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
[ $sh ] && {
|
||||||
|
printf "\n\033[1;31mopening a shell in the most recently created docker image\033[0m\n"
|
||||||
|
podman run --rm -it --entrypoint /bin/ash $(podman images -aq | head -n 1)
|
||||||
|
exit $?
|
||||||
|
}
|
||||||
|
|
||||||
|
filt=
|
||||||
|
[ $clean ] && filt='/<none>/{print$$3}'
|
||||||
|
[ $hclean ] && filt='/localhost\/copyparty-|^<none>.*localhost\/alpine-/{print$3}'
|
||||||
|
[ $purge ] && filt='NR>1{print$3}'
|
||||||
|
[ $filt ] && {
|
||||||
|
[ $purge ] && {
|
||||||
|
podman kill $(podman ps -q) || true
|
||||||
|
podman rm $(podman ps -qa) || true
|
||||||
|
}
|
||||||
|
podman rmi -f $(podman images -a --history | awk "$filt") || true
|
||||||
|
podman rmi $(podman images -a --history | awk '/^<none>.*<none>.*-tmp:/{print$3}') || true
|
||||||
|
}
|
||||||
|
|
||||||
|
[ $pull ] && {
|
||||||
|
for a in $sarchs; do # arm/v6
|
||||||
|
podman pull --arch=$a alpine:latest
|
||||||
|
done
|
||||||
|
|
||||||
|
podman images --format "{{.ID}} {{.History}}" |
|
||||||
|
awk '/library\/alpine/{print$1}' |
|
||||||
|
while read id; do
|
||||||
|
tag=alpine-$(podman inspect $id | jq -r '.[]|.Architecture' | tr / -)
|
||||||
|
[ -e .tag-$tag ] && continue
|
||||||
|
touch .tag-$tag
|
||||||
|
echo tagging $tag
|
||||||
|
podman untag $id
|
||||||
|
podman tag $id $tag
|
||||||
|
done
|
||||||
|
rm .tag-*
|
||||||
|
}
|
||||||
|
|
||||||
|
[ $img ] && {
|
||||||
|
fp=../../dist/copyparty-sfx.py
|
||||||
|
[ -e $fp ] || {
|
||||||
|
echo downloading copyparty-sfx.py ...
|
||||||
|
mkdir -p ../../dist
|
||||||
|
wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O $fp
|
||||||
|
}
|
||||||
|
|
||||||
|
# kill abandoned builders
|
||||||
|
ps aux | awk '/bin\/qemu-[^-]+-static/{print$2}' | xargs -r kill -9
|
||||||
|
|
||||||
|
# grab deps
|
||||||
|
rm -rf i err
|
||||||
|
mkdir i
|
||||||
|
tar -cC../.. dist/copyparty-sfx.py bin/mtag | tar -xvCi
|
||||||
|
|
||||||
|
for i in $imgs; do
|
||||||
|
podman rm copyparty-$i || true # old manifest
|
||||||
|
for a in $archs; do
|
||||||
|
[[ " ${ngs[*]} " =~ " $i-$a " ]] && continue # known incompat
|
||||||
|
|
||||||
|
# wait for a free slot
|
||||||
|
while true; do
|
||||||
|
touch .blk
|
||||||
|
[ $(jobs -p | wc -l) -lt $(nproc) ] && break
|
||||||
|
while [ -e .blk ]; do sleep 0.2; done
|
||||||
|
done
|
||||||
|
aa="$(printf '%7s' $a)"
|
||||||
|
|
||||||
|
# arm takes forever so make it top priority
|
||||||
|
[ ${a::3} == arm ] && nice= || nice=nice
|
||||||
|
|
||||||
|
# --pull=never does nothing at all btw
|
||||||
|
(set -x
|
||||||
|
$nice podman build \
|
||||||
|
--pull=never \
|
||||||
|
--from localhost/alpine-$a \
|
||||||
|
-t copyparty-$i-$a \
|
||||||
|
-f Dockerfile.$i . ||
|
||||||
|
(echo $? $i-$a >> err)
|
||||||
|
rm -f .blk
|
||||||
|
) 2> >(tee $a.err | sed "s/^/$aa:/" >&2) > >(tee $a.out | sed "s/^/$aa:/") &
|
||||||
|
done
|
||||||
|
[ -e err ] && {
|
||||||
|
echo somethign died,
|
||||||
|
cat err
|
||||||
|
pkill -P $$
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
for a in $archs; do
|
||||||
|
rm -f $a.{out,err}
|
||||||
|
done
|
||||||
|
done
|
||||||
|
wait
|
||||||
|
[ -e err ] && {
|
||||||
|
echo somethign died,
|
||||||
|
cat err
|
||||||
|
pkill -P $$
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
# avoid podman race-condition by creating manifest manually --
|
||||||
|
# Error: creating image to hold manifest list: image name "localhost/copyparty-dj:latest" is already associated with image "[0-9a-f]{64}": that name is already in use
|
||||||
|
for i in $imgs; do
|
||||||
|
variants=
|
||||||
|
for a in $archs; do
|
||||||
|
[[ " ${ngs[*]} " =~ " $i-$a " ]] && continue
|
||||||
|
variants="$variants containers-storage:localhost/copyparty-$i-$a"
|
||||||
|
done
|
||||||
|
podman manifest create copyparty-$i $variants
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
[ $push ] && {
|
||||||
|
for i in $dhub_order; do
|
||||||
|
podman manifest push --all copyparty-$i copyparty/$i:latest
|
||||||
|
done
|
||||||
|
for i in $ghcr_order; do
|
||||||
|
podman manifest push --all copyparty-$i ghcr.io/9001/copyparty-$i:latest
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
echo ok
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user