Compare commits
219 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cd40adccdb | ||
|
|
0f2c623599 | ||
|
|
4adbe1b517 | ||
|
|
4f013f64fe | ||
|
|
a9d1310296 | ||
|
|
43e6da3454 | ||
|
|
542a1de1ba | ||
|
|
03d23daecb | ||
|
|
cb019afecf | ||
|
|
5b98e104f2 | ||
|
|
df9feabcf8 | ||
|
|
674fc1fe08 | ||
|
|
a2601fd6ad | ||
|
|
025942a7d6 | ||
|
|
510100c86b | ||
|
|
161bbc7d26 | ||
|
|
7c9c962b79 | ||
|
|
cbdbaf1938 | ||
|
|
cdfceb483e | ||
|
|
2228f81f94 | ||
|
|
895880aeb0 | ||
|
|
6bb27e6091 | ||
|
|
d197e754b9 | ||
|
|
b0dec83aad | ||
|
|
e2c2dd18cf | ||
|
|
ca6d0b8d5e | ||
|
|
48705a74c6 | ||
|
|
b419984709 | ||
|
|
e00b97eee0 | ||
|
|
4dca1cf8f4 | ||
|
|
edba7fffd3 | ||
|
|
21a96bcfe8 | ||
|
|
2d322dd48e | ||
|
|
df6d4df4f8 | ||
|
|
5aa893973c | ||
|
|
be0dd555a6 | ||
|
|
9921c43e3a | ||
|
|
14fa369fae | ||
|
|
0f0f8d90c1 | ||
|
|
1afbff7335 | ||
|
|
8c32b0e7bb | ||
|
|
9bc4c5d2e6 | ||
|
|
1534b7cb55 | ||
|
|
56d3bcf515 | ||
|
|
78605d9a79 | ||
|
|
d46a40fed8 | ||
|
|
ce4e489802 | ||
|
|
fd7c71d6a3 | ||
|
|
fad2268566 | ||
|
|
a95ea03cd0 | ||
|
|
f6be390579 | ||
|
|
4f264a0a9c | ||
|
|
d27144340f | ||
|
|
299cff3ff7 | ||
|
|
42c199e78e | ||
|
|
1b2d39857b | ||
|
|
ed908b9868 | ||
|
|
d162502c38 | ||
|
|
bf11b2a421 | ||
|
|
77274e9d59 | ||
|
|
8306e3d9de | ||
|
|
deb6711b51 | ||
|
|
7ef6fd13cf | ||
|
|
65c4e03574 | ||
|
|
c9fafb202d | ||
|
|
d4d9069130 | ||
|
|
7eca90cc21 | ||
|
|
6ecf4fdceb | ||
|
|
8cae7a715b | ||
|
|
c75b0c25a6 | ||
|
|
9dd5dec093 | ||
|
|
ec05f8ccd5 | ||
|
|
a1c7a095ee | ||
|
|
77df17d191 | ||
|
|
fa5845ff5f | ||
|
|
17fa490687 | ||
|
|
1eff87c3bd | ||
|
|
d123d2bff0 | ||
|
|
5ac3864874 | ||
|
|
c599e2aaa3 | ||
|
|
2e53f7979a | ||
|
|
f61511d8c8 | ||
|
|
47415a7120 | ||
|
|
db7becacd2 | ||
|
|
28b63e587b | ||
|
|
9cb93ae1ed | ||
|
|
e3e51fb83a | ||
|
|
49c7124776 | ||
|
|
60fb1207fc | ||
|
|
48470f6b50 | ||
|
|
1d308eeb4c | ||
|
|
84f5f41747 | ||
|
|
19189afb34 | ||
|
|
23e77a3389 | ||
|
|
ecced0c4f2 | ||
|
|
d4a8071de5 | ||
|
|
261236e302 | ||
|
|
0de09860f6 | ||
|
|
bfb39969a4 | ||
|
|
256dad8cc0 | ||
|
|
a247ba9ca3 | ||
|
|
0a9a807772 | ||
|
|
41fa6b2552 | ||
|
|
f425ff51ae | ||
|
|
7cde9a2976 | ||
|
|
5dcd88a6c8 | ||
|
|
c3ef3fdc1f | ||
|
|
b9ba783c1c | ||
|
|
d1bca1f52f | ||
|
|
94352f278b | ||
|
|
4fb87ebe32 | ||
|
|
3cbb7243ab | ||
|
|
fff45552da | ||
|
|
95157d02c9 | ||
|
|
3090c74832 | ||
|
|
4195762d2a | ||
|
|
dc3b7a2720 | ||
|
|
ad200f2b97 | ||
|
|
897f9d328d | ||
|
|
efbe34f29d | ||
|
|
dbfc899d79 | ||
|
|
74fb4b0cb8 | ||
|
|
68e7000275 | ||
|
|
38c2dcce3e | ||
|
|
5b3a5fe76b | ||
|
|
d5a9bd80b2 | ||
|
|
71c5565949 | ||
|
|
db33d68d42 | ||
|
|
e1c20c7a18 | ||
|
|
d3f1b45ce3 | ||
|
|
c7aa1a3558 | ||
|
|
7b2bd6da83 | ||
|
|
2bd955ba9f | ||
|
|
98dcaee210 | ||
|
|
361aebf877 | ||
|
|
ffc1610980 | ||
|
|
233075aee7 | ||
|
|
d1a4d335df | ||
|
|
96acbd3593 | ||
|
|
4b876dd133 | ||
|
|
a06c5eb048 | ||
|
|
c9cdc3e1c1 | ||
|
|
c0becc6418 | ||
|
|
b17ccc38ee | ||
|
|
acfaacbd46 | ||
|
|
8e0364efad | ||
|
|
e3043004ba | ||
|
|
b2aaf40a3e | ||
|
|
21db8833dc | ||
|
|
ec14c3944e | ||
|
|
20920e844f | ||
|
|
f9954bc4e5 | ||
|
|
d450f61534 | ||
|
|
2b50fc2010 | ||
|
|
c2034f7bc5 | ||
|
|
cec3bee020 | ||
|
|
e1b9ac631f | ||
|
|
19ee64e5e3 | ||
|
|
4f397b9b5b | ||
|
|
71775dcccb | ||
|
|
b383c08cc3 | ||
|
|
fc88341820 | ||
|
|
43bbd566d7 | ||
|
|
e1dea7ef3e | ||
|
|
de2fedd2cd | ||
|
|
6aaafeee6d | ||
|
|
99f63adf58 | ||
|
|
de2c978842 | ||
|
|
3c90cec0cd | ||
|
|
57a56073d8 | ||
|
|
2525d594c5 | ||
|
|
a0ecc4d88e | ||
|
|
accd003d15 | ||
|
|
9c2c423761 | ||
|
|
999789c742 | ||
|
|
14bb299918 | ||
|
|
0a33336dd4 | ||
|
|
6a2644fece | ||
|
|
5ab09769e1 | ||
|
|
782084056d | ||
|
|
494179bd1c | ||
|
|
29a17ae2b7 | ||
|
|
815d46f2c4 | ||
|
|
8417098c68 | ||
|
|
25974d660d | ||
|
|
12fcb42201 | ||
|
|
16462ee573 | ||
|
|
540664e0c2 | ||
|
|
b5cb763ab1 | ||
|
|
c24a0ec364 | ||
|
|
4accef00fb | ||
|
|
d779525500 | ||
|
|
65a7706f77 | ||
|
|
5e12abbb9b | ||
|
|
e0fe2b97be | ||
|
|
bd33863f9f | ||
|
|
a011139894 | ||
|
|
36866f1d36 | ||
|
|
407531bcb1 | ||
|
|
3adbb2ff41 | ||
|
|
499ae1c7a1 | ||
|
|
438ea6ccb0 | ||
|
|
598a29a733 | ||
|
|
6d102fc826 | ||
|
|
fca07fbb62 | ||
|
|
cdedcc24b8 | ||
|
|
60d5f27140 | ||
|
|
cb413bae49 | ||
|
|
e9f78ea70c | ||
|
|
6858cb066f | ||
|
|
4be0d426f4 | ||
|
|
7d7d5d6c3c | ||
|
|
0422387e90 | ||
|
|
2ed5fd9ac4 | ||
|
|
2beb2acc24 | ||
|
|
56ce591908 | ||
|
|
b190e676b4 | ||
|
|
19520b2ec9 | ||
|
|
eeb96ae8b5 |
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
3
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -8,6 +8,7 @@ assignees: '9001'
|
|||||||
---
|
---
|
||||||
|
|
||||||
NOTE:
|
NOTE:
|
||||||
|
**please use english, or include an english translation.** aside from that,
|
||||||
all of the below are optional, consider them as inspiration, delete and rewrite at will, thx md
|
all of the below are optional, consider them as inspiration, delete and rewrite at will, thx md
|
||||||
|
|
||||||
|
|
||||||
@@ -33,7 +34,7 @@ remove the ones that are not relevant:
|
|||||||
### Server details (if you're NOT using docker/podman)
|
### Server details (if you're NOT using docker/podman)
|
||||||
remove the ones that are not relevant:
|
remove the ones that are not relevant:
|
||||||
* **server OS / version:**
|
* **server OS / version:**
|
||||||
* **what copyparty did you grab:** (sfx/exe/pip/aur/...)
|
* **what copyparty did you grab:** (sfx/exe/pip/arch/...)
|
||||||
* **how you're running it:** (in a terminal, as a systemd-service, ...)
|
* **how you're running it:** (in a terminal, as a systemd-service, ...)
|
||||||
* run copyparty with `--version` and grab the last 3 lines (they start with `copyparty`, `CPython`, `sqlite`) and paste them below this line:
|
* run copyparty with `--version` and grab the last 3 lines (they start with `copyparty`, `CPython`, `sqlite`) and paste them below this line:
|
||||||
* **copyparty arguments and/or config-file:**
|
* **copyparty arguments and/or config-file:**
|
||||||
|
|||||||
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -7,6 +7,8 @@ assignees: '9001'
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
NOTE:
|
||||||
|
**please use english, or include an english translation.** aside from that,
|
||||||
all of the below are optional, consider them as inspiration, delete and rewrite at will
|
all of the below are optional, consider them as inspiration, delete and rewrite at will
|
||||||
|
|
||||||
**is your feature request related to a problem? Please describe.**
|
**is your feature request related to a problem? Please describe.**
|
||||||
|
|||||||
@@ -1,8 +1,21 @@
|
|||||||
* do something cool
|
* **found a bug?** [create an issue!](https://github.com/9001/copyparty/issues) or let me know in the [discord](https://discord.gg/25J8CdTT6G) :>
|
||||||
|
* **fixed a bug?** create a PR or post a patch! big thx in advance :>
|
||||||
|
* **have a cool idea?** let's discuss it! anywhere's fine, you choose.
|
||||||
|
|
||||||
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight 👍👍
|
but please:
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# do not use AI / LMM when writing code
|
||||||
|
|
||||||
|
copyparty is 100% organic, free-range, human-written software!
|
||||||
|
|
||||||
|
> ⚠ you are now entering a no-copilot zone
|
||||||
|
|
||||||
|
the *only* place where LMM/AI *may* be accepted is for [localization](https://github.com/9001/copyparty/tree/hovudstraum/docs/rice#translations) if you are fluent and have confirmed that the translation is accurate.
|
||||||
|
|
||||||
|
sorry for the harsh tone, but this is important to me 🙏
|
||||||
|
|
||||||
but to be more specific,
|
|
||||||
|
|
||||||
|
|
||||||
# contribution ideas
|
# contribution ideas
|
||||||
|
|||||||
207
README.md
207
README.md
@@ -8,11 +8,13 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* 🔌 protocols: [http](#the-browser) // [webdav](#webdav-server) // [ftp](#ftp-server) // [tftp](#tftp-server) // [smb/cifs](#smb-server)
|
* 🔌 protocols: [http](#the-browser) // [webdav](#webdav-server) // [ftp](#ftp-server) // [tftp](#tftp-server) // [smb/cifs](#smb-server)
|
||||||
* 📱 [android app](#android-app) // [iPhone shortcuts](#ios-shortcuts)
|
* 📱 [android app](#android-app) // [iPhone shortcuts](#ios-shortcuts)
|
||||||
|
|
||||||
👉 **[Get started](#quickstart)!** or visit the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
|
👉 **[Get started](#quickstart)!** or visit the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running on a nuc in my basement
|
||||||
|
|
||||||
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer)
|
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer)
|
||||||
|
|
||||||
🎬 **videos:** [upload](https://a.ocv.me/pub/demo/pics-vids/up2k.webm) // [cli-upload](https://a.ocv.me/pub/demo/pics-vids/u2cli.webm) // [race-the-beam](https://a.ocv.me/pub/g/nerd-stuff/cpp/2024-0418-race-the-beam.webm)
|
🎬 **videos:** [upload](https://a.ocv.me/pub/demo/pics-vids/up2k.webm) // [cli-upload](https://a.ocv.me/pub/demo/pics-vids/u2cli.webm) // [race-the-beam](https://a.ocv.me/pub/g/nerd-stuff/cpp/2024-0418-race-the-beam.webm) // 👉 **[feature-showcase](https://a.ocv.me/pub/demo/showcase-hq.webm)** ([youtube](https://www.youtube.com/watch?v=15_-hgsX2V0))
|
||||||
|
|
||||||
|
made in Norway 🇳🇴
|
||||||
|
|
||||||
|
|
||||||
## readme toc
|
## readme toc
|
||||||
@@ -50,8 +52,11 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [rss feeds](#rss-feeds) - monitor a folder with your RSS reader
|
* [rss feeds](#rss-feeds) - monitor a folder with your RSS reader
|
||||||
* [recent uploads](#recent-uploads) - list all recent uploads
|
* [recent uploads](#recent-uploads) - list all recent uploads
|
||||||
* [media player](#media-player) - plays almost every audio format there is
|
* [media player](#media-player) - plays almost every audio format there is
|
||||||
|
* [playlists](#playlists) - create and play [m3u8](https://en.wikipedia.org/wiki/M3U) playlists
|
||||||
|
* [creating a playlist](#creating-a-playlist) - with a standalone mediaplayer or copyparty
|
||||||
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||||
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
||||||
|
* [textfile viewer](#textfile-viewer) - with realtime streaming of logfiles and such ([demo](https://a.ocv.me/pub/demo/logtail/))
|
||||||
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
* [markdown viewer](#markdown-viewer) - and there are *two* editors
|
||||||
* [markdown vars](#markdown-vars) - dynamic docs with serverside variable expansion
|
* [markdown vars](#markdown-vars) - dynamic docs with serverside variable expansion
|
||||||
* [other tricks](#other-tricks)
|
* [other tricks](#other-tricks)
|
||||||
@@ -94,12 +99,15 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
|
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
|
||||||
* [real-ip](#real-ip) - teaching copyparty how to see client IPs
|
* [real-ip](#real-ip) - teaching copyparty how to see client IPs
|
||||||
* [reverse-proxy performance](#reverse-proxy-performance)
|
* [reverse-proxy performance](#reverse-proxy-performance)
|
||||||
|
* [permanent cloudflare tunnel](#permanent-cloudflare-tunnel) - if you have a domain and want to get your copyparty online real quick
|
||||||
* [prometheus](#prometheus) - metrics/stats can be enabled
|
* [prometheus](#prometheus) - metrics/stats can be enabled
|
||||||
* [other extremely specific features](#other-extremely-specific-features) - you'll never find a use for these
|
* [other extremely specific features](#other-extremely-specific-features) - you'll never find a use for these
|
||||||
* [custom mimetypes](#custom-mimetypes) - change the association of a file extension
|
* [custom mimetypes](#custom-mimetypes) - change the association of a file extension
|
||||||
|
* [GDPR compliance](#GDPR-compliance) - imagine using copyparty professionally...
|
||||||
* [feature chickenbits](#feature-chickenbits) - buggy feature? rip it out
|
* [feature chickenbits](#feature-chickenbits) - buggy feature? rip it out
|
||||||
|
* [feature beefybits](#feature-beefybits) - force-enable features with known issues on your OS/env
|
||||||
* [packages](#packages) - the party might be closer than you think
|
* [packages](#packages) - the party might be closer than you think
|
||||||
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
* [arch package](#arch-package) - `pacman -S copyparty` (in [arch linux extra](https://archlinux.org/packages/extra/any/copyparty/))
|
||||||
* [fedora package](#fedora-package) - does not exist yet
|
* [fedora package](#fedora-package) - does not exist yet
|
||||||
* [nix package](#nix-package) - `nix profile install github:9001/copyparty`
|
* [nix package](#nix-package) - `nix profile install github:9001/copyparty`
|
||||||
* [nixos module](#nixos-module)
|
* [nixos module](#nixos-module)
|
||||||
@@ -144,6 +152,7 @@ just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/
|
|||||||
* or if you are on android, [install copyparty in termux](#install-on-android)
|
* or if you are on android, [install copyparty in termux](#install-on-android)
|
||||||
* or maybe you have a [synology nas / dsm](./docs/synology-dsm.md)
|
* or maybe you have a [synology nas / dsm](./docs/synology-dsm.md)
|
||||||
* or if your computer is messed up and nothing else works, [try the pyz](#zipapp)
|
* or if your computer is messed up and nothing else works, [try the pyz](#zipapp)
|
||||||
|
* or if your OS is dead, give the [bootable flashdrive / cd-rom](https://a.ocv.me/pub/stuff/edcd001/enterprise-edition/) a spin
|
||||||
* or if you don't trust copyparty yet and want to isolate it a little, then...
|
* or if you don't trust copyparty yet and want to isolate it a little, then...
|
||||||
* ...maybe [prisonparty](./bin/prisonparty.sh) to create a tiny [chroot](https://wiki.archlinux.org/title/Chroot) (very portable),
|
* ...maybe [prisonparty](./bin/prisonparty.sh) to create a tiny [chroot](https://wiki.archlinux.org/title/Chroot) (very portable),
|
||||||
* ...or [bubbleparty](./bin/bubbleparty.sh) to wrap it in [bubblewrap](https://github.com/containers/bubblewrap) (much better)
|
* ...or [bubbleparty](./bin/bubbleparty.sh) to wrap it in [bubblewrap](https://github.com/containers/bubblewrap) (much better)
|
||||||
@@ -159,8 +168,8 @@ enable thumbnails (images/audio/video), media indexing, and audio transcoding by
|
|||||||
* **MacOS:** `port install py-Pillow ffmpeg`
|
* **MacOS:** `port install py-Pillow ffmpeg`
|
||||||
* **MacOS** (alternative): `brew install pillow ffmpeg`
|
* **MacOS** (alternative): `brew install pillow ffmpeg`
|
||||||
* **Windows:** `python -m pip install --user -U Pillow`
|
* **Windows:** `python -m pip install --user -U Pillow`
|
||||||
* install python and ffmpeg manually; do not use `winget` or `Microsoft Store` (it breaks $PATH)
|
* install [python](https://www.python.org/downloads/windows/) and [ffmpeg](#optional-dependencies) manually; do not use `winget` or `Microsoft Store` (it breaks $PATH)
|
||||||
* copyparty.exe comes with `Pillow` and only needs `ffmpeg`
|
* copyparty.exe comes with `Pillow` and only needs [ffmpeg](#optional-dependencies) for mediatags/videothumbs
|
||||||
* see [optional dependencies](#optional-dependencies) to enable even more features
|
* see [optional dependencies](#optional-dependencies) to enable even more features
|
||||||
|
|
||||||
running copyparty without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes)
|
running copyparty without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes)
|
||||||
@@ -183,6 +192,8 @@ first download [cloudflared](https://developers.cloudflare.com/cloudflare-one/co
|
|||||||
|
|
||||||
as the tunnel starts, it will show a URL which you can share to let anyone browse your stash or upload files to you
|
as the tunnel starts, it will show a URL which you can share to let anyone browse your stash or upload files to you
|
||||||
|
|
||||||
|
but if you have a domain, then you probably want to skip the random autogenerated URL and instead make a [permanent cloudflare tunnel](#permanent-cloudflare-tunnel)
|
||||||
|
|
||||||
since people will be connecting through cloudflare, run copyparty with `--xff-hdr cf-connecting-ip` to detect client IPs correctly
|
since people will be connecting through cloudflare, run copyparty with `--xff-hdr cf-connecting-ip` to detect client IPs correctly
|
||||||
|
|
||||||
|
|
||||||
@@ -224,6 +235,7 @@ also see [comparison to similar software](./docs/versus.md)
|
|||||||
* ☑ [upnp / zeroconf / mdns / ssdp](#zeroconf)
|
* ☑ [upnp / zeroconf / mdns / ssdp](#zeroconf)
|
||||||
* ☑ [event hooks](#event-hooks) / script runner
|
* ☑ [event hooks](#event-hooks) / script runner
|
||||||
* ☑ [reverse-proxy support](https://github.com/9001/copyparty#reverse-proxy)
|
* ☑ [reverse-proxy support](https://github.com/9001/copyparty#reverse-proxy)
|
||||||
|
* ☑ cross-platform (Windows, Linux, Macos, Android, FreeBSD, arm32/arm64, ppc64le, s390x, risc-v/riscv64)
|
||||||
* upload
|
* upload
|
||||||
* ☑ basic: plain multipart, ie6 support
|
* ☑ basic: plain multipart, ie6 support
|
||||||
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
||||||
@@ -244,8 +256,10 @@ also see [comparison to similar software](./docs/versus.md)
|
|||||||
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
||||||
* ☑ audio player (with [OS media controls](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) and opus/mp3 transcoding)
|
* ☑ audio player (with [OS media controls](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) and opus/mp3 transcoding)
|
||||||
* ☑ play video files as audio (converted on server)
|
* ☑ play video files as audio (converted on server)
|
||||||
|
* ☑ create and play [m3u8 playlists](#playlists)
|
||||||
* ☑ image gallery with webm player
|
* ☑ image gallery with webm player
|
||||||
* ☑ textfile browser with syntax hilighting
|
* ☑ [textfile browser](#textfile-viewer) with syntax hilighting
|
||||||
|
* ☑ realtime streaming of growing files (logfiles and such)
|
||||||
* ☑ [thumbnails](#thumbnails)
|
* ☑ [thumbnails](#thumbnails)
|
||||||
* ☑ ...of images using Pillow, pyvips, or FFmpeg
|
* ☑ ...of images using Pillow, pyvips, or FFmpeg
|
||||||
* ☑ ...of videos using FFmpeg
|
* ☑ ...of videos using FFmpeg
|
||||||
@@ -275,6 +289,8 @@ small collection of user feedback
|
|||||||
|
|
||||||
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`, `wow this is better than nextcloud`
|
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`, `wow this is better than nextcloud`
|
||||||
|
|
||||||
|
* UI просто ужасно. Если буду описывать детально не смогу удержаться в рамках приличий
|
||||||
|
|
||||||
|
|
||||||
# motivations
|
# motivations
|
||||||
|
|
||||||
@@ -294,6 +310,8 @@ project goals / philosophy
|
|||||||
* adaptable, malleable, hackable
|
* adaptable, malleable, hackable
|
||||||
* no build steps; modify the js/python without needing node.js or anything like that
|
* no build steps; modify the js/python without needing node.js or anything like that
|
||||||
|
|
||||||
|
becoming rich is specifically *not* a motivation, but if you wanna donate then see my [github profile](https://github.com/9001) regarding donations for my FOSS stuff in general (also THANKS!)
|
||||||
|
|
||||||
|
|
||||||
## notes
|
## notes
|
||||||
|
|
||||||
@@ -323,7 +341,8 @@ roughly sorted by chance of encounter
|
|||||||
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux)
|
||||||
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
|
* `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions
|
||||||
* if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
* if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
||||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db and thumbnails on a local disk instead
|
||||||
|
* or, if you only want to move the db (and not the thumbnails), then use `--dbpath` or the `dbpath` volflag
|
||||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||||
* probably more, pls let me know
|
* probably more, pls let me know
|
||||||
|
|
||||||
@@ -376,7 +395,8 @@ same order here too
|
|||||||
* this is an msys2 bug, the regular windows edition of python is fine
|
* this is an msys2 bug, the regular windows edition of python is fine
|
||||||
|
|
||||||
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
|
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
|
||||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
|
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db and thumbnails inside the vm instead
|
||||||
|
* or, if you only want to move the db (and not the thumbnails), then use `--dbpath` or the `dbpath` volflag
|
||||||
* also happens on mergerfs, so put the db elsewhere
|
* also happens on mergerfs, so put the db elsewhere
|
||||||
|
|
||||||
* Ubuntu: dragging files from certain folders into firefox or chrome is impossible
|
* Ubuntu: dragging files from certain folders into firefox or chrome is impossible
|
||||||
@@ -401,6 +421,12 @@ upgrade notes
|
|||||||
|
|
||||||
"frequently" asked questions
|
"frequently" asked questions
|
||||||
|
|
||||||
|
* CopyParty?
|
||||||
|
* nope! the name is either copyparty (all-lowercase) or Copyparty -- it's [one word](https://en.wiktionary.org/wiki/copyparty) after all :>
|
||||||
|
|
||||||
|
* can I change the 🌲 spinning pine-tree loading animation?
|
||||||
|
* [yeah...](https://github.com/9001/copyparty/tree/hovudstraum/docs/rice#boring-loader-spinner) :-(
|
||||||
|
|
||||||
* is it possible to block read-access to folders unless you know the exact URL for a particular file inside?
|
* is it possible to block read-access to folders unless you know the exact URL for a particular file inside?
|
||||||
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
|
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
|
||||||
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
|
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
|
||||||
@@ -423,6 +449,14 @@ upgrade notes
|
|||||||
* copyparty seems to think I am using http, even though the URL is https
|
* copyparty seems to think I am using http, even though the URL is https
|
||||||
* your reverse-proxy is not sending the `X-Forwarded-Proto: https` header; this could be because your reverse-proxy itself is confused. Ensure that none of the intermediates (such as cloudflare) are terminating https before the traffic hits your entrypoint
|
* your reverse-proxy is not sending the `X-Forwarded-Proto: https` header; this could be because your reverse-proxy itself is confused. Ensure that none of the intermediates (such as cloudflare) are terminating https before the traffic hits your entrypoint
|
||||||
|
|
||||||
|
* thumbnails are broken (you get a colorful square which says the filetype instead)
|
||||||
|
* you need to install `FFmpeg` or `Pillow`; see [thumbnails](#thumbnails)
|
||||||
|
|
||||||
|
* thumbnails are broken (some images appear, but other files just get a blank box, and/or the broken-image placeholder)
|
||||||
|
* probably due to a reverse-proxy messing with the request URLs and stripping the query parameters (`?th=w`), so check your URL rewrite rules
|
||||||
|
* could also be due to incorrect caching settings in reverse-proxies and/or CDNs, so make sure that nothing is set to ignore the query string
|
||||||
|
* could also be due to misbehaving privacy-related browser extensions, so try to disable those
|
||||||
|
|
||||||
* i want to learn python and/or programming and am considering looking at the copyparty source code in that occasion
|
* i want to learn python and/or programming and am considering looking at the copyparty source code in that occasion
|
||||||
* ```bash
|
* ```bash
|
||||||
_| _ __ _ _|_
|
_| _ __ _ _|_
|
||||||
@@ -529,6 +563,8 @@ a client can request to see dotfiles in directory listings if global option `-ed
|
|||||||
|
|
||||||
dotfiles do not appear in search results unless one of the above is true, **and** the global option / volflag `dotsrch` is set
|
dotfiles do not appear in search results unless one of the above is true, **and** the global option / volflag `dotsrch` is set
|
||||||
|
|
||||||
|
> even if user has permission to see dotfiles, they are default-hidden unless `--see-dots` is set, and/or user has enabled the `dotfiles` option in the settings tab
|
||||||
|
|
||||||
config file example, where the same permission to see dotfiles is given in two different ways just for reference:
|
config file example, where the same permission to see dotfiles is given in two different ways just for reference:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@@ -653,6 +689,7 @@ press `g` or `田` to toggle grid-view instead of the file listing and `t` togg
|
|||||||
it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
|
it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
|
||||||
* pyvips is 3x faster than Pillow, Pillow is 3x faster than FFmpeg
|
* pyvips is 3x faster than Pillow, Pillow is 3x faster than FFmpeg
|
||||||
* disable thumbnails for specific volumes with volflag `dthumb` for all, or `dvthumb` / `dathumb` / `dithumb` for video/audio/images only
|
* disable thumbnails for specific volumes with volflag `dthumb` for all, or `dvthumb` / `dathumb` / `dithumb` for video/audio/images only
|
||||||
|
* for installing FFmpeg on windows, see [optional dependencies](#optional-dependencies)
|
||||||
|
|
||||||
audio files are converted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
audio files are converted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
|
||||||
|
|
||||||
@@ -664,7 +701,10 @@ enabling `multiselect` lets you click files to select them, and then shift-click
|
|||||||
* `multiselect` is mostly intended for phones/tablets, but the `sel` option in the `[⚙️] settings` tab is better suited for desktop use, allowing selection by CTRL-clicking and range-selection with SHIFT-click, all without affecting regular clicking
|
* `multiselect` is mostly intended for phones/tablets, but the `sel` option in the `[⚙️] settings` tab is better suited for desktop use, allowing selection by CTRL-clicking and range-selection with SHIFT-click, all without affecting regular clicking
|
||||||
* the `sel` option can be made default globally with `--gsel` or per-volume with volflag `gsel`
|
* the `sel` option can be made default globally with `--gsel` or per-volume with volflag `gsel`
|
||||||
|
|
||||||
to show `/icons/exe.png` as the thumbnail for all .exe files, `--ext-th=exe=/icons/exe.png` (optionally as a volflag)
|
to show `/icons/exe.png` and `/icons/elf.gif` as the thumbnail for all `.exe` and `.elf` files respectively, do this: `--ext-th=exe=/icons/exe.png --ext-th=elf=/icons/elf.gif`
|
||||||
|
* optionally as separate volflags for each mapping; see config file example below
|
||||||
|
* the supported image formats are [jpg, png, gif, webp, ico](https://developer.mozilla.org/en-US/docs/Web/Media/Guides/Formats/Image_types)
|
||||||
|
* be careful with svg; chrome will crash if you have too many unique svg files showing on the same page (the limit is 250 or so) -- showing the same handful of svg files thousands of times is ok however
|
||||||
|
|
||||||
config file example:
|
config file example:
|
||||||
|
|
||||||
@@ -681,6 +721,7 @@ config file example:
|
|||||||
dthumb # disable ALL thumbnails and audio transcoding
|
dthumb # disable ALL thumbnails and audio transcoding
|
||||||
dvthumb # only disable video thumbnails
|
dvthumb # only disable video thumbnails
|
||||||
ext-th: exe=/ico/exe.png # /ico/exe.png is the thumbnail of *.exe
|
ext-th: exe=/ico/exe.png # /ico/exe.png is the thumbnail of *.exe
|
||||||
|
ext-th: elf=/ico/elf.gif # ...and /ico/elf.gif is used for *.elf
|
||||||
th-covers: folder.png,folder.jpg,cover.png,cover.jpg # the default
|
th-covers: folder.png,folder.jpg,cover.png,cover.jpg # the default
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -708,6 +749,7 @@ select which type of archive you want in the `[⚙️] config` tab:
|
|||||||
* `up2k.db` and `dir.txt` is always excluded
|
* `up2k.db` and `dir.txt` is always excluded
|
||||||
* bsdtar supports streaming unzipping: `curl foo?zip | bsdtar -xv`
|
* bsdtar supports streaming unzipping: `curl foo?zip | bsdtar -xv`
|
||||||
* good, because copyparty's zip is faster than tar on small files
|
* good, because copyparty's zip is faster than tar on small files
|
||||||
|
* but `?tar` is better for large files, especially if the total exceeds 4 GiB
|
||||||
* `zip_crc` will take longer to download since the server has to read each file twice
|
* `zip_crc` will take longer to download since the server has to read each file twice
|
||||||
* this is only to support MS-DOS PKZIP v2.04g (october 1993) and older
|
* this is only to support MS-DOS PKZIP v2.04g (october 1993) and older
|
||||||
* how are you accessing copyparty actually
|
* how are you accessing copyparty actually
|
||||||
@@ -764,8 +806,11 @@ the up2k UI is the epitome of polished intuitive experiences:
|
|||||||
* "parallel uploads" specifies how many chunks to upload at the same time
|
* "parallel uploads" specifies how many chunks to upload at the same time
|
||||||
* `[🏃]` analysis of other files should continue while one is uploading
|
* `[🏃]` analysis of other files should continue while one is uploading
|
||||||
* `[🥔]` shows a simpler UI for faster uploads from slow devices
|
* `[🥔]` shows a simpler UI for faster uploads from slow devices
|
||||||
|
* `[🛡️]` decides when to overwrite existing files on the server
|
||||||
|
* `🛡️` = never (generate a new filename instead)
|
||||||
|
* `🕒` = overwrite if the server-file is older
|
||||||
|
* `♻️` = always overwrite if the files are different
|
||||||
* `[🎲]` generate random filenames during upload
|
* `[🎲]` generate random filenames during upload
|
||||||
* `[📅]` preserve last-modified timestamps; server times will match yours
|
|
||||||
* `[🔎]` switch between upload and [file-search](#file-search) mode
|
* `[🔎]` switch between upload and [file-search](#file-search) mode
|
||||||
* ignore `[🔎]` if you add files by dragging them into the browser
|
* ignore `[🔎]` if you add files by dragging them into the browser
|
||||||
|
|
||||||
@@ -783,6 +828,8 @@ if you are resuming a massive upload and want to skip hashing the files which al
|
|||||||
|
|
||||||
if the server is behind a proxy which imposes a request-size limit, you can configure up2k to sneak below the limit with server-option `--u2sz` (the default is 96 MiB to support Cloudflare)
|
if the server is behind a proxy which imposes a request-size limit, you can configure up2k to sneak below the limit with server-option `--u2sz` (the default is 96 MiB to support Cloudflare)
|
||||||
|
|
||||||
|
if you want to replace existing files on the server with new uploads by default, run with `--u2ow 2` (only works if users have the delete-permission, and can still be disabled with `🛡️` in the UI)
|
||||||
|
|
||||||
|
|
||||||
### file-search
|
### file-search
|
||||||
|
|
||||||
@@ -881,6 +928,7 @@ semi-intentional limitations:
|
|||||||
|
|
||||||
* cleanup of expired shares only works when global option `e2d` is set, and/or at least one volume on the server has volflag `e2d`
|
* cleanup of expired shares only works when global option `e2d` is set, and/or at least one volume on the server has volflag `e2d`
|
||||||
* only folders from the same volume are shared; if you are sharing a folder which contains other volumes, then the contents of those volumes will not be available
|
* only folders from the same volume are shared; if you are sharing a folder which contains other volumes, then the contents of those volumes will not be available
|
||||||
|
* if you change [password hashing](#password-hashing) settings after creating a password-protected share, then that share will stop working
|
||||||
* related to [IdP volumes being forgotten on shutdown](https://github.com/9001/copyparty/blob/hovudstraum/docs/idp.md#idp-volumes-are-forgotten-on-shutdown), any shares pointing into a user's IdP volume will be unavailable until that user makes their first request after a restart
|
* related to [IdP volumes being forgotten on shutdown](https://github.com/9001/copyparty/blob/hovudstraum/docs/idp.md#idp-volumes-are-forgotten-on-shutdown), any shares pointing into a user's IdP volume will be unavailable until that user makes their first request after a restart
|
||||||
* no option to "delete after first access" because tricky
|
* no option to "delete after first access" because tricky
|
||||||
* when linking something to discord (for example) it'll get accessed by their scraper and that would count as a hit
|
* when linking something to discord (for example) it'll get accessed by their scraper and that would count as a hit
|
||||||
@@ -1007,11 +1055,13 @@ click the `play` link next to an audio file, or copy the link target to [share i
|
|||||||
|
|
||||||
open the `[🎺]` media-player-settings tab to configure it,
|
open the `[🎺]` media-player-settings tab to configure it,
|
||||||
* "switches":
|
* "switches":
|
||||||
|
* `[🔁]` repeats one single song forever
|
||||||
* `[🔀]` shuffles the files inside each folder
|
* `[🔀]` shuffles the files inside each folder
|
||||||
* `[preload]` starts loading the next track when it's about to end, reduces the silence between songs
|
* `[preload]` starts loading the next track when it's about to end, reduces the silence between songs
|
||||||
* `[full]` does a full preload by downloading the entire next file; good for unreliable connections, bad for slow connections
|
* `[full]` does a full preload by downloading the entire next file; good for unreliable connections, bad for slow connections
|
||||||
* `[~s]` toggles the seekbar waveform display
|
* `[~s]` toggles the seekbar waveform display
|
||||||
* `[/np]` enables buttons to copy the now-playing info as an irc message
|
* `[/np]` enables buttons to copy the now-playing info as an irc message
|
||||||
|
* `[📻]` enables buttons to create an [m3u playlist](#playlists) with the selected songs
|
||||||
* `[os-ctl]` makes it possible to control audio playback from the lockscreen of your device (enables [mediasession](https://developer.mozilla.org/en-US/docs/Web/API/MediaSession))
|
* `[os-ctl]` makes it possible to control audio playback from the lockscreen of your device (enables [mediasession](https://developer.mozilla.org/en-US/docs/Web/API/MediaSession))
|
||||||
* `[seek]` allows seeking with lockscreen controls (buggy on some devices)
|
* `[seek]` allows seeking with lockscreen controls (buggy on some devices)
|
||||||
* `[art]` shows album art on the lockscreen
|
* `[art]` shows album art on the lockscreen
|
||||||
@@ -1030,11 +1080,39 @@ open the `[🎺]` media-player-settings tab to configure it,
|
|||||||
* "transcode to":
|
* "transcode to":
|
||||||
* `[opus]` produces an `opus` whenever transcoding is necessary (the best choice on Android and PCs)
|
* `[opus]` produces an `opus` whenever transcoding is necessary (the best choice on Android and PCs)
|
||||||
* `[awo]` is `opus` in a `weba` file, good for iPhones (iOS 17.5 and newer) but Apple is still fixing some state-confusion bugs as of iOS 18.2.1
|
* `[awo]` is `opus` in a `weba` file, good for iPhones (iOS 17.5 and newer) but Apple is still fixing some state-confusion bugs as of iOS 18.2.1
|
||||||
* `[caf]` is `opus` in a `caf` file, good for iPhones (iOS 11 through 17), technically unsupported by Apple but works for the mos tpart
|
* `[caf]` is `opus` in a `caf` file, good for iPhones (iOS 11 through 17), technically unsupported by Apple but works for the most part
|
||||||
* `[mp3]` -- the myth, the legend, the undying master of mediocre sound quality that definitely works everywhere
|
* `[mp3]` -- the myth, the legend, the undying master of mediocre sound quality that definitely works everywhere
|
||||||
* "tint" reduces the contrast of the playback bar
|
* "tint" reduces the contrast of the playback bar
|
||||||
|
|
||||||
|
|
||||||
|
### playlists
|
||||||
|
|
||||||
|
create and play [m3u8](https://en.wikipedia.org/wiki/M3U) playlists -- see example [text](https://a.ocv.me/pub/demo/music/?doc=example-playlist.m3u) and [player](https://a.ocv.me/pub/demo/music/#m3u=example-playlist.m3u)
|
||||||
|
|
||||||
|
click a file with the extension `m3u` or `m3u8` (for example `mixtape.m3u` or `touhou.m3u8` ) and you get two choices: Play / Edit
|
||||||
|
|
||||||
|
playlists can include songs across folders anywhere on the server, but filekeys/dirkeys are NOT supported, so the listener must have read-access or get-access to the files
|
||||||
|
|
||||||
|
|
||||||
|
### creating a playlist
|
||||||
|
|
||||||
|
with a standalone mediaplayer or copyparty
|
||||||
|
|
||||||
|
you can use foobar2000, deadbeef, just about any standalone player should work -- but you might need to edit the filepaths in the playlist so they fit with the server-URLs
|
||||||
|
|
||||||
|
alternatively, you can create the playlist using copyparty itself:
|
||||||
|
|
||||||
|
* open the `[🎺]` media-player-settings tab and enable the `[📻]` create-playlist feature -- this adds two new buttons in the bottom-right tray, `[📻add]` and `[📻copy]` which appear when you listen to music, or when you select a few audiofiles
|
||||||
|
|
||||||
|
* click the `📻add` button while a song is playing (or when you've selected some songs) and they'll be added to "the list" (you can't see it yet)
|
||||||
|
|
||||||
|
* at any time, click `📻copy` to send the playlist to your clipboard
|
||||||
|
* you can then continue adding more songs if you'd like
|
||||||
|
* if you want to wipe the playlist and start from scratch, just refresh the page
|
||||||
|
|
||||||
|
* create a new textfile, name it `something.m3u` and paste the playlist there
|
||||||
|
|
||||||
|
|
||||||
### audio equalizer
|
### audio equalizer
|
||||||
|
|
||||||
and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||||
@@ -1051,6 +1129,18 @@ not available on iPhones / iPads because AudioContext currently breaks backgroun
|
|||||||
due to phone / app settings, android phones may randomly stop playing music when the power saver kicks in, especially at the end of an album -- you can fix it by [disabling power saving](https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png) in the [app settings](https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png) of the browser you use for music streaming (preferably a dedicated one)
|
due to phone / app settings, android phones may randomly stop playing music when the power saver kicks in, especially at the end of an album -- you can fix it by [disabling power saving](https://user-images.githubusercontent.com/241032/235262123-c328cca9-3930-4948-bd18-3949b9fd3fcf.png) in the [app settings](https://user-images.githubusercontent.com/241032/235262121-2ffc51ae-7821-4310-a322-c3b7a507890c.png) of the browser you use for music streaming (preferably a dedicated one)
|
||||||
|
|
||||||
|
|
||||||
|
## textfile viewer
|
||||||
|
|
||||||
|
with realtime streaming of logfiles and such ([demo](https://a.ocv.me/pub/demo/logtail/)) , and terminal colors work too
|
||||||
|
|
||||||
|
click `-txt-` next to a textfile to open the viewer, which has the following toolbar buttons:
|
||||||
|
|
||||||
|
* `✏️ edit` opens the textfile editor
|
||||||
|
* `📡 follow` starts monitoring the file for changes, streaming new lines in realtime
|
||||||
|
* similar to `tail -f`
|
||||||
|
* [link directly](https://a.ocv.me/pub/demo/logtail/?doc=lipsum.txt&tail) to a file with tailing enabled by adding `&tail` to the textviewer URL
|
||||||
|
|
||||||
|
|
||||||
## markdown viewer
|
## markdown viewer
|
||||||
|
|
||||||
and there are *two* editors
|
and there are *two* editors
|
||||||
@@ -1349,12 +1439,17 @@ if you enable deduplication with `--dedup` then it'll create a symlink instead o
|
|||||||
**warning:** when enabling dedup, you should also:
|
**warning:** when enabling dedup, you should also:
|
||||||
* enable indexing with `-e2dsa` or volflag `e2dsa` (see [file indexing](#file-indexing) section below); strongly recommended
|
* enable indexing with `-e2dsa` or volflag `e2dsa` (see [file indexing](#file-indexing) section below); strongly recommended
|
||||||
* ...and/or `--hardlink-only` to use hardlink-based deduplication instead of symlinks; see explanation below
|
* ...and/or `--hardlink-only` to use hardlink-based deduplication instead of symlinks; see explanation below
|
||||||
|
* ...and/or `--reflink` to use CoW/reflink-based dedup (much safer than hardlink, but OS/FS-dependent)
|
||||||
|
|
||||||
it will not be safe to rename/delete files if you only enable dedup and none of the above; if you enable indexing then it is not *necessary* to also do hardlinks (but you may still want to)
|
it will not be safe to rename/delete files if you only enable dedup and none of the above; if you enable indexing then it is not *necessary* to also do hardlinks (but you may still want to)
|
||||||
|
|
||||||
by default, deduplication is done based on symlinks (symbolic links); these are tiny files which are pointers to the nearest full copy of the file
|
by default, deduplication is done based on symlinks (symbolic links); these are tiny files which are pointers to the nearest full copy of the file
|
||||||
|
|
||||||
you can choose to use hardlinks instead of softlinks, globally with `--hardlink-only` or volflag `hardlinkonly`;
|
you can choose to use hardlinks instead of softlinks, globally with `--hardlink-only` or volflag `hardlinkonly`, and you can choose to use reflinks with `--reflink` or volflag `reflink`
|
||||||
|
|
||||||
|
advantages of using reflinks (CoW, copy-on-write):
|
||||||
|
* entirely safe (when your filesystem supports it correctly); either file can be edited or deleted without affecting other copies
|
||||||
|
* only linux 5.3 or newer, only python 3.14 or newer, only some filesystems (btrfs probably ok, maybe xfs too, but zfs had bugs)
|
||||||
|
|
||||||
advantages of using hardlinks:
|
advantages of using hardlinks:
|
||||||
* hardlinks are more compatible with other software; they behave entirely like regular files
|
* hardlinks are more compatible with other software; they behave entirely like regular files
|
||||||
@@ -1411,7 +1506,6 @@ the same arguments can be set as volflags, in addition to `d2d`, `d2ds`, `d2t`,
|
|||||||
note:
|
note:
|
||||||
* upload-times can be displayed in the file listing by enabling the `.up_at` metadata key, either globally with `-e2d -mte +.up_at` or per-volume with volflags `e2d,mte=+.up_at` (will have a ~17% performance impact on directory listings)
|
* upload-times can be displayed in the file listing by enabling the `.up_at` metadata key, either globally with `-e2d -mte +.up_at` or per-volume with volflags `e2d,mte=+.up_at` (will have a ~17% performance impact on directory listings)
|
||||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
|
||||||
|
|
||||||
config file example (these options are recommended btw):
|
config file example (these options are recommended btw):
|
||||||
|
|
||||||
@@ -1516,7 +1610,7 @@ config file example:
|
|||||||
w: * # anyone can upload here
|
w: * # anyone can upload here
|
||||||
rw: ed # only user "ed" can read-write
|
rw: ed # only user "ed" can read-write
|
||||||
flags:
|
flags:
|
||||||
e2ds: # filesystem indexing is required for many of these:
|
e2ds # filesystem indexing is required for many of these:
|
||||||
sz: 1k-3m # accept upload only if filesize in this range
|
sz: 1k-3m # accept upload only if filesize in this range
|
||||||
df: 4g # free disk space cannot go lower than this
|
df: 4g # free disk space cannot go lower than this
|
||||||
vmaxb: 1g # volume can never exceed 1 GiB
|
vmaxb: 1g # volume can never exceed 1 GiB
|
||||||
@@ -1571,6 +1665,10 @@ copyparty creates a subfolder named `.hist` inside each volume where it stores t
|
|||||||
this can instead be kept in a single place using the `--hist` argument, or the `hist=` volflag, or a mix of both:
|
this can instead be kept in a single place using the `--hist` argument, or the `hist=` volflag, or a mix of both:
|
||||||
* `--hist ~/.cache/copyparty -v ~/music::r:c,hist=-` sets `~/.cache/copyparty` as the default place to put volume info, but `~/music` gets the regular `.hist` subfolder (`-` restores default behavior)
|
* `--hist ~/.cache/copyparty -v ~/music::r:c,hist=-` sets `~/.cache/copyparty` as the default place to put volume info, but `~/music` gets the regular `.hist` subfolder (`-` restores default behavior)
|
||||||
|
|
||||||
|
by default, the per-volume `up2k.db` sqlite3-database for `-e2d` and `-e2t` is stored next to the thumbnails according to the `--hist` option, but the global-option `--dbpath` and/or volflag `dbpath` can be used to put the database somewhere else
|
||||||
|
|
||||||
|
if your storage backend is unreliable (NFS or bad HDDs), you can specify one or more "landmarks" to look for before doing anything database-related. A landmark is a file which is always expected to exist inside the volume. This avoids spurious filesystem rescans in the event of an outage. One line per landmark (see example below)
|
||||||
|
|
||||||
note:
|
note:
|
||||||
* putting the hist-folders on an SSD is strongly recommended for performance
|
* putting the hist-folders on an SSD is strongly recommended for performance
|
||||||
* markdown edits are always stored in a local `.hist` subdirectory
|
* markdown edits are always stored in a local `.hist` subdirectory
|
||||||
@@ -1588,6 +1686,8 @@ config file example:
|
|||||||
flags:
|
flags:
|
||||||
hist: - # restore the default (/mnt/nas/pics/.hist/)
|
hist: - # restore the default (/mnt/nas/pics/.hist/)
|
||||||
hist: /mnt/nas/cache/pics/ # can be absolute path
|
hist: /mnt/nas/cache/pics/ # can be absolute path
|
||||||
|
landmark: me.jpg # /mnt/nas/pics/me.jpg must be readable to enable db
|
||||||
|
landmark: info/a.txt^=ok # and this textfile must start with "ok"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
@@ -1816,7 +1916,7 @@ tell search engines you don't wanna be indexed, either using the good old [robo
|
|||||||
* volflag `[...]:c,norobots` does the same thing for that single volume
|
* volflag `[...]:c,norobots` does the same thing for that single volume
|
||||||
* volflag `[...]:c,robots` ALLOWS search-engine crawling for that volume, even if `--no-robots` is set globally
|
* volflag `[...]:c,robots` ALLOWS search-engine crawling for that volume, even if `--no-robots` is set globally
|
||||||
|
|
||||||
also, `--force-js` disables the plain HTML folder listing, making things harder to parse for search engines
|
also, `--force-js` disables the plain HTML folder listing, making things harder to parse for *some* search engines -- note that crawlers which understand javascript (such as google) will not be affected
|
||||||
|
|
||||||
|
|
||||||
## themes
|
## themes
|
||||||
@@ -1927,7 +2027,7 @@ some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatical
|
|||||||
* **warning:** nginx-QUIC (HTTP/3) is still experimental and can make uploads much slower, so HTTP/1.1 is recommended for now
|
* **warning:** nginx-QUIC (HTTP/3) is still experimental and can make uploads much slower, so HTTP/1.1 is recommended for now
|
||||||
* depending on server/client, HTTP/1.1 can also be 5x faster than HTTP/2
|
* depending on server/client, HTTP/1.1 can also be 5x faster than HTTP/2
|
||||||
|
|
||||||
for improved security (and a 10% performance boost) consider listening on a unix-socket with `-i unix:770:www:/tmp/party.sock` (permission `770` means only members of group `www` can access it)
|
for improved security (and a 10% performance boost) consider listening on a unix-socket with `-i unix:770:www:/dev/shm/party.sock` (permission `770` means only members of group `www` can access it)
|
||||||
|
|
||||||
example webserver / reverse-proxy configs:
|
example webserver / reverse-proxy configs:
|
||||||
|
|
||||||
@@ -1982,6 +2082,26 @@ in summary, `haproxy > caddy > traefik > nginx > apache > lighttpd`, and use uds
|
|||||||
* if these results are bullshit because my config exampels are bad, please submit corrections!
|
* if these results are bullshit because my config exampels are bad, please submit corrections!
|
||||||
|
|
||||||
|
|
||||||
|
## permanent cloudflare tunnel
|
||||||
|
|
||||||
|
if you have a domain and want to get your copyparty online real quick, either from your home-PC behind a CGNAT or from a server without an existing [reverse-proxy](#reverse-proxy) setup, one approach is to create a [Cloudflare Tunnel](https://developers.cloudflare.com/cloudflare-one/connections/connect-networks/get-started/) (formerly "Argo Tunnel")
|
||||||
|
|
||||||
|
I'd recommend making a `Locally-managed tunnel` for more control, but if you prefer to make a `Remotely-managed tunnel` then this is currently how:
|
||||||
|
|
||||||
|
* `cloudflare dashboard` » `zero trust` » `networks` » `tunnels` » `create a tunnel` » `cloudflared` » choose a cool `subdomain` and leave the `path` blank, and use `service type` = `http` and `URL` = `127.0.0.1:3923`
|
||||||
|
|
||||||
|
* and if you want to just run the tunnel without installing it, skip the `cloudflared service install BASE64` step and instead do `cloudflared --no-autoupdate tunnel run --token BASE64`
|
||||||
|
|
||||||
|
NOTE: since people will be connecting through cloudflare, as mentioned in [real-ip](#real-ip) you should run copyparty with `--xff-hdr cf-connecting-ip` to detect client IPs correctly
|
||||||
|
|
||||||
|
config file example:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
[global]
|
||||||
|
xff-hdr: cf-connecting-ip
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## prometheus
|
## prometheus
|
||||||
|
|
||||||
metrics/stats can be enabled at URL `/.cpr/metrics` for grafana / prometheus / etc (openmetrics 1.0.0)
|
metrics/stats can be enabled at URL `/.cpr/metrics` for grafana / prometheus / etc (openmetrics 1.0.0)
|
||||||
@@ -2068,13 +2188,27 @@ in a config file, this is the same as:
|
|||||||
run copyparty with `--mimes` to list all the default mappings
|
run copyparty with `--mimes` to list all the default mappings
|
||||||
|
|
||||||
|
|
||||||
|
### GDPR compliance
|
||||||
|
|
||||||
|
imagine using copyparty professionally... **TINLA/IANAL; EU laws are hella confusing**
|
||||||
|
|
||||||
|
* remember to disable logging, or configure logrotation to an acceptable timeframe with `-lo cpp-%Y-%m%d.txt.xz` or similar
|
||||||
|
|
||||||
|
* if running with the database enabled (recommended), then have it forget uploader-IPs after some time using `--forget-ip 43200`
|
||||||
|
* don't set it too low; [unposting](#unpost) a file is no longer possible after this takes effect
|
||||||
|
|
||||||
|
* if you actually *are* a lawyer then I'm open for feedback, would be fun
|
||||||
|
|
||||||
|
|
||||||
### feature chickenbits
|
### feature chickenbits
|
||||||
|
|
||||||
buggy feature? rip it out by setting any of the following environment variables to disable its associated bell or whistle,
|
buggy feature? rip it out by setting any of the following environment variables to disable its associated bell or whistle,
|
||||||
|
|
||||||
| env-var | what it does |
|
| env-var | what it does |
|
||||||
| -------------------- | ------------ |
|
| -------------------- | ------------ |
|
||||||
|
| `PRTY_NO_DB_LOCK` | do not lock session/shares-databases for exclusive access |
|
||||||
| `PRTY_NO_IFADDR` | disable ip/nic discovery by poking into your OS with ctypes |
|
| `PRTY_NO_IFADDR` | disable ip/nic discovery by poking into your OS with ctypes |
|
||||||
|
| `PRTY_NO_IMPRESO` | do not try to load js/css files using `importlib.resources` |
|
||||||
| `PRTY_NO_IPV6` | disable some ipv6 support (should not be necessary since windows 2000) |
|
| `PRTY_NO_IPV6` | disable some ipv6 support (should not be necessary since windows 2000) |
|
||||||
| `PRTY_NO_LZMA` | disable streaming xz compression of incoming uploads |
|
| `PRTY_NO_LZMA` | disable streaming xz compression of incoming uploads |
|
||||||
| `PRTY_NO_MP` | disable all use of the python `multiprocessing` module (actual multithreading, cpu-count for parsers/thumbnailers) |
|
| `PRTY_NO_MP` | disable all use of the python `multiprocessing` module (actual multithreading, cpu-count for parsers/thumbnailers) |
|
||||||
@@ -2085,6 +2219,15 @@ buggy feature? rip it out by setting any of the following environment variables
|
|||||||
example: `PRTY_NO_IFADDR=1 python3 copyparty-sfx.py`
|
example: `PRTY_NO_IFADDR=1 python3 copyparty-sfx.py`
|
||||||
|
|
||||||
|
|
||||||
|
### feature beefybits
|
||||||
|
|
||||||
|
force-enable features with known issues on your OS/env by setting any of the following environment variables, also affectionately known as `fuckitbits` or `hail-mary-bits`
|
||||||
|
|
||||||
|
| env-var | what it does |
|
||||||
|
| ------------------------ | ------------ |
|
||||||
|
| `PRTY_FORCE_MP` | force-enable multiprocessing (real multithreading) on MacOS and other broken platforms |
|
||||||
|
|
||||||
|
|
||||||
# packages
|
# packages
|
||||||
|
|
||||||
the party might be closer than you think
|
the party might be closer than you think
|
||||||
@@ -2094,14 +2237,18 @@ if your distro/OS is not mentioned below, there might be some hints in the [«on
|
|||||||
|
|
||||||
## arch package
|
## arch package
|
||||||
|
|
||||||
now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
|
`pacman -S copyparty` (in [arch linux extra](https://archlinux.org/packages/extra/any/copyparty/))
|
||||||
|
|
||||||
it comes with a [systemd service](./contrib/package/arch/copyparty.service) and expects to find one or more [config files](./docs/example.conf) in `/etc/copyparty.d/`
|
it comes with a [systemd service](./contrib/package/arch/copyparty.service) and expects to find one or more [config files](./docs/example.conf) in `/etc/copyparty.d/`
|
||||||
|
|
||||||
|
after installing it, you may want to `cp /usr/lib/systemd/system/copyparty.service /etc/systemd/system/` and then `vim /etc/systemd/system/copyparty.service` to change what user/group it is running as (you only need to do this once)
|
||||||
|
|
||||||
|
NOTE: there used to be an aur package; this evaporated when copyparty was adopted by the official archlinux repos. If you're still using the aur package, please move
|
||||||
|
|
||||||
|
|
||||||
## fedora package
|
## fedora package
|
||||||
|
|
||||||
does not exist yet; using the [copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/) builds is **NOT recommended** because updates can be delayed by [several months](https://github.com/fedora-copr/copr/issues/3056)
|
does not exist yet; there are rumours that it is being packaged! keep an eye on this space...
|
||||||
|
|
||||||
|
|
||||||
## nix package
|
## nix package
|
||||||
@@ -2226,8 +2373,10 @@ TLDR: yes
|
|||||||
| send message | yep | yep | yep | yep | yep | yep | yep | yep |
|
| send message | yep | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| set sort order | - | yep | yep | yep | yep | yep | yep | yep |
|
| set sort order | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| zip selection | - | yep | yep | yep | yep | yep | yep | yep |
|
| zip selection | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
|
| file search | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| file rename | - | yep | yep | yep | yep | yep | yep | yep |
|
| file rename | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| file cut/paste | - | yep | yep | yep | yep | yep | yep | yep |
|
| file cut/paste | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
|
| unpost uploads | - | - | yep | yep | yep | yep | yep | yep |
|
||||||
| navpane | - | yep | yep | yep | yep | yep | yep | yep |
|
| navpane | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| video player | - | yep | yep | yep | yep | yep | yep | yep |
|
| video player | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
@@ -2255,6 +2404,7 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
|||||||
| **ie4** and **netscape** 4.0 | can browse, upload with `?b=u`, auth with `&pw=wark` |
|
| **ie4** and **netscape** 4.0 | can browse, upload with `?b=u`, auth with `&pw=wark` |
|
||||||
| **ncsa mosaic** 2.7 | does not get a pass, [pic1](https://user-images.githubusercontent.com/241032/174189227-ae816026-cf6f-4be5-a26e-1b3b072c1b2f.png) - [pic2](https://user-images.githubusercontent.com/241032/174189225-5651c059-5152-46e9-ac26-7e98e497901b.png) |
|
| **ncsa mosaic** 2.7 | does not get a pass, [pic1](https://user-images.githubusercontent.com/241032/174189227-ae816026-cf6f-4be5-a26e-1b3b072c1b2f.png) - [pic2](https://user-images.githubusercontent.com/241032/174189225-5651c059-5152-46e9-ac26-7e98e497901b.png) |
|
||||||
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
|
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
|
||||||
|
| **sony psp** 5.50 | can browse, upload/mkdir/msg (thx dwarf) [screenshot](https://github.com/user-attachments/assets/9d21f020-1110-4652-abeb-6fc09c533d4f) |
|
||||||
| **nintendo 3ds** | can browse, upload, view thumbnails (thx bnjmn) |
|
| **nintendo 3ds** | can browse, upload, view thumbnails (thx bnjmn) |
|
||||||
|
|
||||||
<p align="center"><img src="https://github.com/user-attachments/assets/88deab3d-6cad-4017-8841-2f041472b853" /></p>
|
<p align="center"><img src="https://github.com/user-attachments/assets/88deab3d-6cad-4017-8841-2f041472b853" /></p>
|
||||||
@@ -2299,6 +2449,9 @@ interact with copyparty using non-browser clients
|
|||||||
* and for screenshots on macos, see [./contrib/ishare.iscu](./contrib/#ishareiscu)
|
* and for screenshots on macos, see [./contrib/ishare.iscu](./contrib/#ishareiscu)
|
||||||
* and for screenshots on linux, see [./contrib/flameshot.sh](./contrib/flameshot.sh)
|
* and for screenshots on linux, see [./contrib/flameshot.sh](./contrib/flameshot.sh)
|
||||||
|
|
||||||
|
* [Custom Uploader](https://f-droid.org/en/packages/com.nyx.custom_uploader/) (an Android app) as an alternative to copyparty's own [PartyUP!](#android-app)
|
||||||
|
* works if you set UploadURL to `https://your.com/foo/?want=url&pw=hunter2` and FormDataName `f`
|
||||||
|
|
||||||
* contextlet (web browser integration); see [contrib contextlet](contrib/#send-to-cppcontextletjson)
|
* contextlet (web browser integration); see [contrib contextlet](contrib/#send-to-cppcontextletjson)
|
||||||
|
|
||||||
* [igloo irc](https://iglooirc.com/): Method: `post` Host: `https://you.com/up/?want=url&pw=hunter2` Multipart: `yes` File parameter: `f`
|
* [igloo irc](https://iglooirc.com/): Method: `post` Host: `https://you.com/up/?want=url&pw=hunter2` Multipart: `yes` File parameter: `f`
|
||||||
@@ -2310,6 +2463,8 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
|
|||||||
|
|
||||||
you can provide passwords using header `PW: hunter2`, cookie `cppwd=hunter2`, url-param `?pw=hunter2`, or with basic-authentication (either as the username or password)
|
you can provide passwords using header `PW: hunter2`, cookie `cppwd=hunter2`, url-param `?pw=hunter2`, or with basic-authentication (either as the username or password)
|
||||||
|
|
||||||
|
> for basic-authentication, all of the following are accepted: `password` / `whatever:password` / `password:whatever` (the username is ignored)
|
||||||
|
|
||||||
NOTE: curl will not send the original filename if you use `-T` combined with url-params! Also, make sure to always leave a trailing slash in URLs unless you want to override the filename
|
NOTE: curl will not send the original filename if you use `-T` combined with url-params! Also, make sure to always leave a trailing slash in URLs unless you want to override the filename
|
||||||
|
|
||||||
|
|
||||||
@@ -2382,6 +2537,8 @@ below are some tweaks roughly ordered by usefulness:
|
|||||||
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
||||||
* if your volumes are on a network-disk such as NFS / SMB / s3, specifying larger values for `--iobuf` and/or `--s-rd-sz` and/or `--s-wr-sz` may help; try setting all of them to `524288` or `1048576` or `4194304`
|
* if your volumes are on a network-disk such as NFS / SMB / s3, specifying larger values for `--iobuf` and/or `--s-rd-sz` and/or `--s-wr-sz` may help; try setting all of them to `524288` or `1048576` or `4194304`
|
||||||
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
|
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
|
||||||
|
* when running on AlpineLinux or other musl-based distro, try mimalloc for higher performance (and twice as much RAM usage); `apk add mimalloc2` and run copyparty with env-var `LD_PRELOAD=/usr/lib/libmimalloc-secure.so.2`
|
||||||
|
* note that mimalloc requires special care when combined with prisonparty and/or bubbleparty/bubblewrap; you must give it access to `/proc` and `/sys` otherwise you'll encounter issues with FFmpeg (audio transcoding, thumbnails)
|
||||||
* `-j0` enables multiprocessing (actual multithreading), can reduce latency to `20+80/numCores` percent and generally improve performance in cpu-intensive workloads, for example:
|
* `-j0` enables multiprocessing (actual multithreading), can reduce latency to `20+80/numCores` percent and generally improve performance in cpu-intensive workloads, for example:
|
||||||
* lots of connections (many users or heavy clients)
|
* lots of connections (many users or heavy clients)
|
||||||
* simultaneous downloads and uploads saturating a 20gbps connection
|
* simultaneous downloads and uploads saturating a 20gbps connection
|
||||||
@@ -2396,6 +2553,11 @@ below are some tweaks roughly ordered by usefulness:
|
|||||||
|
|
||||||
when uploading files,
|
when uploading files,
|
||||||
|
|
||||||
|
* when uploading from very fast storage (NVMe SSD) with chrome/firefox, enable `[wasm]` in the `[⚙️] settings` tab to more effectively use all CPU-cores for hashing
|
||||||
|
* don't do this on Safari (runs faster without)
|
||||||
|
* don't do this on older browsers; likely to provoke browser-bugs (browser eats all RAM and crashes)
|
||||||
|
* can be made default-enabled serverside with `--nosubtle 137` (chrome v137+) or `--nosubtle 2` (chrome+firefox)
|
||||||
|
|
||||||
* chrome is recommended (unfortunately), at least compared to firefox:
|
* chrome is recommended (unfortunately), at least compared to firefox:
|
||||||
* up to 90% faster when hashing, especially on SSDs
|
* up to 90% faster when hashing, especially on SSDs
|
||||||
* up to 40% faster when uploading over extremely fast internets
|
* up to 40% faster when uploading over extremely fast internets
|
||||||
@@ -2576,7 +2738,7 @@ enable [thumbnails](#thumbnails) of...
|
|||||||
* **images:** `Pillow` and/or `pyvips` and/or `ffmpeg` (requires py2.7 or py3.5+)
|
* **images:** `Pillow` and/or `pyvips` and/or `ffmpeg` (requires py2.7 or py3.5+)
|
||||||
* **videos/audio:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
|
* **videos/audio:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
|
||||||
* **HEIF pictures:** `pyvips` or `ffmpeg` or `pyheif-pillow-opener` (requires Linux or a C compiler)
|
* **HEIF pictures:** `pyvips` or `ffmpeg` or `pyheif-pillow-opener` (requires Linux or a C compiler)
|
||||||
* **AVIF pictures:** `pyvips` or `ffmpeg` or `pillow-avif-plugin`
|
* **AVIF pictures:** `pyvips` or `ffmpeg` or `pillow-avif-plugin` or pillow v11.3+
|
||||||
* **JPEG XL pictures:** `pyvips` or `ffmpeg`
|
* **JPEG XL pictures:** `pyvips` or `ffmpeg`
|
||||||
|
|
||||||
enable sending [zeromq messages](#zeromq) from event-hooks: `pyzmq`
|
enable sending [zeromq messages](#zeromq) from event-hooks: `pyzmq`
|
||||||
@@ -2585,6 +2747,8 @@ enable [smb](#smb-server) support (**not** recommended): `impacket==0.12.0`
|
|||||||
|
|
||||||
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
|
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
|
||||||
|
|
||||||
|
to install FFmpeg on Windows, grab [a recent build](https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z) -- you need `ffmpeg.exe` and `ffprobe.exe` from inside the `bin` folder; copy them into `C:\Windows\System32` or any other folder that's in your `%PATH%`
|
||||||
|
|
||||||
|
|
||||||
### dependency chickenbits
|
### dependency chickenbits
|
||||||
|
|
||||||
@@ -2601,10 +2765,11 @@ set any of the following environment variables to disable its associated optiona
|
|||||||
| `PRTY_NO_CFSSL` | never attempt to generate self-signed certificates using [cfssl](https://github.com/cloudflare/cfssl) |
|
| `PRTY_NO_CFSSL` | never attempt to generate self-signed certificates using [cfssl](https://github.com/cloudflare/cfssl) |
|
||||||
| `PRTY_NO_FFMPEG` | **audio transcoding** goes byebye, **thumbnailing** must be handled by Pillow/libvips |
|
| `PRTY_NO_FFMPEG` | **audio transcoding** goes byebye, **thumbnailing** must be handled by Pillow/libvips |
|
||||||
| `PRTY_NO_FFPROBE` | **audio transcoding** goes byebye, **thumbnailing** must be handled by Pillow/libvips, **metadata-scanning** must be handled by mutagen |
|
| `PRTY_NO_FFPROBE` | **audio transcoding** goes byebye, **thumbnailing** must be handled by Pillow/libvips, **metadata-scanning** must be handled by mutagen |
|
||||||
|
| `PRTY_NO_MAGIC` | do not use [magic](https://pypi.org/project/python-magic/) for filetype detection |
|
||||||
| `PRTY_NO_MUTAGEN` | do not use [mutagen](https://pypi.org/project/mutagen/) for reading metadata from media files; will fallback to ffprobe |
|
| `PRTY_NO_MUTAGEN` | do not use [mutagen](https://pypi.org/project/mutagen/) for reading metadata from media files; will fallback to ffprobe |
|
||||||
| `PRTY_NO_PIL` | disable all [Pillow](https://pypi.org/project/pillow/)-based thumbnail support; will fallback to libvips or ffmpeg |
|
| `PRTY_NO_PIL` | disable all [Pillow](https://pypi.org/project/pillow/)-based thumbnail support; will fallback to libvips or ffmpeg |
|
||||||
| `PRTY_NO_PILF` | disable Pillow `ImageFont` text rendering, used for folder thumbnails |
|
| `PRTY_NO_PILF` | disable Pillow `ImageFont` text rendering, used for folder thumbnails |
|
||||||
| `PRTY_NO_PIL_AVIF` | disable 3rd-party Pillow plugin for [AVIF support](https://pypi.org/project/pillow-avif-plugin/) |
|
| `PRTY_NO_PIL_AVIF` | disable Pillow avif support (internal and/or [plugin](https://pypi.org/project/pillow-avif-plugin/)) |
|
||||||
| `PRTY_NO_PIL_HEIF` | disable 3rd-party Pillow plugin for [HEIF support](https://pypi.org/project/pyheif-pillow-opener/) |
|
| `PRTY_NO_PIL_HEIF` | disable 3rd-party Pillow plugin for [HEIF support](https://pypi.org/project/pyheif-pillow-opener/) |
|
||||||
| `PRTY_NO_PIL_WEBP` | disable use of native webp support in Pillow |
|
| `PRTY_NO_PIL_WEBP` | disable use of native webp support in Pillow |
|
||||||
| `PRTY_NO_PSUTIL` | do not use [psutil](https://pypi.org/project/psutil/) for reaping stuck hooks and plugins on Windows |
|
| `PRTY_NO_PSUTIL` | do not use [psutil](https://pypi.org/project/psutil/) for reaping stuck hooks and plugins on Windows |
|
||||||
@@ -2701,5 +2866,7 @@ if there's a wall of base64 in the log (thread stacks) then please include that,
|
|||||||
|
|
||||||
for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
|
for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
|
||||||
|
|
||||||
|
specifically you may want to [build the sfx](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#just-the-sfx) or [build from scratch](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#build-from-scratch)
|
||||||
|
|
||||||
see [./docs/TODO.md](./docs/TODO.md) for planned features / fixes / changes
|
see [./docs/TODO.md](./docs/TODO.md) for planned features / fixes / changes
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ each plugin must define a `main()` which takes 3 arguments;
|
|||||||
## on404
|
## on404
|
||||||
|
|
||||||
* [redirect.py](redirect.py) sends an HTTP 301 or 302, redirecting the client to another page/file
|
* [redirect.py](redirect.py) sends an HTTP 301 or 302, redirecting the client to another page/file
|
||||||
|
* [randpic.py](randpic.py) redirects `/foo/bar/randpic.jpg` to a random pic in `/foo/bar/`
|
||||||
* [sorry.py](answer.py) replies with a custom message instead of the usual 404
|
* [sorry.py](answer.py) replies with a custom message instead of the usual 404
|
||||||
* [nooo.py](nooo.py) replies with an endless noooooooooooooo
|
* [nooo.py](nooo.py) replies with an endless noooooooooooooo
|
||||||
* [never404.py](never404.py) 100% guarantee that 404 will never be a thing again as it automatically creates dummy files whenever necessary
|
* [never404.py](never404.py) 100% guarantee that 404 will never be a thing again as it automatically creates dummy files whenever necessary
|
||||||
|
|||||||
35
bin/handlers/randpic.py
Normal file
35
bin/handlers/randpic.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import os
|
||||||
|
import random
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
|
||||||
|
# assuming /foo/bar/ is a valid URL but /foo/bar/randpic.png does not exist,
|
||||||
|
# hijack the 404 with a redirect to a random pic in that folder
|
||||||
|
#
|
||||||
|
# thx to lia & kipu for the idea
|
||||||
|
|
||||||
|
|
||||||
|
def main(cli, vn, rem):
|
||||||
|
req_fn = rem.split("/")[-1]
|
||||||
|
if not cli.can_read or not req_fn.startswith("randpic"):
|
||||||
|
return
|
||||||
|
|
||||||
|
req_abspath = vn.canonical(rem)
|
||||||
|
req_ap_dir = os.path.dirname(req_abspath)
|
||||||
|
files_in_dir = os.listdir(req_ap_dir)
|
||||||
|
|
||||||
|
if "." in req_fn:
|
||||||
|
file_ext = "." + req_fn.split(".")[-1]
|
||||||
|
files_in_dir = [x for x in files_in_dir if x.lower().endswith(file_ext)]
|
||||||
|
|
||||||
|
if not files_in_dir:
|
||||||
|
return
|
||||||
|
|
||||||
|
selected_file = random.choice(files_in_dir)
|
||||||
|
|
||||||
|
req_url = "/".join([vn.vpath, rem]).strip("/")
|
||||||
|
req_dir = req_url.rsplit("/", 1)[0]
|
||||||
|
new_url = "/".join([req_dir, quote(selected_file)]).strip("/")
|
||||||
|
|
||||||
|
cli.reply(b"redirecting...", 302, headers={"Location": "/" + new_url})
|
||||||
|
return "true"
|
||||||
@@ -14,6 +14,8 @@ run copyparty with `--help-hooks` for usage details / hook type explanations (xm
|
|||||||
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||||
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||||
* [into-the-cache-it-goes.py](into-the-cache-it-goes.py) avoids bugs in caching proxies by immediately downloading each file that is uploaded
|
* [into-the-cache-it-goes.py](into-the-cache-it-goes.py) avoids bugs in caching proxies by immediately downloading each file that is uploaded
|
||||||
|
* [podcast-normalizer.py](podcast-normalizer.py) creates a second file with dynamic-range-compression whenever an audio file is uploaded
|
||||||
|
* good example of the `idx` [hook effect](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#hook-effects) to tell copyparty about additional files to scan/index
|
||||||
|
|
||||||
|
|
||||||
# upload batches
|
# upload batches
|
||||||
@@ -25,6 +27,7 @@ these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every sin
|
|||||||
# before upload
|
# before upload
|
||||||
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
||||||
* [reloc-by-ext.py](reloc-by-ext.py) redirects an upload to another destination based on the file extension
|
* [reloc-by-ext.py](reloc-by-ext.py) redirects an upload to another destination based on the file extension
|
||||||
|
* good example of the `reloc` [hook effect](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#hook-effects)
|
||||||
|
|
||||||
|
|
||||||
# on message
|
# on message
|
||||||
|
|||||||
121
bin/hooks/podcast-normalizer.py
Executable file
121
bin/hooks/podcast-normalizer.py
Executable file
@@ -0,0 +1,121 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
|
||||||
|
_ = r"""
|
||||||
|
sends all uploaded audio files through an aggressive
|
||||||
|
dynamic-range-compressor to even out the volume levels
|
||||||
|
|
||||||
|
dependencies:
|
||||||
|
ffmpeg
|
||||||
|
|
||||||
|
being an xau hook, this gets eXecuted After Upload completion
|
||||||
|
but before copyparty has started hashing/indexing the file, so
|
||||||
|
we'll create a second normalized copy in a subfolder and tell
|
||||||
|
copyparty to hash/index that additional file as well
|
||||||
|
|
||||||
|
example usage as global config:
|
||||||
|
-e2d -e2t --xau j,c1,bin/hooks/podcast-normalizer.py
|
||||||
|
|
||||||
|
parameters explained,
|
||||||
|
e2d/e2t = enable database and metadata indexing
|
||||||
|
xau = execute after upload
|
||||||
|
j = this hook needs upload information as json (not just the filename)
|
||||||
|
c1 = this hook returns json on stdout, so tell copyparty to read that
|
||||||
|
|
||||||
|
example usage as a volflag (per-volume config):
|
||||||
|
-v srv/inc/pods:inc/pods:r:rw,ed:c,xau=j,c1,bin/hooks/podcast-normalizer.py
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
(share fs-path srv/inc/pods at URL /inc/pods,
|
||||||
|
readable by all, read-write for user ed,
|
||||||
|
running this xau (exec-after-upload) plugin for all uploaded files)
|
||||||
|
|
||||||
|
example usage as a volflag in a copyparty config file:
|
||||||
|
[/inc/pods]
|
||||||
|
srv/inc/pods
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
rw: ed
|
||||||
|
flags:
|
||||||
|
e2d # enables file indexing
|
||||||
|
e2t # metadata tags too
|
||||||
|
xau: j,c1,bin/hooks/podcast-normalizer.py
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
########################################################################
|
||||||
|
### CONFIG
|
||||||
|
|
||||||
|
# filetypes to process; ignores everything else
|
||||||
|
EXTS = "mp3 flac ogg oga opus m4a aac wav wma"
|
||||||
|
|
||||||
|
# the name of the subdir to put the normalized files in
|
||||||
|
SUBDIR = "normalized"
|
||||||
|
|
||||||
|
########################################################################
|
||||||
|
|
||||||
|
|
||||||
|
# try to enable support for crazy filenames
|
||||||
|
try:
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
except:
|
||||||
|
|
||||||
|
def fsenc(p):
|
||||||
|
return p.encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# read info from copyparty
|
||||||
|
inf = json.loads(sys.argv[1])
|
||||||
|
vpath = inf["vp"]
|
||||||
|
abspath = inf["ap"]
|
||||||
|
|
||||||
|
# check if the file-extension is on the to-be-processed list
|
||||||
|
ext = abspath.lower().split(".")[-1]
|
||||||
|
if ext not in EXTS.split():
|
||||||
|
return
|
||||||
|
|
||||||
|
# jump into the folder where the file was uploaded
|
||||||
|
# and create the subfolder to place the normalized copy inside
|
||||||
|
dirpath, filename = os.path.split(abspath)
|
||||||
|
os.chdir(fsenc(dirpath))
|
||||||
|
os.makedirs(SUBDIR, exist_ok=True)
|
||||||
|
|
||||||
|
# the input and output filenames to give ffmpeg
|
||||||
|
fname_in = fsenc(f"./{filename}")
|
||||||
|
fname_out = fsenc(f"{SUBDIR}/{filename}.opus")
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
# create and run the ffmpeg command
|
||||||
|
cmd = [
|
||||||
|
b"ffmpeg",
|
||||||
|
b"-nostdin",
|
||||||
|
b"-hide_banner",
|
||||||
|
b"-i", fname_in,
|
||||||
|
b"-af", b"dynaudnorm=f=100:g=9", # the normalizer config
|
||||||
|
b"-c:a", b"libopus",
|
||||||
|
b"-b:a", b"128k",
|
||||||
|
fname_out,
|
||||||
|
]
|
||||||
|
# fmt: on
|
||||||
|
sp.check_output(cmd)
|
||||||
|
|
||||||
|
# and finally, tell copyparty about the new file
|
||||||
|
# so it appears in the database and rss-feed:
|
||||||
|
vpath = f"{SUBDIR}/{filename}.opus"
|
||||||
|
print(json.dumps({"idx": {"vp": [vpath]}}))
|
||||||
|
|
||||||
|
# (it's fine to give it a relative path like that; it gets
|
||||||
|
# resolved relative to the folder the file was uploaded into)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
main()
|
||||||
|
except Exception as ex:
|
||||||
|
print("podcast-normalizer failed; %r" % (ex,))
|
||||||
@@ -71,6 +71,9 @@ def main():
|
|||||||
## selecting it inside the print at the end:
|
## selecting it inside the print at the end:
|
||||||
##
|
##
|
||||||
|
|
||||||
|
# move all uploads to one specific folder
|
||||||
|
into_junk = {"vp": "/junk"}
|
||||||
|
|
||||||
# create a subfolder named after the filetype and move it into there
|
# create a subfolder named after the filetype and move it into there
|
||||||
into_subfolder = {"vp": ext}
|
into_subfolder = {"vp": ext}
|
||||||
|
|
||||||
@@ -92,8 +95,8 @@ def main():
|
|||||||
by_category = {} # no action
|
by_category = {} # no action
|
||||||
|
|
||||||
# now choose the default effect to apply; can be any of these:
|
# now choose the default effect to apply; can be any of these:
|
||||||
# into_subfolder into_toplevel into_sibling by_category
|
# into_junk into_subfolder into_toplevel into_sibling by_category
|
||||||
effect = {"vp": "/junk"}
|
effect = into_sibling
|
||||||
|
|
||||||
##
|
##
|
||||||
## but we can keep going, adding more speicifc rules
|
## but we can keep going, adding more speicifc rules
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
// see usb-eject.py for usage
|
// see usb-eject.py for usage
|
||||||
|
|
||||||
function usbclick() {
|
function usbclick() {
|
||||||
QS('#treeul a[href="/usb/"]').click();
|
var o = QS('#treeul a[dst="/usb/"]') || QS('#treepar a[dst="/usb/"]');
|
||||||
|
if (o)
|
||||||
|
o.click();
|
||||||
}
|
}
|
||||||
|
|
||||||
function eject_cb() {
|
function eject_cb() {
|
||||||
var t = this.responseText;
|
var t = ('' + this.responseText).trim();
|
||||||
if (t.indexOf('can be safely unplugged') < 0 && t.indexOf('Device can be removed') < 0)
|
if (t.indexOf('can be safely unplugged') < 0 && t.indexOf('Device can be removed') < 0)
|
||||||
return toast.err(30, 'usb eject failed:\n\n' + t);
|
return toast.err(30, 'usb eject failed:\n\n' + t);
|
||||||
|
|
||||||
@@ -19,11 +21,14 @@ function add_eject_2(a) {
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
var v = aw[2],
|
var v = aw[2],
|
||||||
k = 'umount_' + v,
|
k = 'umount_' + v;
|
||||||
o = ebi(k);
|
|
||||||
|
|
||||||
if (o)
|
for (var b = 0; b < 9; b++) {
|
||||||
|
var o = ebi(k);
|
||||||
|
if (!o)
|
||||||
|
break;
|
||||||
o.parentNode.removeChild(o);
|
o.parentNode.removeChild(o);
|
||||||
|
}
|
||||||
|
|
||||||
a.appendChild(mknod('span', k, '⏏'), a);
|
a.appendChild(mknod('span', k, '⏏'), a);
|
||||||
o = ebi(k);
|
o = ebi(k);
|
||||||
@@ -40,7 +45,7 @@ function add_eject_2(a) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
function add_eject() {
|
function add_eject() {
|
||||||
var o = QSA('#treeul a[href^="/usb/"]');
|
var o = QSA('#treeul a[href^="/usb/"]') || QSA('#treepar a[href^="/usb/"]');
|
||||||
for (var a = o.length - 1; a > 0; a--)
|
for (var a = o.length - 1; a > 0; a--)
|
||||||
add_eject_2(o[a]);
|
add_eject_2(o[a]);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import os
|
|||||||
import stat
|
import stat
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import sys
|
import sys
|
||||||
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@@ -28,14 +29,17 @@ which does the following respectively,
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
MOUNT_BASE = b"/run/media/egon/"
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
try:
|
try:
|
||||||
label = sys.argv[1].split(":usb-eject:")[1].split(":")[0]
|
label = sys.argv[1].split(":usb-eject:")[1].split(":")[0]
|
||||||
mp = "/run/media/egon/" + label
|
mp = MOUNT_BASE + unquote(label)
|
||||||
# print("ejecting [%s]... " % (mp,), end="")
|
# print("ejecting [%s]... " % (mp,), end="")
|
||||||
mp = os.path.abspath(os.path.realpath(mp.encode("utf-8")))
|
mp = os.path.abspath(os.path.realpath(mp))
|
||||||
st = os.lstat(mp)
|
st = os.lstat(mp)
|
||||||
if not stat.S_ISDIR(st.st_mode):
|
if not stat.S_ISDIR(st.st_mode) or not mp.startswith(MOUNT_BASE):
|
||||||
raise Exception("not a regular directory")
|
raise Exception("not a regular directory")
|
||||||
|
|
||||||
# if you're running copyparty as root (thx for the faith)
|
# if you're running copyparty as root (thx for the faith)
|
||||||
|
|||||||
@@ -2,11 +2,15 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
import zlib
|
|
||||||
import struct
|
import struct
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
|
try:
|
||||||
|
from zlib_ng import zlib_ng as zlib
|
||||||
|
except:
|
||||||
|
import zlib
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from copyparty.util import fsenc
|
from copyparty.util import fsenc
|
||||||
except:
|
except:
|
||||||
|
|||||||
@@ -22,6 +22,8 @@ set -e
|
|||||||
# modifies the keyfinder python lib to load the .so in ~/pe
|
# modifies the keyfinder python lib to load the .so in ~/pe
|
||||||
|
|
||||||
|
|
||||||
|
export FORCE_COLOR=1
|
||||||
|
|
||||||
linux=1
|
linux=1
|
||||||
|
|
||||||
win=
|
win=
|
||||||
@@ -186,12 +188,15 @@ install_keyfinder() {
|
|||||||
echo "so not found at $sop"
|
echo "so not found at $sop"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
x=${-//[^x]/}; set -x; cat /etc/alpine-release
|
||||||
# rm -rf /Users/ed/Library/Python/3.9/lib/python/site-packages/*keyfinder*
|
# rm -rf /Users/ed/Library/Python/3.9/lib/python/site-packages/*keyfinder*
|
||||||
CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include -I/usr/include/ffmpeg" \
|
CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include -I/usr/include/ffmpeg" \
|
||||||
|
CXXFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include -I/usr/include/ffmpeg" \
|
||||||
LDFLAGS="-L$h/pe/keyfinder/lib -L$h/pe/keyfinder/lib64 -L/opt/local/lib" \
|
LDFLAGS="-L$h/pe/keyfinder/lib -L$h/pe/keyfinder/lib64 -L/opt/local/lib" \
|
||||||
PKG_CONFIG_PATH=/c/msys64/mingw64/lib/pkgconfig \
|
PKG_CONFIG_PATH="/c/msys64/mingw64/lib/pkgconfig:$h/pe/keyfinder/lib/pkgconfig" \
|
||||||
$pybin -m pip install --user keyfinder
|
$pybin -m pip install --user keyfinder
|
||||||
|
[ "$x" ] || set +x
|
||||||
|
|
||||||
pypath="$($pybin -c 'import keyfinder; print(keyfinder.__file__)')"
|
pypath="$($pybin -c 'import keyfinder; print(keyfinder.__file__)')"
|
||||||
for pyso in "${pypath%/*}"/*.so; do
|
for pyso in "${pypath%/*}"/*.so; do
|
||||||
|
|||||||
11
bin/u2c.py
11
bin/u2c.py
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
S_VERSION = "2.9"
|
S_VERSION = "2.11"
|
||||||
S_BUILD_DT = "2025-01-27"
|
S_BUILD_DT = "2025-05-18"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
u2c.py: upload to copyparty
|
u2c.py: upload to copyparty
|
||||||
@@ -807,7 +807,9 @@ def handshake(ar, file, search):
|
|||||||
else:
|
else:
|
||||||
if ar.touch:
|
if ar.touch:
|
||||||
req["umod"] = True
|
req["umod"] = True
|
||||||
if ar.ow:
|
if ar.owo:
|
||||||
|
req["replace"] = "mt"
|
||||||
|
elif ar.ow:
|
||||||
req["replace"] = True
|
req["replace"] = True
|
||||||
|
|
||||||
file.recheck = False
|
file.recheck = False
|
||||||
@@ -1287,7 +1289,7 @@ class Ctl(object):
|
|||||||
if self.ar.jw:
|
if self.ar.jw:
|
||||||
print("%s %s" % (wark, vp))
|
print("%s %s" % (wark, vp))
|
||||||
else:
|
else:
|
||||||
zd = datetime.datetime.fromtimestamp(file.lmod, UTC)
|
zd = datetime.datetime.fromtimestamp(max(0, file.lmod), UTC)
|
||||||
dt = "%04d-%02d-%02d %02d:%02d:%02d" % (
|
dt = "%04d-%02d-%02d %02d:%02d:%02d" % (
|
||||||
zd.year,
|
zd.year,
|
||||||
zd.month,
|
zd.month,
|
||||||
@@ -1538,6 +1540,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||||
ap.add_argument("--touch", action="store_true", help="if last-modified timestamps differ, push local to server (need write+delete perms)")
|
ap.add_argument("--touch", action="store_true", help="if last-modified timestamps differ, push local to server (need write+delete perms)")
|
||||||
ap.add_argument("--ow", action="store_true", help="overwrite existing files instead of autorenaming")
|
ap.add_argument("--ow", action="store_true", help="overwrite existing files instead of autorenaming")
|
||||||
|
ap.add_argument("--owo", action="store_true", help="overwrite existing files if server-file is older")
|
||||||
ap.add_argument("--spd", action="store_true", help="print speeds for each file")
|
ap.add_argument("--spd", action="store_true", help="print speeds for each file")
|
||||||
ap.add_argument("--version", action="store_true", help="show version and exit")
|
ap.add_argument("--version", action="store_true", help="show version and exit")
|
||||||
|
|
||||||
|
|||||||
@@ -50,6 +50,9 @@
|
|||||||
* give a 3rd argument to install it to your copyparty config
|
* give a 3rd argument to install it to your copyparty config
|
||||||
* systemd service at [`systemd/cfssl.service`](systemd/cfssl.service)
|
* systemd service at [`systemd/cfssl.service`](systemd/cfssl.service)
|
||||||
|
|
||||||
|
### [`zfs-tune.py`](zfs-tune.py)
|
||||||
|
* optimizes databases for optimal performance when stored on a zfs filesystem; also see [openzfs docs](https://openzfs.github.io/openzfs-docs/Performance%20and%20Tuning/Workload%20Tuning.html#database-workloads) and specifically the SQLite subsection
|
||||||
|
|
||||||
# OS integration
|
# OS integration
|
||||||
init-scripts to start copyparty as a service
|
init-scripts to start copyparty as a service
|
||||||
* [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally
|
* [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally
|
||||||
|
|||||||
@@ -2,19 +2,38 @@
|
|||||||
# not accept more consecutive clients than what copyparty is able to;
|
# not accept more consecutive clients than what copyparty is able to;
|
||||||
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
||||||
#
|
#
|
||||||
|
# ======================================================================
|
||||||
|
#
|
||||||
|
# to reverse-proxy a specific path/subpath/location below a domain
|
||||||
|
# (rather than a complete subdomain), for example "/qw/er", you must
|
||||||
|
# run copyparty with --rp-loc /qw/as and also change the following:
|
||||||
|
# location / {
|
||||||
|
# proxy_pass http://cpp_tcp;
|
||||||
|
# to this:
|
||||||
|
# location /qw/er/ {
|
||||||
|
# proxy_pass http://cpp_tcp/qw/er/;
|
||||||
|
#
|
||||||
|
# ======================================================================
|
||||||
|
#
|
||||||
# rarely, in some extreme usecases, it can be good to add -j0
|
# rarely, in some extreme usecases, it can be good to add -j0
|
||||||
# (40'000 requests per second, or 20gbps upload/download in parallel)
|
# (40'000 requests per second, or 20gbps upload/download in parallel)
|
||||||
# but this is usually counterproductive and slightly buggy
|
# but this is usually counterproductive and slightly buggy
|
||||||
#
|
#
|
||||||
|
# ======================================================================
|
||||||
|
#
|
||||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||||
#
|
#
|
||||||
# if you are behind cloudflare (or another protection service),
|
# ======================================================================
|
||||||
|
#
|
||||||
|
# if you are behind cloudflare (or another CDN/WAF/protection service),
|
||||||
# remember to reject all connections which are not coming from your
|
# remember to reject all connections which are not coming from your
|
||||||
# protection service -- for cloudflare in particular, you can
|
# protection service -- for cloudflare in particular, you can
|
||||||
# generate the list of permitted IP ranges like so:
|
# generate the list of permitted IP ranges like so:
|
||||||
# (curl -s https://www.cloudflare.com/ips-v{4,6} | sed 's/^/allow /; s/$/;/'; echo; echo "deny all;") > /etc/nginx/cloudflare-only.conf
|
# (curl -s https://www.cloudflare.com/ips-v{4,6} | sed 's/^/allow /; s/$/;/'; echo; echo "deny all;") > /etc/nginx/cloudflare-only.conf
|
||||||
#
|
#
|
||||||
# and then enable it below by uncomenting the cloudflare-only.conf line
|
# and then enable it below by uncomenting the cloudflare-only.conf line
|
||||||
|
#
|
||||||
|
# ======================================================================
|
||||||
|
|
||||||
|
|
||||||
upstream cpp_tcp {
|
upstream cpp_tcp {
|
||||||
@@ -66,13 +85,13 @@ server {
|
|||||||
proxy_buffer_size 16k;
|
proxy_buffer_size 16k;
|
||||||
proxy_busy_buffers_size 24k;
|
proxy_busy_buffers_size 24k;
|
||||||
|
|
||||||
|
proxy_set_header Connection "Keep-Alive";
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
# NOTE: with cloudflare you want this instead:
|
|
||||||
#proxy_set_header X-Forwarded-For $http_cf_connecting_ip;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
proxy_set_header Connection "Keep-Alive";
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
# NOTE: with cloudflare you want this X-Forwarded-For instead:
|
||||||
|
#proxy_set_header X-Forwarded-For $http_cf_connecting_ip;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,29 +1,31 @@
|
|||||||
{ config, pkgs, lib, ... }:
|
{
|
||||||
|
config,
|
||||||
with lib;
|
pkgs,
|
||||||
|
lib,
|
||||||
let
|
...
|
||||||
|
}:
|
||||||
|
with lib; let
|
||||||
mkKeyValue = key: value:
|
mkKeyValue = key: value:
|
||||||
if value == true then
|
if value == true
|
||||||
# sets with a true boolean value are coerced to just the key name
|
then
|
||||||
|
# sets with a true boolean value are coerced to just the key name
|
||||||
key
|
key
|
||||||
else if value == false then
|
else if value == false
|
||||||
# or omitted completely when false
|
then
|
||||||
|
# or omitted completely when false
|
||||||
""
|
""
|
||||||
else
|
else (generators.mkKeyValueDefault {inherit mkValueString;} ": " key value);
|
||||||
(generators.mkKeyValueDefault { inherit mkValueString; } ": " key value);
|
|
||||||
|
|
||||||
mkAttrsString = value: (generators.toKeyValue { inherit mkKeyValue; } value);
|
mkAttrsString = value: (generators.toKeyValue {inherit mkKeyValue;} value);
|
||||||
|
|
||||||
mkValueString = value:
|
mkValueString = value:
|
||||||
if isList value then
|
if isList value
|
||||||
(concatStringsSep ", " (map mkValueString value))
|
then (concatStringsSep ", " (map mkValueString value))
|
||||||
else if isAttrs value then
|
else if isAttrs value
|
||||||
"\n" + (mkAttrsString value)
|
then "\n" + (mkAttrsString value)
|
||||||
else
|
else (generators.mkValueStringDefault {} value);
|
||||||
(generators.mkValueStringDefault { } value);
|
|
||||||
|
|
||||||
mkSectionName = value: "[" + (escape [ "[" "]" ] value) + "]";
|
mkSectionName = value: "[" + (escape ["[" "]"] value) + "]";
|
||||||
|
|
||||||
mkSection = name: attrs: ''
|
mkSection = name: attrs: ''
|
||||||
${mkSectionName name}
|
${mkSectionName name}
|
||||||
@@ -49,12 +51,12 @@ let
|
|||||||
${concatStringsSep "\n" (mapAttrsToList mkVolume cfg.volumes)}
|
${concatStringsSep "\n" (mapAttrsToList mkVolume cfg.volumes)}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
name = "copyparty";
|
|
||||||
cfg = config.services.copyparty;
|
cfg = config.services.copyparty;
|
||||||
configFile = pkgs.writeText "${name}.conf" configStr;
|
configFile = pkgs.writeText "copyparty.conf" configStr;
|
||||||
runtimeConfigPath = "/run/${name}/${name}.conf";
|
runtimeConfigPath = "/run/copyparty/copyparty.conf";
|
||||||
home = "/var/lib/${name}";
|
externalCacheDir = "/var/cache/copyparty";
|
||||||
defaultShareDir = "${home}/data";
|
externalStateDir = "/var/lib/copyparty";
|
||||||
|
defaultShareDir = "${externalStateDir}/data";
|
||||||
in {
|
in {
|
||||||
options.services.copyparty = {
|
options.services.copyparty = {
|
||||||
enable = mkEnableOption "web-based file manager";
|
enable = mkEnableOption "web-based file manager";
|
||||||
@@ -68,6 +70,35 @@ in {
|
|||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
mkHashWrapper = mkOption {
|
||||||
|
type = types.bool;
|
||||||
|
default = true;
|
||||||
|
description = ''
|
||||||
|
Make a shell script wrapper called 'copyparty-hash' with all options set here,
|
||||||
|
that launches the hashing cli.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
user = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "copyparty";
|
||||||
|
description = ''
|
||||||
|
The user that copyparty will run under.
|
||||||
|
|
||||||
|
If changed from default, you are responsible for making sure the user exists.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
group = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "copyparty";
|
||||||
|
description = ''
|
||||||
|
The group that copyparty will run under.
|
||||||
|
|
||||||
|
If changed from default, you are responsible for making sure the user exists.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
openFilesLimit = mkOption {
|
openFilesLimit = mkOption {
|
||||||
default = 4096;
|
default = 4096;
|
||||||
type = types.either types.int types.str;
|
type = types.either types.int types.str;
|
||||||
@@ -79,22 +110,25 @@ in {
|
|||||||
description = ''
|
description = ''
|
||||||
Global settings to apply.
|
Global settings to apply.
|
||||||
Directly maps to values in the [global] section of the copyparty config.
|
Directly maps to values in the [global] section of the copyparty config.
|
||||||
|
Cannot set "c" or "hist", those are set by this module.
|
||||||
See `${getExe cfg.package} --help` for more details.
|
See `${getExe cfg.package} --help` for more details.
|
||||||
'';
|
'';
|
||||||
default = {
|
default = {
|
||||||
i = "127.0.0.1";
|
i = "127.0.0.1";
|
||||||
no-reload = true;
|
no-reload = true;
|
||||||
|
hist = externalCacheDir;
|
||||||
};
|
};
|
||||||
example = literalExpression ''
|
example = literalExpression ''
|
||||||
{
|
{
|
||||||
i = "0.0.0.0";
|
i = "0.0.0.0";
|
||||||
no-reload = true;
|
no-reload = true;
|
||||||
|
hist = ${externalCacheDir};
|
||||||
}
|
}
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
accounts = mkOption {
|
accounts = mkOption {
|
||||||
type = types.attrsOf (types.submodule ({ ... }: {
|
type = types.attrsOf (types.submodule ({...}: {
|
||||||
options = {
|
options = {
|
||||||
passwordFile = mkOption {
|
passwordFile = mkOption {
|
||||||
type = types.str;
|
type = types.str;
|
||||||
@@ -109,7 +143,7 @@ in {
|
|||||||
description = ''
|
description = ''
|
||||||
A set of copyparty accounts to create.
|
A set of copyparty accounts to create.
|
||||||
'';
|
'';
|
||||||
default = { };
|
default = {};
|
||||||
example = literalExpression ''
|
example = literalExpression ''
|
||||||
{
|
{
|
||||||
ed.passwordFile = "/run/keys/copyparty/ed";
|
ed.passwordFile = "/run/keys/copyparty/ed";
|
||||||
@@ -118,10 +152,10 @@ in {
|
|||||||
};
|
};
|
||||||
|
|
||||||
volumes = mkOption {
|
volumes = mkOption {
|
||||||
type = types.attrsOf (types.submodule ({ ... }: {
|
type = types.attrsOf (types.submodule ({...}: {
|
||||||
options = {
|
options = {
|
||||||
path = mkOption {
|
path = mkOption {
|
||||||
type = types.str;
|
type = types.path;
|
||||||
description = ''
|
description = ''
|
||||||
Path of a directory to share.
|
Path of a directory to share.
|
||||||
'';
|
'';
|
||||||
@@ -177,7 +211,7 @@ in {
|
|||||||
nohash = "\.iso$";
|
nohash = "\.iso$";
|
||||||
};
|
};
|
||||||
'';
|
'';
|
||||||
default = { };
|
default = {};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}));
|
}));
|
||||||
@@ -185,7 +219,7 @@ in {
|
|||||||
default = {
|
default = {
|
||||||
"/" = {
|
"/" = {
|
||||||
path = defaultShareDir;
|
path = defaultShareDir;
|
||||||
access = { r = "*"; };
|
access = {r = "*";};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
example = literalExpression ''
|
example = literalExpression ''
|
||||||
@@ -204,52 +238,66 @@ in {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
config = mkIf cfg.enable {
|
config = mkIf cfg.enable (let
|
||||||
|
command = "${getExe cfg.package} -c ${runtimeConfigPath}";
|
||||||
|
in {
|
||||||
systemd.services.copyparty = {
|
systemd.services.copyparty = {
|
||||||
description = "http file sharing hub";
|
description = "http file sharing hub";
|
||||||
wantedBy = [ "multi-user.target" ];
|
wantedBy = ["multi-user.target"];
|
||||||
|
|
||||||
environment = {
|
environment = {
|
||||||
PYTHONUNBUFFERED = "true";
|
PYTHONUNBUFFERED = "true";
|
||||||
XDG_CONFIG_HOME = "${home}/.config";
|
XDG_CONFIG_HOME = externalStateDir;
|
||||||
};
|
};
|
||||||
|
|
||||||
preStart = let
|
preStart = let
|
||||||
replaceSecretCommand = name: attrs:
|
replaceSecretCommand = name: attrs: "${getExe pkgs.replace-secret} '${
|
||||||
"${getExe pkgs.replace-secret} '${
|
passwordPlaceholder name
|
||||||
passwordPlaceholder name
|
}' '${attrs.passwordFile}' ${runtimeConfigPath}";
|
||||||
}' '${attrs.passwordFile}' ${runtimeConfigPath}";
|
|
||||||
in ''
|
in ''
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
install -m 600 ${configFile} ${runtimeConfigPath}
|
install -m 600 ${configFile} ${runtimeConfigPath}
|
||||||
${concatStringsSep "\n"
|
${concatStringsSep "\n"
|
||||||
(mapAttrsToList replaceSecretCommand cfg.accounts)}
|
(mapAttrsToList replaceSecretCommand cfg.accounts)}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
serviceConfig = {
|
serviceConfig = {
|
||||||
Type = "simple";
|
Type = "simple";
|
||||||
ExecStart = "${getExe cfg.package} -c ${runtimeConfigPath}";
|
ExecStart = command;
|
||||||
|
|
||||||
# Hardening options
|
# Hardening options
|
||||||
User = "copyparty";
|
User = cfg.user;
|
||||||
Group = "copyparty";
|
Group = cfg.group;
|
||||||
RuntimeDirectory = name;
|
RuntimeDirectory = ["copyparty"];
|
||||||
RuntimeDirectoryMode = "0700";
|
RuntimeDirectoryMode = "0700";
|
||||||
StateDirectory = [ name "${name}/data" "${name}/.config" ];
|
StateDirectory = ["copyparty"];
|
||||||
StateDirectoryMode = "0700";
|
StateDirectoryMode = "0700";
|
||||||
WorkingDirectory = home;
|
CacheDirectory = lib.mkIf (cfg.settings ? hist) ["copyparty"];
|
||||||
|
CacheDirectoryMode = lib.mkIf (cfg.settings ? hist) "0700";
|
||||||
|
WorkingDirectory = externalStateDir;
|
||||||
|
BindReadOnlyPaths =
|
||||||
|
[
|
||||||
|
"/nix/store"
|
||||||
|
"-/etc/resolv.conf"
|
||||||
|
"-/etc/nsswitch.conf"
|
||||||
|
"-/etc/group"
|
||||||
|
"-/etc/hosts"
|
||||||
|
"-/etc/localtime"
|
||||||
|
]
|
||||||
|
++ (mapAttrsToList (k: v: "-${v.passwordFile}") cfg.accounts);
|
||||||
|
BindPaths =
|
||||||
|
(
|
||||||
|
if cfg.settings ? hist
|
||||||
|
then [cfg.settings.hist]
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
++ [externalStateDir]
|
||||||
|
++ (mapAttrsToList (k: v: v.path) cfg.volumes);
|
||||||
|
# ProtectSystem = "strict";
|
||||||
|
# Note that unlike what 'ro' implies,
|
||||||
|
# this actually makes it impossible to read anything in the root FS,
|
||||||
|
# except for things explicitly mounted via `RuntimeDirectory`, `StateDirectory`, `CacheDirectory`, and `BindReadOnlyPaths`.
|
||||||
|
# This is because TemporaryFileSystem creates a *new* *empty* filesystem for the process, so only bindmounts are visible.
|
||||||
TemporaryFileSystem = "/:ro";
|
TemporaryFileSystem = "/:ro";
|
||||||
BindReadOnlyPaths = [
|
|
||||||
"/nix/store"
|
|
||||||
"-/etc/resolv.conf"
|
|
||||||
"-/etc/nsswitch.conf"
|
|
||||||
"-/etc/hosts"
|
|
||||||
"-/etc/localtime"
|
|
||||||
] ++ (mapAttrsToList (k: v: "-${v.passwordFile}") cfg.accounts);
|
|
||||||
BindPaths = [ home ] ++ (mapAttrsToList (k: v: v.path) cfg.volumes);
|
|
||||||
# Would re-mount paths ignored by temporary root
|
|
||||||
#ProtectSystem = "strict";
|
|
||||||
ProtectHome = true;
|
|
||||||
PrivateTmp = true;
|
PrivateTmp = true;
|
||||||
PrivateDevices = true;
|
PrivateDevices = true;
|
||||||
ProtectKernelTunables = true;
|
ProtectKernelTunables = true;
|
||||||
@@ -269,15 +317,48 @@ in {
|
|||||||
NoNewPrivileges = true;
|
NoNewPrivileges = true;
|
||||||
LockPersonality = true;
|
LockPersonality = true;
|
||||||
RestrictRealtime = true;
|
RestrictRealtime = true;
|
||||||
|
MemoryDenyWriteExecute = true;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
users.groups.copyparty = { };
|
# ensure volumes exist:
|
||||||
users.users.copyparty = {
|
systemd.tmpfiles.settings."copyparty" = (
|
||||||
|
lib.attrsets.mapAttrs' (
|
||||||
|
name: value:
|
||||||
|
lib.attrsets.nameValuePair (value.path) {
|
||||||
|
d = {
|
||||||
|
#: in front of things means it wont change it if the directory already exists.
|
||||||
|
group = ":${cfg.group}";
|
||||||
|
user = ":${cfg.user}";
|
||||||
|
mode = ":755";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
)
|
||||||
|
cfg.volumes
|
||||||
|
);
|
||||||
|
|
||||||
|
users.groups.copyparty = lib.mkIf (cfg.user == "copyparty" && cfg.group == "copyparty") {};
|
||||||
|
users.users.copyparty = lib.mkIf (cfg.user == "copyparty" && cfg.group == "copyparty") {
|
||||||
description = "Service user for copyparty";
|
description = "Service user for copyparty";
|
||||||
group = "copyparty";
|
group = "copyparty";
|
||||||
home = home;
|
home = externalStateDir;
|
||||||
isSystemUser = true;
|
isSystemUser = true;
|
||||||
};
|
};
|
||||||
};
|
environment.systemPackages = lib.mkIf cfg.mkHashWrapper [
|
||||||
|
(pkgs.writeShellScriptBin
|
||||||
|
"copyparty-hash"
|
||||||
|
''
|
||||||
|
set -a # automatically export variables
|
||||||
|
# set same environment variables as the systemd service
|
||||||
|
${lib.pipe config.systemd.services.copyparty.environment [
|
||||||
|
(lib.filterAttrs (n: v: v != null && n != "PATH"))
|
||||||
|
(lib.mapAttrs (_: v: "${v}"))
|
||||||
|
(lib.toShellVars)
|
||||||
|
]}
|
||||||
|
PATH=${config.systemd.services.copyparty.environment.PATH}:$PATH
|
||||||
|
|
||||||
|
exec ${command} --ah-cli
|
||||||
|
'')
|
||||||
|
];
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Maintainer: icxes <dev.null@need.moe>
|
# Maintainer: icxes <dev.null@need.moe>
|
||||||
pkgname=copyparty
|
pkgname=copyparty
|
||||||
pkgver="1.16.12"
|
pkgver="1.18.5"
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||||
arch=("any")
|
arch=("any")
|
||||||
@@ -22,7 +22,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
|||||||
)
|
)
|
||||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||||
backup=("etc/${pkgname}.d/init" )
|
backup=("etc/${pkgname}.d/init" )
|
||||||
sha256sums=("b5b65103198a3dd8a3f9b15c3d6aff6c21147bf87627ceacc64205493c248997")
|
sha256sums=("30dd1bbb479187a44f3e44c8322856873c0022485237d457fadfeb5a6af51f7a")
|
||||||
|
|
||||||
build() {
|
build() {
|
||||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, pyzmq, ffmpeg, mutagen,
|
{ lib, stdenv, makeWrapper, fetchurl, util-linux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, pyzmq, ffmpeg, mutagen,
|
||||||
|
|
||||||
# use argon2id-hashed passwords in config files (sha2 is always available)
|
# use argon2id-hashed passwords in config files (sha2 is always available)
|
||||||
withHashedPasswords ? true,
|
withHashedPasswords ? true,
|
||||||
@@ -61,7 +61,8 @@ in stdenv.mkDerivation {
|
|||||||
installPhase = ''
|
installPhase = ''
|
||||||
install -Dm755 $src $out/share/copyparty-sfx.py
|
install -Dm755 $src $out/share/copyparty-sfx.py
|
||||||
makeWrapper ${pyEnv.interpreter} $out/bin/copyparty \
|
makeWrapper ${pyEnv.interpreter} $out/bin/copyparty \
|
||||||
--set PATH '${lib.makeBinPath ([ utillinux ] ++ lib.optional withMediaProcessing ffmpeg)}:$PATH' \
|
--set PATH '${lib.makeBinPath ([ util-linux ] ++ lib.optional withMediaProcessing ffmpeg)}:$PATH' \
|
||||||
--add-flags "$out/share/copyparty-sfx.py"
|
--add-flags "$out/share/copyparty-sfx.py"
|
||||||
'';
|
'';
|
||||||
|
meta.mainProgram = "copyparty";
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"url": "https://github.com/9001/copyparty/releases/download/v1.16.12/copyparty-sfx.py",
|
"url": "https://github.com/9001/copyparty/releases/download/v1.18.5/copyparty-sfx.py",
|
||||||
"version": "1.16.12",
|
"version": "1.18.5",
|
||||||
"hash": "sha256-gZZqd88/8PEseVtWspocqrWV7Ck8YQAhcsa4ED3F4JU="
|
"hash": "sha256-rEYjxJwzTzN+upo5UQ8hdYonQiNK1c+SfduS6M/QXw0="
|
||||||
}
|
}
|
||||||
@@ -12,6 +12,23 @@ almost the same as minimal-up2k.html except this one...:
|
|||||||
|
|
||||||
-- looks slightly better
|
-- looks slightly better
|
||||||
|
|
||||||
|
|
||||||
|
========================
|
||||||
|
== USAGE INSTRUCTIONS ==
|
||||||
|
|
||||||
|
1. create a volume which anyone can read from (if you haven't already)
|
||||||
|
2. copy this file into that volume, so anyone can download it
|
||||||
|
3. enable the plugin by telling the webbrowser to load this file;
|
||||||
|
assuming the URL to the public volume is /res/, and
|
||||||
|
assuming you're using config-files, then add this to your config:
|
||||||
|
|
||||||
|
[global]
|
||||||
|
js-browser: /res/minimal-up2k.js
|
||||||
|
|
||||||
|
alternatively, if you're not using config-files, then
|
||||||
|
add the following commandline argument instead:
|
||||||
|
--js-browser=/res/minimal-up2k.js
|
||||||
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
var u2min = `
|
var u2min = `
|
||||||
|
|||||||
107
contrib/zfs-tune.py
Executable file
107
contrib/zfs-tune.py
Executable file
@@ -0,0 +1,107 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sqlite3
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
when the up2k-database is stored on a zfs volume, this may give
|
||||||
|
slightly higher performance (actual gains not measured yet)
|
||||||
|
|
||||||
|
NOTE: must be applied in combination with the related advice in the openzfs documentation;
|
||||||
|
https://openzfs.github.io/openzfs-docs/Performance%20and%20Tuning/Workload%20Tuning.html#database-workloads
|
||||||
|
and see specifically the SQLite subsection
|
||||||
|
|
||||||
|
it is assumed that all databases are stored in a single location,
|
||||||
|
for example with `--hist /var/store/hists`
|
||||||
|
|
||||||
|
three alternatives for running this script:
|
||||||
|
|
||||||
|
1. copy it into /var/store/hists and run "python3 zfs-tune.py s"
|
||||||
|
(s = modify all databases below folder containing script)
|
||||||
|
|
||||||
|
2. cd into /var/store/hists and run "python3 ~/zfs-tune.py w"
|
||||||
|
(w = modify all databases below current working directory)
|
||||||
|
|
||||||
|
3. python3 ~/zfs-tune.py /var/store/hists
|
||||||
|
|
||||||
|
if you use docker, run copyparty with `--hist /cfg/hists`, copy this script into /cfg, and run this:
|
||||||
|
podman run --rm -it --entrypoint /usr/bin/python3 ghcr.io/9001/copyparty-ac /cfg/zfs-tune.py s
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
PAGESIZE = 65536
|
||||||
|
|
||||||
|
|
||||||
|
# borrowed from copyparty; short efficient stacktrace for errors
|
||||||
|
def min_ex(max_lines: int = 8, reverse: bool = False) -> str:
|
||||||
|
et, ev, tb = sys.exc_info()
|
||||||
|
stb = traceback.extract_tb(tb) if tb else traceback.extract_stack()[:-1]
|
||||||
|
fmt = "%s:%d <%s>: %s"
|
||||||
|
ex = [fmt % (fp.split(os.sep)[-1], ln, fun, txt) for fp, ln, fun, txt in stb]
|
||||||
|
if et or ev or tb:
|
||||||
|
ex.append("[%s] %s" % (et.__name__ if et else "(anonymous)", ev))
|
||||||
|
return "\n".join(ex[-max_lines:][:: -1 if reverse else 1])
|
||||||
|
|
||||||
|
|
||||||
|
def set_pagesize(db_path):
|
||||||
|
try:
|
||||||
|
# check current page_size
|
||||||
|
with sqlite3.connect(db_path) as db:
|
||||||
|
v = db.execute("pragma page_size").fetchone()[0]
|
||||||
|
if v == PAGESIZE:
|
||||||
|
print(" `-- OK")
|
||||||
|
return
|
||||||
|
|
||||||
|
# https://www.sqlite.org/pragma.html#pragma_page_size
|
||||||
|
# `- disable wal; set pagesize; vacuum
|
||||||
|
# (copyparty will reenable wal if necessary)
|
||||||
|
|
||||||
|
with sqlite3.connect(db_path) as db:
|
||||||
|
db.execute("pragma journal_mode=delete")
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
with sqlite3.connect(db_path) as db:
|
||||||
|
db.execute(f"pragma page_size = {PAGESIZE}")
|
||||||
|
db.execute("vacuum")
|
||||||
|
|
||||||
|
print(" `-- new pagesize OK")
|
||||||
|
|
||||||
|
except Exception:
|
||||||
|
err = min_ex().replace("\n", "\n -- ")
|
||||||
|
print(f"FAILED: {db_path}\n -- {err}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
top = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
cwd = os.path.abspath(os.getcwd())
|
||||||
|
try:
|
||||||
|
x = sys.argv[1]
|
||||||
|
except:
|
||||||
|
print(f"""
|
||||||
|
this script takes one mandatory argument:
|
||||||
|
specify 's' to start recursing from folder containing this script file ({top})
|
||||||
|
specify 'w' to start recursing from the current working directory ({cwd})
|
||||||
|
specify a path to start recursing from there
|
||||||
|
""")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if x.lower() == "w":
|
||||||
|
top = cwd
|
||||||
|
elif x.lower() != "s":
|
||||||
|
top = x
|
||||||
|
|
||||||
|
for dirpath, dirs, files in os.walk(top):
|
||||||
|
for fname in files:
|
||||||
|
if not fname.endswith(".db"):
|
||||||
|
continue
|
||||||
|
db_path = os.path.join(dirpath, fname)
|
||||||
|
print(db_path)
|
||||||
|
set_pagesize(db_path)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -80,6 +80,7 @@ web/deps/prismd.css
|
|||||||
web/deps/scp.woff2
|
web/deps/scp.woff2
|
||||||
web/deps/sha512.ac.js
|
web/deps/sha512.ac.js
|
||||||
web/deps/sha512.hw.js
|
web/deps/sha512.hw.js
|
||||||
|
web/idp.html
|
||||||
web/iiam.gif
|
web/iiam.gif
|
||||||
web/md.css
|
web/md.css
|
||||||
web/md.html
|
web/md.html
|
||||||
|
|||||||
@@ -40,6 +40,7 @@ from .cfg import flagcats, onedash
|
|||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
from .util import (
|
from .util import (
|
||||||
APPLESAN_TXT,
|
APPLESAN_TXT,
|
||||||
|
BAD_BOTS,
|
||||||
DEF_EXP,
|
DEF_EXP,
|
||||||
DEF_MTE,
|
DEF_MTE,
|
||||||
DEF_MTH,
|
DEF_MTH,
|
||||||
@@ -65,6 +66,7 @@ from .util import (
|
|||||||
load_resource,
|
load_resource,
|
||||||
min_ex,
|
min_ex,
|
||||||
pybin,
|
pybin,
|
||||||
|
read_utf8,
|
||||||
termsize,
|
termsize,
|
||||||
wrap,
|
wrap,
|
||||||
)
|
)
|
||||||
@@ -226,7 +228,23 @@ def init_E(EE: EnvParams) -> None:
|
|||||||
if E.mod.endswith("__init__"):
|
if E.mod.endswith("__init__"):
|
||||||
E.mod = os.path.dirname(E.mod)
|
E.mod = os.path.dirname(E.mod)
|
||||||
|
|
||||||
if sys.platform == "win32":
|
try:
|
||||||
|
p = os.environ.get("XDG_CONFIG_HOME")
|
||||||
|
if not p:
|
||||||
|
raise Exception()
|
||||||
|
if p.startswith("~"):
|
||||||
|
p = os.path.expanduser(p)
|
||||||
|
p = os.path.abspath(os.path.realpath(p))
|
||||||
|
p = os.path.join(p, "copyparty")
|
||||||
|
if not os.path.isdir(p):
|
||||||
|
os.mkdir(p)
|
||||||
|
os.listdir(p)
|
||||||
|
except:
|
||||||
|
p = ""
|
||||||
|
|
||||||
|
if p:
|
||||||
|
E.cfg = p
|
||||||
|
elif sys.platform == "win32":
|
||||||
bdir = os.environ.get("APPDATA") or os.environ.get("TEMP") or "."
|
bdir = os.environ.get("APPDATA") or os.environ.get("TEMP") or "."
|
||||||
E.cfg = os.path.normpath(bdir + "/copyparty")
|
E.cfg = os.path.normpath(bdir + "/copyparty")
|
||||||
elif sys.platform == "darwin":
|
elif sys.platform == "darwin":
|
||||||
@@ -255,8 +273,7 @@ def get_srvname(verbose) -> str:
|
|||||||
if verbose:
|
if verbose:
|
||||||
lprint("using hostname from {}\n".format(fp))
|
lprint("using hostname from {}\n".format(fp))
|
||||||
try:
|
try:
|
||||||
with open(fp, "rb") as f:
|
return read_utf8(None, fp, True).strip()
|
||||||
ret = f.read().decode("utf-8", "replace").strip()
|
|
||||||
except:
|
except:
|
||||||
ret = ""
|
ret = ""
|
||||||
namelen = 5
|
namelen = 5
|
||||||
@@ -265,47 +282,18 @@ def get_srvname(verbose) -> str:
|
|||||||
ret = re.sub("[234567=]", "", ret)[:namelen]
|
ret = re.sub("[234567=]", "", ret)[:namelen]
|
||||||
with open(fp, "wb") as f:
|
with open(fp, "wb") as f:
|
||||||
f.write(ret.encode("utf-8") + b"\n")
|
f.write(ret.encode("utf-8") + b"\n")
|
||||||
|
return ret
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def get_fk_salt() -> str:
|
def get_salt(name: str, nbytes: int) -> str:
|
||||||
fp = os.path.join(E.cfg, "fk-salt.txt")
|
fp = os.path.join(E.cfg, "%s-salt.txt" % (name,))
|
||||||
try:
|
try:
|
||||||
with open(fp, "rb") as f:
|
return read_utf8(None, fp, True).strip()
|
||||||
ret = f.read().strip()
|
|
||||||
except:
|
except:
|
||||||
ret = b64enc(os.urandom(18))
|
ret = b64enc(os.urandom(nbytes))
|
||||||
with open(fp, "wb") as f:
|
with open(fp, "wb") as f:
|
||||||
f.write(ret + b"\n")
|
f.write(ret + b"\n")
|
||||||
|
return ret.decode("utf-8")
|
||||||
return ret.decode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
def get_dk_salt() -> str:
|
|
||||||
fp = os.path.join(E.cfg, "dk-salt.txt")
|
|
||||||
try:
|
|
||||||
with open(fp, "rb") as f:
|
|
||||||
ret = f.read().strip()
|
|
||||||
except:
|
|
||||||
ret = b64enc(os.urandom(30))
|
|
||||||
with open(fp, "wb") as f:
|
|
||||||
f.write(ret + b"\n")
|
|
||||||
|
|
||||||
return ret.decode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
def get_ah_salt() -> str:
|
|
||||||
fp = os.path.join(E.cfg, "ah-salt.txt")
|
|
||||||
try:
|
|
||||||
with open(fp, "rb") as f:
|
|
||||||
ret = f.read().strip()
|
|
||||||
except:
|
|
||||||
ret = b64enc(os.urandom(18))
|
|
||||||
with open(fp, "wb") as f:
|
|
||||||
f.write(ret + b"\n")
|
|
||||||
|
|
||||||
return ret.decode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_locale() -> None:
|
def ensure_locale() -> None:
|
||||||
@@ -559,14 +547,15 @@ def get_sects():
|
|||||||
when running behind a reverse-proxy, it's recommended to
|
when running behind a reverse-proxy, it's recommended to
|
||||||
use unix-sockets for improved performance and security;
|
use unix-sockets for improved performance and security;
|
||||||
|
|
||||||
\033[32m-i unix:770:www:\033[33m/tmp/a.sock\033[0m listens on \033[33m/tmp/a.sock\033[0m with
|
\033[32m-i unix:770:www:\033[33m/dev/shm/party.sock\033[0m listens on
|
||||||
permissions \033[33m0770\033[0m; only accessible to members of the \033[33mwww\033[0m
|
\033[33m/dev/shm/party.sock\033[0m with permissions \033[33m0770\033[0m;
|
||||||
group. This is the best approach. Alternatively,
|
only accessible to members of the \033[33mwww\033[0m group.
|
||||||
|
This is the best approach. Alternatively,
|
||||||
|
|
||||||
\033[32m-i unix:777:\033[33m/tmp/a.sock\033[0m sets perms \033[33m0777\033[0m so anyone can
|
\033[32m-i unix:777:\033[33m/dev/shm/party.sock\033[0m sets perms \033[33m0777\033[0m so anyone
|
||||||
access it; bad unless it's inside a restricted folder
|
can access it; bad unless it's inside a restricted folder
|
||||||
|
|
||||||
\033[32m-i unix:\033[33m/tmp/a.sock\033[0m keeps umask-defined permissions
|
\033[32m-i unix:\033[33m/dev/shm/party.sock\033[0m keeps umask-defined permission
|
||||||
(usually \033[33m0600\033[0m) and the same user/group as copyparty
|
(usually \033[33m0600\033[0m) and the same user/group as copyparty
|
||||||
|
|
||||||
\033[33m-p\033[0m (tcp ports) is ignored for unix sockets
|
\033[33m-p\033[0m (tcp ports) is ignored for unix sockets
|
||||||
@@ -875,6 +864,43 @@ def get_sects():
|
|||||||
"""
|
"""
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
[
|
||||||
|
"chmod",
|
||||||
|
"file/folder permissions",
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
global-option \033[33m--chmod-f\033[0m and volflag \033[33mchmod_f\033[0m specifies the unix-permission to use when creating a new file
|
||||||
|
|
||||||
|
similarly, \033[33m--chmod-d\033[0m and \033[33mchmod_d\033[0m sets the directory/folder perm
|
||||||
|
|
||||||
|
the value is a three-digit octal number such as \033[32m755\033[0m, \033[32m750\033[0m, \033[32m644\033[0m, etc.
|
||||||
|
|
||||||
|
first digit = "User"; permission for the unix-user
|
||||||
|
second digit = "Group"; permission for the unix-group
|
||||||
|
third digit = "Other"; permission for all other users/groups
|
||||||
|
|
||||||
|
for files:
|
||||||
|
\033[32m0\033[0m = \033[35m---\033[0m = no access
|
||||||
|
\033[32m1\033[0m = \033[35m--x\033[0m = can execute the file as a program
|
||||||
|
\033[32m2\033[0m = \033[35m-w-\033[0m = can write
|
||||||
|
\033[32m3\033[0m = \033[35m-wx\033[0m = can write and execute
|
||||||
|
\033[32m4\033[0m = \033[35mr--\033[0m = can read
|
||||||
|
\033[32m5\033[0m = \033[35mr-x\033[0m = can read and execute
|
||||||
|
\033[32m6\033[0m = \033[35mrw-\033[0m = can read and write
|
||||||
|
\033[32m7\033[0m = \033[35mrwx\033[0m = can read, write, execute
|
||||||
|
|
||||||
|
for directories/folders:
|
||||||
|
\033[32m0\033[0m = \033[35m---\033[0m = no access
|
||||||
|
\033[32m1\033[0m = \033[35m--x\033[0m = can read files in folder but not list contents
|
||||||
|
\033[32m2\033[0m = \033[35m-w-\033[0m = n/a
|
||||||
|
\033[32m3\033[0m = \033[35m-wx\033[0m = can create files but not list
|
||||||
|
\033[32m4\033[0m = \033[35mr--\033[0m = can list, but not read/write
|
||||||
|
\033[32m5\033[0m = \033[35mr-x\033[0m = can list and read files
|
||||||
|
\033[32m6\033[0m = \033[35mrw-\033[0m = n/a
|
||||||
|
\033[32m7\033[0m = \033[35mrwx\033[0m = can read, write, list
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
],
|
||||||
[
|
[
|
||||||
"pwhash",
|
"pwhash",
|
||||||
"password hashing",
|
"password hashing",
|
||||||
@@ -976,6 +1002,7 @@ def add_general(ap, nc, srvname):
|
|||||||
ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)")
|
ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)")
|
||||||
ap2.add_argument("--mime", metavar="EXT=MIME", type=u, action="append", help="map file \033[33mEXT\033[0mension to \033[33mMIME\033[0mtype, for example [\033[32mjpg=image/jpeg\033[0m]")
|
ap2.add_argument("--mime", metavar="EXT=MIME", type=u, action="append", help="map file \033[33mEXT\033[0mension to \033[33mMIME\033[0mtype, for example [\033[32mjpg=image/jpeg\033[0m]")
|
||||||
ap2.add_argument("--mimes", action="store_true", help="list default mimetype mapping and exit")
|
ap2.add_argument("--mimes", action="store_true", help="list default mimetype mapping and exit")
|
||||||
|
ap2.add_argument("--rmagic", action="store_true", help="do expensive analysis to improve accuracy of returned mimetypes; will make file-downloads, rss, and webdav slower (volflag=rmagic)")
|
||||||
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
|
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
|
||||||
ap2.add_argument("--version", action="store_true", help="show versions and exit")
|
ap2.add_argument("--version", action="store_true", help="show versions and exit")
|
||||||
|
|
||||||
@@ -1015,16 +1042,22 @@ def add_upload(ap):
|
|||||||
ap2 = ap.add_argument_group('upload options')
|
ap2 = ap.add_argument_group('upload options')
|
||||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m")
|
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless \033[33m-ed\033[0m")
|
||||||
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
||||||
|
ap2.add_argument("--put-name", metavar="TXT", type=u, default="put-{now.6f}-{cip}.bin", help="filename for nameless uploads (when uploader doesn't provide a name); default is [\033[32mput-UNIXTIME-IP.bin\033[0m] (the \033[32m.6f\033[0m means six decimal places) (volflag=put_name)")
|
||||||
|
ap2.add_argument("--put-ck", metavar="ALG", type=u, default="sha512", help="default checksum-hasher for PUT/WebDAV uploads: no / md5 / sha1 / sha256 / sha512 / b2 / blake2 / b2s / blake2s (volflag=put_ck)")
|
||||||
|
ap2.add_argument("--bup-ck", metavar="ALG", type=u, default="sha512", help="default checksum-hasher for bup/basic-uploader: no / md5 / sha1 / sha256 / sha512 / b2 / blake2 / b2s / blake2s (volflag=bup_ck)")
|
||||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled, default=12h")
|
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled, default=12h")
|
||||||
ap2.add_argument("--u2abort", metavar="NUM", type=int, default=1, help="clients can abort incomplete uploads by using the unpost tab (requires \033[33m-e2d\033[0m). [\033[32m0\033[0m] = never allowed (disable feature), [\033[32m1\033[0m] = allow if client has the same IP as the upload AND is using the same account, [\033[32m2\033[0m] = just check the IP, [\033[32m3\033[0m] = just check account-name (volflag=u2abort)")
|
ap2.add_argument("--u2abort", metavar="NUM", type=int, default=1, help="clients can abort incomplete uploads by using the unpost tab (requires \033[33m-e2d\033[0m). [\033[32m0\033[0m] = never allowed (disable feature), [\033[32m1\033[0m] = allow if client has the same IP as the upload AND is using the same account, [\033[32m2\033[0m] = just check the IP, [\033[32m3\033[0m] = just check account-name (volflag=u2abort)")
|
||||||
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
|
||||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without \033[33m-e2d\033[0m; roughly 1 MiB RAM per 600")
|
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without \033[33m-e2d\033[0m; roughly 1 MiB RAM per 600")
|
||||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (bad idea to enable this on windows and/or cow filesystems)")
|
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (bad idea to enable this on windows and/or cow filesystems)")
|
||||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
||||||
|
ap2.add_argument("--chmod-f", metavar="UGO", type=u, default="", help="unix file permissions to use when creating files; default is probably 644 (OS-decided), see --help-chmod. Examples: [\033[32m644\033[0m] = owner-RW + all-R, [\033[32m755\033[0m] = owner-RWX + all-RX, [\033[32m777\033[0m] = full-yolo (volflag=chmod_f)")
|
||||||
|
ap2.add_argument("--chmod-d", metavar="UGO", type=u, default="755", help="unix file permissions to use when creating directories; see --help-chmod. Examples: [\033[32m755\033[0m] = owner-RW + all-R, [\033[32m777\033[0m] = full-yolo (volflag=chmod_d)")
|
||||||
ap2.add_argument("--dedup", action="store_true", help="enable symlink-based upload deduplication (volflag=dedup)")
|
ap2.add_argument("--dedup", action="store_true", help="enable symlink-based upload deduplication (volflag=dedup)")
|
||||||
ap2.add_argument("--safe-dedup", metavar="N", type=int, default=50, help="how careful to be when deduplicating files; [\033[32m1\033[0m] = just verify the filesize, [\033[32m50\033[0m] = verify file contents have not been altered (volflag=safededup)")
|
ap2.add_argument("--safe-dedup", metavar="N", type=int, default=50, help="how careful to be when deduplicating files; [\033[32m1\033[0m] = just verify the filesize, [\033[32m50\033[0m] = verify file contents have not been altered (volflag=safededup)")
|
||||||
ap2.add_argument("--hardlink", action="store_true", help="enable hardlink-based dedup; will fallback on symlinks when that is impossible (across filesystems) (volflag=hardlink)")
|
ap2.add_argument("--hardlink", action="store_true", help="enable hardlink-based dedup; will fallback on symlinks when that is impossible (across filesystems) (volflag=hardlink)")
|
||||||
ap2.add_argument("--hardlink-only", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=hardlinkonly)")
|
ap2.add_argument("--hardlink-only", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=hardlinkonly)")
|
||||||
|
ap2.add_argument("--reflink", action="store_true", help="enable reflink-based dedup; will fallback on full copies when that is impossible (non-CoW filesystem) (volflag=reflink)")
|
||||||
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
||||||
ap2.add_argument("--no-clone", action="store_true", help="do not use existing data on disk to satisfy dupe uploads; reduces server HDD reads in exchange for much more network load (volflag=noclone)")
|
ap2.add_argument("--no-clone", action="store_true", help="do not use existing data on disk to satisfy dupe uploads; reduces server HDD reads in exchange for much more network load (volflag=noclone)")
|
||||||
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
||||||
@@ -1037,8 +1070,10 @@ def add_upload(ap):
|
|||||||
ap2.add_argument("--df", metavar="GiB", type=u, default="0", help="ensure \033[33mGiB\033[0m free disk space by rejecting upload requests; assumes gigabytes unless a unit suffix is given: [\033[32m256m\033[0m], [\033[32m4\033[0m], [\033[32m2T\033[0m] (volflag=df)")
|
ap2.add_argument("--df", metavar="GiB", type=u, default="0", help="ensure \033[33mGiB\033[0m free disk space by rejecting upload requests; assumes gigabytes unless a unit suffix is given: [\033[32m256m\033[0m], [\033[32m4\033[0m], [\033[32m2T\033[0m] (volflag=df)")
|
||||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||||
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
|
ap2.add_argument("--nosubtle", metavar="N", type=int, default=0, help="when to use a wasm-hasher instead of the browser's builtin; faster on chrome, but buggy in older chrome versions. [\033[32m0\033[0m] = only when necessary (non-https), [\033[32m1\033[0m] = always (all browsers), [\033[32m2\033[0m] = always on chrome/firefox, [\033[32m3\033[0m] = always on chrome, [\033[32mN\033[0m] = chrome-version N and newer (recommendation: 137)")
|
||||||
|
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good when latency is low (same-country), 2~4 for android-clients, 2~6 for cross-atlantic. Max is 6 in most browsers. Big values increase network-speed but may reduce HDD-speed")
|
||||||
ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for \033[33mdefault\033[0m, and never exceed \033[33mmax\033[0m. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
|
ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for \033[33mdefault\033[0m, and never exceed \033[33mmax\033[0m. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
|
||||||
|
ap2.add_argument("--u2ow", metavar="NUM", type=int, default=0, help="web-client: default setting for when to replace/overwrite existing files; [\033[32m0\033[0m]=never, [\033[32m1\033[0m]=if-client-newer, [\033[32m2\033[0m]=always (volflag=u2ow)")
|
||||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||||
|
|
||||||
@@ -1055,8 +1090,10 @@ def add_network(ap):
|
|||||||
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here; example: [\033[32m/foo/bar\033[0m]")
|
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here; example: [\033[32m/foo/bar\033[0m]")
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
||||||
else:
|
elif not MACOS:
|
||||||
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
||||||
|
ap2.add_argument("--wr-h-eps", metavar="PATH", type=u, default="", help="write list of listening-on ip:port to textfile at \033[33mPATH\033[0m when http-servers have started")
|
||||||
|
ap2.add_argument("--wr-h-aon", metavar="PATH", type=u, default="", help="write list of accessible-on ip:port to textfile at \033[33mPATH\033[0m when http-servers have started")
|
||||||
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
|
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
|
||||||
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=128.0, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=128.0, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
||||||
ap2.add_argument("--s-rd-sz", metavar="B", type=int, default=256*1024, help="socket read size in bytes (indirectly affects filesystem writes; recommendation: keep equal-to or lower-than \033[33m--iobuf\033[0m)")
|
ap2.add_argument("--s-rd-sz", metavar="B", type=int, default=256*1024, help="socket read size in bytes (indirectly affects filesystem writes; recommendation: keep equal-to or lower-than \033[33m--iobuf\033[0m)")
|
||||||
@@ -1097,12 +1134,16 @@ def add_cert(ap, cert_path):
|
|||||||
|
|
||||||
|
|
||||||
def add_auth(ap):
|
def add_auth(ap):
|
||||||
|
idp_db = os.path.join(E.cfg, "idp.db")
|
||||||
ses_db = os.path.join(E.cfg, "sessions.db")
|
ses_db = os.path.join(E.cfg, "sessions.db")
|
||||||
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
||||||
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks if the request-header \033[33mHN\033[0m contains a username to associate the request with (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks if the request-header \033[33mHN\033[0m contains a username to associate the request with (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
||||||
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
||||||
ap2.add_argument("--idp-h-key", metavar="HN", type=u, default="", help="optional but recommended safeguard; your reverse-proxy will insert a secret header named \033[33mHN\033[0m into all requests, and the other IdP headers will be ignored if this header is not present")
|
ap2.add_argument("--idp-h-key", metavar="HN", type=u, default="", help="optional but recommended safeguard; your reverse-proxy will insert a secret header named \033[33mHN\033[0m into all requests, and the other IdP headers will be ignored if this header is not present")
|
||||||
ap2.add_argument("--idp-gsep", metavar="RE", type=u, default="|:;+,", help="if there are multiple groups in \033[33m--idp-h-grp\033[0m, they are separated by one of the characters in \033[33mRE\033[0m")
|
ap2.add_argument("--idp-gsep", metavar="RE", type=u, default="|:;+,", help="if there are multiple groups in \033[33m--idp-h-grp\033[0m, they are separated by one of the characters in \033[33mRE\033[0m")
|
||||||
|
ap2.add_argument("--idp-db", metavar="PATH", type=u, default=idp_db, help="where to store the known IdP users/groups (if you run multiple copyparty instances, make sure they use different DBs)")
|
||||||
|
ap2.add_argument("--idp-store", metavar="N", type=int, default=1, help="how to use \033[33m--idp-db\033[0m; [\033[32m0\033[0m] = entirely disable, [\033[32m1\033[0m] = write-only (effectively disabled), [\033[32m2\033[0m] = remember users, [\033[32m3\033[0m] = remember users and groups.\nNOTE: Will remember and restore the IdP-volumes of all users for all eternity if set to 2 or 3, even when user is deleted from your IdP")
|
||||||
|
ap2.add_argument("--idp-adm", metavar="U,U", type=u, default="", help="comma-separated list of users allowed to use /?idp (the cache management UI)")
|
||||||
ap2.add_argument("--no-bauth", action="store_true", help="disable basic-authentication support; do not accept passwords from the 'Authenticate' header at all. NOTE: This breaks support for the android app")
|
ap2.add_argument("--no-bauth", action="store_true", help="disable basic-authentication support; do not accept passwords from the 'Authenticate' header at all. NOTE: This breaks support for the android app")
|
||||||
ap2.add_argument("--bauth-last", action="store_true", help="keeps basic-authentication enabled, but only as a last-resort; if a cookie is also provided then the cookie wins")
|
ap2.add_argument("--bauth-last", action="store_true", help="keeps basic-authentication enabled, but only as a last-resort; if a cookie is also provided then the cookie wins")
|
||||||
ap2.add_argument("--ses-db", metavar="PATH", type=u, default=ses_db, help="where to store the sessions database (if you run multiple copyparty instances, make sure they use different DBs)")
|
ap2.add_argument("--ses-db", metavar="PATH", type=u, default=ses_db, help="where to store the sessions database (if you run multiple copyparty instances, make sure they use different DBs)")
|
||||||
@@ -1249,7 +1290,9 @@ def add_stats(ap):
|
|||||||
def add_yolo(ap):
|
def add_yolo(ap):
|
||||||
ap2 = ap.add_argument_group('yolo options')
|
ap2 = ap.add_argument_group('yolo options')
|
||||||
ap2.add_argument("--allow-csrf", action="store_true", help="disable csrf protections; let other domains/sites impersonate you through cross-site requests")
|
ap2.add_argument("--allow-csrf", action="store_true", help="disable csrf protections; let other domains/sites impersonate you through cross-site requests")
|
||||||
|
ap2.add_argument("--cookie-lax", action="store_true", help="allow cookies from other domains (if you follow a link from another website into your server, you will arrive logged-in); this reduces protection against CSRF")
|
||||||
ap2.add_argument("--getmod", action="store_true", help="permit ?move=[...] and ?delete as GET")
|
ap2.add_argument("--getmod", action="store_true", help="permit ?move=[...] and ?delete as GET")
|
||||||
|
ap2.add_argument("--wo-up-readme", action="store_true", help="allow users with write-only access to upload logues and readmes without adding the _wo_ filename prefix (volflag=wo_up_readme)")
|
||||||
|
|
||||||
|
|
||||||
def add_optouts(ap):
|
def add_optouts(ap):
|
||||||
@@ -1264,12 +1307,18 @@ def add_optouts(ap):
|
|||||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||||
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
|
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
|
||||||
|
ap2.add_argument("--zipmaxn", metavar="N", type=u, default="0", help="reject download-as-zip if more than \033[33mN\033[0m files in total; optionally takes a unit suffix: [\033[32m256\033[0m], [\033[32m9K\033[0m], [\033[32m4G\033[0m] (volflag=zipmaxn)")
|
||||||
|
ap2.add_argument("--zipmaxs", metavar="SZ", type=u, default="0", help="reject download-as-zip if total download size exceeds \033[33mSZ\033[0m bytes; optionally takes a unit suffix: [\033[32m256M\033[0m], [\033[32m4G\033[0m], [\033[32m2T\033[0m] (volflag=zipmaxs)")
|
||||||
|
ap2.add_argument("--zipmaxt", metavar="TXT", type=u, default="", help="custom errormessage when download size exceeds max (volflag=zipmaxt)")
|
||||||
|
ap2.add_argument("--zipmaxu", action="store_true", help="authenticated users bypass the zip size limit (volflag=zipmaxu)")
|
||||||
ap2.add_argument("--zip-who", metavar="LVL", type=int, default=3, help="who can download as zip/tar? [\033[32m0\033[0m]=nobody, [\033[32m1\033[0m]=admins, [\033[32m2\033[0m]=authenticated-with-read-access, [\033[32m3\033[0m]=everyone-with-read-access (volflag=zip_who)\n\033[1;31mWARNING:\033[0m if a nested volume has a more restrictive value than a parent volume, then this will be \033[33mignored\033[0m if the download is initiated from the parent, more lenient volume")
|
ap2.add_argument("--zip-who", metavar="LVL", type=int, default=3, help="who can download as zip/tar? [\033[32m0\033[0m]=nobody, [\033[32m1\033[0m]=admins, [\033[32m2\033[0m]=authenticated-with-read-access, [\033[32m3\033[0m]=everyone-with-read-access (volflag=zip_who)\n\033[1;31mWARNING:\033[0m if a nested volume has a more restrictive value than a parent volume, then this will be \033[33mignored\033[0m if the download is initiated from the parent, more lenient volume")
|
||||||
|
ap2.add_argument("--ua-nozip", metavar="PTN", type=u, default=BAD_BOTS, help="regex of user-agents to reject from download-as-zip/tar; disable with [\033[32mno\033[0m] or blank")
|
||||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar; same as \033[33m--zip-who=0\033[0m")
|
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar; same as \033[33m--zip-who=0\033[0m")
|
||||||
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
||||||
ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
|
ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
|
||||||
ap2.add_argument("--no-pipe", action="store_true", help="disable race-the-beam (lockstep download of files which are currently being uploaded) (volflag=nopipe)")
|
ap2.add_argument("--no-pipe", action="store_true", help="disable race-the-beam (lockstep download of files which are currently being uploaded) (volflag=nopipe)")
|
||||||
ap2.add_argument("--no-db-ip", action="store_true", help="do not write uploader IPs into the database")
|
ap2.add_argument("--no-tail", action="store_true", help="disable streaming a growing files with ?tail (volflag=notail)")
|
||||||
|
ap2.add_argument("--no-db-ip", action="store_true", help="do not write uploader-IP into the database; will also disable unpost, you may want \033[32m--forget-ip\033[0m instead (volflag=no_db_ip)")
|
||||||
|
|
||||||
|
|
||||||
def add_safety(ap):
|
def add_safety(ap):
|
||||||
@@ -1289,6 +1338,7 @@ def add_safety(ap):
|
|||||||
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)")
|
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)")
|
||||||
ap2.add_argument("--logout", metavar="H", type=float, default=8086.0, help="logout clients after \033[33mH\033[0m hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
ap2.add_argument("--logout", metavar="H", type=float, default=8086.0, help="logout clients after \033[33mH\033[0m hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
|
||||||
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
||||||
|
ap2.add_argument("--ban-pwc", metavar="N,W,B", type=u, default="5,60,1440", help="more than \033[33mN\033[0m password-changes in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
||||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="50,60,1440", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; only affects users who cannot see directory listings because their access is either g/G/h")
|
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="50,60,1440", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; only affects users who cannot see directory listings because their access is either g/G/h")
|
||||||
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
|
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
|
||||||
ap2.add_argument("--ban-422", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 422's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (invalid requests, attempted exploits ++)")
|
ap2.add_argument("--ban-422", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 422's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (invalid requests, attempted exploits ++)")
|
||||||
@@ -1311,6 +1361,9 @@ def add_salt(ap, fk_salt, dk_salt, ah_salt):
|
|||||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
|
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
|
||||||
ap2.add_argument("--dk-salt", metavar="SALT", type=u, default=dk_salt, help="per-directory accesskey salt; used to generate unpredictable URLs to share folders with users who only have the 'get' permission")
|
ap2.add_argument("--dk-salt", metavar="SALT", type=u, default=dk_salt, help="per-directory accesskey salt; used to generate unpredictable URLs to share folders with users who only have the 'get' permission")
|
||||||
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
||||||
|
ap2.add_argument("--show-ah-salt", action="store_true", help="on startup, print the effective value of \033[33m--ah-salt\033[0m (the autogenerated value in $XDG_CONFIG_HOME unless otherwise specified)")
|
||||||
|
ap2.add_argument("--show-fk-salt", action="store_true", help="on startup, print the effective value of \033[33m--fk-salt\033[0m (the autogenerated value in $XDG_CONFIG_HOME unless otherwise specified)")
|
||||||
|
ap2.add_argument("--show-dk-salt", action="store_true", help="on startup, print the effective value of \033[33m--dk-salt\033[0m (the autogenerated value in $XDG_CONFIG_HOME unless otherwise specified)")
|
||||||
|
|
||||||
|
|
||||||
def add_shutdown(ap):
|
def add_shutdown(ap):
|
||||||
@@ -1352,7 +1405,7 @@ def add_admin(ap):
|
|||||||
|
|
||||||
def add_thumbnail(ap):
|
def add_thumbnail(ap):
|
||||||
th_ram = (RAM_AVAIL or RAM_TOTAL or 9) * 0.6
|
th_ram = (RAM_AVAIL or RAM_TOTAL or 9) * 0.6
|
||||||
th_ram = int(max(min(th_ram, 6), 1) * 10) / 10
|
th_ram = int(max(min(th_ram, 6), 0.3) * 10) / 10
|
||||||
ap2 = ap.add_argument_group('thumbnail options')
|
ap2 = ap.add_argument_group('thumbnail options')
|
||||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
||||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
||||||
@@ -1379,7 +1432,8 @@ def add_thumbnail(ap):
|
|||||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
||||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,cbz,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,cbz,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,itgz,itxz,itz,m4a,mdgz,mdxz,mdz,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,s3gz,s3xz,s3z,tak,tta,ulaw,wav,wma,wv,xm,xmgz,xmxz,xmz,xpk", help="audio formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,itgz,itxz,itz,m4a,mdgz,mdxz,mdz,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,oga,ogg,okt,opus,ra,s3m,s3gz,s3xz,s3z,tak,tta,ulaw,wav,wma,wv,xm,xmgz,xmxz,xmz,xpk", help="audio formats to decode using ffmpeg")
|
||||||
|
ap2.add_argument("--th-spec-cnv", metavar="T", type=u, default="it,itgz,itxz,itz,mdgz,mdxz,mdz,mo3,mod,s3m,s3gz,s3xz,s3z,xm,xmgz,xmxz,xmz,xpk", help="audio formats which provoke https://trac.ffmpeg.org/ticket/10797 (huge ram usage for s3xmodit spectrograms)")
|
||||||
ap2.add_argument("--au-unpk", metavar="E=F.C", type=u, default="mdz=mod.zip, mdgz=mod.gz, mdxz=mod.xz, s3z=s3m.zip, s3gz=s3m.gz, s3xz=s3m.xz, xmz=xm.zip, xmgz=xm.gz, xmxz=xm.xz, itz=it.zip, itgz=it.gz, itxz=it.xz, cbz=jpg.cbz", help="audio/image formats to decompress before passing to ffmpeg")
|
ap2.add_argument("--au-unpk", metavar="E=F.C", type=u, default="mdz=mod.zip, mdgz=mod.gz, mdxz=mod.xz, s3z=s3m.zip, s3gz=s3m.gz, s3xz=s3m.xz, xmz=xm.zip, xmgz=xm.gz, xmxz=xm.xz, itz=it.zip, itgz=it.gz, itxz=it.xz, cbz=jpg.cbz", help="audio/image formats to decompress before passing to ffmpeg")
|
||||||
|
|
||||||
|
|
||||||
@@ -1394,6 +1448,16 @@ def add_transcoding(ap):
|
|||||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
||||||
|
|
||||||
|
|
||||||
|
def add_tail(ap):
|
||||||
|
ap2 = ap.add_argument_group('tailing options (realtime streaming of a growing file)')
|
||||||
|
ap2.add_argument("--tail-who", metavar="LVL", type=int, default=2, help="who can tail? [\033[32m0\033[0m]=nobody, [\033[32m1\033[0m]=admins, [\033[32m2\033[0m]=authenticated-with-read-access, [\033[32m3\033[0m]=everyone-with-read-access (volflag=tail_who)")
|
||||||
|
ap2.add_argument("--tail-cmax", metavar="N", type=int, default=64, help="do not allow starting a new tail if more than \033[33mN\033[0m active downloads")
|
||||||
|
ap2.add_argument("--tail-tmax", metavar="SEC", type=float, default=0, help="terminate connection after \033[33mSEC\033[0m seconds; [\033[32m0\033[0m]=never (volflag=tail_tmax)")
|
||||||
|
ap2.add_argument("--tail-rate", metavar="SEC", type=float, default=0.2, help="check for new data every \033[33mSEC\033[0m seconds (volflag=tail_rate)")
|
||||||
|
ap2.add_argument("--tail-ka", metavar="SEC", type=float, default=3.0, help="send a zerobyte if connection is idle for \033[33mSEC\033[0m seconds to prevent disconnect")
|
||||||
|
ap2.add_argument("--tail-fd", metavar="SEC", type=float, default=1.0, help="check if file was replaced (new fd) if idle for \033[33mSEC\033[0m seconds (volflag=tail_fd)")
|
||||||
|
|
||||||
|
|
||||||
def add_rss(ap):
|
def add_rss(ap):
|
||||||
ap2 = ap.add_argument_group('RSS options')
|
ap2 = ap.add_argument_group('RSS options')
|
||||||
ap2.add_argument("--rss", action="store_true", help="enable RSS output (experimental) (volflag=rss)")
|
ap2.add_argument("--rss", action="store_true", help="enable RSS output (experimental) (volflag=rss)")
|
||||||
@@ -1412,6 +1476,7 @@ def add_db_general(ap, hcores):
|
|||||||
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
|
||||||
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
|
||||||
ap2.add_argument("--hist", metavar="PATH", type=u, default="", help="where to store volume data (db, thumbs); default is a folder named \".hist\" inside each volume (volflag=hist)")
|
ap2.add_argument("--hist", metavar="PATH", type=u, default="", help="where to store volume data (db, thumbs); default is a folder named \".hist\" inside each volume (volflag=hist)")
|
||||||
|
ap2.add_argument("--dbpath", metavar="PATH", type=u, default="", help="override where the volume databases are to be placed; default is the same as \033[33m--hist\033[0m (volflag=dbpath)")
|
||||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, default="", help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
|
ap2.add_argument("--no-hash", metavar="PTN", type=u, default="", help="regex: disable hashing of matching absolute-filesystem-paths during e2ds folder scans (volflag=nohash)")
|
||||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, default=noidx, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
ap2.add_argument("--no-idx", metavar="PTN", type=u, default=noidx, help="regex: disable indexing of matching absolute-filesystem-paths during e2ds folder scans (volflag=noidx)")
|
||||||
ap2.add_argument("--no-dirsz", action="store_true", help="do not show total recursive size of folders in listings, show inode size instead; slightly faster (volflag=nodirsz)")
|
ap2.add_argument("--no-dirsz", action="store_true", help="do not show total recursive size of folders in listings, show inode size instead; slightly faster (volflag=nodirsz)")
|
||||||
@@ -1419,6 +1484,7 @@ def add_db_general(ap, hcores):
|
|||||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||||
ap2.add_argument("--re-dhash", action="store_true", help="force a cache rebuild on startup; enable this once if it gets out of sync (should never be necessary)")
|
ap2.add_argument("--re-dhash", action="store_true", help="force a cache rebuild on startup; enable this once if it gets out of sync (should never be necessary)")
|
||||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice -- only useful for offloading uploads to a cloud service or something (volflag=noforget)")
|
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice -- only useful for offloading uploads to a cloud service or something (volflag=noforget)")
|
||||||
|
ap2.add_argument("--forget-ip", metavar="MIN", type=int, default=0, help="remove uploader-IP from database (and make unpost impossible) \033[33mMIN\033[0m minutes after upload, for GDPR reasons. Default [\033[32m0\033[0m] is never-forget. [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month. (volflag=forget_ip)")
|
||||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see \033[33m--help-dbd\033[0m (volflag=dbd)")
|
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see \033[33m--help-dbd\033[0m (volflag=dbd)")
|
||||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (probably buggy, not recommended) (volflag=xlink)")
|
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (probably buggy, not recommended) (volflag=xlink)")
|
||||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||||
@@ -1449,11 +1515,13 @@ def add_db_metadata(ap):
|
|||||||
|
|
||||||
def add_txt(ap):
|
def add_txt(ap):
|
||||||
ap2 = ap.add_argument_group('textfile options')
|
ap2 = ap.add_argument_group('textfile options')
|
||||||
|
ap2.add_argument("--md-hist", metavar="TXT", type=u, default="s", help="where to store old version of markdown files; [\033[32ms\033[0m]=subfolder, [\033[32mv\033[0m]=volume-histpath, [\033[32mn\033[0m]=nope/disabled (volflag=md_hist)")
|
||||||
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="the textfile editor will check for serverside changes every \033[33mSEC\033[0m seconds")
|
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="the textfile editor will check for serverside changes every \033[33mSEC\033[0m seconds")
|
||||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
||||||
ap2.add_argument("--exp", action="store_true", help="enable textfile expansion -- replace {{self.ip}} and such; see \033[33m--help-exp\033[0m (volflag=exp)")
|
ap2.add_argument("--exp", action="store_true", help="enable textfile expansion -- replace {{self.ip}} and such; see \033[33m--help-exp\033[0m (volflag=exp)")
|
||||||
ap2.add_argument("--exp-md", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in markdown files; add/remove stuff on the default list with +hdr_foo or /vf.scan (volflag=exp_md)")
|
ap2.add_argument("--exp-md", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in markdown files; add/remove stuff on the default list with +hdr_foo or /vf.scan (volflag=exp_md)")
|
||||||
ap2.add_argument("--exp-lg", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in prologue/epilogue files (volflag=exp_lg)")
|
ap2.add_argument("--exp-lg", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in prologue/epilogue files (volflag=exp_lg)")
|
||||||
|
ap2.add_argument("--ua-nodoc", metavar="PTN", type=u, default=BAD_BOTS, help="regex of user-agents to reject from viewing documents through ?doc=[...]; disable with [\033[32mno\033[0m] or blank")
|
||||||
|
|
||||||
|
|
||||||
def add_og(ap):
|
def add_og(ap):
|
||||||
@@ -1485,6 +1553,8 @@ def add_ui(ap, retry):
|
|||||||
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
||||||
ap2.add_argument("--nsort", action="store_true", help="default-enable natural sort of filenames with leading numbers (volflag=nsort)")
|
ap2.add_argument("--nsort", action="store_true", help="default-enable natural sort of filenames with leading numbers (volflag=nsort)")
|
||||||
ap2.add_argument("--hsortn", metavar="N", type=int, default=2, help="number of sorting rules to include in media URLs by default (volflag=hsortn)")
|
ap2.add_argument("--hsortn", metavar="N", type=int, default=2, help="number of sorting rules to include in media URLs by default (volflag=hsortn)")
|
||||||
|
ap2.add_argument("--see-dots", action="store_true", help="default-enable seeing dotfiles; only takes effect if user has the necessary permissions")
|
||||||
|
ap2.add_argument("--qdel", metavar="LVL", type=int, default=2, help="number of confirmations to show when deleting files (2/1/0)")
|
||||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||||
ap2.add_argument("--ext-th", metavar="E=VP", type=u, action="append", help="use thumbnail-image \033[33mVP\033[0m for file-extension \033[33mE\033[0m, example: [\033[32mexe=/.res/exe.png\033[0m] (volflag=ext_th)")
|
ap2.add_argument("--ext-th", metavar="E=VP", type=u, action="append", help="use thumbnail-image \033[33mVP\033[0m for file-extension \033[33mE\033[0m, example: [\033[32mexe=/.res/exe.png\033[0m] (volflag=ext_th)")
|
||||||
@@ -1499,7 +1569,7 @@ def add_ui(ap, retry):
|
|||||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
|
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
|
||||||
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
|
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
|
||||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default=URL_PRJ, help="powered-by link; disable with \033[33m-np\033[0m")
|
ap2.add_argument("--pb-url", metavar="URL", type=u, default=URL_PRJ, help="powered-by link; disable with \033[33m-nb\033[0m")
|
||||||
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
|
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
|
||||||
ap2.add_argument("--k304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable k304 on the controlpanel (workaround for buggy reverse-proxies); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
ap2.add_argument("--k304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable k304 on the controlpanel (workaround for buggy reverse-proxies); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
||||||
ap2.add_argument("--no304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable no304 on the controlpanel (workaround for buggy caching in browsers); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
ap2.add_argument("--no304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable no304 on the controlpanel (workaround for buggy caching in browsers); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
||||||
@@ -1509,6 +1579,7 @@ def add_ui(ap, retry):
|
|||||||
ap2.add_argument("--lg-sba", metavar="TXT", type=u, default="", help="the value of the iframe 'allow' attribute for prologue/epilogue docs (volflag=lg_sba); see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Permissions-Policy#iframes")
|
ap2.add_argument("--lg-sba", metavar="TXT", type=u, default="", help="the value of the iframe 'allow' attribute for prologue/epilogue docs (volflag=lg_sba); see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Permissions-Policy#iframes")
|
||||||
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README/PREADME.md documents (volflags: no_sb_md | sb_md)")
|
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README/PREADME.md documents (volflags: no_sb_md | sb_md)")
|
||||||
ap2.add_argument("--no-sb-lg", action="store_true", help="don't sandbox prologue/epilogue docs (volflags: no_sb_lg | sb_lg); enables non-js support")
|
ap2.add_argument("--no-sb-lg", action="store_true", help="don't sandbox prologue/epilogue docs (volflags: no_sb_lg | sb_lg); enables non-js support")
|
||||||
|
ap2.add_argument("--have-unlistc", action="store_true", help=argparse.SUPPRESS)
|
||||||
|
|
||||||
|
|
||||||
def add_debug(ap):
|
def add_debug(ap):
|
||||||
@@ -1550,9 +1621,9 @@ def run_argparse(
|
|||||||
|
|
||||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||||
|
|
||||||
fk_salt = get_fk_salt()
|
fk_salt = get_salt("fk", 18)
|
||||||
dk_salt = get_dk_salt()
|
dk_salt = get_salt("dk", 30)
|
||||||
ah_salt = get_ah_salt()
|
ah_salt = get_salt("ah", 18)
|
||||||
|
|
||||||
# alpine peaks at 5 threads for some reason,
|
# alpine peaks at 5 threads for some reason,
|
||||||
# all others scale past that (but try to avoid SMT),
|
# all others scale past that (but try to avoid SMT),
|
||||||
@@ -1594,6 +1665,7 @@ def run_argparse(
|
|||||||
add_hooks(ap)
|
add_hooks(ap)
|
||||||
add_stats(ap)
|
add_stats(ap)
|
||||||
add_txt(ap)
|
add_txt(ap)
|
||||||
|
add_tail(ap)
|
||||||
add_og(ap)
|
add_og(ap)
|
||||||
add_ui(ap, retry)
|
add_ui(ap, retry)
|
||||||
add_admin(ap)
|
add_admin(ap)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (1, 16, 13)
|
VERSION = (1, 18, 6)
|
||||||
CODENAME = "COPYparty"
|
CODENAME = "logtail"
|
||||||
BUILD_DT = (2025, 2, 13)
|
BUILD_DT = (2025, 7, 28)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -25,14 +25,26 @@ def listdir(p: str = ".") -> list[str]:
|
|||||||
|
|
||||||
|
|
||||||
def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> bool:
|
def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> bool:
|
||||||
|
# os.makedirs does 777 for all but leaf; this does mode on all
|
||||||
|
todo = []
|
||||||
bname = fsenc(name)
|
bname = fsenc(name)
|
||||||
try:
|
while bname:
|
||||||
os.makedirs(bname, mode)
|
if os.path.isdir(bname):
|
||||||
return True
|
break
|
||||||
except:
|
todo.append(bname)
|
||||||
if not exist_ok or not os.path.isdir(bname):
|
bname = os.path.dirname(bname)
|
||||||
raise
|
if not todo:
|
||||||
|
if not exist_ok:
|
||||||
|
os.mkdir(bname) # to throw
|
||||||
return False
|
return False
|
||||||
|
for zb in todo[::-1]:
|
||||||
|
try:
|
||||||
|
os.mkdir(zb, mode)
|
||||||
|
except:
|
||||||
|
if os.path.isdir(zb):
|
||||||
|
continue
|
||||||
|
raise
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def mkdir(p: str, mode: int = 0o755) -> None:
|
def mkdir(p: str, mode: int = 0o755) -> None:
|
||||||
|
|||||||
@@ -1,13 +1,11 @@
|
|||||||
import calendar
|
import calendar
|
||||||
import errno
|
import errno
|
||||||
import filecmp
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from .__init__ import ANYWIN
|
from .__init__ import ANYWIN
|
||||||
from .util import Netdev, load_resource, runcmd, wrename, wunlink
|
from .util import Netdev, atomic_move, load_resource, runcmd, wunlink
|
||||||
|
|
||||||
HAVE_CFSSL = not os.environ.get("PRTY_NO_CFSSL")
|
HAVE_CFSSL = not os.environ.get("PRTY_NO_CFSSL")
|
||||||
|
|
||||||
@@ -122,7 +120,7 @@ def _gen_ca(log: "RootLogger", args):
|
|||||||
wunlink(nlog, bname + ".key", VF)
|
wunlink(nlog, bname + ".key", VF)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
wrename(nlog, bname + "-key.pem", bname + ".key", VF)
|
atomic_move(nlog, bname + "-key.pem", bname + ".key", VF)
|
||||||
wunlink(nlog, bname + ".csr", VF)
|
wunlink(nlog, bname + ".csr", VF)
|
||||||
|
|
||||||
log("cert", "new ca OK", 2)
|
log("cert", "new ca OK", 2)
|
||||||
@@ -215,7 +213,7 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
|||||||
wunlink(nlog, bname + ".key", VF)
|
wunlink(nlog, bname + ".key", VF)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
wrename(nlog, bname + "-key.pem", bname + ".key", VF)
|
atomic_move(nlog, bname + "-key.pem", bname + ".key", VF)
|
||||||
wunlink(nlog, bname + ".csr", VF)
|
wunlink(nlog, bname + ".csr", VF)
|
||||||
|
|
||||||
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f:
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ def vf_bmap() -> dict[str, str]:
|
|||||||
"no_forget": "noforget",
|
"no_forget": "noforget",
|
||||||
"no_pipe": "nopipe",
|
"no_pipe": "nopipe",
|
||||||
"no_robots": "norobots",
|
"no_robots": "norobots",
|
||||||
|
"no_tail": "notail",
|
||||||
"no_thumb": "dthumb",
|
"no_thumb": "dthumb",
|
||||||
"no_vthumb": "dvthumb",
|
"no_vthumb": "dvthumb",
|
||||||
"no_athumb": "dathumb",
|
"no_athumb": "dathumb",
|
||||||
@@ -43,6 +44,7 @@ def vf_bmap() -> dict[str, str]:
|
|||||||
"gsel",
|
"gsel",
|
||||||
"hardlink",
|
"hardlink",
|
||||||
"magic",
|
"magic",
|
||||||
|
"no_db_ip",
|
||||||
"no_sb_md",
|
"no_sb_md",
|
||||||
"no_sb_lg",
|
"no_sb_lg",
|
||||||
"nsort",
|
"nsort",
|
||||||
@@ -50,10 +52,14 @@ def vf_bmap() -> dict[str, str]:
|
|||||||
"og_no_head",
|
"og_no_head",
|
||||||
"og_s_title",
|
"og_s_title",
|
||||||
"rand",
|
"rand",
|
||||||
|
"reflink",
|
||||||
|
"rmagic",
|
||||||
"rss",
|
"rss",
|
||||||
|
"wo_up_readme",
|
||||||
"xdev",
|
"xdev",
|
||||||
"xlink",
|
"xlink",
|
||||||
"xvol",
|
"xvol",
|
||||||
|
"zipmaxu",
|
||||||
):
|
):
|
||||||
ret[k] = k
|
ret[k] = k
|
||||||
return ret
|
return ret
|
||||||
@@ -72,14 +78,20 @@ def vf_vmap() -> dict[str, str]:
|
|||||||
"th_x3": "th3x",
|
"th_x3": "th3x",
|
||||||
}
|
}
|
||||||
for k in (
|
for k in (
|
||||||
|
"bup_ck",
|
||||||
|
"chmod_d",
|
||||||
|
"chmod_f",
|
||||||
"dbd",
|
"dbd",
|
||||||
|
"forget_ip",
|
||||||
"hsortn",
|
"hsortn",
|
||||||
"html_head",
|
"html_head",
|
||||||
"lg_sbf",
|
"lg_sbf",
|
||||||
"md_sbf",
|
"md_sbf",
|
||||||
"lg_sba",
|
"lg_sba",
|
||||||
"md_sba",
|
"md_sba",
|
||||||
|
"md_hist",
|
||||||
"nrand",
|
"nrand",
|
||||||
|
"u2ow",
|
||||||
"og_desc",
|
"og_desc",
|
||||||
"og_site",
|
"og_site",
|
||||||
"og_th",
|
"og_th",
|
||||||
@@ -89,15 +101,24 @@ def vf_vmap() -> dict[str, str]:
|
|||||||
"og_title_i",
|
"og_title_i",
|
||||||
"og_tpl",
|
"og_tpl",
|
||||||
"og_ua",
|
"og_ua",
|
||||||
|
"put_ck",
|
||||||
|
"put_name",
|
||||||
"mv_retry",
|
"mv_retry",
|
||||||
"rm_retry",
|
"rm_retry",
|
||||||
"sort",
|
"sort",
|
||||||
|
"tail_fd",
|
||||||
|
"tail_rate",
|
||||||
|
"tail_tmax",
|
||||||
|
"tail_who",
|
||||||
"tcolor",
|
"tcolor",
|
||||||
"unlist",
|
"unlist",
|
||||||
"u2abort",
|
"u2abort",
|
||||||
"u2ts",
|
"u2ts",
|
||||||
"ups_who",
|
"ups_who",
|
||||||
"zip_who",
|
"zip_who",
|
||||||
|
"zipmaxn",
|
||||||
|
"zipmaxs",
|
||||||
|
"zipmaxt",
|
||||||
):
|
):
|
||||||
ret[k] = k
|
ret[k] = k
|
||||||
return ret
|
return ret
|
||||||
@@ -148,15 +169,22 @@ flagcats = {
|
|||||||
"dedup": "enable symlink-based file deduplication",
|
"dedup": "enable symlink-based file deduplication",
|
||||||
"hardlink": "enable hardlink-based file deduplication,\nwith fallback on symlinks when that is impossible",
|
"hardlink": "enable hardlink-based file deduplication,\nwith fallback on symlinks when that is impossible",
|
||||||
"hardlinkonly": "dedup with hardlink only, never symlink;\nmake a full copy if hardlink is impossible",
|
"hardlinkonly": "dedup with hardlink only, never symlink;\nmake a full copy if hardlink is impossible",
|
||||||
|
"reflink": "enable reflink-based file deduplication,\nwith fallback on full copy when that is impossible",
|
||||||
"safededup": "verify on-disk data before using it for dedup",
|
"safededup": "verify on-disk data before using it for dedup",
|
||||||
"noclone": "take dupe data from clients, even if available on HDD",
|
"noclone": "take dupe data from clients, even if available on HDD",
|
||||||
"nodupe": "rejects existing files (instead of linking/cloning them)",
|
"nodupe": "rejects existing files (instead of linking/cloning them)",
|
||||||
|
"chmod_d=755": "unix-permission for new dirs/folders",
|
||||||
|
"chmod_f=644": "unix-permission for new files",
|
||||||
"sparse": "force use of sparse files, mainly for s3-backed storage",
|
"sparse": "force use of sparse files, mainly for s3-backed storage",
|
||||||
"nosparse": "deny use of sparse files, mainly for slow storage",
|
"nosparse": "deny use of sparse files, mainly for slow storage",
|
||||||
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
||||||
"nosub": "forces all uploads into the top folder of the vfs",
|
"nosub": "forces all uploads into the top folder of the vfs",
|
||||||
"magic": "enables filetype detection for nameless uploads",
|
"magic": "enables filetype detection for nameless uploads",
|
||||||
"gz": "allows server-side gzip of uploads with ?gz (also c,xz)",
|
"put_name": "fallback filename for nameless uploads",
|
||||||
|
"put_ck": "default checksum-hasher for PUT/WebDAV uploads",
|
||||||
|
"bup_ck": "default checksum-hasher for bup/basic uploads",
|
||||||
|
"gz": "allows server-side gzip compression of uploads with ?gz",
|
||||||
|
"xz": "allows server-side lzma compression of uploads with ?xz",
|
||||||
"pk": "forces server-side compression, optional arg: xz,9",
|
"pk": "forces server-side compression, optional arg: xz,9",
|
||||||
},
|
},
|
||||||
"upload rules": {
|
"upload rules": {
|
||||||
@@ -165,8 +193,10 @@ flagcats = {
|
|||||||
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
|
||||||
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
|
||||||
"medialinks": "return medialinks for non-up2k uploads (not hotlinks)",
|
"medialinks": "return medialinks for non-up2k uploads (not hotlinks)",
|
||||||
|
"wo_up_readme": "write-only users can upload logues without getting renamed",
|
||||||
"rand": "force randomized filenames, 9 chars long by default",
|
"rand": "force randomized filenames, 9 chars long by default",
|
||||||
"nrand=N": "randomized filenames are N chars long",
|
"nrand=N": "randomized filenames are N chars long",
|
||||||
|
"u2ow=N": "overwrite existing files? 0=no 1=if-older 2=always",
|
||||||
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
|
"u2ts=fc": "[f]orce [c]lient-last-modified or [u]pload-time",
|
||||||
"u2abort=1": "allow aborting unfinished uploads? 0=no 1=strict 2=ip-chk 3=acct-chk",
|
"u2abort=1": "allow aborting unfinished uploads? 0=no 1=strict 2=ip-chk 3=acct-chk",
|
||||||
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
|
||||||
@@ -193,10 +223,14 @@ flagcats = {
|
|||||||
"d2v": "disables file verification, overrides -e2v*",
|
"d2v": "disables file verification, overrides -e2v*",
|
||||||
"d2d": "disables all database stuff, overrides -e2*",
|
"d2d": "disables all database stuff, overrides -e2*",
|
||||||
"hist=/tmp/cdb": "puts thumbnails and indexes at that location",
|
"hist=/tmp/cdb": "puts thumbnails and indexes at that location",
|
||||||
|
"dbpath=/tmp/cdb": "puts indexes at that location",
|
||||||
|
"landmark=foo": "disable db if file foo doesn't exist",
|
||||||
"scan=60": "scan for new files every 60sec, same as --re-maxage",
|
"scan=60": "scan for new files every 60sec, same as --re-maxage",
|
||||||
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
|
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
|
||||||
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
|
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
|
||||||
"noforget": "don't forget files when deleted from disk",
|
"noforget": "don't forget files when deleted from disk",
|
||||||
|
"forget_ip=43200": "forget uploader-IP after 30 days (GDPR)",
|
||||||
|
"no_db_ip": "never store uploader-IP in the db; disables unpost",
|
||||||
"fat32": "avoid excessive reindexing on android sdcardfs",
|
"fat32": "avoid excessive reindexing on android sdcardfs",
|
||||||
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
|
||||||
"xlink": "cross-volume dupe detection / linking (dangerous)",
|
"xlink": "cross-volume dupe detection / linking (dangerous)",
|
||||||
@@ -253,6 +287,8 @@ flagcats = {
|
|||||||
"nodirsz": "don't show total folder size",
|
"nodirsz": "don't show total folder size",
|
||||||
"robots": "allows indexing by search engines (default)",
|
"robots": "allows indexing by search engines (default)",
|
||||||
"norobots": "kindly asks search engines to leave",
|
"norobots": "kindly asks search engines to leave",
|
||||||
|
"unlistcr": "don't list read-access in controlpanel",
|
||||||
|
"unlistcw": "don't list write-access in controlpanel",
|
||||||
"no_sb_md": "disable js sandbox for markdown files",
|
"no_sb_md": "disable js sandbox for markdown files",
|
||||||
"no_sb_lg": "disable js sandbox for prologue/epilogue",
|
"no_sb_lg": "disable js sandbox for prologue/epilogue",
|
||||||
"sb_md": "enable js sandbox for markdown files (default)",
|
"sb_md": "enable js sandbox for markdown files (default)",
|
||||||
@@ -278,17 +314,33 @@ flagcats = {
|
|||||||
"og_ua": "if defined: only send OG html if useragent matches this regex",
|
"og_ua": "if defined: only send OG html if useragent matches this regex",
|
||||||
},
|
},
|
||||||
"textfiles": {
|
"textfiles": {
|
||||||
|
"md_hist": "where to put markdown backups; s=subfolder, v=volHist, n=nope",
|
||||||
"exp": "enable textfile expansion; see --help-exp",
|
"exp": "enable textfile expansion; see --help-exp",
|
||||||
"exp_md": "placeholders to expand in markdown files; see --help",
|
"exp_md": "placeholders to expand in markdown files; see --help",
|
||||||
"exp_lg": "placeholders to expand in prologue/epilogue; see --help",
|
"exp_lg": "placeholders to expand in prologue/epilogue; see --help",
|
||||||
},
|
},
|
||||||
|
"tailing": {
|
||||||
|
"notail": "disable ?tail (download a growing file continuously)",
|
||||||
|
"tail_fd=1": "check if file was replaced (new fd) every 1 sec",
|
||||||
|
"tail_rate=0.2": "check for new data every 0.2 sec",
|
||||||
|
"tail_tmax=30": "kill connection after 30 sec",
|
||||||
|
"tail_who=2": "restrict ?tail access (1=admins,2=authed,3=everyone)",
|
||||||
|
},
|
||||||
"others": {
|
"others": {
|
||||||
"dots": "allow all users with read-access to\nenable the option to show dotfiles in listings",
|
"dots": "allow all users with read-access to\nenable the option to show dotfiles in listings",
|
||||||
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
"fk=8": 'generates per-file accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
||||||
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
"fka=8": 'generates slightly weaker per-file accesskeys,\nwhich are then required at the "g" permission;\nnot affected by filesize or inode numbers',
|
||||||
|
"dk=8": 'generates per-directory accesskeys,\nwhich are then required at the "g" permission;\nkeys are invalidated if filesize or inode changes',
|
||||||
|
"dks": "per-directory accesskeys allow browsing into subdirs",
|
||||||
|
"dky": 'allow seeing files (not folders) inside a specific folder\nwith "g" perm, and does not require a valid dirkey to do so',
|
||||||
"rss": "allow '?rss' URL suffix (experimental)",
|
"rss": "allow '?rss' URL suffix (experimental)",
|
||||||
|
"rmagic": "expensive analysis for mimetype accuracy",
|
||||||
"ups_who=2": "restrict viewing the list of recent uploads",
|
"ups_who=2": "restrict viewing the list of recent uploads",
|
||||||
"zip_who=2": "restrict access to download-as-zip/tar",
|
"zip_who=2": "restrict access to download-as-zip/tar",
|
||||||
|
"zipmaxn=9k": "reject download-as-zip if more than 9000 files",
|
||||||
|
"zipmaxs=2g": "reject download-as-zip if size over 2 GiB",
|
||||||
|
"zipmaxt=no": "reply with 'no' if download-as-zip exceeds max",
|
||||||
|
"zipmaxu": "zip-size-limit does not apply to authenticated users",
|
||||||
"nopipe": "disable race-the-beam (download unfinished uploads)",
|
"nopipe": "disable race-the-beam (download unfinished uploads)",
|
||||||
"mv_retry": "ms-windows: timeout for renaming busy files",
|
"mv_retry": "ms-windows: timeout for renaming busy files",
|
||||||
"rm_retry": "ms-windows: timeout for deleting busy files",
|
"rm_retry": "ms-windows: timeout for deleting busy files",
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ class Fstab(object):
|
|||||||
return vid
|
return vid
|
||||||
|
|
||||||
def build_fallback(self) -> None:
|
def build_fallback(self) -> None:
|
||||||
self.tab = VFS(self.log_func, "idk", "/", AXS(), {})
|
self.tab = VFS(self.log_func, "idk", "/", "/", AXS(), {})
|
||||||
self.trusted = False
|
self.trusted = False
|
||||||
|
|
||||||
def build_tab(self) -> None:
|
def build_tab(self) -> None:
|
||||||
@@ -111,9 +111,10 @@ class Fstab(object):
|
|||||||
|
|
||||||
tab1.sort(key=lambda x: (len(x[0]), x[0]))
|
tab1.sort(key=lambda x: (len(x[0]), x[0]))
|
||||||
path1, fs1 = tab1[0]
|
path1, fs1 = tab1[0]
|
||||||
tab = VFS(self.log_func, fs1, path1, AXS(), {})
|
tab = VFS(self.log_func, fs1, path1, path1, AXS(), {})
|
||||||
for path, fs in tab1[1:]:
|
for path, fs in tab1[1:]:
|
||||||
tab.add(fs, path.lstrip("/"))
|
zs = path.lstrip("/")
|
||||||
|
tab.add(fs, zs, zs)
|
||||||
|
|
||||||
self.tab = tab
|
self.tab = tab
|
||||||
self.srctab = srctab
|
self.srctab = srctab
|
||||||
@@ -130,9 +131,10 @@ class Fstab(object):
|
|||||||
if not self.trusted:
|
if not self.trusted:
|
||||||
# no mtab access; have to build as we go
|
# no mtab access; have to build as we go
|
||||||
if "/" in rem:
|
if "/" in rem:
|
||||||
self.tab.add("idk", os.path.join(vn.vpath, rem.split("/")[0]))
|
zs = os.path.join(vn.vpath, rem.split("/")[0])
|
||||||
|
self.tab.add("idk", zs, zs)
|
||||||
if rem:
|
if rem:
|
||||||
self.tab.add(nval, path)
|
self.tab.add(nval, path, path)
|
||||||
else:
|
else:
|
||||||
vn.realpath = nval
|
vn.realpath = nval
|
||||||
|
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ from .__init__ import PY2, TYPE_CHECKING
|
|||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import (
|
from .util import (
|
||||||
|
FN_EMB,
|
||||||
VF_CAREFUL,
|
VF_CAREFUL,
|
||||||
Daemon,
|
Daemon,
|
||||||
ODict,
|
ODict,
|
||||||
@@ -170,6 +171,16 @@ class FtpFs(AbstractedFS):
|
|||||||
fn = sanitize_fn(fn or "", "")
|
fn = sanitize_fn(fn or "", "")
|
||||||
vpath = vjoin(rd, fn)
|
vpath = vjoin(rd, fn)
|
||||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||||
|
if (
|
||||||
|
w
|
||||||
|
and fn.lower() in FN_EMB
|
||||||
|
and self.h.uname not in vfs.axs.uread
|
||||||
|
and "wo_up_readme" not in vfs.flags
|
||||||
|
):
|
||||||
|
fn = "_wo_" + fn
|
||||||
|
vpath = vjoin(rd, fn)
|
||||||
|
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||||
|
|
||||||
if not vfs.realpath:
|
if not vfs.realpath:
|
||||||
t = "No filesystem mounted at [{}]"
|
t = "No filesystem mounted at [{}]"
|
||||||
raise FSE(t.format(vpath))
|
raise FSE(t.format(vpath))
|
||||||
@@ -218,7 +229,7 @@ class FtpFs(AbstractedFS):
|
|||||||
r = "r" in mode
|
r = "r" in mode
|
||||||
w = "w" in mode or "a" in mode or "+" in mode
|
w = "w" in mode or "a" in mode or "+" in mode
|
||||||
|
|
||||||
ap = self.rv2a(filename, r, w)[0]
|
ap, vfs, _ = self.rv2a(filename, r, w)
|
||||||
self.validpath(ap)
|
self.validpath(ap)
|
||||||
if w:
|
if w:
|
||||||
try:
|
try:
|
||||||
@@ -250,7 +261,11 @@ class FtpFs(AbstractedFS):
|
|||||||
|
|
||||||
wunlink(self.log, ap, VF_CAREFUL)
|
wunlink(self.log, ap, VF_CAREFUL)
|
||||||
|
|
||||||
return open(fsenc(ap), mode, self.args.iobuf)
|
ret = open(fsenc(ap), mode, self.args.iobuf)
|
||||||
|
if w and "chmod_f" in vfs.flags:
|
||||||
|
os.fchmod(ret.fileno(), vfs.flags["chmod_f"])
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
def chdir(self, path: str) -> None:
|
def chdir(self, path: str) -> None:
|
||||||
nwd = join(self.cwd, path)
|
nwd = join(self.cwd, path)
|
||||||
@@ -281,8 +296,9 @@ class FtpFs(AbstractedFS):
|
|||||||
) = avfs.can_access("", self.h.uname)
|
) = avfs.can_access("", self.h.uname)
|
||||||
|
|
||||||
def mkdir(self, path: str) -> None:
|
def mkdir(self, path: str) -> None:
|
||||||
ap = self.rv2a(path, w=True)[0]
|
ap, vfs, _ = self.rv2a(path, w=True)
|
||||||
bos.makedirs(ap) # filezilla expects this
|
chmod = vfs.flags["chmod_d"]
|
||||||
|
bos.makedirs(ap, chmod) # filezilla expects this
|
||||||
|
|
||||||
def listdir(self, path: str) -> list[str]:
|
def listdir(self, path: str) -> list[str]:
|
||||||
vpath = join(self.cwd, path)
|
vpath = join(self.cwd, path)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -224,3 +224,6 @@ class HttpConn(object):
|
|||||||
if self.u2idx:
|
if self.u2idx:
|
||||||
self.hsrv.put_u2idx(str(self.addr), self.u2idx)
|
self.hsrv.put_u2idx(str(self.addr), self.u2idx)
|
||||||
self.u2idx = None
|
self.u2idx = None
|
||||||
|
|
||||||
|
if self.rproxy:
|
||||||
|
self.set_rproxy()
|
||||||
|
|||||||
@@ -123,6 +123,7 @@ class HttpSrv(object):
|
|||||||
self.nm = NetMap([], [])
|
self.nm = NetMap([], [])
|
||||||
self.ssdp: Optional["SSDPr"] = None
|
self.ssdp: Optional["SSDPr"] = None
|
||||||
self.gpwd = Garda(self.args.ban_pw)
|
self.gpwd = Garda(self.args.ban_pw)
|
||||||
|
self.gpwc = Garda(self.args.ban_pwc)
|
||||||
self.g404 = Garda(self.args.ban_404)
|
self.g404 = Garda(self.args.ban_404)
|
||||||
self.g403 = Garda(self.args.ban_403)
|
self.g403 = Garda(self.args.ban_403)
|
||||||
self.g422 = Garda(self.args.ban_422, False)
|
self.g422 = Garda(self.args.ban_422, False)
|
||||||
@@ -175,6 +176,7 @@ class HttpSrv(object):
|
|||||||
"browser",
|
"browser",
|
||||||
"browser2",
|
"browser2",
|
||||||
"cf",
|
"cf",
|
||||||
|
"idp",
|
||||||
"md",
|
"md",
|
||||||
"mde",
|
"mde",
|
||||||
"msg",
|
"msg",
|
||||||
@@ -313,6 +315,8 @@ class HttpSrv(object):
|
|||||||
|
|
||||||
Daemon(self.broker.say, "sig-hsrv-up1", ("cb_httpsrv_up",))
|
Daemon(self.broker.say, "sig-hsrv-up1", ("cb_httpsrv_up",))
|
||||||
|
|
||||||
|
saddr = ("", 0) # fwd-decl for `except TypeError as ex:`
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="90")
|
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="90")
|
||||||
@@ -394,6 +398,19 @@ class HttpSrv(object):
|
|||||||
self.log(self.name, "accept({}): {}".format(fno, ex), c=6)
|
self.log(self.name, "accept({}): {}".format(fno, ex), c=6)
|
||||||
time.sleep(0.02)
|
time.sleep(0.02)
|
||||||
continue
|
continue
|
||||||
|
except TypeError as ex:
|
||||||
|
# on macOS, accept() may return a None saddr if blocked by LittleSnitch;
|
||||||
|
# unicode(saddr[0]) ==> TypeError: 'NoneType' object is not subscriptable
|
||||||
|
if tcp and not saddr:
|
||||||
|
t = "accept(%s): failed to accept connection from client due to firewall or network issue"
|
||||||
|
self.log(self.name, t % (fno,), c=3)
|
||||||
|
try:
|
||||||
|
sck.close() # type: ignore
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
time.sleep(0.02)
|
||||||
|
continue
|
||||||
|
raise
|
||||||
|
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
t = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
t = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||||
|
|||||||
@@ -94,10 +94,21 @@ class Ico(object):
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
||||||
<rect width="100%" height="100%" fill="#{}" />
|
<rect width="100%" height="100%" fill="#{}" />
|
||||||
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" xml:space="preserve"
|
<text x="50%" y="{}" dominant-baseline="middle" text-anchor="middle" xml:space="preserve"
|
||||||
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
||||||
</g></svg>
|
</g></svg>
|
||||||
"""
|
"""
|
||||||
svg = svg.format(h, c[:6], c[6:], html_escape(ext, True))
|
|
||||||
|
txt = html_escape(ext, True)
|
||||||
|
if "\n" in txt:
|
||||||
|
lines = txt.split("\n")
|
||||||
|
n = len(lines)
|
||||||
|
y = "20%" if n == 2 else "10%" if n == 3 else "0"
|
||||||
|
zs = '<tspan x="50%%" dy="1.2em">%s</tspan>'
|
||||||
|
txt = "".join([zs % (x,) for x in lines])
|
||||||
|
else:
|
||||||
|
y = "50%"
|
||||||
|
|
||||||
|
svg = svg.format(h, c[:6], y, c[6:], txt)
|
||||||
|
|
||||||
return "image/svg+xml", svg.encode("utf-8")
|
return "image/svg+xml", svg.encode("utf-8")
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ from .util import (
|
|||||||
REKOBO_LKEY,
|
REKOBO_LKEY,
|
||||||
VF_CAREFUL,
|
VF_CAREFUL,
|
||||||
fsenc,
|
fsenc,
|
||||||
|
gzip,
|
||||||
min_ex,
|
min_ex,
|
||||||
pybin,
|
pybin,
|
||||||
retchk,
|
retchk,
|
||||||
@@ -138,8 +139,6 @@ def au_unpk(
|
|||||||
fd, ret = tempfile.mkstemp("." + au)
|
fd, ret = tempfile.mkstemp("." + au)
|
||||||
|
|
||||||
if pk == "gz":
|
if pk == "gz":
|
||||||
import gzip
|
|
||||||
|
|
||||||
fi = gzip.GzipFile(abspath, mode="rb")
|
fi = gzip.GzipFile(abspath, mode="rb")
|
||||||
|
|
||||||
elif pk == "xz":
|
elif pk == "xz":
|
||||||
@@ -167,12 +166,13 @@ def au_unpk(
|
|||||||
znil = [x for x in znil if "cover" in x[0]] or znil
|
znil = [x for x in znil if "cover" in x[0]] or znil
|
||||||
znil = [x for x in znil if CBZ_01.search(x[0])] or znil
|
znil = [x for x in znil if CBZ_01.search(x[0])] or znil
|
||||||
t = "cbz: %d files, %d hits" % (nf, len(znil))
|
t = "cbz: %d files, %d hits" % (nf, len(znil))
|
||||||
|
using = sorted(znil)[0][1].filename
|
||||||
if znil:
|
if znil:
|
||||||
t += ", using " + znil[0][1].filename
|
t += ", using " + using
|
||||||
log(t)
|
log(t)
|
||||||
if not znil:
|
if not znil:
|
||||||
raise Exception("no images inside cbz")
|
raise Exception("no images inside cbz")
|
||||||
fi = zf.open(znil[0][1])
|
fi = zf.open(using)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise Exception("unknown compression %s" % (pk,))
|
raise Exception("unknown compression %s" % (pk,))
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ try:
|
|||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
HAVE_ARGON2 = True
|
HAVE_ARGON2 = True
|
||||||
from argon2 import __version__ as argon2ver
|
from argon2 import exceptions as argon2ex
|
||||||
except:
|
except:
|
||||||
HAVE_ARGON2 = False
|
HAVE_ARGON2 = False
|
||||||
|
|
||||||
|
|||||||
@@ -320,7 +320,7 @@ class SMB(object):
|
|||||||
|
|
||||||
self.hub.up2k.handle_mv(uname, "1.7.6.2", vp1, vp2)
|
self.hub.up2k.handle_mv(uname, "1.7.6.2", vp1, vp2)
|
||||||
try:
|
try:
|
||||||
bos.makedirs(ap2)
|
bos.makedirs(ap2, vfs2.flags["chmod_d"])
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -334,7 +334,7 @@ class SMB(object):
|
|||||||
t = "blocked mkdir (no-write-acc %s): /%s @%s"
|
t = "blocked mkdir (no-write-acc %s): /%s @%s"
|
||||||
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
yeet(t % (vfs.axs.uwrite, vpath, uname))
|
||||||
|
|
||||||
return bos.mkdir(ap)
|
return bos.mkdir(ap, vfs.flags["chmod_d"])
|
||||||
|
|
||||||
def _stat(self, vpath: str, *a: Any, **ka: Any) -> os.stat_result:
|
def _stat(self, vpath: str, *a: Any, **ka: Any) -> os.stat_result:
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -17,6 +17,9 @@ if True: # pylint: disable=using-constant-test
|
|||||||
from .util import NamedLogger
|
from .util import NamedLogger
|
||||||
|
|
||||||
|
|
||||||
|
TAR_NO_OPUS = set("aac|m4a|mp3|oga|ogg|opus|wma".split("|"))
|
||||||
|
|
||||||
|
|
||||||
class StreamArc(object):
|
class StreamArc(object):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -82,9 +85,7 @@ def enthumb(
|
|||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
rem = f["vp"]
|
rem = f["vp"]
|
||||||
ext = rem.rsplit(".", 1)[-1].lower()
|
ext = rem.rsplit(".", 1)[-1].lower()
|
||||||
if (fmt == "mp3" and ext == "mp3") or (
|
if (fmt == "mp3" and ext == "mp3") or (fmt == "opus" and ext in TAR_NO_OPUS):
|
||||||
fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|")
|
|
||||||
):
|
|
||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
vp = vjoin(vtop, rem.split("/", 1)[1])
|
vp = vjoin(vtop, rem.split("/", 1)[1])
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ from __future__ import print_function, unicode_literals
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import errno
|
import errno
|
||||||
import gzip
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -28,6 +27,7 @@ if True: # pylint: disable=using-constant-test
|
|||||||
|
|
||||||
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, E, EnvParams, unicode
|
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, E, EnvParams, unicode
|
||||||
from .authsrv import BAD_CFG, AuthSrv
|
from .authsrv import BAD_CFG, AuthSrv
|
||||||
|
from .bos import bos
|
||||||
from .cert import ensure_cert
|
from .cert import ensure_cert
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, HAVE_MUTAGEN
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, HAVE_MUTAGEN
|
||||||
from .pwhash import HAVE_ARGON2
|
from .pwhash import HAVE_ARGON2
|
||||||
@@ -63,7 +63,9 @@ from .util import (
|
|||||||
ansi_re,
|
ansi_re,
|
||||||
build_netmap,
|
build_netmap,
|
||||||
expat_ver,
|
expat_ver,
|
||||||
|
gzip,
|
||||||
load_ipu,
|
load_ipu,
|
||||||
|
lock_file,
|
||||||
min_ex,
|
min_ex,
|
||||||
mp,
|
mp,
|
||||||
odfusion,
|
odfusion,
|
||||||
@@ -73,6 +75,9 @@ from .util import (
|
|||||||
ub64enc,
|
ub64enc,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if HAVE_SQLITE3:
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
try:
|
try:
|
||||||
from .mdns import MDNS
|
from .mdns import MDNS
|
||||||
@@ -84,6 +89,11 @@ if PY2:
|
|||||||
range = xrange # type: ignore
|
range = xrange # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
VER_IDP_DB = 1
|
||||||
|
VER_SESSION_DB = 1
|
||||||
|
VER_SHARES_DB = 2
|
||||||
|
|
||||||
|
|
||||||
class SvcHub(object):
|
class SvcHub(object):
|
||||||
"""
|
"""
|
||||||
Hosts all services which cannot be parallelized due to reliance on monolithic resources.
|
Hosts all services which cannot be parallelized due to reliance on monolithic resources.
|
||||||
@@ -158,6 +168,7 @@ class SvcHub(object):
|
|||||||
# for non-http clients (ftp, tftp)
|
# for non-http clients (ftp, tftp)
|
||||||
self.bans: dict[str, int] = {}
|
self.bans: dict[str, int] = {}
|
||||||
self.gpwd = Garda(self.args.ban_pw)
|
self.gpwd = Garda(self.args.ban_pw)
|
||||||
|
self.gpwc = Garda(self.args.ban_pwc)
|
||||||
self.g404 = Garda(self.args.ban_404)
|
self.g404 = Garda(self.args.ban_404)
|
||||||
self.g403 = Garda(self.args.ban_403)
|
self.g403 = Garda(self.args.ban_403)
|
||||||
self.g422 = Garda(self.args.ban_422, False)
|
self.g422 = Garda(self.args.ban_422, False)
|
||||||
@@ -186,8 +197,14 @@ class SvcHub(object):
|
|||||||
|
|
||||||
if not args.use_fpool and args.j != 1:
|
if not args.use_fpool and args.j != 1:
|
||||||
args.no_fpool = True
|
args.no_fpool = True
|
||||||
t = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems"
|
t = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems, and make some antivirus-softwares "
|
||||||
self.log("root", t.format(args.j))
|
c = 0
|
||||||
|
if ANYWIN:
|
||||||
|
t += "(especially Microsoft Defender) stress your CPU and HDD severely during big uploads"
|
||||||
|
c = 3
|
||||||
|
else:
|
||||||
|
t += "consume more resources (CPU/HDD) than normal"
|
||||||
|
self.log("root", t.format(args.j), c)
|
||||||
|
|
||||||
if not args.no_fpool and args.j != 1:
|
if not args.no_fpool and args.j != 1:
|
||||||
t = "WARNING: ignoring --use-fpool because multithreading (-j{}) is enabled"
|
t = "WARNING: ignoring --use-fpool because multithreading (-j{}) is enabled"
|
||||||
@@ -239,8 +256,20 @@ class SvcHub(object):
|
|||||||
setattr(args, "ipu_iu", iu)
|
setattr(args, "ipu_iu", iu)
|
||||||
setattr(args, "ipu_nm", nm)
|
setattr(args, "ipu_nm", nm)
|
||||||
|
|
||||||
|
for zs in "ah_salt fk_salt dk_salt".split():
|
||||||
|
if getattr(args, "show_%s" % (zs,)):
|
||||||
|
self.log("root", "effective %s is %s" % (zs, getattr(args, zs)))
|
||||||
|
|
||||||
|
if args.ah_cli or args.ah_gen:
|
||||||
|
args.idp_store = 0
|
||||||
|
args.no_ses = True
|
||||||
|
args.shr = ""
|
||||||
|
|
||||||
|
if args.idp_store and args.idp_h_usr:
|
||||||
|
self.setup_db("idp")
|
||||||
|
|
||||||
if not self.args.no_ses:
|
if not self.args.no_ses:
|
||||||
self.setup_session_db()
|
self.setup_db("ses")
|
||||||
|
|
||||||
args.shr1 = ""
|
args.shr1 = ""
|
||||||
if args.shr:
|
if args.shr:
|
||||||
@@ -399,33 +428,91 @@ class SvcHub(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def setup_session_db(self) -> None:
|
def _db_onfail_ses(self) -> None:
|
||||||
|
self.args.no_ses = True
|
||||||
|
|
||||||
|
def _db_onfail_idp(self) -> None:
|
||||||
|
self.args.idp_store = 0
|
||||||
|
|
||||||
|
def setup_db(self, which: str) -> None:
|
||||||
|
"""
|
||||||
|
the "non-mission-critical" databases; if something looks broken then just nuke it
|
||||||
|
"""
|
||||||
|
if which == "ses":
|
||||||
|
native_ver = VER_SESSION_DB
|
||||||
|
db_path = self.args.ses_db
|
||||||
|
desc = "sessions-db"
|
||||||
|
pathopt = "ses-db"
|
||||||
|
sanchk_q = "select count(*) from us"
|
||||||
|
createfun = self._create_session_db
|
||||||
|
failfun = self._db_onfail_ses
|
||||||
|
elif which == "idp":
|
||||||
|
native_ver = VER_IDP_DB
|
||||||
|
db_path = self.args.idp_db
|
||||||
|
desc = "idp-db"
|
||||||
|
pathopt = "idp-db"
|
||||||
|
sanchk_q = "select count(*) from us"
|
||||||
|
createfun = self._create_idp_db
|
||||||
|
failfun = self._db_onfail_idp
|
||||||
|
else:
|
||||||
|
raise Exception("unknown cachetype")
|
||||||
|
|
||||||
|
if not db_path.endswith(".db"):
|
||||||
|
zs = "config option --%s (the %s) was configured to [%s] which is invalid; must be a filepath ending with .db"
|
||||||
|
self.log("root", zs % (pathopt, desc, db_path), 1)
|
||||||
|
raise Exception(BAD_CFG)
|
||||||
|
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
self.args.no_ses = True
|
failfun()
|
||||||
t = "WARNING: sqlite3 not available; disabling sessions, will use plaintext passwords in cookies"
|
if which == "ses":
|
||||||
self.log("root", t, 3)
|
zs = "disabling sessions, will use plaintext passwords in cookies"
|
||||||
|
elif which == "idp":
|
||||||
|
zs = "disabling idp-db, will be unable to remember IdP-volumes after a restart"
|
||||||
|
self.log("root", "WARNING: sqlite3 not available; %s" % (zs,), 3)
|
||||||
return
|
return
|
||||||
|
|
||||||
import sqlite3
|
assert sqlite3 # type: ignore # !rm
|
||||||
|
|
||||||
create = True
|
db_lock = db_path + ".lock"
|
||||||
db_path = self.args.ses_db
|
try:
|
||||||
self.log("root", "opening sessions-db %s" % (db_path,))
|
create = not os.path.getsize(db_path)
|
||||||
for n in range(2):
|
except:
|
||||||
|
create = True
|
||||||
|
zs = "creating new" if create else "opening"
|
||||||
|
self.log("root", "%s %s %s" % (zs, desc, db_path))
|
||||||
|
|
||||||
|
for tries in range(2):
|
||||||
|
sver = 0
|
||||||
try:
|
try:
|
||||||
db = sqlite3.connect(db_path)
|
db = sqlite3.connect(db_path)
|
||||||
cur = db.cursor()
|
cur = db.cursor()
|
||||||
try:
|
try:
|
||||||
cur.execute("select count(*) from us").fetchone()
|
zs = "select v from kv where k='sver'"
|
||||||
create = False
|
sver = cur.execute(zs).fetchall()[0][0]
|
||||||
break
|
if sver > native_ver:
|
||||||
|
zs = "this version of copyparty only understands %s v%d and older; the db is v%d"
|
||||||
|
raise Exception(zs % (desc, native_ver, sver))
|
||||||
|
|
||||||
|
cur.execute(sanchk_q).fetchone()
|
||||||
except:
|
except:
|
||||||
pass
|
if sver:
|
||||||
|
raise
|
||||||
|
sver = createfun(cur)
|
||||||
|
|
||||||
|
err = self._verify_db(
|
||||||
|
cur, which, pathopt, db_path, desc, sver, native_ver
|
||||||
|
)
|
||||||
|
if err:
|
||||||
|
tries = 99
|
||||||
|
self.args.no_ses = True
|
||||||
|
self.log("root", err, 3)
|
||||||
|
break
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if n:
|
if tries or sver > native_ver:
|
||||||
raise
|
raise
|
||||||
t = "sessions-db corrupt; deleting and recreating: %r"
|
t = "%s is unusable; deleting and recreating: %r"
|
||||||
self.log("root", t % (ex,), 3)
|
self.log("root", t % (desc, ex), 3)
|
||||||
try:
|
try:
|
||||||
cur.close() # type: ignore
|
cur.close() # type: ignore
|
||||||
except:
|
except:
|
||||||
@@ -434,8 +521,13 @@ class SvcHub(object):
|
|||||||
db.close() # type: ignore
|
db.close() # type: ignore
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
try:
|
||||||
|
os.unlink(db_lock)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
os.unlink(db_path)
|
os.unlink(db_path)
|
||||||
|
|
||||||
|
def _create_session_db(self, cur: "sqlite3.Cursor") -> int:
|
||||||
sch = [
|
sch = [
|
||||||
r"create table kv (k text, v int)",
|
r"create table kv (k text, v int)",
|
||||||
r"create table us (un text, si text, t0 int)",
|
r"create table us (un text, si text, t0 int)",
|
||||||
@@ -445,17 +537,74 @@ class SvcHub(object):
|
|||||||
r"create index us_t0 on us(t0)",
|
r"create index us_t0 on us(t0)",
|
||||||
r"insert into kv values ('sver', 1)",
|
r"insert into kv values ('sver', 1)",
|
||||||
]
|
]
|
||||||
|
for cmd in sch:
|
||||||
|
cur.execute(cmd)
|
||||||
|
self.log("root", "created new sessions-db")
|
||||||
|
return 1
|
||||||
|
|
||||||
assert db # type: ignore # !rm
|
def _create_idp_db(self, cur: "sqlite3.Cursor") -> int:
|
||||||
assert cur # type: ignore # !rm
|
sch = [
|
||||||
if create:
|
r"create table kv (k text, v int)",
|
||||||
for cmd in sch:
|
r"create table us (un text, gs text)",
|
||||||
cur.execute(cmd)
|
# username, groups
|
||||||
self.log("root", "created new sessions-db")
|
r"create index us_un on us(un)",
|
||||||
db.commit()
|
r"insert into kv values ('sver', 1)",
|
||||||
|
]
|
||||||
|
for cmd in sch:
|
||||||
|
cur.execute(cmd)
|
||||||
|
self.log("root", "created new idp-db")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def _verify_db(
|
||||||
|
self,
|
||||||
|
cur: "sqlite3.Cursor",
|
||||||
|
which: str,
|
||||||
|
pathopt: str,
|
||||||
|
db_path: str,
|
||||||
|
desc: str,
|
||||||
|
sver: int,
|
||||||
|
native_ver: int,
|
||||||
|
) -> str:
|
||||||
|
# ensure writable (maybe owned by other user)
|
||||||
|
db = cur.connection
|
||||||
|
|
||||||
|
try:
|
||||||
|
zil = cur.execute("select v from kv where k='pid'").fetchall()
|
||||||
|
if len(zil) > 1:
|
||||||
|
raise Exception()
|
||||||
|
owner = zil[0][0]
|
||||||
|
except:
|
||||||
|
owner = 0
|
||||||
|
|
||||||
|
if which == "ses":
|
||||||
|
cons = "Will now disable sessions and instead use plaintext passwords in cookies."
|
||||||
|
elif which == "idp":
|
||||||
|
cons = "Each IdP-volume will not become available until its associated user sends their first request."
|
||||||
|
else:
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
if not lock_file(db_path + ".lock"):
|
||||||
|
t = "the %s [%s] is already in use by another copyparty instance (pid:%d). This is not supported; please provide another database with --%s or give this copyparty-instance its entirely separate config-folder by setting another path in the XDG_CONFIG_HOME env-var. You can also disable this safeguard by setting env-var PRTY_NO_DB_LOCK=1. %s"
|
||||||
|
return t % (desc, db_path, owner, pathopt, cons)
|
||||||
|
|
||||||
|
vars = (("pid", os.getpid()), ("ts", int(time.time() * 1000)))
|
||||||
|
if owner:
|
||||||
|
# wear-estimate: 2 cells; offsets 0x10, 0x50, 0x19720
|
||||||
|
for k, v in vars:
|
||||||
|
cur.execute("update kv set v=? where k=?", (v, k))
|
||||||
|
else:
|
||||||
|
# wear-estimate: 3~4 cells; offsets 0x10, 0x50, 0x19180, 0x19710, 0x36000, 0x360b0, 0x36b90
|
||||||
|
for k, v in vars:
|
||||||
|
cur.execute("insert into kv values(?, ?)", (k, v))
|
||||||
|
|
||||||
|
if sver < native_ver:
|
||||||
|
cur.execute("delete from kv where k='sver'")
|
||||||
|
cur.execute("insert into kv values('sver',?)", (native_ver,))
|
||||||
|
|
||||||
|
db.commit()
|
||||||
cur.close()
|
cur.close()
|
||||||
db.close()
|
db.close()
|
||||||
|
return ""
|
||||||
|
|
||||||
def setup_share_db(self) -> None:
|
def setup_share_db(self) -> None:
|
||||||
al = self.args
|
al = self.args
|
||||||
@@ -464,7 +613,7 @@ class SvcHub(object):
|
|||||||
al.shr = ""
|
al.shr = ""
|
||||||
return
|
return
|
||||||
|
|
||||||
import sqlite3
|
assert sqlite3 # type: ignore # !rm
|
||||||
|
|
||||||
al.shr = al.shr.strip("/")
|
al.shr = al.shr.strip("/")
|
||||||
if "/" in al.shr or not al.shr:
|
if "/" in al.shr or not al.shr:
|
||||||
@@ -475,34 +624,48 @@ class SvcHub(object):
|
|||||||
al.shr = "/%s/" % (al.shr,)
|
al.shr = "/%s/" % (al.shr,)
|
||||||
al.shr1 = al.shr[1:]
|
al.shr1 = al.shr[1:]
|
||||||
|
|
||||||
create = True
|
# policy:
|
||||||
modified = False
|
# the shares-db is important, so panic if something is wrong
|
||||||
|
|
||||||
db_path = self.args.shr_db
|
db_path = self.args.shr_db
|
||||||
self.log("root", "opening shares-db %s" % (db_path,))
|
db_lock = db_path + ".lock"
|
||||||
for n in range(2):
|
try:
|
||||||
try:
|
create = not os.path.getsize(db_path)
|
||||||
db = sqlite3.connect(db_path)
|
except:
|
||||||
cur = db.cursor()
|
create = True
|
||||||
try:
|
zs = "creating new" if create else "opening"
|
||||||
cur.execute("select count(*) from sh").fetchone()
|
self.log("root", "%s shares-db %s" % (zs, db_path))
|
||||||
create = False
|
|
||||||
break
|
sver = 0
|
||||||
except:
|
try:
|
||||||
pass
|
db = sqlite3.connect(db_path)
|
||||||
except Exception as ex:
|
cur = db.cursor()
|
||||||
if n:
|
if not create:
|
||||||
raise
|
zs = "select v from kv where k='sver'"
|
||||||
t = "shares-db corrupt; deleting and recreating: %r"
|
sver = cur.execute(zs).fetchall()[0][0]
|
||||||
self.log("root", t % (ex,), 3)
|
if sver > VER_SHARES_DB:
|
||||||
try:
|
zs = "this version of copyparty only understands shares-db v%d and older; the db is v%d"
|
||||||
cur.close() # type: ignore
|
raise Exception(zs % (VER_SHARES_DB, sver))
|
||||||
except:
|
|
||||||
pass
|
cur.execute("select count(*) from sh").fetchone()
|
||||||
try:
|
except Exception as ex:
|
||||||
db.close() # type: ignore
|
t = "could not open shares-db; will now panic...\nthe following database must be repaired or deleted before you can launch copyparty:\n%s\n\nERROR: %s\n\nadditional details:\n%s\n"
|
||||||
except:
|
self.log("root", t % (db_path, ex, min_ex()), 1)
|
||||||
pass
|
raise
|
||||||
os.unlink(db_path)
|
|
||||||
|
try:
|
||||||
|
zil = cur.execute("select v from kv where k='pid'").fetchall()
|
||||||
|
if len(zil) > 1:
|
||||||
|
raise Exception()
|
||||||
|
owner = zil[0][0]
|
||||||
|
except:
|
||||||
|
owner = 0
|
||||||
|
|
||||||
|
if not lock_file(db_lock):
|
||||||
|
t = "the shares-db [%s] is already in use by another copyparty instance (pid:%d). This is not supported; please provide another database with --shr-db or give this copyparty-instance its entirely separate config-folder by setting another path in the XDG_CONFIG_HOME env-var. You can also disable this safeguard by setting env-var PRTY_NO_DB_LOCK=1. Will now panic."
|
||||||
|
t = t % (db_path, owner)
|
||||||
|
self.log("root", t, 1)
|
||||||
|
raise Exception(t)
|
||||||
|
|
||||||
sch1 = [
|
sch1 = [
|
||||||
r"create table kv (k text, v int)",
|
r"create table kv (k text, v int)",
|
||||||
@@ -514,34 +677,37 @@ class SvcHub(object):
|
|||||||
r"create index sf_k on sf(k)",
|
r"create index sf_k on sf(k)",
|
||||||
r"create index sh_k on sh(k)",
|
r"create index sh_k on sh(k)",
|
||||||
r"create index sh_t1 on sh(t1)",
|
r"create index sh_t1 on sh(t1)",
|
||||||
|
r"insert into kv values ('sver', 2)",
|
||||||
]
|
]
|
||||||
|
|
||||||
assert db # type: ignore # !rm
|
assert db # type: ignore # !rm
|
||||||
assert cur # type: ignore # !rm
|
assert cur # type: ignore # !rm
|
||||||
if create:
|
if not sver:
|
||||||
dver = 2
|
sver = VER_SHARES_DB
|
||||||
modified = True
|
|
||||||
for cmd in sch1 + sch2:
|
for cmd in sch1 + sch2:
|
||||||
cur.execute(cmd)
|
cur.execute(cmd)
|
||||||
self.log("root", "created new shares-db")
|
self.log("root", "created new shares-db")
|
||||||
else:
|
|
||||||
(dver,) = cur.execute("select v from kv where k = 'sver'").fetchall()[0]
|
|
||||||
|
|
||||||
if dver == 1:
|
if sver == 1:
|
||||||
modified = True
|
|
||||||
for cmd in sch2:
|
for cmd in sch2:
|
||||||
cur.execute(cmd)
|
cur.execute(cmd)
|
||||||
cur.execute("update sh set st = 0")
|
cur.execute("update sh set st = 0")
|
||||||
self.log("root", "shares-db schema upgrade ok")
|
self.log("root", "shares-db schema upgrade ok")
|
||||||
|
|
||||||
if modified:
|
if sver < VER_SHARES_DB:
|
||||||
for cmd in [
|
cur.execute("delete from kv where k='sver'")
|
||||||
r"delete from kv where k = 'sver'",
|
cur.execute("insert into kv values('sver',?)", (VER_SHARES_DB,))
|
||||||
r"insert into kv values ('sver', %d)" % (2,),
|
|
||||||
]:
|
|
||||||
cur.execute(cmd)
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
|
vars = (("pid", os.getpid()), ("ts", int(time.time() * 1000)))
|
||||||
|
if owner:
|
||||||
|
# wear-estimate: same as sessions-db
|
||||||
|
for k, v in vars:
|
||||||
|
cur.execute("update kv set v=? where k=?", (v, k))
|
||||||
|
else:
|
||||||
|
for k, v in vars:
|
||||||
|
cur.execute("insert into kv values(?, ?)", (k, v))
|
||||||
|
|
||||||
|
db.commit()
|
||||||
cur.close()
|
cur.close()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
@@ -679,10 +845,11 @@ class SvcHub(object):
|
|||||||
t += ", "
|
t += ", "
|
||||||
t += "\033[0mNG: \033[35m" + sng
|
t += "\033[0mNG: \033[35m" + sng
|
||||||
|
|
||||||
t += "\033[0m, see --deps"
|
t += "\033[0m, see --deps (this is fine btw)"
|
||||||
self.log("dependencies", t, 6)
|
self.log("optional-dependencies", t, 6)
|
||||||
|
|
||||||
def _check_env(self) -> None:
|
def _check_env(self) -> None:
|
||||||
|
al = self.args
|
||||||
try:
|
try:
|
||||||
files = os.listdir(E.cfg)
|
files = os.listdir(E.cfg)
|
||||||
except:
|
except:
|
||||||
@@ -699,6 +866,21 @@ class SvcHub(object):
|
|||||||
if self.args.bauth_last:
|
if self.args.bauth_last:
|
||||||
self.log("root", "WARNING: ignoring --bauth-last due to --no-bauth", 3)
|
self.log("root", "WARNING: ignoring --bauth-last due to --no-bauth", 3)
|
||||||
|
|
||||||
|
have_tcp = False
|
||||||
|
for zs in al.i:
|
||||||
|
if not zs.startswith("unix:"):
|
||||||
|
have_tcp = True
|
||||||
|
if not have_tcp:
|
||||||
|
zb = False
|
||||||
|
zs = "z zm zm4 zm6 zmv zmvv zs zsv zv"
|
||||||
|
for zs in zs.split():
|
||||||
|
if getattr(al, zs, False):
|
||||||
|
setattr(al, zs, False)
|
||||||
|
zb = True
|
||||||
|
if zb:
|
||||||
|
t = "only listening on unix-sockets; cannot enable zeroconf/mdns/ssdp as requested"
|
||||||
|
self.log("root", t, 3)
|
||||||
|
|
||||||
if not self.args.no_dav:
|
if not self.args.no_dav:
|
||||||
from .dxml import DXML_OK
|
from .dxml import DXML_OK
|
||||||
|
|
||||||
@@ -763,13 +945,20 @@ class SvcHub(object):
|
|||||||
vl = [os.path.expandvars(os.path.expanduser(x)) for x in vl]
|
vl = [os.path.expandvars(os.path.expanduser(x)) for x in vl]
|
||||||
setattr(al, k, vl)
|
setattr(al, k, vl)
|
||||||
|
|
||||||
for k in "lo hist ssl_log".split(" "):
|
for k in "lo hist dbpath ssl_log".split(" "):
|
||||||
vs = getattr(al, k)
|
vs = getattr(al, k)
|
||||||
if vs:
|
if vs:
|
||||||
vs = os.path.expandvars(os.path.expanduser(vs))
|
vs = os.path.expandvars(os.path.expanduser(vs))
|
||||||
setattr(al, k, vs)
|
setattr(al, k, vs)
|
||||||
|
|
||||||
for k in "dav_ua1 sus_urls nonsus_urls".split(" "):
|
for k in "idp_adm".split(" "):
|
||||||
|
vs = getattr(al, k)
|
||||||
|
vsa = [x.strip() for x in vs.split(",")]
|
||||||
|
vsa = [x.lower() for x in vsa if x]
|
||||||
|
setattr(al, k + "_set", set(vsa))
|
||||||
|
|
||||||
|
zs = "dav_ua1 sus_urls nonsus_urls ua_nodoc ua_nozip"
|
||||||
|
for k in zs.split(" "):
|
||||||
vs = getattr(al, k)
|
vs = getattr(al, k)
|
||||||
if not vs or vs == "no":
|
if not vs or vs == "no":
|
||||||
setattr(al, k, None)
|
setattr(al, k, None)
|
||||||
@@ -931,7 +1120,7 @@ class SvcHub(object):
|
|||||||
|
|
||||||
fn = sel_fn
|
fn = sel_fn
|
||||||
try:
|
try:
|
||||||
os.makedirs(os.path.dirname(fn))
|
bos.makedirs(os.path.dirname(fn))
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -948,6 +1137,9 @@ class SvcHub(object):
|
|||||||
|
|
||||||
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
|
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
|
||||||
|
|
||||||
|
if getattr(self.args, "free_umask", False):
|
||||||
|
os.fchmod(lh.fileno(), 0o644)
|
||||||
|
|
||||||
argv = [pybin] + self.argv
|
argv = [pybin] + self.argv
|
||||||
if hasattr(shlex, "quote"):
|
if hasattr(shlex, "quote"):
|
||||||
argv = [shlex.quote(x) for x in argv]
|
argv = [shlex.quote(x) for x in argv]
|
||||||
@@ -1260,7 +1452,7 @@ class SvcHub(object):
|
|||||||
raise
|
raise
|
||||||
|
|
||||||
def check_mp_support(self) -> str:
|
def check_mp_support(self) -> str:
|
||||||
if MACOS:
|
if MACOS and not os.environ.get("PRTY_FORCE_MP"):
|
||||||
return "multiprocessing is wonky on mac osx;"
|
return "multiprocessing is wonky on mac osx;"
|
||||||
elif sys.version_info < (3, 3):
|
elif sys.version_info < (3, 3):
|
||||||
return "need python 3.3 or newer for multiprocessing;"
|
return "need python 3.3 or newer for multiprocessing;"
|
||||||
@@ -1280,7 +1472,7 @@ class SvcHub(object):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if mp.cpu_count() <= 1:
|
if mp.cpu_count() <= 1 and not os.environ.get("PRTY_FORCE_MP"):
|
||||||
raise Exception()
|
raise Exception()
|
||||||
except:
|
except:
|
||||||
self.log("svchub", "only one CPU detected; multiprocessing disabled")
|
self.log("svchub", "only one CPU detected; multiprocessing disabled")
|
||||||
|
|||||||
@@ -4,12 +4,11 @@ from __future__ import print_function, unicode_literals
|
|||||||
import calendar
|
import calendar
|
||||||
import stat
|
import stat
|
||||||
import time
|
import time
|
||||||
import zlib
|
|
||||||
|
|
||||||
from .authsrv import AuthSrv
|
from .authsrv import AuthSrv
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .sutil import StreamArc, errdesc
|
from .sutil import StreamArc, errdesc
|
||||||
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile
|
from .util import min_ex, sanitize_fn, spack, sunpack, yieldfile, zlib
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Generator, Optional
|
from typing import Any, Generator, Optional
|
||||||
@@ -55,6 +54,7 @@ def gen_fdesc(sz: int, crc32: int, z64: bool) -> bytes:
|
|||||||
|
|
||||||
def gen_hdr(
|
def gen_hdr(
|
||||||
h_pos: Optional[int],
|
h_pos: Optional[int],
|
||||||
|
z64: bool,
|
||||||
fn: str,
|
fn: str,
|
||||||
sz: int,
|
sz: int,
|
||||||
lastmod: int,
|
lastmod: int,
|
||||||
@@ -71,7 +71,6 @@ def gen_hdr(
|
|||||||
# appnote 4.5 / zip 3.0 (2008) / unzip 6.0 (2009) says to add z64
|
# appnote 4.5 / zip 3.0 (2008) / unzip 6.0 (2009) says to add z64
|
||||||
# extinfo for values which exceed H, but that becomes an off-by-one
|
# extinfo for values which exceed H, but that becomes an off-by-one
|
||||||
# (can't tell if it was clamped or exactly maxval), make it obvious
|
# (can't tell if it was clamped or exactly maxval), make it obvious
|
||||||
z64 = sz >= 0xFFFFFFFF
|
|
||||||
z64v = [sz, sz] if z64 else []
|
z64v = [sz, sz] if z64 else []
|
||||||
if h_pos and h_pos >= 0xFFFFFFFF:
|
if h_pos and h_pos >= 0xFFFFFFFF:
|
||||||
# central, also consider ptr to original header
|
# central, also consider ptr to original header
|
||||||
@@ -245,6 +244,7 @@ class StreamZip(StreamArc):
|
|||||||
|
|
||||||
sz = st.st_size
|
sz = st.st_size
|
||||||
ts = st.st_mtime
|
ts = st.st_mtime
|
||||||
|
h_pos = self.pos
|
||||||
|
|
||||||
crc = 0
|
crc = 0
|
||||||
if self.pre_crc:
|
if self.pre_crc:
|
||||||
@@ -253,8 +253,12 @@ class StreamZip(StreamArc):
|
|||||||
|
|
||||||
crc &= 0xFFFFFFFF
|
crc &= 0xFFFFFFFF
|
||||||
|
|
||||||
h_pos = self.pos
|
# some unzip-programs expect a 64bit data-descriptor
|
||||||
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
|
# even if the only 32bit-exceeding value is the offset,
|
||||||
|
# so force that by placeholdering the filesize too
|
||||||
|
z64 = h_pos >= 0xFFFFFFFF or sz >= 0xFFFFFFFF
|
||||||
|
|
||||||
|
buf = gen_hdr(None, z64, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||||
yield self._ct(buf)
|
yield self._ct(buf)
|
||||||
|
|
||||||
for buf in yieldfile(src, self.args.iobuf):
|
for buf in yieldfile(src, self.args.iobuf):
|
||||||
@@ -267,8 +271,6 @@ class StreamZip(StreamArc):
|
|||||||
|
|
||||||
self.items.append((name, sz, ts, crc, h_pos))
|
self.items.append((name, sz, ts, crc, h_pos))
|
||||||
|
|
||||||
z64 = sz >= 4 * 1024 * 1024 * 1024
|
|
||||||
|
|
||||||
if z64 or not self.pre_crc:
|
if z64 or not self.pre_crc:
|
||||||
buf = gen_fdesc(sz, crc, z64)
|
buf = gen_fdesc(sz, crc, z64)
|
||||||
yield self._ct(buf)
|
yield self._ct(buf)
|
||||||
@@ -307,7 +309,8 @@ class StreamZip(StreamArc):
|
|||||||
|
|
||||||
cdir_pos = self.pos
|
cdir_pos = self.pos
|
||||||
for name, sz, ts, crc, h_pos in self.items:
|
for name, sz, ts, crc, h_pos in self.items:
|
||||||
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
|
z64 = h_pos >= 0xFFFFFFFF or sz >= 0xFFFFFFFF
|
||||||
|
buf = gen_hdr(h_pos, z64, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||||
mbuf += self._ct(buf)
|
mbuf += self._ct(buf)
|
||||||
if len(mbuf) >= 16384:
|
if len(mbuf) >= 16384:
|
||||||
yield mbuf
|
yield mbuf
|
||||||
|
|||||||
@@ -151,9 +151,15 @@ class TcpSrv(object):
|
|||||||
if just_ll or self.args.ll:
|
if just_ll or self.args.ll:
|
||||||
ll_ok.add(ip.split("/")[0])
|
ll_ok.add(ip.split("/")[0])
|
||||||
|
|
||||||
|
listening_on = []
|
||||||
|
for ip, ports in sorted(ok.items()):
|
||||||
|
for port in sorted(ports):
|
||||||
|
listening_on.append("%s %s" % (ip, port))
|
||||||
|
|
||||||
qr1: dict[str, list[int]] = {}
|
qr1: dict[str, list[int]] = {}
|
||||||
qr2: dict[str, list[int]] = {}
|
qr2: dict[str, list[int]] = {}
|
||||||
msgs = []
|
msgs = []
|
||||||
|
accessible_on = []
|
||||||
title_tab: dict[str, dict[str, int]] = {}
|
title_tab: dict[str, dict[str, int]] = {}
|
||||||
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
|
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
|
||||||
t = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
|
t = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
|
||||||
@@ -169,6 +175,10 @@ class TcpSrv(object):
|
|||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
zs = "%s %s" % (ip, port)
|
||||||
|
if zs not in accessible_on:
|
||||||
|
accessible_on.append(zs)
|
||||||
|
|
||||||
proto = " http"
|
proto = " http"
|
||||||
if self.args.http_only:
|
if self.args.http_only:
|
||||||
pass
|
pass
|
||||||
@@ -219,6 +229,14 @@ class TcpSrv(object):
|
|||||||
else:
|
else:
|
||||||
print("\n", end="")
|
print("\n", end="")
|
||||||
|
|
||||||
|
for fn, ls in (
|
||||||
|
(self.args.wr_h_eps, listening_on),
|
||||||
|
(self.args.wr_h_aon, accessible_on),
|
||||||
|
):
|
||||||
|
if fn:
|
||||||
|
with open(fn, "wb") as f:
|
||||||
|
f.write(("\n".join(ls)).encode("utf-8"))
|
||||||
|
|
||||||
if self.args.qr or self.args.qrs:
|
if self.args.qr or self.args.qrs:
|
||||||
self.qr = self._qr(qr1, qr2)
|
self.qr = self._qr(qr1, qr2)
|
||||||
|
|
||||||
@@ -264,7 +282,7 @@ class TcpSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass # will create another ipv4 socket instead
|
pass # will create another ipv4 socket instead
|
||||||
|
|
||||||
if not ANYWIN and self.args.freebind:
|
if getattr(self.args, "freebind", False):
|
||||||
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -548,7 +566,7 @@ class TcpSrv(object):
|
|||||||
ip = None
|
ip = None
|
||||||
ips = list(t1) + list(t2)
|
ips = list(t1) + list(t2)
|
||||||
qri = self.args.qri
|
qri = self.args.qri
|
||||||
if self.args.zm and not qri:
|
if self.args.zm and not qri and ips:
|
||||||
name = self.args.name + ".local"
|
name = self.args.name + ".local"
|
||||||
t1[name] = next(v for v in (t1 or t2).values())
|
t1[name] = next(v for v in (t1 or t2).values())
|
||||||
ips = [name] + ips
|
ips = [name] + ips
|
||||||
|
|||||||
@@ -36,7 +36,19 @@ from partftpy.TftpShared import TftpException
|
|||||||
from .__init__ import EXE, PY2, TYPE_CHECKING
|
from .__init__ import EXE, PY2, TYPE_CHECKING
|
||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .util import UTC, BytesIO, Daemon, ODict, exclude_dotfiles, min_ex, runhook, undot
|
from .util import (
|
||||||
|
FN_EMB,
|
||||||
|
UTC,
|
||||||
|
BytesIO,
|
||||||
|
Daemon,
|
||||||
|
ODict,
|
||||||
|
exclude_dotfiles,
|
||||||
|
min_ex,
|
||||||
|
runhook,
|
||||||
|
undot,
|
||||||
|
vjoin,
|
||||||
|
vsplit,
|
||||||
|
)
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
@@ -244,16 +256,25 @@ class Tftpd(object):
|
|||||||
for srv in srvs:
|
for srv in srvs:
|
||||||
srv.stop()
|
srv.stop()
|
||||||
|
|
||||||
def _v2a(self, caller: str, vpath: str, perms: list, *a: Any) -> tuple[VFS, str]:
|
def _v2a(
|
||||||
|
self, caller: str, vpath: str, perms: list, *a: Any
|
||||||
|
) -> tuple[VFS, str, str]:
|
||||||
vpath = vpath.replace("\\", "/").lstrip("/")
|
vpath = vpath.replace("\\", "/").lstrip("/")
|
||||||
if not perms:
|
if not perms:
|
||||||
perms = [True, True]
|
perms = [True, True]
|
||||||
|
|
||||||
debug('%s("%s", %s) %s\033[K\033[0m', caller, vpath, str(a), perms)
|
debug('%s("%s", %s) %s\033[K\033[0m', caller, vpath, str(a), perms)
|
||||||
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
||||||
|
if perms[1] and "*" not in vfs.axs.uread and "wo_up_readme" not in vfs.flags:
|
||||||
|
zs, fn = vsplit(vpath)
|
||||||
|
if fn.lower() in FN_EMB:
|
||||||
|
vpath = vjoin(zs, "_wo_" + fn)
|
||||||
|
vfs, rem = self.asrv.vfs.get(vpath, "*", *perms)
|
||||||
|
|
||||||
if not vfs.realpath:
|
if not vfs.realpath:
|
||||||
raise Exception("unmapped vfs")
|
raise Exception("unmapped vfs")
|
||||||
return vfs, vfs.canonical(rem)
|
|
||||||
|
return vfs, vpath, vfs.canonical(rem)
|
||||||
|
|
||||||
def _ls(self, vpath: str, raddress: str, rport: int, force=False) -> Any:
|
def _ls(self, vpath: str, raddress: str, rport: int, force=False) -> Any:
|
||||||
# generate file listing if vpath is dir.txt and return as file object
|
# generate file listing if vpath is dir.txt and return as file object
|
||||||
@@ -263,6 +284,7 @@ class Tftpd(object):
|
|||||||
if not ptn or not ptn.match(fn.lower()):
|
if not ptn or not ptn.match(fn.lower()):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
tsdt = datetime.fromtimestamp
|
||||||
vn, rem = self.asrv.vfs.get(vpath, "*", True, False)
|
vn, rem = self.asrv.vfs.get(vpath, "*", True, False)
|
||||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||||
rem,
|
rem,
|
||||||
@@ -275,7 +297,7 @@ class Tftpd(object):
|
|||||||
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
||||||
fils1 = [(v.st_mtime, v.st_size, k) for k, v in vfs_ls if k not in dnames]
|
fils1 = [(v.st_mtime, v.st_size, k) for k, v in vfs_ls if k not in dnames]
|
||||||
real1 = dirs1 + fils1
|
real1 = dirs1 + fils1
|
||||||
realt = [(datetime.fromtimestamp(mt, UTC), sz, fn) for mt, sz, fn in real1]
|
realt = [(tsdt(max(0, mt), UTC), sz, fn) for mt, sz, fn in real1]
|
||||||
reals = [
|
reals = [
|
||||||
(
|
(
|
||||||
"%04d-%02d-%02d %02d:%02d:%02d"
|
"%04d-%02d-%02d %02d:%02d:%02d"
|
||||||
@@ -331,7 +353,7 @@ class Tftpd(object):
|
|||||||
else:
|
else:
|
||||||
raise Exception("bad mode %s" % (mode,))
|
raise Exception("bad mode %s" % (mode,))
|
||||||
|
|
||||||
vfs, ap = self._v2a("open", vpath, [rd, wr])
|
vfs, vpath, ap = self._v2a("open", vpath, [rd, wr])
|
||||||
if wr:
|
if wr:
|
||||||
if "*" not in vfs.axs.uwrite:
|
if "*" not in vfs.axs.uwrite:
|
||||||
yeet("blocked write; folder not world-writable: /%s" % (vpath,))
|
yeet("blocked write; folder not world-writable: /%s" % (vpath,))
|
||||||
@@ -365,18 +387,22 @@ class Tftpd(object):
|
|||||||
if not a:
|
if not a:
|
||||||
a = (self.args.iobuf,)
|
a = (self.args.iobuf,)
|
||||||
|
|
||||||
return open(ap, mode, *a, **ka)
|
ret = open(ap, mode, *a, **ka)
|
||||||
|
if wr and "chmod_f" in vfs.flags:
|
||||||
|
os.fchmod(ret.fileno(), vfs.flags["chmod_f"])
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
def _mkdir(self, vpath: str, *a) -> None:
|
def _mkdir(self, vpath: str, *a) -> None:
|
||||||
vfs, ap = self._v2a("mkdir", vpath, [])
|
vfs, _, ap = self._v2a("mkdir", vpath, [False, True])
|
||||||
if "*" not in vfs.axs.uwrite:
|
if "*" not in vfs.axs.uwrite:
|
||||||
yeet("blocked mkdir; folder not world-writable: /%s" % (vpath,))
|
yeet("blocked mkdir; folder not world-writable: /%s" % (vpath,))
|
||||||
|
|
||||||
return bos.mkdir(ap)
|
return bos.mkdir(ap, vfs.flags["chmod_d"])
|
||||||
|
|
||||||
def _unlink(self, vpath: str) -> None:
|
def _unlink(self, vpath: str) -> None:
|
||||||
# return bos.unlink(self._v2a("stat", vpath, *a)[1])
|
# return bos.unlink(self._v2a("stat", vpath, *a)[1])
|
||||||
vfs, ap = self._v2a("delete", vpath, [True, False, False, True])
|
vfs, _, ap = self._v2a("delete", vpath, [True, False, False, True])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
inf = bos.stat(ap)
|
inf = bos.stat(ap)
|
||||||
@@ -400,7 +426,7 @@ class Tftpd(object):
|
|||||||
|
|
||||||
def _p_exists(self, vpath: str) -> bool:
|
def _p_exists(self, vpath: str) -> bool:
|
||||||
try:
|
try:
|
||||||
ap = self._v2a("p.exists", vpath, [False, False])[1]
|
ap = self._v2a("p.exists", vpath, [False, False])[2]
|
||||||
bos.stat(ap)
|
bos.stat(ap)
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
@@ -408,7 +434,7 @@ class Tftpd(object):
|
|||||||
|
|
||||||
def _p_isdir(self, vpath: str) -> bool:
|
def _p_isdir(self, vpath: str) -> bool:
|
||||||
try:
|
try:
|
||||||
st = bos.stat(self._v2a("p.isdir", vpath, [False, False])[1])
|
st = bos.stat(self._v2a("p.isdir", vpath, [False, False])[2])
|
||||||
ret = stat.S_ISDIR(st.st_mode)
|
ret = stat.S_ISDIR(st.st_mode)
|
||||||
return ret
|
return ret
|
||||||
except:
|
except:
|
||||||
|
|||||||
@@ -1,13 +1,15 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import errno
|
||||||
import os
|
import os
|
||||||
|
import stat
|
||||||
|
|
||||||
from .__init__ import TYPE_CHECKING
|
from .__init__ import TYPE_CHECKING
|
||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .th_srv import EXTS_AC, HAVE_WEBP, thumb_path
|
from .th_srv import EXTS_AC, HAVE_WEBP, thumb_path
|
||||||
from .util import Cooldown
|
from .util import Cooldown, Pebkac
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
@@ -16,6 +18,9 @@ if TYPE_CHECKING:
|
|||||||
from .httpsrv import HttpSrv
|
from .httpsrv import HttpSrv
|
||||||
|
|
||||||
|
|
||||||
|
IOERROR = "reading the file was denied by the server os; either due to filesystem permissions, selinux, apparmor, or similar:\n%r"
|
||||||
|
|
||||||
|
|
||||||
class ThumbCli(object):
|
class ThumbCli(object):
|
||||||
def __init__(self, hsrv: "HttpSrv") -> None:
|
def __init__(self, hsrv: "HttpSrv") -> None:
|
||||||
self.broker = hsrv.broker
|
self.broker = hsrv.broker
|
||||||
@@ -124,7 +129,7 @@ class ThumbCli(object):
|
|||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||||
tpaths = [tpath]
|
tpaths = [tpath]
|
||||||
if fmt == "w":
|
if fmt[:1] == "w":
|
||||||
# also check for jpg (maybe webp is unavailable)
|
# also check for jpg (maybe webp is unavailable)
|
||||||
tpaths.append(tpath.rsplit(".", 1)[0] + ".jpg")
|
tpaths.append(tpath.rsplit(".", 1)[0] + ".jpg")
|
||||||
|
|
||||||
@@ -157,8 +162,22 @@ class ThumbCli(object):
|
|||||||
if abort:
|
if abort:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if not bos.path.getsize(os.path.join(ptop, rem)):
|
ap = os.path.join(ptop, rem)
|
||||||
return None
|
try:
|
||||||
|
st = bos.stat(ap)
|
||||||
|
if not st.st_size or not stat.S_ISREG(st.st_mode):
|
||||||
|
return None
|
||||||
|
|
||||||
|
with open(ap, "rb", 4) as f:
|
||||||
|
if not f.read(4):
|
||||||
|
raise Exception()
|
||||||
|
except OSError as ex:
|
||||||
|
if ex.errno == errno.ENOENT:
|
||||||
|
raise Pebkac(404)
|
||||||
|
else:
|
||||||
|
raise Pebkac(500, IOERROR % (ex,))
|
||||||
|
except Exception as ex:
|
||||||
|
raise Pebkac(500, IOERROR % (ex,))
|
||||||
|
|
||||||
x = self.broker.ask("thumbsrv.get", ptop, rem, mtime, fmt)
|
x = self.broker.ask("thumbsrv.get", ptop, rem, mtime, fmt)
|
||||||
return x.get() # type: ignore
|
return x.get() # type: ignore
|
||||||
|
|||||||
@@ -4,8 +4,10 @@ from __future__ import print_function, unicode_literals
|
|||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@@ -18,16 +20,17 @@ from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, au_unpk, ffprobe
|
|||||||
from .util import BytesIO # type: ignore
|
from .util import BytesIO # type: ignore
|
||||||
from .util import (
|
from .util import (
|
||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
|
VF_CAREFUL,
|
||||||
Cooldown,
|
Cooldown,
|
||||||
Daemon,
|
Daemon,
|
||||||
afsenc,
|
afsenc,
|
||||||
|
atomic_move,
|
||||||
fsenc,
|
fsenc,
|
||||||
min_ex,
|
min_ex,
|
||||||
runcmd,
|
runcmd,
|
||||||
statdir,
|
statdir,
|
||||||
ub64enc,
|
ub64enc,
|
||||||
vsplit,
|
vsplit,
|
||||||
wrename,
|
|
||||||
wunlink,
|
wunlink,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -48,6 +51,10 @@ HAVE_WEBP = False
|
|||||||
|
|
||||||
EXTS_TH = set(["jpg", "webp", "png"])
|
EXTS_TH = set(["jpg", "webp", "png"])
|
||||||
EXTS_AC = set(["opus", "owa", "caf", "mp3"])
|
EXTS_AC = set(["opus", "owa", "caf", "mp3"])
|
||||||
|
EXTS_SPEC_SAFE = set("aif aiff flac mp3 opus wav".split())
|
||||||
|
|
||||||
|
PTN_TS = re.compile("^-?[0-9a-f]{8,10}$")
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if os.environ.get("PRTY_NO_PIL"):
|
if os.environ.get("PRTY_NO_PIL"):
|
||||||
@@ -89,6 +96,10 @@ try:
|
|||||||
if os.environ.get("PRTY_NO_PIL_AVIF"):
|
if os.environ.get("PRTY_NO_PIL_AVIF"):
|
||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
|
if ".avif" in Image.registered_extensions():
|
||||||
|
HAVE_AVIF = True
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
import pillow_avif # noqa: F401 # pylint: disable=unused-import
|
import pillow_avif # noqa: F401 # pylint: disable=unused-import
|
||||||
|
|
||||||
HAVE_AVIF = True
|
HAVE_AVIF = True
|
||||||
@@ -163,12 +174,15 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.busy: dict[str, list[threading.Condition]] = {}
|
self.busy: dict[str, list[threading.Condition]] = {}
|
||||||
|
self.untemp: dict[str, list[str]] = {}
|
||||||
self.ram: dict[str, float] = {}
|
self.ram: dict[str, float] = {}
|
||||||
self.memcond = threading.Condition(self.mutex)
|
self.memcond = threading.Condition(self.mutex)
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.rm_nullthumbs = True # forget failed conversions on startup
|
self.rm_nullthumbs = True # forget failed conversions on startup
|
||||||
self.nthr = max(1, self.args.th_mt)
|
self.nthr = max(1, self.args.th_mt)
|
||||||
|
|
||||||
|
self.exts_spec_unsafe = set(self.args.th_spec_cnv.split(","))
|
||||||
|
|
||||||
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
||||||
for n in range(self.nthr):
|
for n in range(self.nthr):
|
||||||
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
|
Daemon(self.worker, "thumb-{}-{}".format(n, self.nthr))
|
||||||
@@ -255,7 +269,8 @@ class ThumbSrv(object):
|
|||||||
self.log("joined waiting room for %r" % (tpath,))
|
self.log("joined waiting room for %r" % (tpath,))
|
||||||
except:
|
except:
|
||||||
thdir = os.path.dirname(tpath)
|
thdir = os.path.dirname(tpath)
|
||||||
bos.makedirs(os.path.join(thdir, "w"))
|
chmod = 0o700 if self.args.free_umask else 0o755
|
||||||
|
bos.makedirs(os.path.join(thdir, "w"), chmod)
|
||||||
|
|
||||||
inf_path = os.path.join(thdir, "dir.txt")
|
inf_path = os.path.join(thdir, "dir.txt")
|
||||||
if not bos.path.exists(inf_path):
|
if not bos.path.exists(inf_path):
|
||||||
@@ -270,7 +285,7 @@ class ThumbSrv(object):
|
|||||||
vn = next((x for x in allvols if x.realpath == ptop), None)
|
vn = next((x for x in allvols if x.realpath == ptop), None)
|
||||||
if not vn:
|
if not vn:
|
||||||
self.log("ptop %r not in %s" % (ptop, allvols), 3)
|
self.log("ptop %r not in %s" % (ptop, allvols), 3)
|
||||||
vn = self.asrv.vfs.all_aps[0][1]
|
vn = self.asrv.vfs.all_aps[0][1][0]
|
||||||
|
|
||||||
self.q.put((abspath, tpath, fmt, vn))
|
self.q.put((abspath, tpath, fmt, vn))
|
||||||
self.log("conv %r :%s \033[0m%r" % (tpath, fmt, abspath), 6)
|
self.log("conv %r :%s \033[0m%r" % (tpath, fmt, abspath), 6)
|
||||||
@@ -385,8 +400,12 @@ class ThumbSrv(object):
|
|||||||
self.log(msg, c)
|
self.log(msg, c)
|
||||||
if getattr(ex, "returncode", 0) != 321:
|
if getattr(ex, "returncode", 0) != 321:
|
||||||
if fun == funs[-1]:
|
if fun == funs[-1]:
|
||||||
with open(ttpath, "wb") as _:
|
try:
|
||||||
pass
|
with open(ttpath, "wb") as _:
|
||||||
|
pass
|
||||||
|
except Exception as ex:
|
||||||
|
t = "failed to create the file [%s]: %r"
|
||||||
|
self.log(t % (ttpath, ex), 3)
|
||||||
else:
|
else:
|
||||||
# ffmpeg may spawn empty files on windows
|
# ffmpeg may spawn empty files on windows
|
||||||
try:
|
try:
|
||||||
@@ -398,14 +417,25 @@ class ThumbSrv(object):
|
|||||||
wunlink(self.log, ap_unpk, vn.flags)
|
wunlink(self.log, ap_unpk, vn.flags)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
wrename(self.log, ttpath, tpath, vn.flags)
|
atomic_move(self.log, ttpath, tpath, vn.flags)
|
||||||
except:
|
except Exception as ex:
|
||||||
|
if not os.path.exists(tpath):
|
||||||
|
t = "failed to move [%s] to [%s]: %r"
|
||||||
|
self.log(t % (ttpath, tpath, ex), 3)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
untemp = []
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
subs = self.busy[tpath]
|
subs = self.busy[tpath]
|
||||||
del self.busy[tpath]
|
del self.busy[tpath]
|
||||||
self.ram.pop(ttpath, None)
|
self.ram.pop(ttpath, None)
|
||||||
|
untemp = self.untemp.pop(ttpath, None) or []
|
||||||
|
|
||||||
|
for ap in untemp:
|
||||||
|
try:
|
||||||
|
wunlink(self.log, ap, VF_CAREFUL)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
for x in subs:
|
for x in subs:
|
||||||
with x:
|
with x:
|
||||||
@@ -652,22 +682,50 @@ class ThumbSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
wrename(self.log, wtpath, tpath, vn.flags)
|
atomic_move(self.log, wtpath, tpath, vn.flags)
|
||||||
|
|
||||||
def conv_spec(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
def conv_spec(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
|
||||||
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
ret, _ = ffprobe(abspath, int(vn.flags["convt"] / 2))
|
||||||
if "ac" not in ret:
|
if "ac" not in ret:
|
||||||
raise Exception("not audio")
|
raise Exception("not audio")
|
||||||
|
|
||||||
|
fext = abspath.split(".")[-1].lower()
|
||||||
|
|
||||||
# https://trac.ffmpeg.org/ticket/10797
|
# https://trac.ffmpeg.org/ticket/10797
|
||||||
# expect 1 GiB every 600 seconds when duration is tricky;
|
# expect 1 GiB every 600 seconds when duration is tricky;
|
||||||
# simple filetypes are generally safer so let's special-case those
|
# simple filetypes are generally safer so let's special-case those
|
||||||
safe = ("flac", "wav", "aif", "aiff", "opus")
|
coeff = 1800 if fext in EXTS_SPEC_SAFE else 600
|
||||||
coeff = 1800 if abspath.split(".")[-1].lower() in safe else 600
|
dur = ret[".dur"][1] if ".dur" in ret else 900
|
||||||
dur = ret[".dur"][1] if ".dur" in ret else 300
|
|
||||||
need = 0.2 + dur / coeff
|
need = 0.2 + dur / coeff
|
||||||
self.wait4ram(need, tpath)
|
self.wait4ram(need, tpath)
|
||||||
|
|
||||||
|
infile = abspath
|
||||||
|
if dur >= 900 or fext in self.exts_spec_unsafe:
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".spec.flac", delete=False) as f:
|
||||||
|
f.write(b"h")
|
||||||
|
infile = f.name
|
||||||
|
try:
|
||||||
|
self.untemp[tpath].append(infile)
|
||||||
|
except:
|
||||||
|
self.untemp[tpath] = [infile]
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
cmd = [
|
||||||
|
b"ffmpeg",
|
||||||
|
b"-nostdin",
|
||||||
|
b"-v", b"error",
|
||||||
|
b"-hide_banner",
|
||||||
|
b"-i", fsenc(abspath),
|
||||||
|
b"-map", b"0:a:0",
|
||||||
|
b"-ac", b"1",
|
||||||
|
b"-ar", b"48000",
|
||||||
|
b"-sample_fmt", b"s16",
|
||||||
|
b"-t", b"900",
|
||||||
|
b"-y", fsenc(infile),
|
||||||
|
]
|
||||||
|
# fmt: on
|
||||||
|
self._run_ff(cmd, vn)
|
||||||
|
|
||||||
fc = "[0:a:0]aresample=48000{},showspectrumpic=s="
|
fc = "[0:a:0]aresample=48000{},showspectrumpic=s="
|
||||||
if "3" in fmt:
|
if "3" in fmt:
|
||||||
fc += "1280x1024,crop=1420:1056:70:48[o]"
|
fc += "1280x1024,crop=1420:1056:70:48[o]"
|
||||||
@@ -687,7 +745,7 @@ class ThumbSrv(object):
|
|||||||
b"-nostdin",
|
b"-nostdin",
|
||||||
b"-v", b"error",
|
b"-v", b"error",
|
||||||
b"-hide_banner",
|
b"-hide_banner",
|
||||||
b"-i", fsenc(abspath),
|
b"-i", fsenc(infile),
|
||||||
b"-filter_complex", fc.encode("utf-8"),
|
b"-filter_complex", fc.encode("utf-8"),
|
||||||
b"-map", b"[o]",
|
b"-map", b"[o]",
|
||||||
b"-frames:v", b"1",
|
b"-frames:v", b"1",
|
||||||
@@ -991,6 +1049,8 @@ class ThumbSrv(object):
|
|||||||
# thumb file
|
# thumb file
|
||||||
try:
|
try:
|
||||||
b64, ts, ext = f.split(".")
|
b64, ts, ext = f.split(".")
|
||||||
|
if len(ts) > 8 and PTN_TS.match(ts):
|
||||||
|
ts = "yeahokay"
|
||||||
if len(b64) != 24 or len(ts) != 8 or ext not in exts:
|
if len(b64) != 24 or len(ts) != 8 or ext not in exts:
|
||||||
raise Exception()
|
raise Exception()
|
||||||
except:
|
except:
|
||||||
|
|||||||
@@ -134,9 +134,9 @@ class U2idx(object):
|
|||||||
assert sqlite3 # type: ignore # !rm
|
assert sqlite3 # type: ignore # !rm
|
||||||
|
|
||||||
ptop = vn.realpath
|
ptop = vn.realpath
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.dbpaths.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for %r" % (ptop,))
|
self.log("no dbpath for %r" % (ptop,))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
db_path = os.path.join(histpath, "up2k.db")
|
db_path = os.path.join(histpath, "up2k.db")
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import errno
|
import errno
|
||||||
import gzip
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import math
|
import math
|
||||||
@@ -42,6 +41,7 @@ from .util import (
|
|||||||
fsenc,
|
fsenc,
|
||||||
gen_filekey,
|
gen_filekey,
|
||||||
gen_filekey_dbg,
|
gen_filekey_dbg,
|
||||||
|
gzip,
|
||||||
hidedir,
|
hidedir,
|
||||||
humansize,
|
humansize,
|
||||||
min_ex,
|
min_ex,
|
||||||
@@ -94,7 +94,7 @@ VF_AFFECTS_INDEXING = set(zsg.split(" "))
|
|||||||
|
|
||||||
SBUSY = "cannot receive uploads right now;\nserver busy with %s.\nPlease wait; the client will retry..."
|
SBUSY = "cannot receive uploads right now;\nserver busy with %s.\nPlease wait; the client will retry..."
|
||||||
|
|
||||||
HINT_HISTPATH = "you could try moving the database to another location (preferably an SSD or NVME drive) using either the --hist argument (global option for all volumes), or the hist volflag (just for this volume)"
|
HINT_HISTPATH = "you could try moving the database to another location (preferably an SSD or NVME drive) using either the --hist argument (global option for all volumes), or the hist volflag (just for this volume), or, if you want to keep the thumbnails in the current location and only move the database itself, then use --dbpath or volflag dbpath"
|
||||||
|
|
||||||
|
|
||||||
NULLSTAT = os.stat_result((0, -1, -1, 0, 0, 0, 0, 0, 0, 0))
|
NULLSTAT = os.stat_result((0, -1, -1, 0, 0, 0, 0, 0, 0, 0))
|
||||||
@@ -557,6 +557,7 @@ class Up2k(object):
|
|||||||
else:
|
else:
|
||||||
# important; not deferred by db_act
|
# important; not deferred by db_act
|
||||||
timeout = self._check_lifetimes()
|
timeout = self._check_lifetimes()
|
||||||
|
timeout = min(self._check_forget_ip(), timeout)
|
||||||
try:
|
try:
|
||||||
if self.args.shr:
|
if self.args.shr:
|
||||||
timeout = min(self._check_shares(), timeout)
|
timeout = min(self._check_shares(), timeout)
|
||||||
@@ -617,6 +618,43 @@ class Up2k(object):
|
|||||||
for v in vols:
|
for v in vols:
|
||||||
volage[v] = now
|
volage[v] = now
|
||||||
|
|
||||||
|
def _check_forget_ip(self) -> float:
|
||||||
|
now = time.time()
|
||||||
|
timeout = now + 9001
|
||||||
|
for vp, vol in sorted(self.vfs.all_vols.items()):
|
||||||
|
maxage = vol.flags["forget_ip"]
|
||||||
|
if not maxage:
|
||||||
|
continue
|
||||||
|
|
||||||
|
cur = self.cur.get(vol.realpath)
|
||||||
|
if not cur:
|
||||||
|
continue
|
||||||
|
|
||||||
|
cutoff = now - maxage * 60
|
||||||
|
|
||||||
|
for _ in range(2):
|
||||||
|
q = "select ip, at from up where ip > '' order by +at limit 1"
|
||||||
|
hits = cur.execute(q).fetchall()
|
||||||
|
if not hits:
|
||||||
|
break
|
||||||
|
|
||||||
|
remains = hits[0][1] - cutoff
|
||||||
|
if remains > 0:
|
||||||
|
timeout = min(timeout, now + remains)
|
||||||
|
break
|
||||||
|
|
||||||
|
q = "update up set ip = '' where ip > '' and at <= %d"
|
||||||
|
cur.execute(q % (cutoff,))
|
||||||
|
zi = cur.rowcount
|
||||||
|
cur.connection.commit()
|
||||||
|
|
||||||
|
t = "forget-ip(%d) removed %d IPs from db [/%s]"
|
||||||
|
self.log(t % (maxage, zi, vol.vpath))
|
||||||
|
|
||||||
|
timeout = min(timeout, now + 900)
|
||||||
|
|
||||||
|
return timeout
|
||||||
|
|
||||||
def _check_lifetimes(self) -> float:
|
def _check_lifetimes(self) -> float:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
timeout = now + 9001
|
timeout = now + 9001
|
||||||
@@ -877,7 +915,8 @@ class Up2k(object):
|
|||||||
# only need to protect register_vpath but all in one go feels right
|
# only need to protect register_vpath but all in one go feels right
|
||||||
for vol in vols:
|
for vol in vols:
|
||||||
try:
|
try:
|
||||||
bos.makedirs(vol.realpath) # gonna happen at snap anyways
|
# mkdir gonna happen at snap anyways;
|
||||||
|
bos.makedirs(vol.realpath, vol.flags["chmod_d"])
|
||||||
dir_is_empty(self.log_func, not self.args.no_scandir, vol.realpath)
|
dir_is_empty(self.log_func, not self.args.no_scandir, vol.realpath)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self.volstate[vol.vpath] = "OFFLINE (cannot access folder)"
|
self.volstate[vol.vpath] = "OFFLINE (cannot access folder)"
|
||||||
@@ -1058,9 +1097,9 @@ class Up2k(object):
|
|||||||
self, ptop: str, flags: dict[str, Any]
|
self, ptop: str, flags: dict[str, Any]
|
||||||
) -> Optional[tuple["sqlite3.Cursor", str]]:
|
) -> Optional[tuple["sqlite3.Cursor", str]]:
|
||||||
"""mutex(main,reg) me"""
|
"""mutex(main,reg) me"""
|
||||||
histpath = self.vfs.histtab.get(ptop)
|
histpath = self.vfs.dbpaths.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for %r" % (ptop,))
|
self.log("no dbpath for %r" % (ptop,))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
db_path = os.path.join(histpath, "up2k.db")
|
db_path = os.path.join(histpath, "up2k.db")
|
||||||
@@ -1081,7 +1120,7 @@ class Up2k(object):
|
|||||||
ft = "\033[0;32m{}{:.0}"
|
ft = "\033[0;32m{}{:.0}"
|
||||||
ff = "\033[0;35m{}{:.0}"
|
ff = "\033[0;35m{}{:.0}"
|
||||||
fv = "\033[0;36m{}:\033[90m{}"
|
fv = "\033[0;36m{}:\033[90m{}"
|
||||||
zs = "html_head mv_re_r mv_re_t rm_re_r rm_re_t srch_re_dots srch_re_nodot"
|
zs = "ext_th_d html_head put_name2 mv_re_r mv_re_t rm_re_r rm_re_t srch_re_dots srch_re_nodot zipmax zipmaxn_v zipmaxs_v"
|
||||||
fx = set(zs.split())
|
fx = set(zs.split())
|
||||||
fd = vf_bmap()
|
fd = vf_bmap()
|
||||||
fd.update(vf_cmap())
|
fd.update(vf_cmap())
|
||||||
@@ -1103,6 +1142,20 @@ class Up2k(object):
|
|||||||
del fl[k1]
|
del fl[k1]
|
||||||
else:
|
else:
|
||||||
fl[k1] = ",".join(x for x in fl[k1])
|
fl[k1] = ",".join(x for x in fl[k1])
|
||||||
|
|
||||||
|
if fl["chmod_d"] == int(self.args.chmod_d, 8):
|
||||||
|
fl.pop("chmod_d")
|
||||||
|
try:
|
||||||
|
if fl["chmod_f"] == int(self.args.chmod_f or "-1", 8):
|
||||||
|
fl.pop("chmod_f")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
for k in ("chmod_f", "chmod_d"):
|
||||||
|
try:
|
||||||
|
fl[k] = "%o" % (fl[k])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
a = [
|
a = [
|
||||||
(ft if v is True else ff if v is False else fv).format(k, str(v))
|
(ft if v is True else ff if v is False else fv).format(k, str(v))
|
||||||
for k, v in fl.items()
|
for k, v in fl.items()
|
||||||
@@ -1306,12 +1359,15 @@ class Up2k(object):
|
|||||||
]
|
]
|
||||||
excl += [absreal(x) for x in excl]
|
excl += [absreal(x) for x in excl]
|
||||||
excl += list(self.vfs.histtab.values())
|
excl += list(self.vfs.histtab.values())
|
||||||
|
excl += list(self.vfs.dbpaths.values())
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
excl = [x.replace("/", "\\") for x in excl]
|
excl = [x.replace("/", "\\") for x in excl]
|
||||||
else:
|
else:
|
||||||
# ~/.wine/dosdevices/z:/ and such
|
# ~/.wine/dosdevices/z:/ and such
|
||||||
excl.extend(("/dev", "/proc", "/run", "/sys"))
|
excl.extend(("/dev", "/proc", "/run", "/sys"))
|
||||||
|
|
||||||
|
excl = list({k: 1 for k in excl})
|
||||||
|
|
||||||
if self.args.re_dirsz:
|
if self.args.re_dirsz:
|
||||||
db.c.execute("delete from ds")
|
db.c.execute("delete from ds")
|
||||||
db.n += 1
|
db.n += 1
|
||||||
@@ -1323,6 +1379,10 @@ class Up2k(object):
|
|||||||
t = "volume /%s at [%s] is empty; will not be indexed as this could be due to an offline filesystem"
|
t = "volume /%s at [%s] is empty; will not be indexed as this could be due to an offline filesystem"
|
||||||
self.log(t % (vol.vpath, rtop), 6)
|
self.log(t % (vol.vpath, rtop), 6)
|
||||||
return True, False
|
return True, False
|
||||||
|
if not vol.check_landmarks():
|
||||||
|
t = "volume /%s at [%s] will not be indexed due to bad landmarks"
|
||||||
|
self.log(t % (vol.vpath, rtop), 6)
|
||||||
|
return True, False
|
||||||
|
|
||||||
n_add, _, _ = self._build_dir(
|
n_add, _, _ = self._build_dir(
|
||||||
db,
|
db,
|
||||||
@@ -2079,11 +2139,12 @@ class Up2k(object):
|
|||||||
return -1
|
return -1
|
||||||
|
|
||||||
w = bw[:-1].decode("ascii")
|
w = bw[:-1].decode("ascii")
|
||||||
|
w16 = w[:16]
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
try:
|
try:
|
||||||
q = "select rd, fn, ip, at from up where substr(w,1,16)=? and +w=?"
|
q = "select rd, fn, ip, at from up where substr(w,1,16)=? and +w=?"
|
||||||
rd, fn, ip, at = cur.execute(q, (w[:16], w)).fetchone()
|
rd, fn, ip, at = cur.execute(q, (w16, w)).fetchone()
|
||||||
except:
|
except:
|
||||||
# file modified/deleted since spooling
|
# file modified/deleted since spooling
|
||||||
continue
|
continue
|
||||||
@@ -2092,8 +2153,12 @@ class Up2k(object):
|
|||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
if "mtp" in flags:
|
if "mtp" in flags:
|
||||||
|
q = "select 1 from mt where w=? and +k='t:mtp' limit 1"
|
||||||
|
if cur.execute(q, (w16,)).fetchone():
|
||||||
|
continue
|
||||||
|
|
||||||
q = "insert into mt values (?,'t:mtp','a')"
|
q = "insert into mt values (?,'t:mtp','a')"
|
||||||
cur.execute(q, (w[:16],))
|
cur.execute(q, (w16,))
|
||||||
|
|
||||||
abspath = djoin(ptop, rd, fn)
|
abspath = djoin(ptop, rd, fn)
|
||||||
self.pp.msg = "c%d %s" % (nq, abspath)
|
self.pp.msg = "c%d %s" % (nq, abspath)
|
||||||
@@ -2149,7 +2214,7 @@ class Up2k(object):
|
|||||||
return tf, -1
|
return tf, -1
|
||||||
|
|
||||||
if flt == 1:
|
if flt == 1:
|
||||||
q = "select w from mt where w = ?"
|
q = "select 1 from mt where w=? and +k != 't:mtp'"
|
||||||
if c2.execute(q, (row[0][:16],)).fetchone():
|
if c2.execute(q, (row[0][:16],)).fetchone():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -2880,7 +2945,6 @@ class Up2k(object):
|
|||||||
if ptop not in self.registry:
|
if ptop not in self.registry:
|
||||||
raise Pebkac(410, "location unavailable")
|
raise Pebkac(410, "location unavailable")
|
||||||
|
|
||||||
cj["name"] = sanitize_fn(cj["name"], "")
|
|
||||||
cj["poke"] = now = self.db_act = self.vol_act[ptop] = time.time()
|
cj["poke"] = now = self.db_act = self.vol_act[ptop] = time.time()
|
||||||
wark = dwark = self._get_wark(cj)
|
wark = dwark = self._get_wark(cj)
|
||||||
job = None
|
job = None
|
||||||
@@ -2916,9 +2980,14 @@ class Up2k(object):
|
|||||||
self.salt, cj["size"], cj["lmod"], cj["prel"], cj["name"]
|
self.salt, cj["size"], cj["lmod"], cj["prel"], cj["name"]
|
||||||
)
|
)
|
||||||
|
|
||||||
if vfs.flags.get("up_ts", "") == "fu" or not cj["lmod"]:
|
zi = cj["lmod"]
|
||||||
|
bad_mt = zi <= 0 or zi > 0xAAAAAAAA
|
||||||
|
if bad_mt or vfs.flags.get("up_ts", "") == "fu":
|
||||||
# force upload time rather than last-modified
|
# force upload time rather than last-modified
|
||||||
cj["lmod"] = int(time.time())
|
cj["lmod"] = int(time.time())
|
||||||
|
if zi and bad_mt:
|
||||||
|
t = "ignoring impossible last-modified time from client: %s"
|
||||||
|
self.log(t % (zi,), 6)
|
||||||
|
|
||||||
alts: list[tuple[int, int, dict[str, Any], "sqlite3.Cursor", str, str]] = []
|
alts: list[tuple[int, int, dict[str, Any], "sqlite3.Cursor", str, str]] = []
|
||||||
for ptop, cur in vols:
|
for ptop, cur in vols:
|
||||||
@@ -3186,14 +3255,16 @@ class Up2k(object):
|
|||||||
if hr.get("reloc"):
|
if hr.get("reloc"):
|
||||||
x = pathmod(self.vfs, dst, vp, hr["reloc"])
|
x = pathmod(self.vfs, dst, vp, hr["reloc"])
|
||||||
if x:
|
if x:
|
||||||
zvfs = vfs
|
ud1 = (vfs.vpath, job["prel"], job["name"])
|
||||||
pdir, _, job["name"], (vfs, rem) = x
|
pdir, _, job["name"], (vfs, rem) = x
|
||||||
dst = os.path.join(pdir, job["name"])
|
dst = os.path.join(pdir, job["name"])
|
||||||
job["vcfg"] = vfs.flags
|
job["vcfg"] = vfs.flags
|
||||||
job["ptop"] = vfs.realpath
|
job["ptop"] = vfs.realpath
|
||||||
job["vtop"] = vfs.vpath
|
job["vtop"] = vfs.vpath
|
||||||
job["prel"] = rem
|
job["prel"] = rem
|
||||||
if zvfs.vpath != vfs.vpath:
|
job["name"] = sanitize_fn(job["name"], "")
|
||||||
|
ud2 = (vfs.vpath, job["prel"], job["name"])
|
||||||
|
if ud1 != ud2:
|
||||||
# print(json.dumps(job, sort_keys=True, indent=4))
|
# print(json.dumps(job, sort_keys=True, indent=4))
|
||||||
job["hash"] = cj["hash"]
|
job["hash"] = cj["hash"]
|
||||||
self.log("xbu reloc1:%d..." % (depth,), 6)
|
self.log("xbu reloc1:%d..." % (depth,), 6)
|
||||||
@@ -3238,7 +3309,7 @@ class Up2k(object):
|
|||||||
reg,
|
reg,
|
||||||
"up2k._get_volsize",
|
"up2k._get_volsize",
|
||||||
)
|
)
|
||||||
bos.makedirs(ap2)
|
bos.makedirs(ap2, vfs.flags["chmod_d"])
|
||||||
vfs.lim.nup(cj["addr"])
|
vfs.lim.nup(cj["addr"])
|
||||||
vfs.lim.bup(cj["addr"], cj["size"])
|
vfs.lim.bup(cj["addr"], cj["size"])
|
||||||
|
|
||||||
@@ -3335,11 +3406,21 @@ class Up2k(object):
|
|||||||
return fname
|
return fname
|
||||||
|
|
||||||
fp = djoin(fdir, fname)
|
fp = djoin(fdir, fname)
|
||||||
if job.get("replace") and bos.path.exists(fp):
|
|
||||||
|
ow = job.get("replace") and bos.path.exists(fp)
|
||||||
|
if ow and "mt" in str(job["replace"]).lower():
|
||||||
|
mts = bos.stat(fp).st_mtime
|
||||||
|
mtc = job["lmod"]
|
||||||
|
if mtc < mts:
|
||||||
|
t = "will not overwrite; server %d sec newer than client; %d > %d %r"
|
||||||
|
self.log(t % (mts - mtc, mts, mtc, fp))
|
||||||
|
ow = False
|
||||||
|
|
||||||
|
ptop = job["ptop"]
|
||||||
|
vf = self.flags.get(ptop) or {}
|
||||||
|
if ow:
|
||||||
self.log("replacing existing file at %r" % (fp,))
|
self.log("replacing existing file at %r" % (fp,))
|
||||||
cur = None
|
cur = None
|
||||||
ptop = job["ptop"]
|
|
||||||
vf = self.flags.get(ptop) or {}
|
|
||||||
st = bos.stat(fp)
|
st = bos.stat(fp)
|
||||||
try:
|
try:
|
||||||
vrel = vjoin(job["prel"], fname)
|
vrel = vjoin(job["prel"], fname)
|
||||||
@@ -3359,8 +3440,13 @@ class Up2k(object):
|
|||||||
else:
|
else:
|
||||||
dip = self.hub.iphash.s(ip)
|
dip = self.hub.iphash.s(ip)
|
||||||
|
|
||||||
suffix = "-%.6f-%s" % (ts, dip)
|
f, ret = ren_open(
|
||||||
f, ret = ren_open(fname, "wb", fdir=fdir, suffix=suffix)
|
fname,
|
||||||
|
"wb",
|
||||||
|
fdir=fdir,
|
||||||
|
suffix="-%.6f-%s" % (ts, dip),
|
||||||
|
chmod=vf.get("chmod_f", -1),
|
||||||
|
)
|
||||||
f.close()
|
f.close()
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -3373,6 +3459,7 @@ class Up2k(object):
|
|||||||
rm: bool = False,
|
rm: bool = False,
|
||||||
lmod: float = 0,
|
lmod: float = 0,
|
||||||
fsrc: Optional[str] = None,
|
fsrc: Optional[str] = None,
|
||||||
|
is_mv: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
if src == dst or (fsrc and fsrc == dst):
|
if src == dst or (fsrc and fsrc == dst):
|
||||||
t = "symlinking a file to itself?? orig(%s) fsrc(%s) link(%s)"
|
t = "symlinking a file to itself?? orig(%s) fsrc(%s) link(%s)"
|
||||||
@@ -3389,7 +3476,9 @@ class Up2k(object):
|
|||||||
|
|
||||||
linked = False
|
linked = False
|
||||||
try:
|
try:
|
||||||
if not flags.get("dedup"):
|
if "reflink" in flags:
|
||||||
|
raise Exception("reflink")
|
||||||
|
if not is_mv and not flags.get("dedup"):
|
||||||
raise Exception("dedup is disabled in config")
|
raise Exception("dedup is disabled in config")
|
||||||
|
|
||||||
lsrc = src
|
lsrc = src
|
||||||
@@ -3445,7 +3534,8 @@ class Up2k(object):
|
|||||||
|
|
||||||
linked = True
|
linked = True
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
self.log("cannot link; creating copy: " + repr(ex))
|
if str(ex) != "reflink":
|
||||||
|
self.log("cannot link; creating copy: " + repr(ex))
|
||||||
if bos.path.isfile(src):
|
if bos.path.isfile(src):
|
||||||
csrc = src
|
csrc = src
|
||||||
elif fsrc and bos.path.isfile(fsrc):
|
elif fsrc and bos.path.isfile(fsrc):
|
||||||
@@ -3655,8 +3745,9 @@ class Up2k(object):
|
|||||||
if self.idx_wark(vflags, *z2):
|
if self.idx_wark(vflags, *z2):
|
||||||
del self.registry[ptop][wark]
|
del self.registry[ptop][wark]
|
||||||
else:
|
else:
|
||||||
for k in "host tnam busy sprs poke t0c".split():
|
for k in "host tnam busy sprs poke".split():
|
||||||
del job[k]
|
del job[k]
|
||||||
|
job.pop("t0c", None)
|
||||||
job["t0"] = int(job["t0"])
|
job["t0"] = int(job["t0"])
|
||||||
job["hash"] = []
|
job["hash"] = []
|
||||||
job["done"] = 1
|
job["done"] = 1
|
||||||
@@ -3789,7 +3880,7 @@ class Up2k(object):
|
|||||||
db_ip = ""
|
db_ip = ""
|
||||||
else:
|
else:
|
||||||
# plugins may expect this to look like an actual IP
|
# plugins may expect this to look like an actual IP
|
||||||
db_ip = "1.1.1.1" if self.args.no_db_ip else ip
|
db_ip = "1.1.1.1" if "no_db_ip" in vflags else ip
|
||||||
|
|
||||||
sql = "insert into up values (?,?,?,?,?,?,?)"
|
sql = "insert into up values (?,?,?,?,?,?,?)"
|
||||||
v = (dwark, int(ts), sz, rd, fn, db_ip, int(at or 0))
|
v = (dwark, int(ts), sz, rd, fn, db_ip, int(at or 0))
|
||||||
@@ -4213,7 +4304,7 @@ class Up2k(object):
|
|||||||
self.log(t, 1)
|
self.log(t, 1)
|
||||||
raise Pebkac(405, t)
|
raise Pebkac(405, t)
|
||||||
|
|
||||||
bos.makedirs(os.path.dirname(dabs))
|
bos.makedirs(os.path.dirname(dabs), dvn.flags["chmod_d"])
|
||||||
|
|
||||||
c1, w, ftime_, fsize_, ip, at = self._find_from_vpath(
|
c1, w, ftime_, fsize_, ip, at = self._find_from_vpath(
|
||||||
svn_dbv.realpath, srem_dbv
|
svn_dbv.realpath, srem_dbv
|
||||||
@@ -4389,7 +4480,7 @@ class Up2k(object):
|
|||||||
vp = vjoin(dvp, rem)
|
vp = vjoin(dvp, rem)
|
||||||
try:
|
try:
|
||||||
dvn, drem = self.vfs.get(vp, uname, False, True)
|
dvn, drem = self.vfs.get(vp, uname, False, True)
|
||||||
bos.mkdir(dvn.canonical(drem))
|
bos.mkdir(dvn.canonical(drem), dvn.flags["chmod_d"])
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -4459,7 +4550,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
is_xvol = svn.realpath != dvn.realpath
|
is_xvol = svn.realpath != dvn.realpath
|
||||||
|
|
||||||
bos.makedirs(os.path.dirname(dabs))
|
bos.makedirs(os.path.dirname(dabs), dvn.flags["chmod_d"])
|
||||||
|
|
||||||
if is_dirlink:
|
if is_dirlink:
|
||||||
dlabs = absreal(sabs)
|
dlabs = absreal(sabs)
|
||||||
@@ -4548,7 +4639,7 @@ class Up2k(object):
|
|||||||
dlink = bos.readlink(sabs)
|
dlink = bos.readlink(sabs)
|
||||||
dlink = os.path.join(os.path.dirname(sabs), dlink)
|
dlink = os.path.join(os.path.dirname(sabs), dlink)
|
||||||
dlink = bos.path.abspath(dlink)
|
dlink = bos.path.abspath(dlink)
|
||||||
self._symlink(dlink, dabs, dvn.flags, lmod=ftime)
|
self._symlink(dlink, dabs, dvn.flags, lmod=ftime, is_mv=True)
|
||||||
wunlink(self.log, sabs, svn.flags)
|
wunlink(self.log, sabs, svn.flags)
|
||||||
else:
|
else:
|
||||||
atomic_move(self.log, sabs, dabs, svn.flags)
|
atomic_move(self.log, sabs, dabs, svn.flags)
|
||||||
@@ -4767,7 +4858,7 @@ class Up2k(object):
|
|||||||
flags = self.flags.get(ptop) or {}
|
flags = self.flags.get(ptop) or {}
|
||||||
atomic_move(self.log, sabs, slabs, flags)
|
atomic_move(self.log, sabs, slabs, flags)
|
||||||
bos.utime(slabs, (int(time.time()), int(mt)), False)
|
bos.utime(slabs, (int(time.time()), int(mt)), False)
|
||||||
self._symlink(slabs, sabs, flags, False)
|
self._symlink(slabs, sabs, flags, False, is_mv=True)
|
||||||
full[slabs] = (ptop, rem)
|
full[slabs] = (ptop, rem)
|
||||||
sabs = slabs
|
sabs = slabs
|
||||||
|
|
||||||
@@ -4826,7 +4917,9 @@ class Up2k(object):
|
|||||||
# (for example a volume with symlinked dupes but no --dedup);
|
# (for example a volume with symlinked dupes but no --dedup);
|
||||||
# fsrc=sabs is then a source that currently resolves to copy
|
# fsrc=sabs is then a source that currently resolves to copy
|
||||||
|
|
||||||
self._symlink(dabs, alink, flags, False, lmod=lmod or 0, fsrc=sabs)
|
self._symlink(
|
||||||
|
dabs, alink, flags, False, lmod=lmod or 0, fsrc=sabs, is_mv=True
|
||||||
|
)
|
||||||
|
|
||||||
return len(full) + len(links)
|
return len(full) + len(links)
|
||||||
|
|
||||||
@@ -4934,13 +5027,15 @@ class Up2k(object):
|
|||||||
if hr.get("reloc"):
|
if hr.get("reloc"):
|
||||||
x = pathmod(self.vfs, ap_chk, vp_chk, hr["reloc"])
|
x = pathmod(self.vfs, ap_chk, vp_chk, hr["reloc"])
|
||||||
if x:
|
if x:
|
||||||
zvfs = vfs
|
ud1 = (vfs.vpath, job["prel"], job["name"])
|
||||||
pdir, _, job["name"], (vfs, rem) = x
|
pdir, _, job["name"], (vfs, rem) = x
|
||||||
job["vcfg"] = vf = vfs.flags
|
job["vcfg"] = vf = vfs.flags
|
||||||
job["ptop"] = vfs.realpath
|
job["ptop"] = vfs.realpath
|
||||||
job["vtop"] = vfs.vpath
|
job["vtop"] = vfs.vpath
|
||||||
job["prel"] = rem
|
job["prel"] = rem
|
||||||
if zvfs.vpath != vfs.vpath:
|
job["name"] = sanitize_fn(job["name"], "")
|
||||||
|
ud2 = (vfs.vpath, job["prel"], job["name"])
|
||||||
|
if ud1 != ud2:
|
||||||
self.log("xbu reloc2:%d..." % (depth,), 6)
|
self.log("xbu reloc2:%d..." % (depth,), 6)
|
||||||
return self._handle_json(job, depth + 1)
|
return self._handle_json(job, depth + 1)
|
||||||
|
|
||||||
@@ -4962,8 +5057,13 @@ class Up2k(object):
|
|||||||
else:
|
else:
|
||||||
dip = self.hub.iphash.s(job["addr"])
|
dip = self.hub.iphash.s(job["addr"])
|
||||||
|
|
||||||
suffix = "-%.6f-%s" % (job["t0"], dip)
|
f, job["tnam"] = ren_open(
|
||||||
f, job["tnam"] = ren_open(tnam, "wb", fdir=pdir, suffix=suffix)
|
tnam,
|
||||||
|
"wb",
|
||||||
|
fdir=pdir,
|
||||||
|
suffix="-%.6f-%s" % (job["t0"], dip),
|
||||||
|
chmod=vf.get("chmod_f", -1),
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
abspath = djoin(pdir, job["tnam"])
|
abspath = djoin(pdir, job["tnam"])
|
||||||
sprs = job["sprs"]
|
sprs = job["sprs"]
|
||||||
@@ -5044,7 +5144,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
def _snap_reg(self, ptop: str, reg: dict[str, dict[str, Any]]) -> None:
|
def _snap_reg(self, ptop: str, reg: dict[str, dict[str, Any]]) -> None:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
histpath = self.vfs.histtab.get(ptop)
|
histpath = self.vfs.dbpaths.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -31,6 +31,17 @@ from collections import Counter
|
|||||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
|
||||||
|
try:
|
||||||
|
from zlib_ng import gzip_ng as gzip
|
||||||
|
from zlib_ng import zlib_ng as zlib
|
||||||
|
|
||||||
|
sys.modules["gzip"] = gzip
|
||||||
|
# sys.modules["zlib"] = zlib
|
||||||
|
# `- somehow makes tarfile 3% slower with default malloc, and barely faster with mimalloc
|
||||||
|
except:
|
||||||
|
import gzip
|
||||||
|
import zlib
|
||||||
|
|
||||||
from .__init__ import (
|
from .__init__ import (
|
||||||
ANYWIN,
|
ANYWIN,
|
||||||
EXE,
|
EXE,
|
||||||
@@ -94,6 +105,7 @@ def _ens(want: str) -> tuple[int, ...]:
|
|||||||
# WSAENOTSOCK - no longer a socket
|
# WSAENOTSOCK - no longer a socket
|
||||||
# EUNATCH - can't assign requested address (wifi down)
|
# EUNATCH - can't assign requested address (wifi down)
|
||||||
E_SCK = _ens("ENOTCONN EUNATCH EBADF WSAENOTSOCK WSAECONNRESET")
|
E_SCK = _ens("ENOTCONN EUNATCH EBADF WSAENOTSOCK WSAECONNRESET")
|
||||||
|
E_SCK_WR = _ens("EPIPE ESHUTDOWN EBADFD")
|
||||||
E_ADDR_NOT_AVAIL = _ens("EADDRNOTAVAIL WSAEADDRNOTAVAIL")
|
E_ADDR_NOT_AVAIL = _ens("EADDRNOTAVAIL WSAEADDRNOTAVAIL")
|
||||||
E_ADDR_IN_USE = _ens("EADDRINUSE WSAEADDRINUSE")
|
E_ADDR_IN_USE = _ens("EADDRINUSE WSAEADDRINUSE")
|
||||||
E_ACCESS = _ens("EACCES WSAEACCES")
|
E_ACCESS = _ens("EACCES WSAEACCES")
|
||||||
@@ -103,8 +115,14 @@ IP6ALL = "0:0:0:0:0:0:0:0"
|
|||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import ctypes
|
|
||||||
import fcntl
|
import fcntl
|
||||||
|
|
||||||
|
HAVE_FCNTL = True
|
||||||
|
except:
|
||||||
|
HAVE_FCNTL = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ctypes
|
||||||
import termios
|
import termios
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
@@ -136,6 +154,14 @@ try:
|
|||||||
except:
|
except:
|
||||||
HAVE_PSUTIL = False
|
HAVE_PSUTIL = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
if os.environ.get("PRTY_NO_MAGIC"):
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
import magic
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
if True: # pylint: disable=using-constant-test
|
if True: # pylint: disable=using-constant-test
|
||||||
import types
|
import types
|
||||||
from collections.abc import Callable, Iterable
|
from collections.abc import Callable, Iterable
|
||||||
@@ -158,8 +184,6 @@ if True: # pylint: disable=using-constant-test
|
|||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
import magic
|
|
||||||
|
|
||||||
from .authsrv import VFS
|
from .authsrv import VFS
|
||||||
from .broker_util import BrokerCli
|
from .broker_util import BrokerCli
|
||||||
from .up2k import Up2k
|
from .up2k import Up2k
|
||||||
@@ -234,6 +258,9 @@ SYMTIME = PY36 and os.utime in os.supports_follow_symlinks
|
|||||||
|
|
||||||
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">\n'
|
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">\n'
|
||||||
|
|
||||||
|
# smart enough to understand javascript while also ignoring rel="nofollow"
|
||||||
|
BAD_BOTS = r"Barkrowler|bingbot|BLEXBot|Googlebot|GoogleOther|GPTBot|PetalBot|SeekportBot|SemrushBot|YandexBot"
|
||||||
|
|
||||||
FFMPEG_URL = "https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z"
|
FFMPEG_URL = "https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z"
|
||||||
|
|
||||||
URL_PRJ = "https://github.com/9001/copyparty"
|
URL_PRJ = "https://github.com/9001/copyparty"
|
||||||
@@ -448,8 +475,12 @@ UNHUMANIZE_UNITS = {
|
|||||||
|
|
||||||
VF_CAREFUL = {"mv_re_t": 5, "rm_re_t": 5, "mv_re_r": 0.1, "rm_re_r": 0.1}
|
VF_CAREFUL = {"mv_re_t": 5, "rm_re_t": 5, "mv_re_r": 0.1, "rm_re_r": 0.1}
|
||||||
|
|
||||||
|
FN_EMB = set([".prologue.html", ".epilogue.html", "readme.md", "preadme.md"])
|
||||||
|
|
||||||
|
|
||||||
def read_ram() -> tuple[float, float]:
|
def read_ram() -> tuple[float, float]:
|
||||||
|
# NOTE: apparently no need to consider /sys/fs/cgroup/memory.max
|
||||||
|
# (cgroups2) since the limit is synced to /proc/meminfo
|
||||||
a = b = 0
|
a = b = 0
|
||||||
try:
|
try:
|
||||||
with open("/proc/meminfo", "rb", 0x10000) as f:
|
with open("/proc/meminfo", "rb", 0x10000) as f:
|
||||||
@@ -594,6 +625,38 @@ except Exception as ex:
|
|||||||
print("using fallback base64 codec due to %r" % (ex,))
|
print("using fallback base64 codec due to %r" % (ex,))
|
||||||
|
|
||||||
|
|
||||||
|
class NotUTF8(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def read_utf8(log: Optional["NamedLogger"], ap: Union[str, bytes], strict: bool) -> str:
|
||||||
|
with open(ap, "rb") as f:
|
||||||
|
buf = f.read()
|
||||||
|
|
||||||
|
try:
|
||||||
|
return buf.decode("utf-8", "strict")
|
||||||
|
except UnicodeDecodeError as ex:
|
||||||
|
eo = ex.start
|
||||||
|
eb = buf[eo : eo + 1]
|
||||||
|
|
||||||
|
if not strict:
|
||||||
|
t = "WARNING: The file [%s] is not using the UTF-8 character encoding; some characters in the file will be skipped/ignored. The first unreadable character was byte %r at offset %d. Please convert this file to UTF-8 by opening the file in your text-editor and saving it as UTF-8."
|
||||||
|
t = t % (ap, eb, eo)
|
||||||
|
if log:
|
||||||
|
log(t, 3)
|
||||||
|
else:
|
||||||
|
print(t)
|
||||||
|
return buf.decode("utf-8", "replace")
|
||||||
|
|
||||||
|
t = "ERROR: The file [%s] is not using the UTF-8 character encoding, and cannot be loaded. The first unreadable character was byte %r at offset %d. Please convert this file to UTF-8 by opening the file in your text-editor and saving it as UTF-8."
|
||||||
|
t = t % (ap, eb, eo)
|
||||||
|
if log:
|
||||||
|
log(t, 3)
|
||||||
|
else:
|
||||||
|
print(t)
|
||||||
|
raise NotUTF8(t)
|
||||||
|
|
||||||
|
|
||||||
class Daemon(threading.Thread):
|
class Daemon(threading.Thread):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -1200,8 +1263,6 @@ class Magician(object):
|
|||||||
self.magic: Optional["magic.Magic"] = None
|
self.magic: Optional["magic.Magic"] = None
|
||||||
|
|
||||||
def ext(self, fpath: str) -> str:
|
def ext(self, fpath: str) -> str:
|
||||||
import magic
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.bad_magic:
|
if self.bad_magic:
|
||||||
raise Exception()
|
raise Exception()
|
||||||
@@ -1419,8 +1480,6 @@ def stackmon(fp: str, ival: float, suffix: str) -> None:
|
|||||||
buf = st.encode("utf-8", "replace")
|
buf = st.encode("utf-8", "replace")
|
||||||
|
|
||||||
if fp.endswith(".gz"):
|
if fp.endswith(".gz"):
|
||||||
import gzip
|
|
||||||
|
|
||||||
# 2459b 2304b 2241b 2202b 2194b 2191b lv3..8
|
# 2459b 2304b 2241b 2202b 2194b 2191b lv3..8
|
||||||
# 0.06s 0.08s 0.11s 0.13s 0.16s 0.19s
|
# 0.06s 0.08s 0.11s 0.13s 0.16s 0.19s
|
||||||
buf = gzip.compress(buf, compresslevel=6)
|
buf = gzip.compress(buf, compresslevel=6)
|
||||||
@@ -1500,6 +1559,12 @@ def vol_san(vols: list["VFS"], txt: bytes) -> bytes:
|
|||||||
txt = txt.replace(bap.replace(b"\\", b"\\\\"), bvp)
|
txt = txt.replace(bap.replace(b"\\", b"\\\\"), bvp)
|
||||||
txt = txt.replace(bhp.replace(b"\\", b"\\\\"), bvph)
|
txt = txt.replace(bhp.replace(b"\\", b"\\\\"), bvph)
|
||||||
|
|
||||||
|
if vol.histpath != vol.dbpath:
|
||||||
|
bdp = vol.dbpath.encode("utf-8")
|
||||||
|
bdph = b"$db(/" + bvp + b")"
|
||||||
|
txt = txt.replace(bdp, bdph)
|
||||||
|
txt = txt.replace(bdp.replace(b"\\", b"\\\\"), bdph)
|
||||||
|
|
||||||
if txt != txt0:
|
if txt != txt0:
|
||||||
txt += b"\r\nNOTE: filepaths sanitized; see serverlog for correct values"
|
txt += b"\r\nNOTE: filepaths sanitized; see serverlog for correct values"
|
||||||
|
|
||||||
@@ -1520,6 +1585,7 @@ def ren_open(fname: str, *args: Any, **kwargs: Any) -> tuple[typing.IO[Any], str
|
|||||||
fun = kwargs.pop("fun", open)
|
fun = kwargs.pop("fun", open)
|
||||||
fdir = kwargs.pop("fdir", None)
|
fdir = kwargs.pop("fdir", None)
|
||||||
suffix = kwargs.pop("suffix", None)
|
suffix = kwargs.pop("suffix", None)
|
||||||
|
chmod = kwargs.pop("chmod", -1)
|
||||||
|
|
||||||
if fname == os.devnull:
|
if fname == os.devnull:
|
||||||
return fun(fname, *args, **kwargs), fname
|
return fun(fname, *args, **kwargs), fname
|
||||||
@@ -1563,6 +1629,11 @@ def ren_open(fname: str, *args: Any, **kwargs: Any) -> tuple[typing.IO[Any], str
|
|||||||
fp2 = os.path.join(fdir, fp2)
|
fp2 = os.path.join(fdir, fp2)
|
||||||
with open(fsenc(fp2), "wb") as f2:
|
with open(fsenc(fp2), "wb") as f2:
|
||||||
f2.write(orig_name.encode("utf-8"))
|
f2.write(orig_name.encode("utf-8"))
|
||||||
|
if chmod >= 0:
|
||||||
|
os.fchmod(f2.fileno(), chmod)
|
||||||
|
|
||||||
|
if chmod >= 0:
|
||||||
|
os.fchmod(f.fileno(), chmod)
|
||||||
|
|
||||||
return f, fname
|
return f, fname
|
||||||
|
|
||||||
@@ -1903,7 +1974,7 @@ def rand_name(fdir: str, fn: str, rnd: int) -> str:
|
|||||||
return fn
|
return fn
|
||||||
|
|
||||||
|
|
||||||
def gen_filekey(alg: int, salt: str, fspath: str, fsize: int, inode: int) -> str:
|
def _gen_filekey(alg: int, salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||||
if alg == 1:
|
if alg == 1:
|
||||||
zs = "%s %s %s %s" % (salt, fspath, fsize, inode)
|
zs = "%s %s %s %s" % (salt, fspath, fsize, inode)
|
||||||
else:
|
else:
|
||||||
@@ -1913,6 +1984,13 @@ def gen_filekey(alg: int, salt: str, fspath: str, fsize: int, inode: int) -> str
|
|||||||
return ub64enc(hashlib.sha512(zb).digest()).decode("ascii")
|
return ub64enc(hashlib.sha512(zb).digest()).decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
|
def _gen_filekey_w(alg: int, salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||||
|
return _gen_filekey(alg, salt, fspath.replace("/", "\\"), fsize, inode)
|
||||||
|
|
||||||
|
|
||||||
|
gen_filekey = _gen_filekey_w if ANYWIN else _gen_filekey
|
||||||
|
|
||||||
|
|
||||||
def gen_filekey_dbg(
|
def gen_filekey_dbg(
|
||||||
alg: int,
|
alg: int,
|
||||||
salt: str,
|
salt: str,
|
||||||
@@ -1959,15 +2037,25 @@ def formatdate(ts: Optional[float] = None) -> str:
|
|||||||
return RFC2822 % (WKDAYS[wd], d, MONTHS[mo - 1], y, h, mi, s)
|
return RFC2822 % (WKDAYS[wd], d, MONTHS[mo - 1], y, h, mi, s)
|
||||||
|
|
||||||
|
|
||||||
def gencookie(k: str, v: str, r: str, tls: bool, dur: int = 0, txt: str = "") -> str:
|
def gencookie(
|
||||||
|
k: str, v: str, r: str, lax: bool, tls: bool, dur: int = 0, txt: str = ""
|
||||||
|
) -> str:
|
||||||
v = v.replace("%", "%25").replace(";", "%3B")
|
v = v.replace("%", "%25").replace(";", "%3B")
|
||||||
if dur:
|
if dur:
|
||||||
exp = formatdate(time.time() + dur)
|
exp = formatdate(time.time() + dur)
|
||||||
else:
|
else:
|
||||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||||
|
|
||||||
t = "%s=%s; Path=/%s; Expires=%s%s%s; SameSite=Lax"
|
t = "%s=%s; Path=/%s; Expires=%s%s%s; SameSite=%s"
|
||||||
return t % (k, v, r, exp, "; Secure" if tls else "", txt)
|
return t % (
|
||||||
|
k,
|
||||||
|
v,
|
||||||
|
r,
|
||||||
|
exp,
|
||||||
|
"; Secure" if tls else "",
|
||||||
|
txt,
|
||||||
|
"Lax" if lax else "Strict",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def humansize(sz: float, terse: bool = False) -> str:
|
def humansize(sz: float, terse: bool = False) -> str:
|
||||||
@@ -2336,11 +2424,11 @@ def pathmod(
|
|||||||
|
|
||||||
# try to map abspath to vpath
|
# try to map abspath to vpath
|
||||||
np = np.replace("/", os.sep)
|
np = np.replace("/", os.sep)
|
||||||
for vn_ap, vn in vfs.all_aps:
|
for vn_ap, vns in vfs.all_aps:
|
||||||
if not np.startswith(vn_ap):
|
if not np.startswith(vn_ap):
|
||||||
continue
|
continue
|
||||||
zs = np[len(vn_ap) :].replace(os.sep, "/")
|
zs = np[len(vn_ap) :].replace(os.sep, "/")
|
||||||
nvp = vjoin(vn.vpath, zs)
|
nvp = vjoin(vns[0].vpath, zs)
|
||||||
break
|
break
|
||||||
|
|
||||||
if nvp == "\n":
|
if nvp == "\n":
|
||||||
@@ -2523,6 +2611,11 @@ def _fs_mvrm(
|
|||||||
now = time.time()
|
now = time.time()
|
||||||
if ex.errno == errno.ENOENT:
|
if ex.errno == errno.ENOENT:
|
||||||
return False
|
return False
|
||||||
|
if not attempt and ex.errno == errno.EXDEV:
|
||||||
|
t = "using copy+delete (%s)\n %s\n %s"
|
||||||
|
log(t % (ex.strerror, src, dst))
|
||||||
|
osfun = shutil.move
|
||||||
|
continue
|
||||||
if now - t0 > maxtime or attempt == 90209:
|
if now - t0 > maxtime or attempt == 90209:
|
||||||
raise
|
raise
|
||||||
if not attempt:
|
if not attempt:
|
||||||
@@ -2547,15 +2640,18 @@ def atomic_move(log: "NamedLogger", src: str, dst: str, flags: dict[str, Any]) -
|
|||||||
elif flags.get("mv_re_t"):
|
elif flags.get("mv_re_t"):
|
||||||
_fs_mvrm(log, src, dst, True, flags)
|
_fs_mvrm(log, src, dst, True, flags)
|
||||||
else:
|
else:
|
||||||
os.replace(bsrc, bdst)
|
try:
|
||||||
|
os.replace(bsrc, bdst)
|
||||||
|
except OSError as ex:
|
||||||
def wrename(log: "NamedLogger", src: str, dst: str, flags: dict[str, Any]) -> bool:
|
if ex.errno != errno.EXDEV:
|
||||||
if not flags.get("mv_re_t"):
|
raise
|
||||||
os.rename(fsenc(src), fsenc(dst))
|
t = "using copy+delete (%s);\n %s\n %s"
|
||||||
return True
|
log(t % (ex.strerror, src, dst))
|
||||||
|
try:
|
||||||
return _fs_mvrm(log, src, dst, False, flags)
|
os.unlink(bdst)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
shutil.move(bsrc, bdst)
|
||||||
|
|
||||||
|
|
||||||
def wunlink(log: "NamedLogger", abspath: str, flags: dict[str, Any]) -> bool:
|
def wunlink(log: "NamedLogger", abspath: str, flags: dict[str, Any]) -> bool:
|
||||||
@@ -3084,11 +3180,13 @@ def unescape_cookie(orig: str) -> str:
|
|||||||
return "".join(ret)
|
return "".join(ret)
|
||||||
|
|
||||||
|
|
||||||
def guess_mime(url: str, fallback: str = "application/octet-stream") -> str:
|
def guess_mime(
|
||||||
|
url: str, path: str = "", fallback: str = "application/octet-stream"
|
||||||
|
) -> str:
|
||||||
try:
|
try:
|
||||||
ext = url.rsplit(".", 1)[1].lower()
|
ext = url.rsplit(".", 1)[1].lower()
|
||||||
except:
|
except:
|
||||||
return fallback
|
ext = ""
|
||||||
|
|
||||||
ret = MIMES.get(ext)
|
ret = MIMES.get(ext)
|
||||||
|
|
||||||
@@ -3096,6 +3194,16 @@ def guess_mime(url: str, fallback: str = "application/octet-stream") -> str:
|
|||||||
x = mimetypes.guess_type(url)
|
x = mimetypes.guess_type(url)
|
||||||
ret = "application/{}".format(x[1]) if x[1] else x[0]
|
ret = "application/{}".format(x[1]) if x[1] else x[0]
|
||||||
|
|
||||||
|
if not ret and path:
|
||||||
|
try:
|
||||||
|
with open(fsenc(path), "rb", 0) as f:
|
||||||
|
ret = magic.from_buffer(f.read(4096), mime=True)
|
||||||
|
if ret.startswith("text/htm"):
|
||||||
|
# avoid serving up HTML content unless there was actually a .html extension
|
||||||
|
ret = "text/plain"
|
||||||
|
except Exception as ex:
|
||||||
|
pass
|
||||||
|
|
||||||
if not ret:
|
if not ret:
|
||||||
ret = fallback
|
ret = fallback
|
||||||
|
|
||||||
@@ -3888,8 +3996,75 @@ def hidedir(dp) -> None:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
_flocks = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _lock_file_noop(ap: str) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _lock_file_ioctl(ap: str) -> bool:
|
||||||
|
assert fcntl # type: ignore # !rm
|
||||||
|
try:
|
||||||
|
fd = _flocks.pop(ap)
|
||||||
|
os.close(fd)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
fd = os.open(ap, os.O_RDWR | os.O_CREAT, 438)
|
||||||
|
# NOTE: the fcntl.lockf identifier is (pid,node);
|
||||||
|
# the lock will be dropped if os.close(os.open(ap))
|
||||||
|
# is performed anywhere else in this thread
|
||||||
|
|
||||||
|
try:
|
||||||
|
fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||||
|
_flocks[ap] = fd
|
||||||
|
return True
|
||||||
|
except Exception as ex:
|
||||||
|
eno = getattr(ex, "errno", -1)
|
||||||
|
try:
|
||||||
|
os.close(fd)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if eno in (errno.EAGAIN, errno.EACCES):
|
||||||
|
return False
|
||||||
|
print("WARNING: unexpected errno %d from fcntl.lockf; %r" % (eno, ex))
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _lock_file_windows(ap: str) -> bool:
|
||||||
|
try:
|
||||||
|
import msvcrt
|
||||||
|
|
||||||
|
try:
|
||||||
|
fd = _flocks.pop(ap)
|
||||||
|
os.close(fd)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
fd = os.open(ap, os.O_RDWR | os.O_CREAT, 438)
|
||||||
|
msvcrt.locking(fd, msvcrt.LK_NBLCK, 1)
|
||||||
|
return True
|
||||||
|
except Exception as ex:
|
||||||
|
eno = getattr(ex, "errno", -1)
|
||||||
|
if eno == errno.EACCES:
|
||||||
|
return False
|
||||||
|
print("WARNING: unexpected errno %d from msvcrt.locking; %r" % (eno, ex))
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
if os.environ.get("PRTY_NO_DB_LOCK"):
|
||||||
|
lock_file = _lock_file_noop
|
||||||
|
elif ANYWIN:
|
||||||
|
lock_file = _lock_file_windows
|
||||||
|
elif HAVE_FCNTL:
|
||||||
|
lock_file = _lock_file_ioctl
|
||||||
|
else:
|
||||||
|
lock_file = _lock_file_noop
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if sys.version_info < (3, 10):
|
if sys.version_info < (3, 10) or os.environ.get("PRTY_NO_IMPRESO"):
|
||||||
# py3.8 doesn't have .files
|
# py3.8 doesn't have .files
|
||||||
# py3.9 has broken .is_file
|
# py3.9 has broken .is_file
|
||||||
raise ImportError()
|
raise ImportError()
|
||||||
@@ -4021,9 +4196,22 @@ class WrongPostKey(Pebkac):
|
|||||||
self.datagen = datagen
|
self.datagen = datagen
|
||||||
|
|
||||||
|
|
||||||
_: Any = (mp, BytesIO, quote, unquote, SQLITE_VER, JINJA_VER, PYFTPD_VER, PARTFTPY_VER)
|
_: Any = (
|
||||||
|
gzip,
|
||||||
|
mp,
|
||||||
|
zlib,
|
||||||
|
BytesIO,
|
||||||
|
quote,
|
||||||
|
unquote,
|
||||||
|
SQLITE_VER,
|
||||||
|
JINJA_VER,
|
||||||
|
PYFTPD_VER,
|
||||||
|
PARTFTPY_VER,
|
||||||
|
)
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
"gzip",
|
||||||
"mp",
|
"mp",
|
||||||
|
"zlib",
|
||||||
"BytesIO",
|
"BytesIO",
|
||||||
"quote",
|
"quote",
|
||||||
"unquote",
|
"unquote",
|
||||||
|
|||||||
@@ -592,9 +592,7 @@ window.baguetteBox = (function () {
|
|||||||
preloadPrev(currentIndex);
|
preloadPrev(currentIndex);
|
||||||
});
|
});
|
||||||
|
|
||||||
clmod(ebi('bbox-btns'), 'off');
|
show_buttons(0);
|
||||||
clmod(btnPrev, 'off');
|
|
||||||
clmod(btnNext, 'off');
|
|
||||||
|
|
||||||
updateOffset();
|
updateOffset();
|
||||||
overlay.style.display = 'block';
|
overlay.style.display = 'block';
|
||||||
@@ -776,6 +774,8 @@ window.baguetteBox = (function () {
|
|||||||
if (is_vid) {
|
if (is_vid) {
|
||||||
image.volume = clamp(fcfg_get('vol', dvol / 100), 0, 1);
|
image.volume = clamp(fcfg_get('vol', dvol / 100), 0, 1);
|
||||||
image.setAttribute('controls', 'controls');
|
image.setAttribute('controls', 'controls');
|
||||||
|
image.setAttribute('playsinline', '1');
|
||||||
|
// ios ignores poster
|
||||||
image.onended = vidEnd;
|
image.onended = vidEnd;
|
||||||
image.onplay = function () { show_buttons(1); };
|
image.onplay = function () { show_buttons(1); };
|
||||||
image.onpause = function () { show_buttons(); };
|
image.onpause = function () { show_buttons(); };
|
||||||
|
|||||||
@@ -4,6 +4,8 @@
|
|||||||
--grid-sz: 10em;
|
--grid-sz: 10em;
|
||||||
--grid-ln: 3;
|
--grid-ln: 3;
|
||||||
--nav-sz: 16em;
|
--nav-sz: 16em;
|
||||||
|
--sbw: 0.5em;
|
||||||
|
--sbh: 0.5em;
|
||||||
|
|
||||||
--fg: #ccc;
|
--fg: #ccc;
|
||||||
--fg-max: #fff;
|
--fg-max: #fff;
|
||||||
@@ -1151,17 +1153,17 @@ html.y #widget.open {
|
|||||||
background: #fff;
|
background: #fff;
|
||||||
background: var(--bg-u3);
|
background: var(--bg-u3);
|
||||||
}
|
}
|
||||||
#wfs, #wfm, #wzip, #wnp {
|
#wfs, #wfm, #wzip, #wnp, #wm3u {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
#wfs, #wzip, #wnp {
|
#wfs, #wzip, #wnp, #wm3u {
|
||||||
margin-right: .2em;
|
margin-right: .2em;
|
||||||
padding-right: .2em;
|
padding-right: .2em;
|
||||||
border: 1px solid var(--bg-u5);
|
border: 1px solid var(--bg-u5);
|
||||||
border-width: 0 .1em 0 0;
|
border-width: 0 .1em 0 0;
|
||||||
}
|
}
|
||||||
#wfm.act+#wzip,
|
#wfm.act+#wzip1+#wzip,
|
||||||
#wfm.act+#wzip+#wnp {
|
#wfm.act+#wzip1+#wzip+#wnp {
|
||||||
margin-left: .2em;
|
margin-left: .2em;
|
||||||
padding-left: .2em;
|
padding-left: .2em;
|
||||||
border-left-width: .1em;
|
border-left-width: .1em;
|
||||||
@@ -1175,14 +1177,18 @@ html.y #widget.open {
|
|||||||
line-height: 1em;
|
line-height: 1em;
|
||||||
}
|
}
|
||||||
#wtoggle.sel #wzip,
|
#wtoggle.sel #wzip,
|
||||||
|
#wtoggle.m3u #wm3u,
|
||||||
#wtoggle.np #wnp {
|
#wtoggle.np #wnp {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
|
#wtoggle.sel #wzip1,
|
||||||
#wtoggle.sel.np #wnp {
|
#wtoggle.sel.np #wnp {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
#wfm a,
|
#wfm a,
|
||||||
#wnp a,
|
#wnp a,
|
||||||
|
#wm3u a,
|
||||||
|
#zip1,
|
||||||
#wzip a {
|
#wzip a {
|
||||||
font-size: .5em;
|
font-size: .5em;
|
||||||
padding: 0 .3em;
|
padding: 0 .3em;
|
||||||
@@ -1190,6 +1196,13 @@ html.y #widget.open {
|
|||||||
position: relative;
|
position: relative;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
|
#zip1 {
|
||||||
|
font-size: .38em;
|
||||||
|
}
|
||||||
|
#wm3u a {
|
||||||
|
margin: -.2em .1em;
|
||||||
|
font-size: .45em;
|
||||||
|
}
|
||||||
#wfs {
|
#wfs {
|
||||||
font-size: .36em;
|
font-size: .36em;
|
||||||
text-align: right;
|
text-align: right;
|
||||||
@@ -1198,13 +1211,22 @@ html.y #widget.open {
|
|||||||
border-width: 0 .25em 0 0;
|
border-width: 0 .25em 0 0;
|
||||||
}
|
}
|
||||||
#wfm span,
|
#wfm span,
|
||||||
|
#wm3u span,
|
||||||
|
#zip1 span,
|
||||||
#wnp span {
|
#wnp span {
|
||||||
font-size: .6em;
|
font-size: .6em;
|
||||||
display: block;
|
display: block;
|
||||||
}
|
}
|
||||||
|
#zip1 span {
|
||||||
|
font-size: .9em;
|
||||||
|
}
|
||||||
#wnp span {
|
#wnp span {
|
||||||
font-size: .7em;
|
font-size: .7em;
|
||||||
}
|
}
|
||||||
|
#wm3u span {
|
||||||
|
font-size: .77em;
|
||||||
|
padding-top: .2em;
|
||||||
|
}
|
||||||
#wfm a:not(.en) {
|
#wfm a:not(.en) {
|
||||||
opacity: .3;
|
opacity: .3;
|
||||||
color: var(--fm-off);
|
color: var(--fm-off);
|
||||||
@@ -1538,8 +1560,8 @@ html {
|
|||||||
z-index: 1;
|
z-index: 1;
|
||||||
position: fixed;
|
position: fixed;
|
||||||
background: var(--tree-bg);
|
background: var(--tree-bg);
|
||||||
left: -.98em;
|
left: -.96em;
|
||||||
width: calc(var(--nav-sz) - 0.5em);
|
width: calc(.3em + var(--nav-sz) - var(--sbw));
|
||||||
border-bottom: 1px solid var(--bg-u5);
|
border-bottom: 1px solid var(--bg-u5);
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
@@ -1695,7 +1717,7 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
line-height: 0;
|
line-height: 0;
|
||||||
}
|
}
|
||||||
.dumb_loader_thing {
|
.dumb_loader_thing {
|
||||||
display: inline-block;
|
display: block;
|
||||||
margin: 1em .3em 1em 1em;
|
margin: 1em .3em 1em 1em;
|
||||||
padding: 0 1.2em 0 0;
|
padding: 0 1.2em 0 0;
|
||||||
font-size: 4em;
|
font-size: 4em;
|
||||||
@@ -1703,9 +1725,16 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
min-height: 1em;
|
min-height: 1em;
|
||||||
opacity: 0;
|
opacity: 0;
|
||||||
animation: 1s linear .15s infinite forwards spin, .2s ease .15s 1 forwards fadein;
|
animation: 1s linear .15s infinite forwards spin, .2s ease .15s 1 forwards fadein;
|
||||||
position: absolute;
|
position: fixed;
|
||||||
|
top: .3em;
|
||||||
z-index: 9;
|
z-index: 9;
|
||||||
}
|
}
|
||||||
|
#dlt_t {
|
||||||
|
left: 0;
|
||||||
|
}
|
||||||
|
#dlt_f {
|
||||||
|
right: .5em;
|
||||||
|
}
|
||||||
#files .cfg {
|
#files .cfg {
|
||||||
display: none;
|
display: none;
|
||||||
font-size: 2em;
|
font-size: 2em;
|
||||||
@@ -1798,10 +1827,11 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
line-height: 2.3em;
|
line-height: 2.3em;
|
||||||
margin-bottom: 1.5em;
|
margin-bottom: 1.5em;
|
||||||
}
|
}
|
||||||
|
#hdoc,
|
||||||
#ghead {
|
#ghead {
|
||||||
position: sticky;
|
position: sticky;
|
||||||
top: -.3em;
|
top: -.3em;
|
||||||
z-index: 1;
|
z-index: 2;
|
||||||
}
|
}
|
||||||
.ghead .btn {
|
.ghead .btn {
|
||||||
position: relative;
|
position: relative;
|
||||||
@@ -1811,6 +1841,13 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
white-space: pre;
|
white-space: pre;
|
||||||
padding-left: .3em;
|
padding-left: .3em;
|
||||||
}
|
}
|
||||||
|
#tailbtns {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
#taildoc.on+#tailbtns {
|
||||||
|
display: inherit;
|
||||||
|
display: unset;
|
||||||
|
}
|
||||||
#op_unpost {
|
#op_unpost {
|
||||||
padding: 1em;
|
padding: 1em;
|
||||||
}
|
}
|
||||||
@@ -1907,6 +1944,9 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
padding: 1em 0 1em 0;
|
padding: 1em 0 1em 0;
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
}
|
}
|
||||||
|
#doc.wrap {
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
html.y #doc {
|
html.y #doc {
|
||||||
box-shadow: 0 0 .3em var(--bg-u5);
|
box-shadow: 0 0 .3em var(--bg-u5);
|
||||||
background: #f7f7f7;
|
background: #f7f7f7;
|
||||||
@@ -1995,6 +2035,9 @@ a.btn,
|
|||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
}
|
}
|
||||||
|
#hkhelp b {
|
||||||
|
text-shadow: 1px 0 0 var(--fg), -1px 0 0 var(--fg), 0 -1px 0 var(--fg);
|
||||||
|
}
|
||||||
html.noscroll,
|
html.noscroll,
|
||||||
html.noscroll .sbar {
|
html.noscroll .sbar {
|
||||||
scrollbar-width: none;
|
scrollbar-width: none;
|
||||||
@@ -2156,18 +2199,25 @@ html.y #bbox-overlay figcaption a {
|
|||||||
top: calc(50% - 30px);
|
top: calc(50% - 30px);
|
||||||
width: 44px;
|
width: 44px;
|
||||||
height: 60px;
|
height: 60px;
|
||||||
|
transition: background-color .3s ease, color .3s ease, left .3s ease, right .3s ease;
|
||||||
|
}
|
||||||
|
#bbox-btns button {
|
||||||
|
transition: background-color .3s ease, color .3s ease;
|
||||||
|
}
|
||||||
|
#bbox-btns {
|
||||||
|
transition: top .3s ease;
|
||||||
}
|
}
|
||||||
.bbox-btn {
|
.bbox-btn {
|
||||||
position: fixed;
|
position: fixed;
|
||||||
}
|
}
|
||||||
.bbox-btn,
|
#bbox-next.off {
|
||||||
#bbox-btns {
|
right: -2.6em;
|
||||||
opacity: 1;
|
}
|
||||||
animation: opacity .2s infinite ease-in-out;
|
#bbox-prev.off {
|
||||||
|
left: -2.6em;
|
||||||
}
|
}
|
||||||
.bbox-btn.off,
|
|
||||||
#bbox-btns.off {
|
#bbox-btns.off {
|
||||||
opacity: 0;
|
top: -2.2em;
|
||||||
}
|
}
|
||||||
#bbox-overlay button {
|
#bbox-overlay button {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
@@ -2178,8 +2228,6 @@ html.y #bbox-overlay figcaption a {
|
|||||||
border-radius: 15%;
|
border-radius: 15%;
|
||||||
background: rgba(50, 50, 50, 0.5);
|
background: rgba(50, 50, 50, 0.5);
|
||||||
color: rgba(255,255,255,0.7);
|
color: rgba(255,255,255,0.7);
|
||||||
transition: background-color .3s ease;
|
|
||||||
transition: color .3s ease;
|
|
||||||
font-size: 1.4em;
|
font-size: 1.4em;
|
||||||
line-height: 1.4em;
|
line-height: 1.4em;
|
||||||
vertical-align: top;
|
vertical-align: top;
|
||||||
@@ -3028,7 +3076,8 @@ html.b .ntree a {
|
|||||||
padding: .6em .2em;
|
padding: .6em .2em;
|
||||||
}
|
}
|
||||||
html.b #treepar {
|
html.b #treepar {
|
||||||
margin-left: .62em;
|
margin-left: .63em;
|
||||||
|
width: calc(.1em + var(--nav-sz) - var(--sbw));
|
||||||
border-bottom: .2em solid var(--f-h-b1);
|
border-bottom: .2em solid var(--f-h-b1);
|
||||||
}
|
}
|
||||||
html.b #wrap {
|
html.b #wrap {
|
||||||
@@ -3200,7 +3249,7 @@ html.d #treepar {
|
|||||||
|
|
||||||
#ggrid>a>span {
|
#ggrid>a>span {
|
||||||
text-align: center;
|
text-align: center;
|
||||||
padding: 0.2em;
|
padding: .2em .2em .15em .2em;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3223,4 +3272,9 @@ html.d #treepar {
|
|||||||
.dropdesc>div>div {
|
.dropdesc>div>div {
|
||||||
transition: none;
|
transition: none;
|
||||||
}
|
}
|
||||||
|
#bbox-next,
|
||||||
|
#bbox-prev,
|
||||||
|
#bbox-btns {
|
||||||
|
transition: background-color .3s ease, color .3s ease;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
55
copyparty/web/idp.html
Normal file
55
copyparty/web/idp.html
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>{{ s_doctitle }}</title>
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
|
<meta name="robots" content="noindex, nofollow">
|
||||||
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/shares.css?_={{ ts }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
|
{{ html_head }}
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="wrap">
|
||||||
|
<a href="{{ r }}/?idp">refresh</a>
|
||||||
|
<a href="{{ r }}/?h">control-panel</a>
|
||||||
|
|
||||||
|
<table id="tab"><thead><tr>
|
||||||
|
<th>forget</th>
|
||||||
|
<th>user</th>
|
||||||
|
<th>groups</th>
|
||||||
|
</tr></thead><tbody>
|
||||||
|
{% for un, gn in rows %}
|
||||||
|
<tr>
|
||||||
|
<td><a href="{{ r }}/?idp=rm={{ un|e }}">forget</a></td>
|
||||||
|
<td>{{ un|e }}</td>
|
||||||
|
<td>{{ gn|e }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody></table>
|
||||||
|
{% if not rows %}
|
||||||
|
(there are no IdP users in the cache)
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<a href="#" id="repl">π</a>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
var SR="{{ r }}",
|
||||||
|
lang="{{ lang }}",
|
||||||
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
|
var STG = window.localStorage;
|
||||||
|
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||||
|
|
||||||
|
</script>
|
||||||
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
@@ -1078,26 +1078,28 @@ action_stack = (function () {
|
|||||||
var p1 = from.length,
|
var p1 = from.length,
|
||||||
p2 = to.length;
|
p2 = to.length;
|
||||||
|
|
||||||
while (p1-- > 0 && p2-- > 0)
|
while (p1 --> 0 && p2 --> 0)
|
||||||
if (from[p1] != to[p2])
|
if (from[p1] != to[p2])
|
||||||
break;
|
break;
|
||||||
|
|
||||||
if (car > ++p1) {
|
if (car > ++p1)
|
||||||
car = p1;
|
car = p1;
|
||||||
}
|
|
||||||
|
|
||||||
var txt = from.substring(car, p1)
|
var txt = from.substring(car, p1)
|
||||||
return {
|
return {
|
||||||
car: car,
|
car: car,
|
||||||
cdr: ++p2,
|
cdr: p2 + (car && 1),
|
||||||
txt: txt,
|
txt: txt,
|
||||||
cpos: cpos
|
cpos: cpos
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
var undiff = function (from, change) {
|
var undiff = function (from, change) {
|
||||||
|
var t1 = from.substring(0, change.car),
|
||||||
|
t2 = from.substring(change.cdr);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
txt: from.substring(0, change.car) + change.txt + from.substring(change.cdr),
|
txt: t1 + change.txt + t2,
|
||||||
cpos: change.cpos
|
cpos: change.cpos
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -122,7 +122,7 @@
|
|||||||
<input type="hidden" id="la" name="act" value="login" />
|
<input type="hidden" id="la" name="act" value="login" />
|
||||||
<input type="password" id="lp" name="cppwd" placeholder=" password" />
|
<input type="password" id="lp" name="cppwd" placeholder=" password" />
|
||||||
<input type="hidden" name="uhash" id="uhash" value="x" />
|
<input type="hidden" name="uhash" id="uhash" value="x" />
|
||||||
<input type="submit" id="ls" value="Login" />
|
<input type="submit" id="ls" value="login" />
|
||||||
{% if chpw %}
|
{% if chpw %}
|
||||||
<a id="x" href="#">change password</a>
|
<a id="x" href="#">change password</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@@ -135,6 +135,10 @@
|
|||||||
|
|
||||||
<h1 id="cc">other stuff:</h1>
|
<h1 id="cc">other stuff:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
|
{%- if this.uname in this.args.idp_adm_set %}
|
||||||
|
<li><a id="ag" href="{{ r }}/?idp">view idp cache</a></li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
{%- if this.uname != '*' and this.args.shr %}
|
{%- if this.uname != '*' and this.args.shr %}
|
||||||
<li><a id="y" href="{{ r }}/?shares">edit shares</a></li>
|
<li><a id="y" href="{{ r }}/?shares">edit shares</a></li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ var Ls = {
|
|||||||
"ad1": "no304 stopper all bruk av cache. Hvis ikke k304 var nok, prøv denne. Vil mangedoble dataforbruk!",
|
"ad1": "no304 stopper all bruk av cache. Hvis ikke k304 var nok, prøv denne. Vil mangedoble dataforbruk!",
|
||||||
"ae1": "utgående:",
|
"ae1": "utgående:",
|
||||||
"af1": "vis nylig opplastede filer",
|
"af1": "vis nylig opplastede filer",
|
||||||
|
"ag1": "vis kjente IdP-brukere",
|
||||||
},
|
},
|
||||||
"eng": {
|
"eng": {
|
||||||
"d2": "shows the state of all active threads",
|
"d2": "shows the state of all active threads",
|
||||||
@@ -90,6 +91,7 @@ var Ls = {
|
|||||||
"ad1": "启用 no304 将禁用所有缓存;如果 k304 不够,可以尝试此选项。这将消耗大量的网络流量!", //m
|
"ad1": "启用 no304 将禁用所有缓存;如果 k304 不够,可以尝试此选项。这将消耗大量的网络流量!", //m
|
||||||
"ae1": "正在下载:", //m
|
"ae1": "正在下载:", //m
|
||||||
"af1": "显示最近上传的文件", //m
|
"af1": "显示最近上传的文件", //m
|
||||||
|
"ag1": "查看已知 IdP 用户", //m
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -36,7 +36,7 @@
|
|||||||
<span class="os lin mac">
|
<span class="os lin mac">
|
||||||
{% if accs %}<code><b id="pw0">{{ pw }}</b></code>=password, {% endif %}<code><b>mp</b></code>=mountpoint
|
{% if accs %}<code><b id="pw0">{{ pw }}</b></code>=password, {% endif %}<code><b>mp</b></code>=mountpoint
|
||||||
</span>
|
</span>
|
||||||
<a href="#" id="setpw">use real password</a>
|
{% if accs %}<a href="#" id="setpw">use real password</a>{% endif %}
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
|
||||||
@@ -101,6 +101,7 @@
|
|||||||
gio mount -a dav{{ s }}://{{ ep }}/{{ rvp }}
|
gio mount -a dav{{ s }}://{{ ep }}/{{ rvp }}
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</pre>
|
</pre>
|
||||||
|
<p>on KDE Dolphin, use <code>webdav{{ s }}://{{ ep }}/{{ rvp }}</code></p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="os mac">
|
<div class="os mac">
|
||||||
|
|||||||
@@ -49,7 +49,7 @@ function setos(os) {
|
|||||||
setos(WINDOWS ? 'win' : LINUX ? 'lin' : MACOS ? 'mac' : 'idk');
|
setos(WINDOWS ? 'win' : LINUX ? 'lin' : MACOS ? 'mac' : 'idk');
|
||||||
|
|
||||||
|
|
||||||
ebi('setpw').onclick = function (e) {
|
function setpw(e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
modal.prompt('password:', '', function (v) {
|
modal.prompt('password:', '', function (v) {
|
||||||
if (!v)
|
if (!v)
|
||||||
@@ -57,7 +57,7 @@ ebi('setpw').onclick = function (e) {
|
|||||||
|
|
||||||
var pw0 = ebi('pw0').innerHTML,
|
var pw0 = ebi('pw0').innerHTML,
|
||||||
oa = QSA('b');
|
oa = QSA('b');
|
||||||
|
|
||||||
for (var a = 0; a < oa.length; a++)
|
for (var a = 0; a < oa.length; a++)
|
||||||
if (oa[a].innerHTML == pw0)
|
if (oa[a].innerHTML == pw0)
|
||||||
oa[a].textContent = v;
|
oa[a].textContent = v;
|
||||||
@@ -65,3 +65,5 @@ ebi('setpw').onclick = function (e) {
|
|||||||
add_dls();
|
add_dls();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
if (ebi('setpw'))
|
||||||
|
ebi('setpw').onclick = setpw;
|
||||||
|
|||||||
@@ -381,6 +381,9 @@ html.y .btn:focus {
|
|||||||
box-shadow: 0 .1em .2em #037 inset;
|
box-shadow: 0 .1em .2em #037 inset;
|
||||||
outline: #037 solid .1em;
|
outline: #037 solid .1em;
|
||||||
}
|
}
|
||||||
|
input, button {
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
|
}
|
||||||
input[type="submit"] {
|
input[type="submit"] {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,18 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
|
||||||
|
(function () {
|
||||||
|
var x = sread('nosubtle');
|
||||||
|
if (x === '0' || x === '1')
|
||||||
|
nosubtle = parseInt(x);
|
||||||
|
if ((nosubtle > 1 && !CHROME && !FIREFOX) ||
|
||||||
|
(nosubtle > 2 && !CHROME) ||
|
||||||
|
(CHROME && nosubtle > VCHROME) ||
|
||||||
|
!WebAssembly)
|
||||||
|
nosubtle = 0;
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
function goto_up2k() {
|
function goto_up2k() {
|
||||||
if (up2k === false)
|
if (up2k === false)
|
||||||
return goto('bup');
|
return goto('bup');
|
||||||
@@ -23,7 +35,7 @@ var up2k = null,
|
|||||||
m = 'will use ' + sha_js + ' instead of native sha512 due to';
|
m = 'will use ' + sha_js + ' instead of native sha512 due to';
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (sread('nosubtle') || window.nosubtle)
|
if (nosubtle)
|
||||||
throw 'chickenbit';
|
throw 'chickenbit';
|
||||||
var cf = crypto.subtle || crypto.webkitSubtle;
|
var cf = crypto.subtle || crypto.webkitSubtle;
|
||||||
cf.digest('SHA-512', new Uint8Array(1)).then(
|
cf.digest('SHA-512', new Uint8Array(1)).then(
|
||||||
@@ -825,7 +837,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
qsr('#u2depmsg');
|
qsr('#u2depmsg');
|
||||||
var o = mknod('div', 'u2depmsg');
|
var o = mknod('div', 'u2depmsg');
|
||||||
o.innerHTML = m;
|
o.innerHTML = nosubtle ? '' : m;
|
||||||
ebi('u2foot').appendChild(o);
|
ebi('u2foot').appendChild(o);
|
||||||
}
|
}
|
||||||
loading_deps = true;
|
loading_deps = true;
|
||||||
@@ -881,10 +893,30 @@ function up2k_init(subtle) {
|
|||||||
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo);
|
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo);
|
||||||
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null);
|
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null);
|
||||||
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
|
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
|
||||||
bcfg_bind(uc, 'hashw', 'hashw', !!WebAssembly && !(CHROME && MOBILE) && (!subtle || !CHROME), set_hashw);
|
bcfg_bind(uc, 'hashw', 'hashw', !!WebAssembly && !(CHROME && MOBILE) && (!subtle || !CHROME || VCHROME > 136), set_hashw);
|
||||||
|
bcfg_bind(uc, 'hwasm', 'nosubtle', nosubtle, set_nosubtle);
|
||||||
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
|
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
|
||||||
bcfg_bind(uc, 'upsfx', 'upsfx', false, set_upsfx);
|
bcfg_bind(uc, 'upsfx', 'upsfx', false, set_upsfx);
|
||||||
|
|
||||||
|
uc.ow = parseInt(sread('u2ow', ['0', '1', '2']) || u2ow);
|
||||||
|
uc.owt = ['🛡️', '🕒', '♻️'];
|
||||||
|
function set_ow() {
|
||||||
|
QS('label[for="u2ow"]').innerHTML = uc.owt[uc.ow];
|
||||||
|
ebi('u2ow').checked = true; //cosmetic
|
||||||
|
}
|
||||||
|
ebi('u2ow').onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
if (++uc.ow > 2)
|
||||||
|
uc.ow = 0;
|
||||||
|
swrite('u2ow', uc.ow);
|
||||||
|
set_ow();
|
||||||
|
if (uc.ow && !has(perms, 'delete'))
|
||||||
|
toast.warn(10, L.u_enoow, 'noow');
|
||||||
|
else if (toast.tag == 'noow')
|
||||||
|
toast.hide();
|
||||||
|
};
|
||||||
|
set_ow();
|
||||||
|
|
||||||
var st = {
|
var st = {
|
||||||
"files": [],
|
"files": [],
|
||||||
"nfile": {
|
"nfile": {
|
||||||
@@ -1300,7 +1332,7 @@ function up2k_init(subtle) {
|
|||||||
if (bad_files.length) {
|
if (bad_files.length) {
|
||||||
var msg = L.u_badf.format(bad_files.length, ntot);
|
var msg = L.u_badf.format(bad_files.length, ntot);
|
||||||
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
|
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
|
||||||
msg += '-- ' + bad_files[a][1] + '\n';
|
msg += '-- ' + esc(bad_files[a][1]) + '\n';
|
||||||
|
|
||||||
msg += L.u_just1;
|
msg += L.u_just1;
|
||||||
return modal.alert(msg, function () {
|
return modal.alert(msg, function () {
|
||||||
@@ -1312,7 +1344,7 @@ function up2k_init(subtle) {
|
|||||||
if (nil_files.length) {
|
if (nil_files.length) {
|
||||||
var msg = L.u_blankf.format(nil_files.length, ntot);
|
var msg = L.u_blankf.format(nil_files.length, ntot);
|
||||||
for (var a = 0, aa = Math.min(20, nil_files.length); a < aa; a++)
|
for (var a = 0, aa = Math.min(20, nil_files.length); a < aa; a++)
|
||||||
msg += '-- ' + nil_files[a][1] + '\n';
|
msg += '-- ' + esc(nil_files[a][1]) + '\n';
|
||||||
|
|
||||||
msg += L.u_just1;
|
msg += L.u_just1;
|
||||||
return modal.confirm(msg, function () {
|
return modal.confirm(msg, function () {
|
||||||
@@ -1324,10 +1356,68 @@ function up2k_init(subtle) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var fps = new Set(), pdp = '';
|
||||||
|
for (var a = 0; a < good_files.length; a++) {
|
||||||
|
var fp = good_files[a][1],
|
||||||
|
dp = vsplit(fp)[0];
|
||||||
|
fps.add(fp);
|
||||||
|
if (pdp != dp) {
|
||||||
|
pdp = dp;
|
||||||
|
dp = dp.slice(0, -1);
|
||||||
|
while (dp) {
|
||||||
|
fps.add(dp);
|
||||||
|
dp = vsplit(dp)[0].slice(0, -1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var junk = [], rmi = [];
|
||||||
|
for (var a = 0; a < good_files.length; a++) {
|
||||||
|
var fn = good_files[a][1];
|
||||||
|
if (fn.indexOf("/.") < 0 && fn.indexOf("/__MACOS") < 0)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
if (/\/__MACOS|\/\.(DS_Store|AppleDouble|LSOverride|DocumentRevisions-|fseventsd|Spotlight-V[0-9]|TemporaryItems|Trashes|VolumeIcon\.icns|com\.apple\.timemachine\.donotpresent|AppleDB|AppleDesktop|apdisk)/.exec(fn)) {
|
||||||
|
junk.push(good_files[a]);
|
||||||
|
rmi.push(a);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fn.indexOf("/._") + 1 &&
|
||||||
|
fps.has(fn.replace("/._", "/")) &&
|
||||||
|
fn.split("/").pop().startsWith("._") &&
|
||||||
|
!has(rmi, a)
|
||||||
|
) {
|
||||||
|
junk.push(good_files[a]);
|
||||||
|
rmi.push(a);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!junk.length)
|
||||||
|
return gotallfiles2(good_files);
|
||||||
|
|
||||||
|
junk.sort();
|
||||||
|
rmi.sort(function (a, b) { return a - b; });
|
||||||
|
|
||||||
|
var msg = L.u_applef.format(junk.length, good_files.length);
|
||||||
|
for (var a = 0, aa = Math.min(1000, junk.length); a < aa; a++)
|
||||||
|
msg += '-- ' + esc(junk[a][1]) + '\n';
|
||||||
|
|
||||||
|
return modal.confirm(msg, function () {
|
||||||
|
for (var a = rmi.length - 1; a >= 0; a--)
|
||||||
|
good_files.splice(rmi[a], 1);
|
||||||
|
|
||||||
|
start_actx();
|
||||||
|
gotallfiles2(good_files);
|
||||||
|
}, function () {
|
||||||
|
start_actx();
|
||||||
|
gotallfiles2(good_files);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function gotallfiles2(good_files) {
|
||||||
good_files.sort(function (a, b) {
|
good_files.sort(function (a, b) {
|
||||||
a = a[1];
|
return a[1] < b[1] ? -1 : 1;
|
||||||
b = b[1];
|
|
||||||
return a < b ? -1 : a > b ? 1 : 0;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
var msg = [];
|
var msg = [];
|
||||||
@@ -1338,7 +1428,7 @@ function up2k_init(subtle) {
|
|||||||
if (FIREFOX && good_files.length > 3000)
|
if (FIREFOX && good_files.length > 3000)
|
||||||
msg.push(L.u_ff_many + "\n\n");
|
msg.push(L.u_ff_many + "\n\n");
|
||||||
|
|
||||||
msg.push(L.u_asku.format(good_files.length, esc(get_vpath())) + '<ul>');
|
msg.push(L.u_asku.format(good_files.length, esc(uricom_dec(get_evpath()))) + '<ul>');
|
||||||
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
|
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
|
||||||
msg.push('<li>' + esc(good_files[a][1]) + '</li>');
|
msg.push('<li>' + esc(good_files[a][1]) + '</li>');
|
||||||
|
|
||||||
@@ -1365,9 +1455,16 @@ function up2k_init(subtle) {
|
|||||||
if (CHROME) {
|
if (CHROME) {
|
||||||
// chrome-bug 383568268 // #124
|
// chrome-bug 383568268 // #124
|
||||||
nw = Math.max(1, (nw > 4 ? 4 : (nw - 1)));
|
nw = Math.max(1, (nw > 4 ? 4 : (nw - 1)));
|
||||||
|
if (VCHROME < 137)
|
||||||
nw = (subtle && !MOBILE && nw > 2) ? 2 : nw;
|
nw = (subtle && !MOBILE && nw > 2) ? 2 : nw;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var x = sread('u2hashers') || window.u2hashers;
|
||||||
|
if (x) {
|
||||||
|
console.log('u2hashers is overriding default-value ' + nw);
|
||||||
|
nw = parseInt(x);
|
||||||
|
}
|
||||||
|
|
||||||
for (var a = 0; a < nw; a++)
|
for (var a = 0; a < nw; a++)
|
||||||
hws.push(new Worker(SR + '/.cpr/w.hash.js?_=' + TS));
|
hws.push(new Worker(SR + '/.cpr/w.hash.js?_=' + TS));
|
||||||
|
|
||||||
@@ -1380,9 +1477,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
if (!uc.az)
|
if (!uc.az)
|
||||||
good_files.sort(function (a, b) {
|
good_files.sort(function (a, b) {
|
||||||
a = a[0].size;
|
return a[0].size - b[0].size;
|
||||||
b = b[0].size;
|
|
||||||
return a < b ? -1 : a > b ? 1 : 0;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
for (var a = 0; a < good_files.length; a++) {
|
for (var a = 0; a < good_files.length; a++) {
|
||||||
@@ -1390,7 +1485,7 @@ function up2k_init(subtle) {
|
|||||||
name = good_files[a][1],
|
name = good_files[a][1],
|
||||||
fdir = evpath,
|
fdir = evpath,
|
||||||
now = Date.now(),
|
now = Date.now(),
|
||||||
lmod = uc.u2ts ? (fobj.lastModified || now) : 0,
|
lmod = (uc.u2ts && fobj.lastModified) || 0,
|
||||||
ofs = name.lastIndexOf('/') + 1;
|
ofs = name.lastIndexOf('/') + 1;
|
||||||
|
|
||||||
if (ofs) {
|
if (ofs) {
|
||||||
@@ -2054,8 +2149,8 @@ function up2k_init(subtle) {
|
|||||||
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
||||||
};
|
};
|
||||||
reader.onerror = function () {
|
reader.onerror = function () {
|
||||||
var err = reader.error + '';
|
var err = esc('' + reader.error),
|
||||||
var handled = false;
|
handled = false;
|
||||||
|
|
||||||
if (err.indexOf('NotReadableError') !== -1 || // win10-chrome defender
|
if (err.indexOf('NotReadableError') !== -1 || // win10-chrome defender
|
||||||
err.indexOf('NotFoundError') !== -1 // macos-firefox permissions
|
err.indexOf('NotFoundError') !== -1 // macos-firefox permissions
|
||||||
@@ -2138,6 +2233,7 @@ function up2k_init(subtle) {
|
|||||||
reading = 0,
|
reading = 0,
|
||||||
max_readers = 1,
|
max_readers = 1,
|
||||||
opt_readers = 2,
|
opt_readers = 2,
|
||||||
|
failed = false,
|
||||||
free = [],
|
free = [],
|
||||||
busy = {},
|
busy = {},
|
||||||
nbusy = 0,
|
nbusy = 0,
|
||||||
@@ -2187,6 +2283,14 @@ function up2k_init(subtle) {
|
|||||||
tasker();
|
tasker();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function go_fail() {
|
||||||
|
failed = true;
|
||||||
|
if (nbusy)
|
||||||
|
return;
|
||||||
|
apop(st.busy.hash, t);
|
||||||
|
st.bytes.finished += t.size;
|
||||||
|
}
|
||||||
|
|
||||||
function onmsg(d) {
|
function onmsg(d) {
|
||||||
d = d.data;
|
d = d.data;
|
||||||
var k = d[0];
|
var k = d[0];
|
||||||
@@ -2201,6 +2305,12 @@ function up2k_init(subtle) {
|
|||||||
return vis_exh(d[1], 'up2k.js', '', '', d[1]);
|
return vis_exh(d[1], 'up2k.js', '', '', d[1]);
|
||||||
|
|
||||||
if (k == "fail") {
|
if (k == "fail") {
|
||||||
|
var nchunk = d[1];
|
||||||
|
free.push(busy[nchunk]);
|
||||||
|
delete busy[nchunk];
|
||||||
|
nbusy--;
|
||||||
|
reading--;
|
||||||
|
|
||||||
pvis.seth(t.n, 1, d[1]);
|
pvis.seth(t.n, 1, d[1]);
|
||||||
pvis.seth(t.n, 2, d[2]);
|
pvis.seth(t.n, 2, d[2]);
|
||||||
console.log(d[1], d[2]);
|
console.log(d[1], d[2]);
|
||||||
@@ -2208,9 +2318,7 @@ function up2k_init(subtle) {
|
|||||||
got_oserr();
|
got_oserr();
|
||||||
|
|
||||||
pvis.move(t.n, 'ng');
|
pvis.move(t.n, 'ng');
|
||||||
apop(st.busy.hash, t);
|
return go_fail();
|
||||||
st.bytes.finished += t.size;
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (k == "ferr")
|
if (k == "ferr")
|
||||||
@@ -2243,6 +2351,9 @@ function up2k_init(subtle) {
|
|||||||
t.hash.push(nchunk);
|
t.hash.push(nchunk);
|
||||||
pvis.hashed(t);
|
pvis.hashed(t);
|
||||||
|
|
||||||
|
if (failed)
|
||||||
|
return go_fail();
|
||||||
|
|
||||||
if (t.hash.length < nchunks)
|
if (t.hash.length < nchunks)
|
||||||
return nbusy < opt_readers && go_next();
|
return nbusy < opt_readers && go_next();
|
||||||
|
|
||||||
@@ -2279,7 +2390,7 @@ function up2k_init(subtle) {
|
|||||||
xhr.onerror = xhr.ontimeout = function () {
|
xhr.onerror = xhr.ontimeout = function () {
|
||||||
console.log('head onerror, retrying', t.name, t);
|
console.log('head onerror, retrying', t.name, t);
|
||||||
if (!toast.visible)
|
if (!toast.visible)
|
||||||
toast.warn(9.98, L.u_enethd + "\n\nfile: " + t.name, t);
|
toast.warn(9.98, L.u_enethd + "\n\nfile: " + esc(t.name), t);
|
||||||
|
|
||||||
apop(st.busy.head, t);
|
apop(st.busy.head, t);
|
||||||
st.todo.head.unshift(t);
|
st.todo.head.unshift(t);
|
||||||
@@ -2320,8 +2431,8 @@ function up2k_init(subtle) {
|
|||||||
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
|
||||||
};
|
};
|
||||||
|
|
||||||
xhr.timeout = 34000;
|
|
||||||
xhr.open('HEAD', t.purl + uricom_enc(t.name), true);
|
xhr.open('HEAD', t.purl + uricom_enc(t.name), true);
|
||||||
|
xhr.timeout = 34000;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2354,7 +2465,7 @@ function up2k_init(subtle) {
|
|||||||
return console.log('zombie handshake onerror', t.name, t);
|
return console.log('zombie handshake onerror', t.name, t);
|
||||||
|
|
||||||
if (!toast.visible)
|
if (!toast.visible)
|
||||||
toast.warn(9.98, L.u_eneths + "\n\nfile: " + t.name, t);
|
toast.warn(9.98, L.u_eneths + "\n\nfile: " + esc(t.name), t);
|
||||||
|
|
||||||
console.log('handshake onerror, retrying', t.name, t);
|
console.log('handshake onerror, retrying', t.name, t);
|
||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
@@ -2459,7 +2570,7 @@ function up2k_init(subtle) {
|
|||||||
var idx = t.hash.indexOf(missing[a]);
|
var idx = t.hash.indexOf(missing[a]);
|
||||||
if (idx < 0)
|
if (idx < 0)
|
||||||
return modal.alert('wtf negative index for hash "{0}" in task:\n{1}'.format(
|
return modal.alert('wtf negative index for hash "{0}" in task:\n{1}'.format(
|
||||||
missing[a], JSON.stringify(t)));
|
missing[a], esc(JSON.stringify(t))));
|
||||||
|
|
||||||
t.postlist.push(idx);
|
t.postlist.push(idx);
|
||||||
cbd[idx] = 0;
|
cbd[idx] = 0;
|
||||||
@@ -2613,7 +2724,7 @@ function up2k_init(subtle) {
|
|||||||
return toast.err(0, L.u_ehsdf + "\n\n" + rsp.replace(/.*; /, ''));
|
return toast.err(0, L.u_ehsdf + "\n\n" + rsp.replace(/.*; /, ''));
|
||||||
|
|
||||||
err = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
err = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
||||||
xhrchk(xhr, err + "\n\nfile: " + t.name + "\n\nerror ", "404, target folder not found", "warn", t);
|
xhrchk(xhr, err + "\n\nfile: " + esc(t.name) + "\n\nerror ", "404, target folder not found", "warn", t);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
xhr.onload = function (e) {
|
xhr.onload = function (e) {
|
||||||
@@ -2634,6 +2745,13 @@ function up2k_init(subtle) {
|
|||||||
else if (t.umod)
|
else if (t.umod)
|
||||||
req.umod = true;
|
req.umod = true;
|
||||||
|
|
||||||
|
if (!t.srch) {
|
||||||
|
if (uc.ow == 1)
|
||||||
|
req.replace = 'mt';
|
||||||
|
if (uc.ow == 2)
|
||||||
|
req.replace = true;
|
||||||
|
}
|
||||||
|
|
||||||
xhr.open('POST', t.purl, true);
|
xhr.open('POST', t.purl, true);
|
||||||
xhr.responseType = 'text';
|
xhr.responseType = 'text';
|
||||||
xhr.timeout = 42000 + (t.srch || t.t_uploaded ? 0 :
|
xhr.timeout = 42000 + (t.srch || t.t_uploaded ? 0 :
|
||||||
@@ -2763,7 +2881,7 @@ function up2k_init(subtle) {
|
|||||||
toast.inf(10, L.u_cbusy);
|
toast.inf(10, L.u_cbusy);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
xhrchk(xhr, L.u_cuerr2.format(snpart, Math.ceil(t.size / chunksize), t.name), "404, target folder not found (???)", "warn", t);
|
xhrchk(xhr, L.u_cuerr2.format(snpart, Math.ceil(t.size / chunksize), esc(t.name)), "404, target folder not found (???)", "warn", t);
|
||||||
chill(t);
|
chill(t);
|
||||||
}
|
}
|
||||||
orz2(xhr);
|
orz2(xhr);
|
||||||
@@ -2793,7 +2911,8 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
st.bytes.inflight += db;
|
st.bytes.inflight += db;
|
||||||
xhr.bsent = nb;
|
xhr.bsent = nb;
|
||||||
xhr.timeout = 64000 + Date.now() - xhr.t0;
|
if (!IE)
|
||||||
|
xhr.timeout = 64000 + Date.now() - xhr.t0;
|
||||||
pvis.prog(t, pcar, nb);
|
pvis.prog(t, pcar, nb);
|
||||||
};
|
};
|
||||||
xhr.onload = function (xev) {
|
xhr.onload = function (xev) {
|
||||||
@@ -2807,7 +2926,7 @@ function up2k_init(subtle) {
|
|||||||
xhr.bsent = 0;
|
xhr.bsent = 0;
|
||||||
|
|
||||||
if (!toast.visible)
|
if (!toast.visible)
|
||||||
toast.warn(9.98, L.u_cuerr.format(snpart, Math.ceil(t.size / chunksize), t.name), t);
|
toast.warn(9.98, L.u_cuerr.format(snpart, Math.ceil(t.size / chunksize), esc(t.name)), t);
|
||||||
|
|
||||||
t.nojoin = t.nojoin || t.postlist.length; // maybe rproxy postsize limit
|
t.nojoin = t.nojoin || t.postlist.length; // maybe rproxy postsize limit
|
||||||
console.log('chunkpit onerror,', t.name, t);
|
console.log('chunkpit onerror,', t.name, t);
|
||||||
@@ -2841,7 +2960,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
xhr.bsent = 0;
|
xhr.bsent = 0;
|
||||||
xhr.t0 = Date.now();
|
xhr.t0 = Date.now();
|
||||||
xhr.timeout = 42000;
|
xhr.timeout = 1000 * (IE ? 1234 : 42);
|
||||||
xhr.responseType = 'text';
|
xhr.responseType = 'text';
|
||||||
xhr.send(t.fobj.slice(car, cdr));
|
xhr.send(t.fobj.slice(car, cdr));
|
||||||
}
|
}
|
||||||
@@ -3187,6 +3306,12 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function set_nosubtle(v) {
|
||||||
|
if (!WebAssembly)
|
||||||
|
return toast.err(10, L.u_nowork);
|
||||||
|
modal.confirm(L.lang_set, location.reload.bind(location), null);
|
||||||
|
}
|
||||||
|
|
||||||
function set_upnag(en) {
|
function set_upnag(en) {
|
||||||
function nopenag() {
|
function nopenag() {
|
||||||
bcfg_set('upnag', uc.upnag = false);
|
bcfg_set('upnag', uc.upnag = false);
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ var wah = '',
|
|||||||
CHROME = !!window.chrome, // safari=false
|
CHROME = !!window.chrome, // safari=false
|
||||||
VCHROME = CHROME ? 1 : 0,
|
VCHROME = CHROME ? 1 : 0,
|
||||||
UA = '' + navigator.userAgent,
|
UA = '' + navigator.userAgent,
|
||||||
IE = /Trident\//.test(UA),
|
IE = !!document.documentMode,
|
||||||
FIREFOX = ('netscape' in window) && / rv:/.test(UA),
|
FIREFOX = ('netscape' in window) && / rv:/.test(UA),
|
||||||
IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(UA),
|
IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(UA),
|
||||||
LINUX = /Linux/.test(UA),
|
LINUX = /Linux/.test(UA),
|
||||||
@@ -69,7 +69,7 @@ try {
|
|||||||
|
|
||||||
CHROME = navigator.userAgentData.brands.find(function (d) { return d.brand == 'Chromium' });
|
CHROME = navigator.userAgentData.brands.find(function (d) { return d.brand == 'Chromium' });
|
||||||
if (CHROME)
|
if (CHROME)
|
||||||
VCHROME = CHROME.version;
|
VCHROME = parseInt(CHROME.version);
|
||||||
else
|
else
|
||||||
VCHROME = 0;
|
VCHROME = 0;
|
||||||
|
|
||||||
@@ -183,7 +183,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
|||||||
if (url.indexOf(' > eval') + 1 && !evalex_fatal)
|
if (url.indexOf(' > eval') + 1 && !evalex_fatal)
|
||||||
return; // md timer
|
return; // md timer
|
||||||
|
|
||||||
if (IE && url.indexOf('prism.js') + 1)
|
if (url.indexOf('prism.js') + 1)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (url.indexOf('easymde.js') + 1)
|
if (url.indexOf('easymde.js') + 1)
|
||||||
@@ -364,7 +364,8 @@ if (!Element.prototype.matches)
|
|||||||
Element.prototype.mozMatchesSelector ||
|
Element.prototype.mozMatchesSelector ||
|
||||||
Element.prototype.webkitMatchesSelector;
|
Element.prototype.webkitMatchesSelector;
|
||||||
|
|
||||||
if (!Element.prototype.closest)
|
var CLOSEST = !!Element.prototype.closest;
|
||||||
|
if (!CLOSEST)
|
||||||
Element.prototype.closest = function (s) {
|
Element.prototype.closest = function (s) {
|
||||||
var el = this;
|
var el = this;
|
||||||
do {
|
do {
|
||||||
@@ -382,8 +383,10 @@ if (!String.prototype.format)
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
var have_URL = false;
|
||||||
try {
|
try {
|
||||||
new URL('/a/', 'https://a.com/');
|
new URL('/a/', 'https://a.com/');
|
||||||
|
have_URL = true;
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
console.log('ie11 shim URL()');
|
console.log('ie11 shim URL()');
|
||||||
@@ -461,6 +464,13 @@ function namesan(txt, win, fslash) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
var NATSORT, ENATSORT;
|
||||||
|
try {
|
||||||
|
NATSORT = new Intl.Collator([], {numeric: true});
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
|
||||||
|
|
||||||
var crctab = (function () {
|
var crctab = (function () {
|
||||||
var c, tab = [];
|
var c, tab = [];
|
||||||
for (var n = 0; n < 256; n++) {
|
for (var n = 0; n < 256; n++) {
|
||||||
@@ -614,6 +624,33 @@ function showsort(tab) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
function st_cmp_num(a, b) {
|
||||||
|
a = a[0];
|
||||||
|
b = b[0];
|
||||||
|
return (
|
||||||
|
a === null ? -1 :
|
||||||
|
b === null ? 1 :
|
||||||
|
(a - b)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
function st_cmp_nat(a, b) {
|
||||||
|
a = a[0];
|
||||||
|
b = b[0];
|
||||||
|
return (
|
||||||
|
a === null ? -1 :
|
||||||
|
b === null ? 1 :
|
||||||
|
NATSORT.compare(a, b)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
function st_cmp_gen(a, b) {
|
||||||
|
a = a[0];
|
||||||
|
b = b[0];
|
||||||
|
return (
|
||||||
|
a === null ? -1 :
|
||||||
|
b === null ? 1 :
|
||||||
|
a.localeCompare(b)
|
||||||
|
);
|
||||||
|
}
|
||||||
function sortTable(table, col, cb) {
|
function sortTable(table, col, cb) {
|
||||||
var tb = table.tBodies[0],
|
var tb = table.tBodies[0],
|
||||||
th = table.tHead.rows[0].cells,
|
th = table.tHead.rows[0].cells,
|
||||||
@@ -659,19 +696,17 @@ function sortTable(table, col, cb) {
|
|||||||
}
|
}
|
||||||
vl.push([v, a]);
|
vl.push([v, a]);
|
||||||
}
|
}
|
||||||
vl.sort(function (a, b) {
|
|
||||||
a = a[0];
|
|
||||||
b = b[0];
|
|
||||||
if (a === null)
|
|
||||||
return -1;
|
|
||||||
if (b === null)
|
|
||||||
return 1;
|
|
||||||
|
|
||||||
if (stype == 'int') {
|
if (stype == 'int')
|
||||||
return reverse * (a - b);
|
vl.sort(st_cmp_num);
|
||||||
}
|
else if (ENATSORT)
|
||||||
return reverse * (a.localeCompare(b));
|
vl.sort(st_cmp_nat);
|
||||||
});
|
else
|
||||||
|
vl.sort(st_cmp_gen);
|
||||||
|
|
||||||
|
if (reverse < 0)
|
||||||
|
vl.reverse();
|
||||||
|
|
||||||
if (sread('dir1st') !== '0') {
|
if (sread('dir1st') !== '0') {
|
||||||
var r1 = [], r2 = [];
|
var r1 = [], r2 = [];
|
||||||
for (var i = 0; i < tr.length; i++) {
|
for (var i = 0; i < tr.length; i++) {
|
||||||
@@ -699,6 +734,16 @@ function makeSortable(table, cb) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function assert_vp(path) {
|
||||||
|
if (path.indexOf('//') + 1)
|
||||||
|
throw 'nonlocal1: ' + path;
|
||||||
|
|
||||||
|
var o = window.location.origin;
|
||||||
|
if (have_URL && (new URL(path, o)).origin != o)
|
||||||
|
throw 'nonlocal2: ' + path;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function linksplit(rp, id) {
|
function linksplit(rp, id) {
|
||||||
var ret = [],
|
var ret = [],
|
||||||
apath = '/',
|
apath = '/',
|
||||||
@@ -857,11 +902,6 @@ function get_evpath() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function get_vpath() {
|
|
||||||
return uricom_dec(get_evpath());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function noq_href(el) {
|
function noq_href(el) {
|
||||||
return el.getAttribute('href').split('?')[0];
|
return el.getAttribute('href').split('?')[0];
|
||||||
}
|
}
|
||||||
@@ -1201,7 +1241,7 @@ function dl_file(url) {
|
|||||||
function cliptxt(txt, ok) {
|
function cliptxt(txt, ok) {
|
||||||
var fb = function () {
|
var fb = function () {
|
||||||
console.log('clip-fb');
|
console.log('clip-fb');
|
||||||
var o = mknod('input');
|
var o = mknod('textarea');
|
||||||
o.value = txt;
|
o.value = txt;
|
||||||
document.body.appendChild(o);
|
document.body.appendChild(o);
|
||||||
o.focus();
|
o.focus();
|
||||||
@@ -1211,6 +1251,8 @@ function cliptxt(txt, ok) {
|
|||||||
ok();
|
ok();
|
||||||
};
|
};
|
||||||
try {
|
try {
|
||||||
|
if (!window.isSecureContext)
|
||||||
|
throw 1;
|
||||||
navigator.clipboard.writeText(txt).then(ok, fb);
|
navigator.clipboard.writeText(txt).then(ok, fb);
|
||||||
}
|
}
|
||||||
catch (ex) { fb(); }
|
catch (ex) { fb(); }
|
||||||
|
|||||||
@@ -4,6 +4,16 @@
|
|||||||
function hex2u8(txt) {
|
function hex2u8(txt) {
|
||||||
return new Uint8Array(txt.match(/.{2}/g).map(function (b) { return parseInt(b, 16); }));
|
return new Uint8Array(txt.match(/.{2}/g).map(function (b) { return parseInt(b, 16); }));
|
||||||
}
|
}
|
||||||
|
function esc(txt) {
|
||||||
|
return txt.replace(/[&"<>]/g, function (c) {
|
||||||
|
return {
|
||||||
|
'&': '&',
|
||||||
|
'"': '"',
|
||||||
|
'<': '<',
|
||||||
|
'>': '>'
|
||||||
|
}[c];
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
var subtle = null;
|
var subtle = null;
|
||||||
@@ -19,6 +29,8 @@ catch (ex) {
|
|||||||
}
|
}
|
||||||
function load_fb() {
|
function load_fb() {
|
||||||
subtle = null;
|
subtle = null;
|
||||||
|
if (self.hashwasm)
|
||||||
|
return;
|
||||||
importScripts('deps/sha512.hw.js');
|
importScripts('deps/sha512.hw.js');
|
||||||
console.log('using fallback hasher');
|
console.log('using fallback hasher');
|
||||||
}
|
}
|
||||||
@@ -64,7 +76,7 @@ onmessage = (d) => {
|
|||||||
};
|
};
|
||||||
reader.onerror = function () {
|
reader.onerror = function () {
|
||||||
busy = false;
|
busy = false;
|
||||||
var err = reader.error + '';
|
var err = esc('' + reader.error);
|
||||||
|
|
||||||
if (err.indexOf('NotReadableError') !== -1 || // win10-chrome defender
|
if (err.indexOf('NotReadableError') !== -1 || // win10-chrome defender
|
||||||
err.indexOf('NotFoundError') !== -1 // macos-firefox permissions
|
err.indexOf('NotFoundError') !== -1 // macos-firefox permissions
|
||||||
|
|||||||
@@ -1,3 +1,557 @@
|
|||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0727-2305 `v1.18.5` SECURITY: fix XSS in media tags
|
||||||
|
|
||||||
|
## ⚠️ ATTN: this release fixes an XSS vulnerability
|
||||||
|
|
||||||
|
[GHSA-9q4r-x2hj-jmvr](https://github.com/9001/copyparty/security/advisories/GHSA-9q4r-x2hj-jmvr), exploitable in two different ways, could let an attacker execute arbitrary javascript on other users:
|
||||||
|
* either: tricking someone into clicking a malicious URL to load and execute javascript
|
||||||
|
* or: uploading a malicious audio file to the server, affecting any successive visitors
|
||||||
|
|
||||||
|
so, with new and curious eyes on the project, we are starting off with a bang. Huge thanks to @altperfect for finding and reporting this earlier today.
|
||||||
|
|
||||||
|
## recent important news
|
||||||
|
|
||||||
|
* [v1.18.5 (2025-07-28)](https://github.com/9001/copyparty/releases/tag/v1.18.5) fixed XSS in display of media tags
|
||||||
|
* [v1.15.0 (2024-09-08)](https://github.com/9001/copyparty/releases/tag/v1.15.0) changed upload deduplication to be default-disabled
|
||||||
|
* [v1.14.3 (2024-08-30)](https://github.com/9001/copyparty/releases/tag/v1.14.3) fixed a bug that was introduced in v1.13.8 (2024-08-13); this bug could lead to **data loss** -- see the v1.14.3 release-notes for details
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* #214 option to stop playback after one song, and/or at end of folder 6bb27e60
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* GHSA-9q4r-x2hj-jmvr 895880ae
|
||||||
|
* block external m3u files 2228f81f
|
||||||
|
* #202 the connect-page could show IP-address when it should have used hostnames/domains b0dec83a
|
||||||
|
* scrolling locked after tailing a file and closing it creatively d197e754
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* #189 the `SameSite` cookie parameter now defaults to `Strict`, increasing CSRF protection ca6d0b8d
|
||||||
|
* new option `--cookie-lax` reverts to previous value `Lax`
|
||||||
|
* docker: add FTPS support b4199847
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0725-1841 `v1.18.4` Landmarks
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* #182 [Landmarks](https://github.com/9001/copyparty#database-location) edba7fff
|
||||||
|
* detects that a storage backend is glitching out and disengage the up2k-database as a precaution
|
||||||
|
* #183 quickdelete 21a96bcf
|
||||||
|
* new togglebutton `qdel` in the UI which reduces the number of deletion confirmations by one
|
||||||
|
* global-option `--qdel=0` which can bring it all the way to zero (good luck)
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* fix unpost in recently created shares 2d322dd4
|
||||||
|
* fix filekeys on windows df6d4df4
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0721-2307 `v1.18.3` drop the umask
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* #181 the default chmod (unix-permissions) of new files and folders can now be changed 9921c43e
|
||||||
|
* `--chmod-d` or volflag `chmod_d` sets directory permissions; default is 755
|
||||||
|
* `--chmod-f` or volflag `chmod_f` sets file permissions; default is usually 644 (OS-defined)
|
||||||
|
* see `--help-chmod` which explains the numbers
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* #179 couldn't combine `--shr` (shares) and `--xvol` (symlink-guard) 0f0f8d90
|
||||||
|
* #180 gallery buttons could still be clicked when faded-out 8c32b0e7
|
||||||
|
* rss-feeds were slightly busted when combined with rp-loc (location-based proxying) 56d3bcf5
|
||||||
|
* music-playback within search-results no longer jumps into the next folder at end-of-list 9bc4c5d2
|
||||||
|
* video-playback on iOS now behaves like on all other platforms 78605d9a
|
||||||
|
* (it would force-switch into fullscreen because that's their default)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0707-1419 `v1.18.2` idp-vol persistence
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* IdP-volumes can optionally be persisted across restarts d162502c
|
||||||
|
* there is a UI to manage the cached users/groups 4f264a0a
|
||||||
|
* only available to users listed in the new option `--idp-adm`
|
||||||
|
* api for manually rescanning several volumes at once 42c199e7
|
||||||
|
* `/some/path/?scan` does that one volume like before
|
||||||
|
* `/any/path/?scan=/vol1,/another/vol2` rescans `/vol1` and `/another/vol2`
|
||||||
|
* volflag to hide volume from listing in controlpanel fd7c71d6
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* macos: fix confusing crash when blocked by [Little Snitch](https://www.obdev.at/products/littlesnitch/) bf11b2a4
|
||||||
|
* unpost could break in some hairy reverseproxy setups 1b2d3985
|
||||||
|
* copyparty32.exe: fix segfault on win7 c9fafb20
|
||||||
|
* ui: fix navpane overlapping the scrollbar (still a bit jank but eh) 7ef6fd13
|
||||||
|
* usb-eject: support all volume names ed908b98
|
||||||
|
* docker: ensure clean slate deb6711b
|
||||||
|
* fix up2k on ie11 d2714434
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* update buildscript for keyfinder to support llvm 65c4e035
|
||||||
|
* #175 add `python-magic` into the `iv` and `dj` docker flavors (thx @Morganamilo) 77274e9d
|
||||||
|
* properly killed the experimental docker flavors to avoid confusion 8306e3d9
|
||||||
|
* copyparty.exe: updated pillow 299cff3f f6be3905
|
||||||
|
* avif support was removed to save 2 MiB
|
||||||
|
|
||||||
|
## 🌠 fun facts
|
||||||
|
|
||||||
|
* this release was slightly delayed due to a [norwegian traffic jam](https://a.ocv.me/pub/g/2025/07/PXL_20250706_143558381.jpg)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0622-0020 `v1.18.0` Logtail
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* textfile-viewer can now livestream logfiles (and other growing files) 17fa4906 77df17d1 a1c7a095 6ecf4fdc
|
||||||
|
* see [readme](https://github.com/9001/copyparty/#textfile-viewer) and the [live demo](https://a.ocv.me/pub/demo/logtail/)
|
||||||
|
* IdP-volumes: extend syntax for excluding certain users/groups 2e53f797
|
||||||
|
* the commit-message explains it well enough
|
||||||
|
* new option `--see-dots` to show dotfiles in the web-ui by default c599e2aa
|
||||||
|
* #171 automatic mimetype detection for files without extensions (thx @Morganamilo!) ec05f8cc 9dd5dec0
|
||||||
|
* default-disabled since it has a performance impact on webdav
|
||||||
|
* there are plans to fix this by using the db instead
|
||||||
|
* #170 improve custom filetype icons
|
||||||
|
* be less strict; if a thumbnail is set for `.gz` files, use it for `.tar.gz` too c75b0c25
|
||||||
|
* improve config docs fa5845ff
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* cosmetic: get rid of some noise along the bottom of some cards in the gridview 8cae7a71
|
||||||
|
* cosmetic: satisfy a new syntax warning in cpython-3.14 5ac38648
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* properly document how to [build from source](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#build-from-scratch) / build from scratch f61511d8
|
||||||
|
* update deps
|
||||||
|
* copyparty.exe: python 3.13 1eff87c3
|
||||||
|
* webdeps: dompurify 7eca90cc
|
||||||
|
|
||||||
|
## 🌠 fun facts
|
||||||
|
|
||||||
|
* this release was cooked up in a [swedish forest cabin](https://a.ocv.me/pub/g/nerd-stuff/forestparty.jpg)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0527-1939 `v1.17.2` pushing chrome to the limits (and then some)
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* not this time
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* up2k: improve file-hashing speed on recent versions of google chrome e3e51fb8
|
||||||
|
* speed increased from 319 to 513 MiB/s by default (but older chrome versions did 748...)
|
||||||
|
* read the commit message for the full story, but basically chrome has gotten gradually slower over the past couple versions (starting from v133) and this makes it slightly less bad again
|
||||||
|
* hashing speed can be further improved from `0.5` to `1.1` GiB/s by enabling the `[wasm]` option in the `[⚙️] settings` tab
|
||||||
|
* this option can be made default-enabled with `--nosubtle 137` but beware that this increases the chances of running into browser-bugs (foreshadowing...)
|
||||||
|
* up2k: fix errorhandler for browser-bugs (oom and such) 49c71247
|
||||||
|
* because [chrome-bug 383568268](https://issues.chromium.org/issues/383568268) is about to make a [surprise return?!](https://issues.chromium.org/issues/383568268#comment14)
|
||||||
|
* #168 fix uploading into shares if path-based proxying is used 9cb93ae1
|
||||||
|
* #165 unconditionally heed `--rp-loc` 84f5f417
|
||||||
|
* the config-option for [path-based proxying](https://github.com/9001/copyparty/#reverse-proxy) was ignored if the reverse-proxy was untrusted; this was confusing and not strictly necessary
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* #166 the nixos module was improved once more (thx @msfjarvis!) 48470f6b 60fb1207
|
||||||
|
* added usage instructions to [minimal-up2k.js](https://github.com/9001/copyparty/tree/hovudstraum/contrib/plugins#example-browser-js), the up2k-ui [simplifier](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png) 1d308eeb
|
||||||
|
* docker: improve feedback if config is bad or missing 28b63e58
|
||||||
|
|
||||||
|
## 🌠 fun facts
|
||||||
|
|
||||||
|
* this release was tested using an [unreliable rdp connection](https://a.ocv.me/pub/g/nerd-stuff/PXL_20250526_021207825.jpg) through two ssh-jumphosts to a qemu win10 vm back home from the bergen-oslo night train wifi
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0518-2234 `v1.17.1` as seen on archlinux
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* new toolbar button to zip/tar the currently open folder 256dad8c
|
||||||
|
* new options to specify the default checksum algorithm for PUT/bup/WebDAV uploads 0de09860
|
||||||
|
* #164 new option `--put-name` to specify the filename of nameless uploads 5dcd88a6
|
||||||
|
* the default is still `put-TIMESTAMP-IPADDRESS.bin`
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* #162 password-protected shares was incompatible with password-hashing c3ef3fdc
|
||||||
|
* #161 m3u playlist creation was only possible over https 94352f27
|
||||||
|
* when relocating/redirecting an upload from an xbu hook (execute-before-upload), could miss an already existing file at the destination and create another copy 0a9a8077
|
||||||
|
* some edgecases when moving files between filesystems f425ff51
|
||||||
|
* improve tagscan-resume after a server restart (primarily for dupes) 41fa6b25
|
||||||
|
* support prehistoric timestamps in fat16 vhd-drives on windows 261236e3
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* #159 the nixos module was improved (thx @gabevenberg and @chinponya!) d1bca1f5
|
||||||
|
* an archlinux maintainer adopted the aur package; copyparty is now [officially in arch](https://archlinux.org/packages/extra/any/copyparty/) b9ba783c
|
||||||
|
* #162 add KDE Dolphin instructions to the conect-page d4a8071d
|
||||||
|
* audioplayer now knows that `.oga` means `.ogg`
|
||||||
|
|
||||||
|
## 🌠 fun facts
|
||||||
|
|
||||||
|
* this release contains code [pair-programmed during an anime rave](https://a.ocv.me/pub/g/nerd-stuff/PXL_20250503_222654610.jpg)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0426-2149 `v1.17.0` mixtape.m3u
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* [m3u playlists](https://github.com/9001/copyparty/#playlists) 897f9d32 ad200f2b 4195762d fff45552
|
||||||
|
* create and play m3u / m3u8 files
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* improve support for ie11 (yes, internet explorer 11) 3090c748 95157d02
|
||||||
|
* now possible to launch the password-hasher cli while another instance is running dbfc899d
|
||||||
|
* in preparation of #157 / #159
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* make better decisions when running in a VM with less than 1 GiB RAM dc3b7a27
|
||||||
|
|
||||||
|
## 🌠 fun facts
|
||||||
|
|
||||||
|
* this release contains code written [less than 1masl](https://a.ocv.me/pub/g/nerd-stuff/PXL_20250425_170037812.jpg) and was gonna be named [hash again](https://www.youtube.com/watch?v=twUFbqyul_M) since it was originally just the password-hasher fix, but then kipun suggested adding playlist support (thx kipun)
|
||||||
|
* [donations](https://github.com/9001/) are now also possible through github -- good alternative to paypal (y)
|
||||||
|
* and thanks a lot for the support (and kind words therein) so far, appreciate it :>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0420-1836 `v1.16.21` unzip-compat
|
||||||
|
|
||||||
|
a couple guys have been asking if I accept donations -- thanks a lot!! added a few options on [my github page](https://github.com/9001/) :>
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* #156 add button to loop/repeat music 71c55659
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* #155 download-as-zip: increase compatibility with the unix `unzip` command db33d68d
|
||||||
|
* this unfortunately reduces support for huge zipfiles on old software (WinXP and such)
|
||||||
|
* and makes it less safe to stream zips into unzippers, so use tar.gz instead
|
||||||
|
* and is perhaps not even a copyparty bug; see commit-message for the full story
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* show warning on Ctrl-A in lazy-loaded folders 5b3a5fe7
|
||||||
|
* docker: hide keepalive pings from logs d5a9bd80
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0413-2151 `v1.16.20` all sorted
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* when enabled, natural-sort will now also apply to tags, not just filenames 7b2bd6da
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* some sorting-related stuff 7b2bd6da
|
||||||
|
* folders with non-ascii names would sort incorrectly in the navpane/sidebar
|
||||||
|
* natural-sort didn't apply correctly after changing the sort order
|
||||||
|
* workaround [ffmpeg-bug 10797](https://trac.ffmpeg.org/ticket/10797) 98dcaee2
|
||||||
|
* reduces ram usage from 1534 to 230 MiB when generating spectrograms of s3xmodit songs (amiga chiptunes)
|
||||||
|
* disable mdns if only listening on uds (unix-sockets) ffc16109 361aebf8
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* hotkey CTRL-A will now select all files in gridview 233075ae
|
||||||
|
* and it toggles (just like in list-view) so try pressing it again
|
||||||
|
* copyparty.exe: upgrade to pillow v11.2.1 c7aa1a35
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0408-2132 `v1.16.19` GHOST
|
||||||
|
|
||||||
|
did you know that every song named `GHOST` is a banger? it's true! [ghost](https://www.youtube.com/watch?v=NoUAwC4yiAw) // [ghost](https://www.youtube.com/watch?v=IKKar5SS29E) // [ghost](https://www.youtube.com/watch?v=tFSFlgm_tsw)
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* option to store markdown backups out-of-volume fc883418
|
||||||
|
* the default is still a subfolder named `.hist` next to the markdown file
|
||||||
|
* `--md-hist v` puts them in the volume's hist-folder instead
|
||||||
|
* `--md-hist n` disables markdown-backups entirely
|
||||||
|
* #149 option to store the volume sqlite databases at a custom locations outside the hist-folder e1b9ac63
|
||||||
|
* new option `--dbpath` works like `--hist` but it only moves the database file, not the thumbnails
|
||||||
|
* they can be combined, in which case `--hist` is applied to thumbnails, `--dbpath` to the db
|
||||||
|
* useful when you're squeezing every last drop of performance out of your filesystem (see the issue)
|
||||||
|
* actively prevent sharing certain databases (sessions/shares) between multiple copyparty instances acfaacbd
|
||||||
|
* an errormessage was added to explain some different alternatives for doing this safely
|
||||||
|
* for example by setting `XDG_CONFIG_HOME` which now works on all platforms b17ccc38
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* #151 mkdir did not work in locations outside the volume root (via symlinks) 2b50fc20
|
||||||
|
* improve the ui feedback when trying to play an audio file which failed to transcode f9954bc4
|
||||||
|
* also helps with server-filesystem issues, including image-thumbs
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* #152 custom fonts are also applied to textboxes and buttons (thx @thaddeuskkr) d450f615
|
||||||
|
* be more careful with the shares-db 8e0364ef
|
||||||
|
* be less careful with the sessions-db 8e0364ef
|
||||||
|
* update deps c0becc64
|
||||||
|
* web: dompurify
|
||||||
|
* copyparty.exe: python 3.12.10
|
||||||
|
* rephrase `-j0` warning on windows to also mention that Microsoft Defender will freak out c0becc64
|
||||||
|
* #149 add [a script](https://github.com/9001/copyparty/tree/hovudstraum/contrib#zfs-tunepy) to optimize the sqlite databases for storage on zfs 4f397b9b
|
||||||
|
* block `GoogleOther` (another recalcitrant bot) from zip-downloads c2034f7b
|
||||||
|
* rephrase `-j0` warning on windows to also mention that Microsoft Defender will freak out c0becc64
|
||||||
|
* update [contributing.md](https://github.com/9001/copyparty/blob/hovudstraum/CONTRIBUTING.md) with a section regarding LLM/AI-written code cec3bee0
|
||||||
|
* the [helptext](https://ocv.me/copyparty/helptext.html) will also be uploaded to each github release from now on, [permalink](https://github.com/9001/copyparty/releases/latest/download/helptext.html)
|
||||||
|
* add review from ixbt forums b383c08c
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0323-2216 `v1.16.18` zlib-ng
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* prefer zlib-ng when available 57a56073
|
||||||
|
* download-as-tar-gz becomes 2.5x faster
|
||||||
|
* default-enabled in docker-images
|
||||||
|
* not enabled in copyparty.exe yet; coming in a future python version
|
||||||
|
* docker: add mimalloc (optional, default-disabled) de2c9788
|
||||||
|
* gives twice the speed, and twice the ram usage
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* small up2k glitch 3c90cec0
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* rename logues/readmes when uploaded with write-only access 2525d594
|
||||||
|
* since they are used as helptext when viewing the page
|
||||||
|
* try to block google and other bad bots from `?doc` and `?zip` 99f63adf
|
||||||
|
* apparently `rel="nofollow"` means nothing these days
|
||||||
|
|
||||||
|
### the docker images for this release were built from e1dea7ef
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0316-2002 `v1.16.17` boot2party
|
||||||
|
|
||||||
|
## NEW: make it a bootable usb flashdrive
|
||||||
|
|
||||||
|
get the party going anywhere, anytime, no OS required! [download flashdrive image](https://a.ocv.me/pub/stuff/edcd001/enterprise-edition/) or watch the [low-effort demo video](https://a.ocv.me/pub/stuff/edcd001/enterprise-edition/hub-demo-hq.webm) which eventually gets to the copyparty part after showing off a bunch of other stuff on there
|
||||||
|
|
||||||
|
* there is [source code](https://github.com/9001/asm/tree/hovudstraum/p/hub) and [build instructions](https://github.com/9001/asm/tree/hovudstraum/p/hub/sm/how2build) too
|
||||||
|
* please don't take this too seriously
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* option to specify max-size for download-as-zip/tar 494179bd 0a33336d
|
||||||
|
* either the total download size (`--zipmaxs 500M`), and/or max number of files (`--zipmaxn 9k`)
|
||||||
|
* applies to all uesrs by default; can also ignore limits for authorized users (`--zipmaxu`)
|
||||||
|
* errormessage can be customized with `--zipmaxt "winter is coming... but this download isn't"`
|
||||||
|
* [appledoubles](https://a.ocv.me/pub/stuff/?doc=appledoubles-and-friends.txt) are detected and skipped when uploading with the browser-UI 78208405
|
||||||
|
* IdP-volumes can be filtered by group 9c2c4237
|
||||||
|
* `[/users/${u}]` in a config-file creates the volume for all users like before
|
||||||
|
* `[/users/${u%+canwrite}]` only if the user is in the `canwrite` group
|
||||||
|
* `[/users/${u%-admins}]` only if the user is NOT in the `admins` group
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* when moving a folder with symlinks, don't expand them into full files 5ab09769
|
||||||
|
* absolute symlinks are moved as-is; relative symlinks are rewritten so they still point to the same file when possible (if both source and destination are indexed in the db)
|
||||||
|
* the previous behavior was good for un-deduplicating files after changing the server-settings, but was too inconvenient for all other usecases
|
||||||
|
* #146 fix downloading from shares when `-j0` enabled 8417098c
|
||||||
|
* only show the download-as-zip link when the user is actually allowed to 14bb2999
|
||||||
|
* the suggestions in the serverlog regarding how to fix incorrect X-Forwarded-For settings would be incorrect if the reverse-proxy used IPv6 to communicate with copyparty 16462ee5
|
||||||
|
* set nofollow on `?doc` links so crawlers don't download binary files as text 6a2644fe
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* #147 IdP: fix the warning about dangerous misconfigurations to be more accurate 29a17ae2
|
||||||
|
* #143 print a warning on incorrect character-encoding in textfiles (config-files, logues, readmes etc.) 25974d66
|
||||||
|
* copyparty.exe: update to jinja 3.1.6 (copyparty was *not affected* by the jinja-3.1.5 vuln)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0228-1846 `v1.16.16` lemon melon cookie
|
||||||
|
|
||||||
|
<img src="https://github.com/9001/copyparty/raw/hovudstraum/docs/logo.svg" width="250" align="right"/>
|
||||||
|
|
||||||
|
webdev is [like a lemon](https://youtu.be/HPURbfKb7to) sometimes
|
||||||
|
|
||||||
|
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||||
|
* [docker image](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) ╱ [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) ╱ [client testbed](https://cd.ocv.me/b/)
|
||||||
|
|
||||||
|
there is a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` in case of future important updates, such as [vulnerabilities](https://github.com/9001/copyparty/security) (most recently 2025-02-25)
|
||||||
|
|
||||||
|
## recent important news
|
||||||
|
|
||||||
|
* [v1.16.15 (2025-02-25)](https://github.com/9001/copyparty/releases/tag/v1.16.15) fixed low-severity xss when uploading maliciously-named files
|
||||||
|
* [v1.15.0 (2024-09-08)](https://github.com/9001/copyparty/releases/tag/v1.15.0) changed upload deduplication to be default-disabled
|
||||||
|
* [v1.14.3 (2024-08-30)](https://github.com/9001/copyparty/releases/tag/v1.14.3) fixed a bug that was introduced in v1.13.8 (2024-08-13); this bug could lead to **data loss** -- see the v1.14.3 release-notes for details
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* #142 workaround android-chrome timestamp bug 5e12abbb
|
||||||
|
* all files were uploaded with last-modified year 1601 in specific recent versions of chrome
|
||||||
|
* https://issues.chromium.org/issues/393149335 has the actual fix; will be out soon
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* add helptext for volflags `dk`, `dks`, `dky` 65a7706f
|
||||||
|
* fix false-positive warning when disabling a global option per-volume by unsetting the volflag
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* #140 nixos: @daimond113 fixed a warning in the nixpkg (thx!) e0fe2b97
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0225-0017 `v1.16.15` fix low-severity vuln
|
||||||
|
|
||||||
|
<img src="https://github.com/9001/copyparty/raw/hovudstraum/docs/logo.svg" width="250" align="right"/>
|
||||||
|
|
||||||
|
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||||
|
* [docker image](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) ╱ [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) ╱ [client testbed](https://cd.ocv.me/b/)
|
||||||
|
|
||||||
|
## ⚠️ this fixes a minor vulnerability; CVE-score `3.6`/`10`
|
||||||
|
|
||||||
|
[GHSA-m2jw-cj8v-937r](https://github.com/9001/copyparty/security/advisories/GHSA-m2jw-cj8v-937r) aka [CVE-2025-27145](https://www.cve.org/CVERecord?id=CVE-2025-27145) could let an attacker run arbitrary javascript by tricking an authenticated user into uploading files with malicious filenames
|
||||||
|
|
||||||
|
* ...but it required some clever social engineering, and is **not likely** to be a cause for concern... ah, better safe than sorry
|
||||||
|
|
||||||
|
there is a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` in case of future important updates, such as [vulnerabilities](https://github.com/9001/copyparty/security) (most recently 2025-02-25)
|
||||||
|
|
||||||
|
## recent important news
|
||||||
|
|
||||||
|
* [v1.15.0 (2024-09-08)](https://github.com/9001/copyparty/releases/tag/v1.15.0) changed upload deduplication to be default-disabled
|
||||||
|
* [v1.14.3 (2024-08-30)](https://github.com/9001/copyparty/releases/tag/v1.14.3) fixed a bug that was introduced in v1.13.8 (2024-08-13); this bug could lead to **data loss** -- see the v1.14.3 release-notes for details
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* nothing this time
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* fix [GHSA-m2jw-cj8v-937r](https://github.com/9001/copyparty/security/advisories/GHSA-m2jw-cj8v-937r) / [CVE-2025-27145](https://www.cve.org/CVERecord?id=CVE-2025-27145) in 438ea6cc
|
||||||
|
* when trying to upload an empty files by dragging it into the browser, the filename would be rendered as HTML, allowing javascript injection if the filename was malicious
|
||||||
|
* issue discovered and reported by @JayPatel48 (thx!)
|
||||||
|
* related issues in errorhandling of uploads 499ae1c7 36866f1d
|
||||||
|
* these all had the same consequences as the GHSA above, but a network outage was necessary to trigger them
|
||||||
|
* which would probably have the lucky side-effect of blocking the javascript download, nice
|
||||||
|
* paranoid fixing of probably-not-even-issues 3adbb2ff
|
||||||
|
* fix some markdown / texteditor bugs 407531bc
|
||||||
|
* only indicate file-versions for markdown files in listings, since it's tricky to edit non-textfiles otherwise
|
||||||
|
* CTRL-C followed by CTRL-V and CTRL-Z in a single-line file would make a character fall off
|
||||||
|
* ensure safety of extensions
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* readme:
|
||||||
|
* mention support for running the server on risc-v 6d102fc8
|
||||||
|
* mention that the [sony psp](https://github.com/user-attachments/assets/9d21f020-1110-4652-abeb-6fc09c533d4f) can browse and upload 598a29a7
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
# 💾 what to download?
|
||||||
|
| download link | is it good? | description |
|
||||||
|
| -- | -- | -- |
|
||||||
|
| **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** | ✅ the best 👍 | runs anywhere! only needs python |
|
||||||
|
| [a docker image](https://github.com/9001/copyparty/blob/hovudstraum/scripts/docker/README.md) | it's ok | good if you prefer docker 🐋 |
|
||||||
|
| [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) | ⚠️ [acceptable](https://github.com/9001/copyparty#copypartyexe) | for [win8](https://user-images.githubusercontent.com/241032/221445946-1e328e56-8c5b-44a9-8b9f-dee84d942535.png) or later; built-in thumbnailer |
|
||||||
|
| [u2c.exe](https://github.com/9001/copyparty/releases/download/v1.16.14/u2c.exe) | ⚠️ acceptable | [CLI uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py) as a win7+ exe ([video](https://a.ocv.me/pub/demo/pics-vids/u2cli.webm)) |
|
||||||
|
| [copyparty.pyz](https://github.com/9001/copyparty/releases/latest/download/copyparty.pyz) | ⚠️ acceptable | similar to the regular sfx, [mostly worse](https://github.com/9001/copyparty#zipapp) |
|
||||||
|
| [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) | ⛔️ [dangerous](https://github.com/9001/copyparty#copypartyexe) | for [win7](https://user-images.githubusercontent.com/241032/221445944-ae85d1f4-d351-4837-b130-82cab57d6cca.png) -- never expose to the internet! |
|
||||||
|
| [cpp-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.16.5/copyparty-winpe64.exe) | ⛔️ dangerous | runs on [64bit WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png), otherwise useless |
|
||||||
|
|
||||||
|
* except for [u2c.exe](https://github.com/9001/copyparty/releases/download/v1.16.14/u2c.exe), all of the options above are mostly equivalent
|
||||||
|
* the zip and tar.gz files below are just source code
|
||||||
|
* python packages are available at [PyPI](https://pypi.org/project/copyparty/#files)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0219-2309 `v1.16.14` overwrite by upload
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* #139 overwrite existing files by uploading over them e9f78ea7
|
||||||
|
* default-disabled; a new togglebutton in the upload-UI configures it
|
||||||
|
* can optionally compare last-modified-time and only overwrite older files
|
||||||
|
* [GDPR compliance](https://github.com/9001/copyparty#GDPR-compliance) (maybe/probably) 4be0d426
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* some cosmetic volflag stuff, all harmless b190e676
|
||||||
|
* disabling a volflag `foo` with `-foo` shows a warning that `-foo` was not a recognized volflag, but it still does the right thing
|
||||||
|
* some volflags give the *"unrecognized volflag, will ignore"* warning, but not to worry, they still work just fine:
|
||||||
|
* `xz` to allow serverside xz-compression of uploaded files
|
||||||
|
* the option to customize the loader-spinner would glitch out during the initial page load 7d7d5d6c
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* [randpic.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/handlers/randpic.py), new 404-handler example, returns a random pic from a folder 60d5f271
|
||||||
|
* readme: [howto permanent cloudflare tunnel](https://github.com/9001/copyparty#permanent-cloudflare-tunnel) for easy hosting from home 2beb2acc
|
||||||
|
* [synology-dsm](https://github.com/9001/copyparty/blob/hovudstraum/docs/synology-dsm.md): mention how to update the docker image 56ce5919
|
||||||
|
* spinner improvements 6858cb06
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2025-0213-2057 `v1.16.13` configure with confidence
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* make the config-parser more helpful regarding volflags a255db70
|
||||||
|
* if an unrecognized volflag is specified, print a warning instead of silently ignoring it
|
||||||
|
* understand volflag-names with Uppercase and/or kebab-case (dashes), and not just snake_case (underscores)
|
||||||
|
* improve `--help-flags` to mention and explain all available flags
|
||||||
|
* #136 WebDAV: support COPY 62ee7f69
|
||||||
|
* also support overwrite of existing target files (default-enabled according to the spec)
|
||||||
|
* the user must have the delete-permission to actually replace files
|
||||||
|
* option to specify custom icons for certain file extensions 7e4702cf
|
||||||
|
* see `--ext-th` mentioned briefly in the [thumbnails section](https://github.com/9001/copyparty/#thumbnails)
|
||||||
|
* option to replace the loading-spinner animation 685f0869
|
||||||
|
* including how to [make it exceptionally normal-looking](https://github.com/9001/copyparty/tree/hovudstraum/docs/rice#boring-loader-spinner)
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* #136 WebDAV fixes 62ee7f69
|
||||||
|
* COPY/MOVE/MKCOL: challenge clients to provide the password as necessary
|
||||||
|
* most clients only need this in PROPFIND, but KDE-Dolphin is more picky
|
||||||
|
* MOVE: support `webdav://` Destination prefix as used by Dolphin, probably others
|
||||||
|
* #136 WebDAV: improve support for KDE-Dolphin as client 9d769027
|
||||||
|
* it masquerades as a graphical browser yet still expects 401, so special-case it with a useragent scan
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* Docker-only: quick hacky fix for the [musl CVE](https://www.openwall.com/lists/musl/2025/02/13/1) until the official fix is out 4d6626b0
|
||||||
|
* the docker images will be rebuilt when `musl-1.2.5-r9.apk` is released, in 6~24h or so
|
||||||
|
* until then, there is no support for reading korean XML files when running in docker
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
# 2025-0209-2331 `v1.16.12` RTT
|
# 2025-0209-2331 `v1.16.12` RTT
|
||||||
|
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
* [dev env setup](#dev-env-setup)
|
* [dev env setup](#dev-env-setup)
|
||||||
* [just the sfx](#just-the-sfx)
|
* [just the sfx](#just-the-sfx)
|
||||||
* [build from release tarball](#build-from-release-tarball) - uses the included prebuilt webdeps
|
* [build from release tarball](#build-from-release-tarball) - uses the included prebuilt webdeps
|
||||||
|
* [build from scratch](#build-from-scratch) - how the sausage is made
|
||||||
* [complete release](#complete-release)
|
* [complete release](#complete-release)
|
||||||
* [debugging](#debugging)
|
* [debugging](#debugging)
|
||||||
* [music playback halting on phones](#music-playback-halting-on-phones) - mostly fine on android
|
* [music playback halting on phones](#music-playback-halting-on-phones) - mostly fine on android
|
||||||
@@ -190,6 +191,9 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
|||||||
| GET | `?v` | open image/video/audio in mediaplayer |
|
| GET | `?v` | open image/video/audio in mediaplayer |
|
||||||
| GET | `?txt` | get file at URL as plaintext |
|
| GET | `?txt` | get file at URL as plaintext |
|
||||||
| GET | `?txt=iso-8859-1` | ...with specific charset |
|
| GET | `?txt=iso-8859-1` | ...with specific charset |
|
||||||
|
| GET | `?tail` | continuously stream a growing file |
|
||||||
|
| GET | `?tail=1024` | ...starting from byte 1024 |
|
||||||
|
| GET | `?tail=-128` | ...starting 128 bytes from the end |
|
||||||
| GET | `?th` | get image/video at URL as thumbnail |
|
| GET | `?th` | get image/video at URL as thumbnail |
|
||||||
| GET | `?th=opus` | convert audio file to 128kbps opus |
|
| GET | `?th=opus` | convert audio file to 128kbps opus |
|
||||||
| GET | `?th=caf` | ...in the iOS-proprietary container |
|
| GET | `?th=caf` | ...in the iOS-proprietary container |
|
||||||
@@ -257,6 +261,7 @@ upload modifiers:
|
|||||||
|--|--|--|
|
|--|--|--|
|
||||||
| GET | `?reload=cfg` | reload config files and rescan volumes |
|
| GET | `?reload=cfg` | reload config files and rescan volumes |
|
||||||
| GET | `?scan` | initiate a rescan of the volume which provides URL |
|
| GET | `?scan` | initiate a rescan of the volume which provides URL |
|
||||||
|
| GET | `?scan=/a,/b` | initiate a rescan of volumes `/a` and `/b` |
|
||||||
| GET | `?stack` | show a stacktrace of all threads |
|
| GET | `?stack` | show a stacktrace of all threads |
|
||||||
|
|
||||||
## general
|
## general
|
||||||
@@ -281,8 +286,11 @@ on writing your own [hooks](../README.md#event-hooks)
|
|||||||
hooks can cause intentional side-effects, such as redirecting an upload into another location, or creating+indexing additional files, or deleting existing files, by returning json on stdout
|
hooks can cause intentional side-effects, such as redirecting an upload into another location, or creating+indexing additional files, or deleting existing files, by returning json on stdout
|
||||||
|
|
||||||
* `reloc` can redirect uploads before/after uploading has finished, based on filename, extension, file contents, uploader ip/name etc.
|
* `reloc` can redirect uploads before/after uploading has finished, based on filename, extension, file contents, uploader ip/name etc.
|
||||||
|
* example: [reloc-by-ext](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reloc-by-ext.py)
|
||||||
* `idx` informs copyparty about a new file to index as a consequence of this upload
|
* `idx` informs copyparty about a new file to index as a consequence of this upload
|
||||||
|
* example: [podcast-normalizer.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/podcast-normalizer.py)
|
||||||
* `del` tells copyparty to delete an unrelated file by vpath
|
* `del` tells copyparty to delete an unrelated file by vpath
|
||||||
|
* example: ( ´・ω・) nyoro~n
|
||||||
|
|
||||||
for these to take effect, the hook must be defined with the `c1` flag; see example [reloc-by-ext](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reloc-by-ext.py)
|
for these to take effect, the hook must be defined with the `c1` flag; see example [reloc-by-ext](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reloc-by-ext.py)
|
||||||
|
|
||||||
@@ -335,7 +343,7 @@ for the `re`pack to work, first run one of the sfx'es once to unpack it
|
|||||||
|
|
||||||
you need python 3.9 or newer due to type hints
|
you need python 3.9 or newer due to type hints
|
||||||
|
|
||||||
the rest is mostly optional; if you need a working env for vscode or similar
|
setting up a venv with the below packages is only necessary if you want it for vscode or similar
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
python3 -m venv .venv
|
python3 -m venv .venv
|
||||||
@@ -347,7 +355,7 @@ pip install mutagen # audio metadata
|
|||||||
pip install pyftpdlib # ftp server
|
pip install pyftpdlib # ftp server
|
||||||
pip install partftpy # tftp server
|
pip install partftpy # tftp server
|
||||||
pip install impacket # smb server -- disable Windows Defender if you REALLY need this on windows
|
pip install impacket # smb server -- disable Windows Defender if you REALLY need this on windows
|
||||||
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
|
pip install Pillow pyheif-pillow-opener # thumbnails
|
||||||
pip install pyvips # faster thumbnails
|
pip install pyvips # faster thumbnails
|
||||||
pip install psutil # better cleanup of stuck metadata parsers on windows
|
pip install psutil # better cleanup of stuck metadata parsers on windows
|
||||||
pip install black==21.12b0 click==8.0.2 bandit pylint flake8 isort mypy # vscode tooling
|
pip install black==21.12b0 click==8.0.2 bandit pylint flake8 isort mypy # vscode tooling
|
||||||
@@ -389,6 +397,39 @@ python3 setup.py install --skip-build --prefix=/usr --root=$HOME/pe/copyparty
|
|||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## build from scratch
|
||||||
|
|
||||||
|
how the sausage is made:
|
||||||
|
|
||||||
|
to get started, first `cd` into the `scripts` folder
|
||||||
|
|
||||||
|
* the first step is the webdeps; they end up in `../copyparty/web/deps/` for example `../copyparty/web/deps/marked.js.gz` -- if you need to build the webdeps, run `make -C deps-docker`
|
||||||
|
* this needs rootless podman and the `podman-docker` compat-layer to pretend it's docker, although it *should* be possible to use rootful/rootless docker too
|
||||||
|
* if you don't have rootless podman/docker then `sudo make -C deps-docker` is fine too
|
||||||
|
* alternatively, you can entirely skip building the webdeps and instead extract the compiled webdeps from the latest github release with `./make-sfx.sh fast dl-wd`
|
||||||
|
|
||||||
|
* next, build `copyparty-sfx.py` by running `./make-sfx.sh gz fast`
|
||||||
|
* this is a dependency for most of the remaining steps, since they take the sfx as input
|
||||||
|
* removing `fast` makes it compress better
|
||||||
|
* removing `gz` too compresses even better, but startup gets slower
|
||||||
|
|
||||||
|
* if you want to build the `.pyz` standalone "binary", now run `./make-pyz.sh`
|
||||||
|
|
||||||
|
* if you want to build a pypi package, now run `./make-pypi-release.sh d`
|
||||||
|
|
||||||
|
* if you want to build a docker-image, you have two options:
|
||||||
|
* if you want to use podman to build all docker-images for all supported architectures, now run `(cd docker; ./make.sh hclean; ./make.sh hclean pull img)`
|
||||||
|
* if you want to use docker to build all docker-images for your native architecture, now run `sudo make -C docker`
|
||||||
|
* if you want to do something else, please take a look at `docker/make.sh` or `docker/Makefile` for inspiration
|
||||||
|
|
||||||
|
* if you want to build the windows exe, first grab some snacks and a beer, [you'll need it](https://github.com/9001/copyparty/tree/hovudstraum/scripts/pyinstaller)
|
||||||
|
|
||||||
|
the complete list of buildtime dependencies to do a build from scratch is as follows:
|
||||||
|
|
||||||
|
* on ubuntu-server, install podman or [docker](https://get.docker.com/), and then `sudo apt install make zip bzip2`
|
||||||
|
* because ubuntu is specifically what someone asked about :-p
|
||||||
|
|
||||||
|
|
||||||
## complete release
|
## complete release
|
||||||
|
|
||||||
also builds the sfx so skip the sfx section above
|
also builds the sfx so skip the sfx section above
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
version: '3'
|
|
||||||
services:
|
services:
|
||||||
|
|
||||||
copyparty:
|
copyparty:
|
||||||
@@ -11,9 +10,14 @@ services:
|
|||||||
- ./:/cfg:z
|
- ./:/cfg:z
|
||||||
- /path/to/your/fileshare/top/folder:/w:z
|
- /path/to/your/fileshare/top/folder:/w:z
|
||||||
|
|
||||||
|
# enabling mimalloc by replacing "NOPE" with "2" will make some stuff twice as fast, but everything will use twice as much ram:
|
||||||
|
environment:
|
||||||
|
LD_PRELOAD: /usr/lib/libmimalloc-secure.so.NOPE
|
||||||
|
|
||||||
stop_grace_period: 15s # thumbnailer is allowed to continue finishing up for 10s after the shutdown signal
|
stop_grace_period: 15s # thumbnailer is allowed to continue finishing up for 10s after the shutdown signal
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "wget --spider -q 127.0.0.1:3923/?reset"]
|
# hide it from logs with "/._" so it matches the default --lf-url filter
|
||||||
|
test: ["CMD-SHELL", "wget --spider -q 127.0.0.1:3923/?reset=/._"]
|
||||||
interval: 1m
|
interval: 1m
|
||||||
timeout: 2s
|
timeout: 2s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
|||||||
@@ -23,6 +23,9 @@ services:
|
|||||||
- 'traefik.http.routers.copyparty.tls=true'
|
- 'traefik.http.routers.copyparty.tls=true'
|
||||||
- 'traefik.http.routers.copyparty.middlewares=authelia@docker'
|
- 'traefik.http.routers.copyparty.middlewares=authelia@docker'
|
||||||
stop_grace_period: 15s # thumbnailer is allowed to continue finishing up for 10s after the shutdown signal
|
stop_grace_period: 15s # thumbnailer is allowed to continue finishing up for 10s after the shutdown signal
|
||||||
|
environment:
|
||||||
|
LD_PRELOAD: /usr/lib/libmimalloc-secure.so.NOPE
|
||||||
|
# enable mimalloc by replacing "NOPE" with "2" for a nice speed-boost (will use twice as much ram)
|
||||||
|
|
||||||
authelia:
|
authelia:
|
||||||
image: authelia/authelia:v4.38.0-beta3 # the config files in the authelia folder use the new syntax
|
image: authelia/authelia:v4.38.0-beta3 # the config files in the authelia folder use the new syntax
|
||||||
|
|||||||
@@ -22,13 +22,10 @@ services:
|
|||||||
- 'traefik.http.routers.fs.rule=Host(`fs.example.com`)'
|
- 'traefik.http.routers.fs.rule=Host(`fs.example.com`)'
|
||||||
- 'traefik.http.routers.fs.entrypoints=http'
|
- 'traefik.http.routers.fs.entrypoints=http'
|
||||||
#- 'traefik.http.routers.fs.middlewares=authelia@docker' # TODO: ???
|
#- 'traefik.http.routers.fs.middlewares=authelia@docker' # TODO: ???
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "wget --spider -q 127.0.0.1:3923/?reset"]
|
|
||||||
interval: 1m
|
|
||||||
timeout: 2s
|
|
||||||
retries: 5
|
|
||||||
start_period: 15s
|
|
||||||
stop_grace_period: 15s # thumbnailer is allowed to continue finishing up for 10s after the shutdown signal
|
stop_grace_period: 15s # thumbnailer is allowed to continue finishing up for 10s after the shutdown signal
|
||||||
|
environment:
|
||||||
|
LD_PRELOAD: /usr/lib/libmimalloc-secure.so.NOPE
|
||||||
|
# enable mimalloc by replacing "NOPE" with "2" for a nice speed-boost (will use twice as much ram)
|
||||||
|
|
||||||
traefik:
|
traefik:
|
||||||
image: traefik:v2.11
|
image: traefik:v2.11
|
||||||
|
|||||||
@@ -13,6 +13,8 @@
|
|||||||
# because that is the data-volume in the docker containers,
|
# because that is the data-volume in the docker containers,
|
||||||
# because a deployment like this (with an IdP) is more commonly
|
# because a deployment like this (with an IdP) is more commonly
|
||||||
# seen in containerized environments -- but this is not required
|
# seen in containerized environments -- but this is not required
|
||||||
|
#
|
||||||
|
# the example group "su" (super-user) is the admins group
|
||||||
|
|
||||||
|
|
||||||
[global]
|
[global]
|
||||||
@@ -78,6 +80,18 @@
|
|||||||
rwmda: @${g}, @su # read-write-move-delete-admin for that group + the "su" group
|
rwmda: @${g}, @su # read-write-move-delete-admin for that group + the "su" group
|
||||||
|
|
||||||
|
|
||||||
|
[/sus/${u%+su}] # users which ARE members of group "su" gets /sus/username
|
||||||
|
/w/tank1/${u} # which will be "tank1/username" in the docker data volume
|
||||||
|
accs:
|
||||||
|
rwmda: ${u} # read-write-move-delete-admin for that username
|
||||||
|
|
||||||
|
|
||||||
|
[/m8s/${u%-su}] # users which are NOT members of group "su" gets /m8s/username
|
||||||
|
/w/tank2/${u} # which will be "tank2/username" in the docker data volume
|
||||||
|
accs:
|
||||||
|
rwmda: ${u} # read-write-move-delete-admin for that username
|
||||||
|
|
||||||
|
|
||||||
# and create some strategic volumes to prevent anyone from gaining
|
# and create some strategic volumes to prevent anyone from gaining
|
||||||
# unintended access to priv folders if the users/groups db is lost
|
# unintended access to priv folders if the users/groups db is lost
|
||||||
[/u]
|
[/u]
|
||||||
@@ -88,3 +102,14 @@
|
|||||||
/w/lounge
|
/w/lounge
|
||||||
accs:
|
accs:
|
||||||
rwmda: @su
|
rwmda: @su
|
||||||
|
[/sus]
|
||||||
|
/w/tank1
|
||||||
|
[/m8s]
|
||||||
|
/w/tank2
|
||||||
|
|
||||||
|
|
||||||
|
# some other things you can do:
|
||||||
|
# [/demo/${u%-su,%-fds}] # users which are NOT members of "su" or "fds"
|
||||||
|
# [/demo/${u%+su,%+fds}] # users which ARE members of BOTH "su" and "fds"
|
||||||
|
# [/demo/${g%-su}] # all groups except su
|
||||||
|
# [/demo/${g%-su,%-fds}] # all groups except su and fds
|
||||||
|
|||||||
@@ -33,12 +33,6 @@ if you are introducing a new ttf/woff font, don't forget to declare the font its
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
and because textboxes don't inherit fonts by default, you can force it like this:
|
|
||||||
|
|
||||||
```css
|
|
||||||
input[type=text], input[type=submit], input[type=button] { font-family: var(--font-main) }
|
|
||||||
```
|
|
||||||
|
|
||||||
and if you want to have a monospace font in the fancy markdown editor, do this:
|
and if you want to have a monospace font in the fancy markdown editor, do this:
|
||||||
|
|
||||||
```css
|
```css
|
||||||
|
|||||||
@@ -115,6 +115,16 @@ note that if you only want to share some folders inside your data volume, and no
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## updating
|
||||||
|
|
||||||
|
to update to a new copyparty version: `Container Manager` » `Images` » `Update available` » `Update`
|
||||||
|
|
||||||
|
* DSM checks for updates every 12h; you can force a check with `sudo /var/packages/ContainerManager/target/tool/image_upgradable_checker`
|
||||||
|
|
||||||
|
* there is no auto-update feature, and beware that watchtower does not support DSM
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## regarding ram usage
|
## regarding ram usage
|
||||||
|
|
||||||
the ram usage indicator in both `Docker` and `Container Manager` is misleading because it also counts the kernel disk cache which makes the number insanely high -- the synology resource monitor shows the correct values, usually less than 100 MiB
|
the ram usage indicator in both `Docker` and `Container Manager` is misleading because it also counts the kernel disk cache which makes the number insanely high -- the synology resource monitor shows the correct values, usually less than 100 MiB
|
||||||
|
|||||||
@@ -131,6 +131,7 @@ symbol legend,
|
|||||||
| runs on Linux | █ | ╱ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
| runs on Linux | █ | ╱ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||||
| runs on Macos | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | |
|
| runs on Macos | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | |
|
||||||
| runs on FreeBSD | █ | | | • | █ | █ | █ | • | █ | █ | | █ | |
|
| runs on FreeBSD | █ | | | • | █ | █ | █ | • | █ | █ | | █ | |
|
||||||
|
| runs on Risc-V | █ | | | █ | █ | █ | | • | | █ | | | |
|
||||||
| portable binary | █ | █ | █ | | | █ | █ | | | █ | | █ | █ |
|
| portable binary | █ | █ | █ | | | █ | █ | | | █ | | █ | █ |
|
||||||
| zero setup, just go | █ | █ | █ | | | ╱ | █ | | | █ | | ╱ | █ |
|
| zero setup, just go | █ | █ | █ | | | ╱ | █ | | | █ | | ╱ | █ |
|
||||||
| android app | ╱ | | | █ | █ | | | | | | | | |
|
| android app | ╱ | | | █ | █ | | | | | | | | |
|
||||||
@@ -160,13 +161,14 @@ symbol legend,
|
|||||||
| upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | ╱ | █ | █ |
|
| upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | ╱ | █ | █ |
|
||||||
| parallel uploads | █ | | | █ | █ | | • | | █ | █ | █ | | █ |
|
| parallel uploads | █ | | | █ | █ | | • | | █ | █ | █ | | █ |
|
||||||
| resumable uploads | █ | | █ | | | | | | █ | █ | █ | ╱ | |
|
| resumable uploads | █ | | █ | | | | | | █ | █ | █ | ╱ | |
|
||||||
| upload segmenting | █ | | | | | | | █ | █ | █ | █ | ╱ | █ |
|
| upload segmenting | █ | | | █ | | | | █ | █ | █ | █ | ╱ | █ |
|
||||||
| upload acceleration | █ | | | | | | | | █ | | █ | | |
|
| upload acceleration | █ | | | | | | | | █ | | █ | | |
|
||||||
| upload verification | █ | | | █ | █ | | | | █ | | | | |
|
| upload verification | █ | | | █ | █ | | | | █ | | | | |
|
||||||
| upload deduplication | █ | | | | █ | | | | █ | | | | |
|
| upload deduplication | █ | | | | █ | | | | █ | | | | |
|
||||||
| upload a 999 TiB file | █ | | | | █ | █ | • | | █ | | █ | ╱ | ╱ |
|
| upload a 999 TiB file | █ | | | | █ | █ | • | | █ | | █ | ╱ | ╱ |
|
||||||
| CTRL-V from device | █ | | | █ | | | | | | | | | |
|
| CTRL-V from device | █ | | | █ | | | | | | | | | |
|
||||||
| race the beam ("p2p") | █ | | | | | | | | | | | | |
|
| race the beam ("p2p") | █ | | | | | | | | | | | | |
|
||||||
|
| "tail -f" streaming | █ | | | | | | | | | | | | |
|
||||||
| keep last-modified time | █ | | | █ | █ | █ | | | | | | █ | |
|
| keep last-modified time | █ | | | █ | █ | █ | | | | | | █ | |
|
||||||
| upload rules | ╱ | ╱ | ╱ | ╱ | ╱ | | | ╱ | ╱ | | ╱ | ╱ | ╱ |
|
| upload rules | ╱ | ╱ | ╱ | ╱ | ╱ | | | ╱ | ╱ | | ╱ | ╱ | ╱ |
|
||||||
| ┗ max disk usage | █ | █ | █ | | █ | | | | █ | | | █ | █ |
|
| ┗ max disk usage | █ | █ | █ | | █ | | | | █ | | | █ | █ |
|
||||||
@@ -192,6 +194,8 @@ symbol legend,
|
|||||||
|
|
||||||
* `race the beam` = files can be downloaded while they're still uploading; downloaders are slowed down such that the uploader is always ahead
|
* `race the beam` = files can be downloaded while they're still uploading; downloaders are slowed down such that the uploader is always ahead
|
||||||
|
|
||||||
|
* `tail -f` = when viewing or downloading a logfile, the connection can remain open to keep showing new lines as they are added in real time
|
||||||
|
|
||||||
* `upload routing` = depending on filetype / contents / uploader etc., the file can be redirected to another location or otherwise transformed; mitigates limitations such as [sharex#3992](https://github.com/ShareX/ShareX/issues/3992)
|
* `upload routing` = depending on filetype / contents / uploader etc., the file can be redirected to another location or otherwise transformed; mitigates limitations such as [sharex#3992](https://github.com/ShareX/ShareX/issues/3992)
|
||||||
* copyparty example: [reloc-by-ext](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks#before-upload)
|
* copyparty example: [reloc-by-ext](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks#before-upload)
|
||||||
|
|
||||||
@@ -484,7 +488,7 @@ symbol legend,
|
|||||||
* ⚠️ [isolated on-disk file hierarchy] in per-user folders
|
* ⚠️ [isolated on-disk file hierarchy] in per-user folders
|
||||||
* not that bad, can probably be remedied with bindmounts or maybe symlinks
|
* not that bad, can probably be remedied with bindmounts or maybe symlinks
|
||||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
* 🔵 uploads are segmented; no filesize limit, even on cloudflare
|
||||||
* ⚠️ uploading small files is slow; `4` files per sec (copyparty does `670`/sec, 160x faster)
|
* ⚠️ uploading small files is slow; `4` files per sec (copyparty does `670`/sec, 160x faster)
|
||||||
* ⚠️ no write-only / upload-only folders
|
* ⚠️ no write-only / upload-only folders
|
||||||
* ⚠️ http/webdav only; no ftp, zeroconf
|
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||||
|
|||||||
8
flake.lock
generated
8
flake.lock
generated
@@ -17,16 +17,16 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1680334310,
|
"lastModified": 1748162331,
|
||||||
"narHash": "sha256-ISWz16oGxBhF7wqAxefMPwFag6SlsA9up8muV79V9ck=",
|
"narHash": "sha256-rqc2RKYTxP3tbjA+PB3VMRQNnjesrT0pEofXQTrMsS8=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "884e3b68be02ff9d61a042bc9bd9dd2a358f95da",
|
"rev": "7c43f080a7f28b2774f3b3f43234ca11661bf334",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"id": "nixpkgs",
|
"id": "nixpkgs",
|
||||||
"ref": "nixos-22.11",
|
"ref": "nixos-25.05",
|
||||||
"type": "indirect"
|
"type": "indirect"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "nixpkgs/nixos-22.11";
|
nixpkgs.url = "nixpkgs/nixos-25.05";
|
||||||
flake-utils.url = "github:numtide/flake-utils";
|
flake-utils.url = "github:numtide/flake-utils";
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -17,6 +17,9 @@
|
|||||||
let
|
let
|
||||||
pkgs = import nixpkgs {
|
pkgs = import nixpkgs {
|
||||||
inherit system;
|
inherit system;
|
||||||
|
config = {
|
||||||
|
allowAliases = false;
|
||||||
|
};
|
||||||
overlays = [ self.overlays.default ];
|
overlays = [ self.overlays.default ];
|
||||||
};
|
};
|
||||||
in {
|
in {
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ WORKDIR /z
|
|||||||
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
||||||
ver_hashwasm=4.12.0 \
|
ver_hashwasm=4.12.0 \
|
||||||
ver_marked=4.3.0 \
|
ver_marked=4.3.0 \
|
||||||
ver_dompf=3.2.4 \
|
ver_dompf=3.2.6 \
|
||||||
ver_mde=2.18.0 \
|
ver_mde=2.18.0 \
|
||||||
ver_codemirror=5.65.18 \
|
ver_codemirror=5.65.18 \
|
||||||
ver_fontawesome=5.13.0 \
|
ver_fontawesome=5.13.0 \
|
||||||
@@ -12,7 +12,7 @@ ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
|||||||
|
|
||||||
# versioncheck:
|
# versioncheck:
|
||||||
# https://github.com/markedjs/marked/releases
|
# https://github.com/markedjs/marked/releases
|
||||||
# https://github.com/Ionaru/easy-markdown-editor/tags
|
# https://github.com/Ionaru/easy-markdown-editor/tags # ignore 2.20.0
|
||||||
# https://github.com/codemirror/codemirror5/releases
|
# https://github.com/codemirror/codemirror5/releases
|
||||||
# https://github.com/cure53/DOMPurify/releases
|
# https://github.com/cure53/DOMPurify/releases
|
||||||
# https://github.com/Daninet/hash-wasm/releases
|
# https://github.com/Daninet/hash-wasm/releases
|
||||||
|
|||||||
@@ -8,12 +8,13 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
|||||||
ENV XDG_CONFIG_HOME=/cfg
|
ENV XDG_CONFIG_HOME=/cfg
|
||||||
|
|
||||||
RUN apk --no-cache add !pyc \
|
RUN apk --no-cache add !pyc \
|
||||||
tzdata wget \
|
tzdata wget mimalloc2 mimalloc2-insecure \
|
||||||
py3-jinja2 py3-argon2-cffi py3-pyzmq py3-pillow \
|
py3-jinja2 py3-argon2-cffi py3-pyzmq py3-openssl py3-pillow \
|
||||||
ffmpeg
|
ffmpeg
|
||||||
|
|
||||||
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
||||||
RUN ash innvikler.sh && rm innvikler.sh
|
ADD base ./base
|
||||||
|
RUN ash innvikler.sh ac
|
||||||
|
|
||||||
WORKDIR /w
|
WORKDIR /w
|
||||||
EXPOSE 3923
|
EXPOSE 3923
|
||||||
|
|||||||
@@ -11,17 +11,18 @@ COPY i/bin/mtag/install-deps.sh ./
|
|||||||
COPY i/bin/mtag/audio-bpm.py /mtag/
|
COPY i/bin/mtag/audio-bpm.py /mtag/
|
||||||
COPY i/bin/mtag/audio-key.py /mtag/
|
COPY i/bin/mtag/audio-key.py /mtag/
|
||||||
RUN apk add -U !pyc \
|
RUN apk add -U !pyc \
|
||||||
tzdata wget \
|
tzdata wget mimalloc2 mimalloc2-insecure \
|
||||||
py3-jinja2 py3-argon2-cffi py3-pyzmq py3-pillow \
|
py3-jinja2 py3-argon2-cffi py3-pyzmq py3-openssl py3-pillow \
|
||||||
py3-pip py3-cffi \
|
py3-pip py3-cffi \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
|
py3-magic \
|
||||||
vips-jxl vips-heif vips-poppler vips-magick \
|
vips-jxl vips-heif vips-poppler vips-magick \
|
||||||
py3-numpy fftw libsndfile \
|
py3-numpy fftw libsndfile \
|
||||||
vamp-sdk vamp-sdk-libs \
|
vamp-sdk vamp-sdk-libs \
|
||||||
&& apk add -t .bd \
|
&& apk add -t .bd \
|
||||||
bash wget gcc g++ make cmake patchelf \
|
bash wget gcc g++ make cmake patchelf \
|
||||||
python3-dev ffmpeg-dev fftw-dev libsndfile-dev \
|
python3-dev ffmpeg-dev fftw-dev libsndfile-dev \
|
||||||
py3-wheel py3-numpy-dev \
|
py3-wheel py3-numpy-dev libffi-dev \
|
||||||
vamp-sdk-dev \
|
vamp-sdk-dev \
|
||||||
&& rm -f /usr/lib/python3*/EXTERNALLY-MANAGED \
|
&& rm -f /usr/lib/python3*/EXTERNALLY-MANAGED \
|
||||||
&& python3 -m pip install pyvips \
|
&& python3 -m pip install pyvips \
|
||||||
@@ -31,7 +32,8 @@ RUN apk add -U !pyc \
|
|||||||
&& ln -s /root/vamp /root/.local /
|
&& ln -s /root/vamp /root/.local /
|
||||||
|
|
||||||
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
||||||
RUN ash innvikler.sh && rm innvikler.sh
|
ADD base ./base
|
||||||
|
RUN ash innvikler.sh dj
|
||||||
|
|
||||||
WORKDIR /w
|
WORKDIR /w
|
||||||
EXPOSE 3923
|
EXPOSE 3923
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
FROM debian:12-slim
|
FROM DO_NOT_USE_THIS_DOCKER_IMAGE
|
||||||
|
# this image is an unmaintained experiment to see whether alpine was the correct choice (it was)
|
||||||
|
|
||||||
|
#FROM debian:12-slim
|
||||||
WORKDIR /z
|
WORKDIR /z
|
||||||
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||||
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
FROM fedora:39
|
FROM DO_NOT_USE_THIS_DOCKER_IMAGE
|
||||||
|
# this image is an unmaintained experiment to see whether alpine was the correct choice (it was)
|
||||||
|
|
||||||
|
#FROM fedora:39
|
||||||
WORKDIR /z
|
WORKDIR /z
|
||||||
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||||
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
FROM fedora:38
|
FROM DO_NOT_USE_THIS_DOCKER_IMAGE
|
||||||
|
# this image is an unmaintained experiment to see whether alpine was the correct choice (it was)
|
||||||
|
|
||||||
|
#FROM fedora:38
|
||||||
WORKDIR /z
|
WORKDIR /z
|
||||||
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||||
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
FROM ubuntu:23.04
|
FROM DO_NOT_USE_THIS_DOCKER_IMAGE
|
||||||
|
# this image is an unmaintained experiment to see whether alpine was the correct choice (it was)
|
||||||
|
|
||||||
|
#FROM ubuntu:23.04
|
||||||
WORKDIR /z
|
WORKDIR /z
|
||||||
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
||||||
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
|
||||||
|
|||||||
@@ -8,11 +8,12 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
|||||||
ENV XDG_CONFIG_HOME=/cfg
|
ENV XDG_CONFIG_HOME=/cfg
|
||||||
|
|
||||||
RUN apk --no-cache add !pyc \
|
RUN apk --no-cache add !pyc \
|
||||||
tzdata wget \
|
tzdata wget mimalloc2 mimalloc2-insecure \
|
||||||
py3-jinja2 py3-argon2-cffi py3-pillow py3-mutagen
|
py3-jinja2 py3-argon2-cffi py3-openssl py3-pillow py3-mutagen
|
||||||
|
|
||||||
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
||||||
RUN ash innvikler.sh && rm innvikler.sh
|
ADD base ./base
|
||||||
|
RUN ash innvikler.sh im
|
||||||
|
|
||||||
WORKDIR /w
|
WORKDIR /w
|
||||||
EXPOSE 3923
|
EXPOSE 3923
|
||||||
|
|||||||
@@ -8,20 +8,22 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
|
|||||||
ENV XDG_CONFIG_HOME=/cfg
|
ENV XDG_CONFIG_HOME=/cfg
|
||||||
|
|
||||||
RUN apk add -U !pyc \
|
RUN apk add -U !pyc \
|
||||||
tzdata wget \
|
tzdata wget mimalloc2 mimalloc2-insecure \
|
||||||
py3-jinja2 py3-argon2-cffi py3-pyzmq py3-pillow \
|
py3-jinja2 py3-argon2-cffi py3-pyzmq py3-openssl py3-pillow \
|
||||||
py3-pip py3-cffi \
|
py3-pip py3-cffi \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
|
py3-magic \
|
||||||
vips-jxl vips-heif vips-poppler vips-magick \
|
vips-jxl vips-heif vips-poppler vips-magick \
|
||||||
&& apk add -t .bd \
|
&& apk add -t .bd \
|
||||||
bash wget gcc g++ make cmake patchelf \
|
bash wget gcc g++ make cmake patchelf \
|
||||||
python3-dev py3-wheel \
|
python3-dev py3-wheel libffi-dev \
|
||||||
&& rm -f /usr/lib/python3*/EXTERNALLY-MANAGED \
|
&& rm -f /usr/lib/python3*/EXTERNALLY-MANAGED \
|
||||||
&& python3 -m pip install pyvips \
|
&& python3 -m pip install pyvips \
|
||||||
&& apk del py3-pip .bd
|
&& apk del py3-pip .bd
|
||||||
|
|
||||||
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
||||||
RUN ash innvikler.sh && rm innvikler.sh
|
ADD base ./base
|
||||||
|
RUN ash innvikler.sh iv
|
||||||
|
|
||||||
WORKDIR /w
|
WORKDIR /w
|
||||||
EXPOSE 3923
|
EXPOSE 3923
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ RUN apk --no-cache add !pyc \
|
|||||||
py3-jinja2
|
py3-jinja2
|
||||||
|
|
||||||
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
COPY i/dist/copyparty-sfx.py innvikler.sh ./
|
||||||
RUN ash innvikler.sh && rm innvikler.sh
|
RUN ash innvikler.sh min
|
||||||
|
|
||||||
WORKDIR /w
|
WORKDIR /w
|
||||||
EXPOSE 3923
|
EXPOSE 3923
|
||||||
|
|||||||
@@ -28,6 +28,14 @@ all:
|
|||||||
|
|
||||||
docker image ls
|
docker image ls
|
||||||
|
|
||||||
|
min:
|
||||||
|
rm -rf i
|
||||||
|
mkdir i
|
||||||
|
tar -cC../.. dist/copyparty-sfx.py bin/mtag | tar -xvCi
|
||||||
|
|
||||||
|
podman build --squash --pull=always -t copyparty/min:latest -f Dockerfile.min .
|
||||||
|
echo 'scale=1;'`podman save copyparty/min:latest | pigz -c | wc -c`/1024/1024 | bc
|
||||||
|
|
||||||
push:
|
push:
|
||||||
docker push copyparty/min
|
docker push copyparty/min
|
||||||
docker push copyparty/im
|
docker push copyparty/im
|
||||||
|
|||||||
@@ -101,6 +101,14 @@ the following advice is best-effort and not guaranteed to be entirely correct
|
|||||||
|
|
||||||
* copyparty will generally create a `.hist` folder at the top of each volume, which contains the filesystem index, thumbnails and such. For performance reasons, but also just to keep things tidy, it might be convenient to store these inside the config folder instead. Add the line `hist: /cfg/hists/` inside the `[global]` section of your `copyparty.conf` to do this
|
* copyparty will generally create a `.hist` folder at the top of each volume, which contains the filesystem index, thumbnails and such. For performance reasons, but also just to keep things tidy, it might be convenient to store these inside the config folder instead. Add the line `hist: /cfg/hists/` inside the `[global]` section of your `copyparty.conf` to do this
|
||||||
|
|
||||||
|
* if you want more performance, and you're OK with doubling the RAM usage, then consider enabling mimalloc **(maybe buggy)** with one of these:
|
||||||
|
|
||||||
|
* `-e LD_PRELOAD=/usr/lib/libmimalloc-secure.so.2` makes download-as-zip **3x** as fast, filesystem-indexing **1.5x** as fast, etc.
|
||||||
|
|
||||||
|
* `-e LD_PRELOAD=/usr/lib/libmimalloc-insecure.so.2` adds another 10% speed but makes it easier to exploit future vulnerabilities
|
||||||
|
|
||||||
|
* complete example: `podman run --rm -it -p 3923:3923 -v "$PWD:/w:z" -e LD_PRELOAD=/usr/lib/libmimalloc-secure.so.2 copyparty/ac -v /w::r`
|
||||||
|
|
||||||
|
|
||||||
## enabling the ftp server
|
## enabling the ftp server
|
||||||
|
|
||||||
|
|||||||
5
scripts/docker/base/Dockerfile.zlibng
Normal file
5
scripts/docker/base/Dockerfile.zlibng
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
FROM alpine:latest
|
||||||
|
WORKDIR /z
|
||||||
|
|
||||||
|
RUN apk add py3-pip make gcc musl-dev python3-dev
|
||||||
|
RUN pip wheel https://files.pythonhosted.org/packages/c4/a7/0b7673be5945071e99364a3ac1987b02fc1d416617e97f3e8816d275174e/zlib_ng-0.5.1.tar.gz
|
||||||
15
scripts/docker/base/Makefile
Normal file
15
scripts/docker/base/Makefile
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
self := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
|
||||||
|
|
||||||
|
all:
|
||||||
|
# build zlib-ng from source so we know how the sausage was made
|
||||||
|
# (still only doing the archs which are officially supported/tested)
|
||||||
|
|
||||||
|
podman build --arch amd64 -t localhost/cpp-zlibng-amd64:latest -f Dockerfile.zlibng .
|
||||||
|
podman run --arch amd64 --rm --log-driver=none -i localhost/cpp-zlibng-amd64:latest tar -cC/z . | tar -xv
|
||||||
|
|
||||||
|
podman build --arch arm64 -t localhost/cpp-zlibng-amd64:latest -f Dockerfile.zlibng .
|
||||||
|
podman run --arch arm64 --rm --log-driver=none -i localhost/cpp-zlibng-amd64:latest tar -cC/z . | tar -xv
|
||||||
|
|
||||||
|
sh:
|
||||||
|
@printf "\n\033[1;31mopening a shell in the most recently created docker image\033[0m\n"
|
||||||
|
docker run --rm -it --entrypoint /bin/ash `docker images -aq | head -n 1`
|
||||||
@@ -1,12 +1,15 @@
|
|||||||
#!/bin/ash
|
#!/bin/ash
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
# patch musl cve https://www.openwall.com/lists/musl/2025/02/13/1
|
# use zlib-ng if available
|
||||||
apk add -U grep
|
f=/z/base/zlib_ng-0.5.1-cp312-cp312-linux_$(cat /etc/apk/arch).whl
|
||||||
grep -aobRE 'euckr[^\w]ksc5601[^\w]ksx1001[^\w]cp949[^\w]' /lib/ | awk -F: '$2>999{printf "%d %s\n",$2,$1}' | while read ofs fn
|
[ "$1" != min ] && [ -e $f ] && {
|
||||||
do printf -- '-----\0-------\0-------\0-----\0' | dd bs=1 iflag=fullblock conv=notrunc seek=$ofs of=$fn; done 2>&1 |
|
apk add -t .bd !pyc py3-pip
|
||||||
tee /dev/stderr | grep -E copied, | wc -l | grep '^2$'
|
rm -f /usr/lib/python3*/EXTERNALLY-MANAGED
|
||||||
apk del grep
|
pip install $f
|
||||||
|
apk del .bd
|
||||||
|
}
|
||||||
|
rm -rf /z/base
|
||||||
|
|
||||||
# cleanup for flavors with python build steps (dj/iv)
|
# cleanup for flavors with python build steps (dj/iv)
|
||||||
rm -rf /var/cache/apk/* /root/.cache
|
rm -rf /var/cache/apk/* /root/.cache
|
||||||
@@ -29,6 +32,9 @@ rm -rf \
|
|||||||
/tmp/pe-* /z/copyparty-sfx.py \
|
/tmp/pe-* /z/copyparty-sfx.py \
|
||||||
ensurepip pydoc_data turtle.py turtledemo lib2to3
|
ensurepip pydoc_data turtle.py turtledemo lib2to3
|
||||||
|
|
||||||
|
# speedhack
|
||||||
|
sed -ri 's/os.environ.get\("PRTY_NO_IMPRESO"\)/"1"/' /usr/lib/python3.*/site-packages/copyparty/util.py
|
||||||
|
|
||||||
# drop bytecode
|
# drop bytecode
|
||||||
find / -xdev -name __pycache__ -print0 | xargs -0 rm -rf
|
find / -xdev -name __pycache__ -print0 | xargs -0 rm -rf
|
||||||
|
|
||||||
@@ -47,7 +53,34 @@ find -name __pycache__ |
|
|||||||
cd /z
|
cd /z
|
||||||
python3 -m copyparty \
|
python3 -m copyparty \
|
||||||
--ign-ebind -p$((1024+RANDOM)),$((1024+RANDOM)),$((1024+RANDOM)) \
|
--ign-ebind -p$((1024+RANDOM)),$((1024+RANDOM)),$((1024+RANDOM)) \
|
||||||
--no-crt -qi127.1 --exit=idx -e2dsa -e2ts
|
-v .::r --no-crt -qi127.1 --exit=idx -e2dsa -e2ts
|
||||||
|
|
||||||
|
########################################################################
|
||||||
|
# test download-as-tar.gz
|
||||||
|
|
||||||
|
t=$(mktemp)
|
||||||
|
python3 -m copyparty \
|
||||||
|
--ign-ebind -p$((1024+RANDOM)),$((1024+RANDOM)),$((1024+RANDOM)) \
|
||||||
|
-v .::r --no-crt -qi127.1 --wr-h-eps $t & pid=$!
|
||||||
|
|
||||||
|
for n in $(seq 1 900); do sleep 0.2
|
||||||
|
v=$(awk '/^127/{print;n=1;exit}END{exit n-1}' $t) && break
|
||||||
|
done
|
||||||
|
[ -z "$v" ] && echo SNAAAAAKE && exit 1
|
||||||
|
rm $t
|
||||||
|
|
||||||
|
for n in $(seq 1 900); do sleep 0.2
|
||||||
|
wget -O- http://${v/ /:}/?tar=gz:1 >tf && break
|
||||||
|
done
|
||||||
|
tar -xzO top/innvikler.sh <tf | cmp innvikler.sh
|
||||||
|
rm tf
|
||||||
|
|
||||||
|
kill $pid; wait $pid
|
||||||
|
|
||||||
|
########################################################################
|
||||||
|
|
||||||
# output from -e2d
|
# output from -e2d
|
||||||
rm -rf .hist
|
rm -rf .hist /cfg/copyparty
|
||||||
|
|
||||||
|
# goodbye
|
||||||
|
exec rm innvikler.sh
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import subprocess as sp
|
|||||||
|
|
||||||
# to convert the copyparty --help to html, run this in xfce4-terminal @ 140x43:
|
# to convert the copyparty --help to html, run this in xfce4-terminal @ 140x43:
|
||||||
_ = r""""
|
_ = r""""
|
||||||
echo; for a in '' -bind -accounts -flags -handlers -hooks -urlform -exp -ls -dbd -pwhash -zm; do
|
echo; for a in '' -bind -accounts -flags -handlers -hooks -urlform -exp -ls -dbd -chmod -pwhash -zm; do
|
||||||
./copyparty-sfx.py --help$a 2>/dev/null; printf '\n\n\n%0139d\n\n\n'; done # xfce4-terminal @ 140x43
|
./copyparty-sfx.py --help$a 2>/dev/null; printf '\n\n\n%0139d\n\n\n'; done # xfce4-terminal @ 140x43
|
||||||
"""
|
"""
|
||||||
# click [edit] => [select all]
|
# click [edit] => [select all]
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ exit 0
|
|||||||
|
|
||||||
|
|
||||||
# first open an infinitely wide console (this is why you own an ultrawide) and copypaste this into it:
|
# first open an infinitely wide console (this is why you own an ultrawide) and copypaste this into it:
|
||||||
for a in '' -bind -accounts -flags -handlers -hooks -urlform -exp -ls -dbd -pwhash -zm; do
|
for a in '' -bind -accounts -flags -handlers -hooks -urlform -exp -ls -dbd -chmod -pwhash -zm; do
|
||||||
./copyparty-sfx.py --help$a 2>/dev/null; printf '\n\n\n%0255d\n\n\n'; done
|
./copyparty-sfx.py --help$a 2>/dev/null; printf '\n\n\n%0255d\n\n\n'; done
|
||||||
|
|
||||||
# then copypaste all of the output by pressing ctrl-shift-a, ctrl-shift-c
|
# then copypaste all of the output by pressing ctrl-shift-a, ctrl-shift-c
|
||||||
|
|||||||
@@ -237,6 +237,8 @@ necho() {
|
|||||||
tar -zxf $f
|
tar -zxf $f
|
||||||
mv partftpy-*/partftpy .
|
mv partftpy-*/partftpy .
|
||||||
rm -rf partftpy-* partftpy/bin
|
rm -rf partftpy-* partftpy/bin
|
||||||
|
#(cd partftpy && "$pybin" ../../scripts/strip_hints/a.py; rm uh) # dont need the full thing, just this:
|
||||||
|
sed -ri 's/from typing import TYPE_CHECKING$/TYPE_CHECKING = False/' partftpy/TftpShared.py
|
||||||
|
|
||||||
necho collecting python-magic
|
necho collecting python-magic
|
||||||
v=0.4.27
|
v=0.4.27
|
||||||
@@ -535,6 +537,7 @@ find | grep -E '\.(js|html)$' | while IFS= read -r f; do
|
|||||||
done
|
done
|
||||||
|
|
||||||
gzres() {
|
gzres() {
|
||||||
|
local pk=
|
||||||
[ $zopf ] && command -v zopfli && pk="zopfli --i$zopf"
|
[ $zopf ] && command -v zopfli && pk="zopfli --i$zopf"
|
||||||
[ $zopf ] && command -v pigz && pk="pigz -11 -I $zopf"
|
[ $zopf ] && command -v pigz && pk="pigz -11 -I $zopf"
|
||||||
[ -z "$pk" ] && pk='gzip'
|
[ -z "$pk" ] && pk='gzip'
|
||||||
@@ -626,7 +629,6 @@ suf=
|
|||||||
[ $use_gz ] && {
|
[ $use_gz ] && {
|
||||||
sed -r 's/"r:bz2"/"r:gz"/' <$py >$py.t
|
sed -r 's/"r:bz2"/"r:gz"/' <$py >$py.t
|
||||||
py=$py.t
|
py=$py.t
|
||||||
suf=-gz
|
|
||||||
}
|
}
|
||||||
|
|
||||||
"$pybin" $py --sfx-make tar.bz2 $ver $ts
|
"$pybin" $py --sfx-make tar.bz2 $ver $ts
|
||||||
|
|||||||
@@ -14,9 +14,10 @@ clean=--clean
|
|||||||
|
|
||||||
uname -s | grep WOW64 && m=64 || m=32
|
uname -s | grep WOW64 && m=64 || m=32
|
||||||
uname -s | grep NT-10 && w10=1 || w7=1
|
uname -s | grep NT-10 && w10=1 || w7=1
|
||||||
|
[ $w7 ] && export PRTY_NO_MAGIC=1
|
||||||
[ $w7 ] && [ -e up2k.sh ] && [ ! "$1" ] && ./up2k.sh
|
[ $w7 ] && [ -e up2k.sh ] && [ ! "$1" ] && ./up2k.sh
|
||||||
|
|
||||||
[ $w7 ] && pyv=37 || pyv=312
|
[ $w7 ] && pyv=37 || pyv=313
|
||||||
esuf=
|
esuf=
|
||||||
[ $w7 ] && [ $m = 32 ] && esuf=32
|
[ $w7 ] && [ $m = 32 ] && esuf=32
|
||||||
[ $w7 ] && [ $m = 64 ] && esuf=-winpe64
|
[ $w7 ] && [ $m = 64 ] && esuf=-winpe64
|
||||||
@@ -79,7 +80,6 @@ excl=(
|
|||||||
email.parser
|
email.parser
|
||||||
importlib.resources
|
importlib.resources
|
||||||
importlib_resources
|
importlib_resources
|
||||||
inspect
|
|
||||||
multiprocessing
|
multiprocessing
|
||||||
packaging
|
packaging
|
||||||
pdb
|
pdb
|
||||||
@@ -90,20 +90,26 @@ excl=(
|
|||||||
urllib.request
|
urllib.request
|
||||||
urllib.response
|
urllib.response
|
||||||
urllib.robotparser
|
urllib.robotparser
|
||||||
zipfile
|
|
||||||
)
|
)
|
||||||
[ $w10 ] && excl+=(
|
[ $w10 ] && excl+=(
|
||||||
|
_pyrepl
|
||||||
|
distutils
|
||||||
|
setuptools
|
||||||
|
PIL._avif
|
||||||
PIL.ImageQt
|
PIL.ImageQt
|
||||||
PIL.ImageShow
|
PIL.ImageShow
|
||||||
PIL.ImageTk
|
PIL.ImageTk
|
||||||
PIL.ImageWin
|
PIL.ImageWin
|
||||||
PIL.PdfParser
|
PIL.PdfParser
|
||||||
|
zipimport
|
||||||
) || excl+=(
|
) || excl+=(
|
||||||
|
inspect
|
||||||
PIL
|
PIL
|
||||||
PIL.ExifTags
|
PIL.ExifTags
|
||||||
PIL.Image
|
PIL.Image
|
||||||
PIL.ImageDraw
|
PIL.ImageDraw
|
||||||
PIL.ImageOps
|
PIL.ImageOps
|
||||||
|
zipfile
|
||||||
)
|
)
|
||||||
excl=( "${excl[@]/#/--exclude-module }" )
|
excl=( "${excl[@]/#/--exclude-module }" )
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ f117016b1e6a7d7e745db30d3e67f1acf7957c443a0dd301b6c5e10b8368f2aa4db6be9782d2d3f8
|
|||||||
17ce52ba50692a9d964f57a23ac163fb74c77fdeb2ca988a6d439ae1fe91955ff43730c073af97a7b3223093ffea3479a996b9b50ee7fba0869247a56f74baa6 pefile-2023.2.7-py3-none-any.whl
|
17ce52ba50692a9d964f57a23ac163fb74c77fdeb2ca988a6d439ae1fe91955ff43730c073af97a7b3223093ffea3479a996b9b50ee7fba0869247a56f74baa6 pefile-2023.2.7-py3-none-any.whl
|
||||||
b297ff66ec50cf5a1abcf07d6ac949644c5150ba094ffac974c5d27c81574c3e97ed814a47547f4b03a4c83ea0fb8f026433fca06a3f08e32742dc5c024f3d07 pywin32_ctypes-0.2.3-py3-none-any.whl
|
b297ff66ec50cf5a1abcf07d6ac949644c5150ba094ffac974c5d27c81574c3e97ed814a47547f4b03a4c83ea0fb8f026433fca06a3f08e32742dc5c024f3d07 pywin32_ctypes-0.2.3-py3-none-any.whl
|
||||||
085d39ef4426aa5f097fbc484595becc16e61ca23fc7da4d2a8bba540a3b82e789e390b176c7151bdc67d01735cce22b1562cdb2e31273225a2d3e275851a4ad setuptools-70.3.0-py3-none-any.whl
|
085d39ef4426aa5f097fbc484595becc16e61ca23fc7da4d2a8bba540a3b82e789e390b176c7151bdc67d01735cce22b1562cdb2e31273225a2d3e275851a4ad setuptools-70.3.0-py3-none-any.whl
|
||||||
360a141928f4a7ec18a994602cbb28bbf8b5cc7c077a06ac76b54b12fa769ed95ca0333a5cf728923a8e0baeb5cc4d5e73e5b3de2666beb05eb477d8ae719093 upx-4.2.4-win32.zip
|
644931f8e1764e168c257c11c77b3d2ac5408397d97b0eef98168a058efe793d3ab6900dc2e9c54923a2bd906dd66bfbff8db6ff43418513e530a1bd501c6ccd upx-5.0.1-win32.zip
|
||||||
# win7
|
# win7
|
||||||
3253e86471e6f9fa85bfdb7684cd2f964ed6e35c6a4db87f81cca157c049bef43e66dfcae1e037b2fb904567b1e028aaeefe8983ba3255105df787406d2aa71e en_windows_7_professional_with_sp1_x86_dvd_u_677056.iso
|
3253e86471e6f9fa85bfdb7684cd2f964ed6e35c6a4db87f81cca157c049bef43e66dfcae1e037b2fb904567b1e028aaeefe8983ba3255105df787406d2aa71e en_windows_7_professional_with_sp1_x86_dvd_u_677056.iso
|
||||||
ab0db0283f61a5bbe44797d74546786bf41685175764a448d2e3bd629f292f1e7d829757b26be346b5044d78c9c1891736d93237cee4b1b6f5996a902c86d15f en_windows_7_professional_with_sp1_x64_dvd_u_676939.iso
|
ab0db0283f61a5bbe44797d74546786bf41685175764a448d2e3bd629f292f1e7d829757b26be346b5044d78c9c1891736d93237cee4b1b6f5996a902c86d15f en_windows_7_professional_with_sp1_x64_dvd_u_676939.iso
|
||||||
@@ -23,11 +23,12 @@ ac96786e5d35882e0c5b724794329c9125c2b86ae7847f17acfc49f0d294312c6afc1c3f248655de
|
|||||||
# win10
|
# win10
|
||||||
0a2cd4cadf0395f0374974cd2bc2407e5cc65c111275acdffb6ecc5a2026eee9e1bb3da528b35c7f0ff4b64563a74857d5c2149051e281cc09ebd0d1968be9aa en-us_windows_10_enterprise_ltsc_2021_x64_dvd_d289cf96.iso
|
0a2cd4cadf0395f0374974cd2bc2407e5cc65c111275acdffb6ecc5a2026eee9e1bb3da528b35c7f0ff4b64563a74857d5c2149051e281cc09ebd0d1968be9aa en-us_windows_10_enterprise_ltsc_2021_x64_dvd_d289cf96.iso
|
||||||
16cc0c58b5df6c7040893089f3eb29c074aed61d76dae6cd628d8a89a05f6223ac5d7f3f709a12417c147594a87a94cc808d1e04a6f1e407cc41f7c9f47790d1 virtio-win-0.1.248.iso
|
16cc0c58b5df6c7040893089f3eb29c074aed61d76dae6cd628d8a89a05f6223ac5d7f3f709a12417c147594a87a94cc808d1e04a6f1e407cc41f7c9f47790d1 virtio-win-0.1.248.iso
|
||||||
18b9e8cfa682da51da1b682612652030bd7f10e4a1d5ea5220ab32bde734b0e6fe1c7dbd903ac37928c0171fd45d5ca602952054de40a4e55e9ed596279516b5 jinja2-3.1.5-py3-none-any.whl
|
9a7f40edc6f9209a2acd23793f3cbd6213c94f36064048cb8bf6eb04f1bdb2c2fe991cb09f77fe8b13e5cd85c618ef23573e79813b2fef899ab2f290cd129779 jinja2-3.1.6-py3-none-any.whl
|
||||||
6df21f0da408a89f6504417c7cdf9aaafe4ed88cfa13e9b8fa8414f604c0401f885a04bbad0484dc51a29284af5d1548e33c6cc6bfb9896d9992c1b1074f332d MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl
|
00731cfdd9d5c12efef04a7161c90c1e5ed1dc4677aa88a1d4054aff836f3430df4da5262ed4289c21637358a9e10e5df16f76743cbf5a29bb3a44b146c19cf3 MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl
|
||||||
8a6e2b13a2ec4ef914a5d62aad3db6464d45e525a82e07f6051ed10474eae959069e165dba011aefb8207cdfd55391d73d6f06362c7eb247b08763106709526e mutagen-1.47.0-py3-none-any.whl
|
8a6e2b13a2ec4ef914a5d62aad3db6464d45e525a82e07f6051ed10474eae959069e165dba011aefb8207cdfd55391d73d6f06362c7eb247b08763106709526e mutagen-1.47.0-py3-none-any.whl
|
||||||
0203ec2551c4836696cfab0b2c9fff603352f03fa36e7476e2e1ca7ec57a3a0c24bd791fcd92f342bf817f0887854d9f072e0271c643de4b313d8c9569ba8813 packaging-24.1-py3-none-any.whl
|
a726fb46cce24f781fc8b55a3e6dea0a884ebc3b2b400ea74aa02333699f4955a5dc1e2ec5927ac72f35a624401f3f3b442882ba1cc4cadaf9c88558b5b8bdae packaging-25.0-py3-none-any.whl
|
||||||
12d7921dc7dfd8a4b0ea0fa2bae8f1354fcdd59ece3d7f4e075aed631f9ba791dc142c70b1ccd1e6287c43139df1db26bd57a7a217c8da3a77326036495cdb57 pillow-11.1.0-cp312-cp312-win_amd64.whl
|
3e39ea6e16b502d99a2e6544579095d0f7c6097761cd85135d5e929b9dec1b32e80669a846f94ee8c2cca9be2f5fe728625d09453988864c04e16bb8445c3f91 pillow-11.3.0-cp313-cp313-win_amd64.whl
|
||||||
f0463895e9aee97f31a2003323de235fed1b26289766dc0837261e3f4a594a31162b69e9adbb0e9a31e2e2d4b5f25c762ed1669553df7dc89a8ba4f85d297873 pyinstaller-6.11.1-py3-none-win_amd64.whl
|
59fbbcae044f4ee73d203ac74b553b27bfad3e6b2f3fb290fd3f8774753c6b545176b6b3399c240b092d131d152290ce732750accd962dc1e48e930be85f5e53 pyinstaller-6.14.1-py3-none-win_amd64.whl
|
||||||
d550a0a14428386945533de2220c4c2e37c0c890fc51a600f626c6ca90a32d39572c121ec04c157ba3a8d6601cb021f8433d871b5c562a3d342c804fffec90c1 pyinstaller_hooks_contrib-2024.11-py3-none-any.whl
|
fc6f3e144c5f5b662412de07cb8bf0c2eb3b3be21d19ec448aef3c4244d779b9ab8027fd67a4871e6e13823b248ea0f5a7a9241a53aef30f3b51a6d3cb5bdb3f pyinstaller_hooks_contrib-2025.5-py3-none-any.whl
|
||||||
17b64ff6744004a05d475c8f6de3e48286db4069afad4cae690f83b3555f8e35ceafb210eeba69a11e983d0da3001099de284b6696ed0f1bf9cd791938a7f2cd python-3.12.9-amd64.exe
|
2c7a52e223b8186c21009d3fa5ed6a856d8eb4ef3b98f5d24c378c6a1afbfa1378bd7a51d6addc500e263d7989efb544c862bf920055e740f137c702dfd9d18b python-3.13.5-amd64.exe
|
||||||
|
2a0420f7faaa33d2132b82895a8282688030e939db0225ad8abb95a47bdb87b45318f10985fc3cee271a9121441c1526caa363d7f2e4a4b18b1a674068766e87 setuptools-80.9.0-py3-none-any.whl
|
||||||
|
|||||||
@@ -29,19 +29,19 @@ uname -s | grep NT-10 && w10=1 || {
|
|||||||
fns=(
|
fns=(
|
||||||
altgraph-0.17.4-py2.py3-none-any.whl
|
altgraph-0.17.4-py2.py3-none-any.whl
|
||||||
pefile-2023.2.7-py3-none-any.whl
|
pefile-2023.2.7-py3-none-any.whl
|
||||||
pywin32_ctypes-0.2.2-py3-none-any.whl
|
pywin32_ctypes-0.2.3-py3-none-any.whl
|
||||||
setuptools-70.3.0-py3-none-any.whl
|
upx-5.0.1-win32.zip
|
||||||
upx-4.2.4-win32.zip
|
|
||||||
)
|
)
|
||||||
[ $w10 ] && fns+=(
|
[ $w10 ] && fns+=(
|
||||||
jinja2-3.1.4-py3-none-any.whl
|
jinja2-3.1.6-py3-none-any.whl
|
||||||
MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl
|
MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl
|
||||||
mutagen-1.47.0-py3-none-any.whl
|
mutagen-1.47.0-py3-none-any.whl
|
||||||
packaging-24.1-py3-none-any.whl
|
packaging-25.0-py3-none-any.whl
|
||||||
pillow-11.1.0-cp312-cp312-win_amd64.whl
|
pillow-11.3.0-cp313-cp313-win_amd64.whl
|
||||||
pyinstaller-6.10.0-py3-none-win_amd64.whl
|
pyinstaller-6.14.1-py3-none-win_amd64.whl
|
||||||
pyinstaller_hooks_contrib-2024.8-py3-none-any.whl
|
pyinstaller_hooks_contrib-2025.5-py3-none-any.whl
|
||||||
python-3.12.9-amd64.exe
|
python-3.13.5-amd64.exe
|
||||||
|
setuptools-80.9.0-py3-none-any.whl
|
||||||
)
|
)
|
||||||
[ $w7 ] && fns+=(
|
[ $w7 ] && fns+=(
|
||||||
future-1.0.0-py3-none-any.whl
|
future-1.0.0-py3-none-any.whl
|
||||||
@@ -49,6 +49,7 @@ fns=(
|
|||||||
packaging-24.0-py3-none-any.whl
|
packaging-24.0-py3-none-any.whl
|
||||||
pip-24.0-py3-none-any.whl
|
pip-24.0-py3-none-any.whl
|
||||||
pyinstaller_hooks_contrib-2023.8-py2.py3-none-any.whl
|
pyinstaller_hooks_contrib-2023.8-py2.py3-none-any.whl
|
||||||
|
setuptools-70.3.0-py3-none-any.whl
|
||||||
typing_extensions-4.7.1-py3-none-any.whl
|
typing_extensions-4.7.1-py3-none-any.whl
|
||||||
zipp-3.15.0-py3-none-any.whl
|
zipp-3.15.0-py3-none-any.whl
|
||||||
)
|
)
|
||||||
@@ -80,7 +81,7 @@ close and reopen git-bash so python is in PATH
|
|||||||
|
|
||||||
===[ copy-paste into git-bash ]================================
|
===[ copy-paste into git-bash ]================================
|
||||||
uname -s | grep NT-10 && w10=1 || w7=1
|
uname -s | grep NT-10 && w10=1 || w7=1
|
||||||
[ $w7 ] && pyv=37 || pyv=312
|
[ $w7 ] && pyv=37 || pyv=313
|
||||||
appd=$(cygpath.exe "$APPDATA")
|
appd=$(cygpath.exe "$APPDATA")
|
||||||
cd ~/Downloads &&
|
cd ~/Downloads &&
|
||||||
yes | unzip upx-*-win32.zip &&
|
yes | unzip upx-*-win32.zip &&
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ shift
|
|||||||
./make-sfx.sh "$@"
|
./make-sfx.sh "$@"
|
||||||
f=../dist/copyparty-sfx
|
f=../dist/copyparty-sfx
|
||||||
[ -e $f.py ] && s= || s=-gz
|
[ -e $f.py ] && s= || s=-gz
|
||||||
|
# TODO: the -gz suffix is gone, can drop all the $s stuff probably
|
||||||
|
|
||||||
$f$s.py --version >/dev/null
|
$f$s.py --version >/dev/null
|
||||||
|
|
||||||
|
|||||||
@@ -94,6 +94,7 @@ copyparty/web/deps/prismd.css,
|
|||||||
copyparty/web/deps/scp.woff2,
|
copyparty/web/deps/scp.woff2,
|
||||||
copyparty/web/deps/sha512.ac.js,
|
copyparty/web/deps/sha512.ac.js,
|
||||||
copyparty/web/deps/sha512.hw.js,
|
copyparty/web/deps/sha512.hw.js,
|
||||||
|
copyparty/web/idp.html,
|
||||||
copyparty/web/iiam.gif,
|
copyparty/web/iiam.gif,
|
||||||
copyparty/web/md.css,
|
copyparty/web/md.css,
|
||||||
copyparty/web/md.html,
|
copyparty/web/md.html,
|
||||||
|
|||||||
@@ -413,6 +413,9 @@ def run_i(ld):
|
|||||||
for x in ld:
|
for x in ld:
|
||||||
sys.path.insert(0, x)
|
sys.path.insert(0, x)
|
||||||
|
|
||||||
|
e = os.environ
|
||||||
|
e["PRTY_NO_IMPRESO"] = "1"
|
||||||
|
|
||||||
from copyparty.__main__ import main as p
|
from copyparty.__main__ import main as p
|
||||||
|
|
||||||
p()
|
p()
|
||||||
|
|||||||
@@ -84,6 +84,9 @@ def uh2(fp):
|
|||||||
if " # !rm" in ln:
|
if " # !rm" in ln:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if ln.endswith("TYPE_CHECKING"):
|
||||||
|
ln = ln.replace("from typing import TYPE_CHECKING", "TYPE_CHECKING = False")
|
||||||
|
|
||||||
lns.append(ln)
|
lns.append(ln)
|
||||||
|
|
||||||
cs = "\n".join(lns)
|
cs = "\n".join(lns)
|
||||||
|
|||||||
@@ -121,7 +121,7 @@ var tl_browser = {
|
|||||||
"file-manager",
|
"file-manager",
|
||||||
["G", "toggle list / grid view"],
|
["G", "toggle list / grid view"],
|
||||||
["T", "toggle thumbnails / icons"],
|
["T", "toggle thumbnails / icons"],
|
||||||
["🡅 A/D", "thumbnail size"],
|
["⇧ A/D", "thumbnail size"],
|
||||||
["ctrl-K", "delete selected"],
|
["ctrl-K", "delete selected"],
|
||||||
["ctrl-X", "cut selection to clipboard"],
|
["ctrl-X", "cut selection to clipboard"],
|
||||||
["ctrl-C", "copy selection to clipboard"],
|
["ctrl-C", "copy selection to clipboard"],
|
||||||
@@ -131,9 +131,9 @@ var tl_browser = {
|
|||||||
|
|
||||||
"file-list-sel",
|
"file-list-sel",
|
||||||
["space", "toggle file selection"],
|
["space", "toggle file selection"],
|
||||||
["🡑/🡓", "move selection cursor"],
|
["↑/↓", "move selection cursor"],
|
||||||
["ctrl 🡑/🡓", "move cursor and viewport"],
|
["ctrl ↑/↓", "move cursor and viewport"],
|
||||||
["🡅 🡑/🡓", "select prev/next file"],
|
["⇧ ↑/↓", "select prev/next file"],
|
||||||
["ctrl-A", "select all files / folders"],
|
["ctrl-A", "select all files / folders"],
|
||||||
], [
|
], [
|
||||||
"navigation",
|
"navigation",
|
||||||
@@ -148,6 +148,7 @@ var tl_browser = {
|
|||||||
["U/O", "skip 10sec back/fwd"],
|
["U/O", "skip 10sec back/fwd"],
|
||||||
["0..9", "jump to 0%..90%"],
|
["0..9", "jump to 0%..90%"],
|
||||||
["P", "play/pause (also initiates)"],
|
["P", "play/pause (also initiates)"],
|
||||||
|
["S", "select playing song"],
|
||||||
["Y", "download song"],
|
["Y", "download song"],
|
||||||
], [
|
], [
|
||||||
"image-viewer",
|
"image-viewer",
|
||||||
@@ -155,7 +156,8 @@ var tl_browser = {
|
|||||||
["Home/End", "first/last pic"],
|
["Home/End", "first/last pic"],
|
||||||
["F", "fullscreen"],
|
["F", "fullscreen"],
|
||||||
["R", "rotate clockwise"],
|
["R", "rotate clockwise"],
|
||||||
["🡅 R", "rotate ccw"],
|
["⇧ R", "rotate ccw"],
|
||||||
|
["S", "select pic"],
|
||||||
["Y", "download pic"],
|
["Y", "download pic"],
|
||||||
], [
|
], [
|
||||||
"video-player",
|
"video-player",
|
||||||
@@ -224,10 +226,13 @@ var tl_browser = {
|
|||||||
"wt_pst": "paste a previously cut / copied selection$NHotkey: ctrl-V",
|
"wt_pst": "paste a previously cut / copied selection$NHotkey: ctrl-V",
|
||||||
"wt_selall": "select all files$NHotkey: ctrl-A (when file focused)",
|
"wt_selall": "select all files$NHotkey: ctrl-A (when file focused)",
|
||||||
"wt_selinv": "invert selection",
|
"wt_selinv": "invert selection",
|
||||||
|
"wt_zip1": "download this folder as archive",
|
||||||
"wt_selzip": "download selection as archive",
|
"wt_selzip": "download selection as archive",
|
||||||
"wt_seldl": "download selection as separate files$NHotkey: Y",
|
"wt_seldl": "download selection as separate files$NHotkey: Y",
|
||||||
"wt_npirc": "copy irc-formatted track info",
|
"wt_npirc": "copy irc-formatted track info",
|
||||||
"wt_nptxt": "copy plaintext track info",
|
"wt_nptxt": "copy plaintext track info",
|
||||||
|
"wt_m3ua": "add to m3u playlist (click <code>📻copy</code> later)",
|
||||||
|
"wt_m3uc": "copy m3u playlist to clipboard",
|
||||||
"wt_grid": "toggle grid / list view$NHotkey: G",
|
"wt_grid": "toggle grid / list view$NHotkey: G",
|
||||||
"wt_prev": "previous track$NHotkey: J",
|
"wt_prev": "previous track$NHotkey: J",
|
||||||
"wt_play": "play / pause$NHotkey: P",
|
"wt_play": "play / pause$NHotkey: P",
|
||||||
@@ -235,7 +240,8 @@ var tl_browser = {
|
|||||||
|
|
||||||
"ul_par": "parallel uploads:",
|
"ul_par": "parallel uploads:",
|
||||||
"ut_rand": "randomize filenames",
|
"ut_rand": "randomize filenames",
|
||||||
"ut_u2ts": "copy the last-modified timestamp$Nfrom your filesystem to the server",
|
"ut_u2ts": "copy the last-modified timestamp$Nfrom your filesystem to the server\">📅",
|
||||||
|
"ut_ow": "overwrite existing files on the server?$N🛡️: never (will generate a new filename instead)$N🕒: overwrite if server-file is older than yours$N♻️: always overwrite if the files are different",
|
||||||
"ut_mt": "continue hashing other files while uploading$N$Nmaybe disable if your CPU or HDD is a bottleneck",
|
"ut_mt": "continue hashing other files while uploading$N$Nmaybe disable if your CPU or HDD is a bottleneck",
|
||||||
"ut_ask": 'ask for confirmation before upload starts">💭',
|
"ut_ask": 'ask for confirmation before upload starts">💭',
|
||||||
"ut_pot": "improve upload speed on slow devices$Nby making the UI less complex",
|
"ut_pot": "improve upload speed on slow devices$Nby making the UI less complex",
|
||||||
@@ -306,6 +312,7 @@ var tl_browser = {
|
|||||||
"ct_csel": 'use CTRL and SHIFT for file selection in grid-view">sel',
|
"ct_csel": 'use CTRL and SHIFT for file selection in grid-view">sel',
|
||||||
"ct_ihop": 'when the image viewer is closed, scroll down to the last viewed file">g⮯',
|
"ct_ihop": 'when the image viewer is closed, scroll down to the last viewed file">g⮯',
|
||||||
"ct_dots": 'show hidden files (if server permits)">dotfiles',
|
"ct_dots": 'show hidden files (if server permits)">dotfiles',
|
||||||
|
"ct_qdel": 'when deleting files, only ask for confirmation once">qdel',
|
||||||
"ct_dir1st": 'sort folders before files">📁 first',
|
"ct_dir1st": 'sort folders before files">📁 first',
|
||||||
"ct_nsort": 'natural sort (for filenames with leading digits)">nsort',
|
"ct_nsort": 'natural sort (for filenames with leading digits)">nsort',
|
||||||
"ct_readme": 'show README.md in folder listings">📜 readme',
|
"ct_readme": 'show README.md in folder listings">📜 readme',
|
||||||
@@ -327,7 +334,9 @@ var tl_browser = {
|
|||||||
"cut_nag": "OS notification when upload completes$N(only if the browser or tab is not active)",
|
"cut_nag": "OS notification when upload completes$N(only if the browser or tab is not active)",
|
||||||
"cut_sfx": "audible alert when upload completes$N(only if the browser or tab is not active)",
|
"cut_sfx": "audible alert when upload completes$N(only if the browser or tab is not active)",
|
||||||
|
|
||||||
"cut_mt": "use multithreading to accelerate file hashing$N$Nthis uses web-workers and requires$Nmore RAM (up to 512 MiB extra)$N$N30% faster https, 4.5x faster http,$Nand 5.3x faster on android phones\">mt",
|
"cut_mt": "use multithreading to accelerate file hashing$N$Nthis uses web-workers and requires$Nmore RAM (up to 512 MiB extra)$N$Nmakes https 30% faster, http 4.5x faster\">mt",
|
||||||
|
|
||||||
|
"cut_wasm": "use wasm instead of the browser's built-in hasher; improves speed on chrome-based browsers but increases CPU load, and many older versions of chrome have bugs which makes the browser consume all RAM and crash if this is enabled\">wasm",
|
||||||
|
|
||||||
"cft_text": "favicon text (blank and refresh to disable)",
|
"cft_text": "favicon text (blank and refresh to disable)",
|
||||||
"cft_fg": "foreground color",
|
"cft_fg": "foreground color",
|
||||||
@@ -349,10 +358,13 @@ var tl_browser = {
|
|||||||
"ml_pmode": "at end of folder...",
|
"ml_pmode": "at end of folder...",
|
||||||
"ml_btns": "cmds",
|
"ml_btns": "cmds",
|
||||||
"ml_tcode": "transcode",
|
"ml_tcode": "transcode",
|
||||||
|
"ml_tcode2": "transcode to",
|
||||||
"ml_tint": "tint",
|
"ml_tint": "tint",
|
||||||
"ml_eq": "audio equalizer",
|
"ml_eq": "audio equalizer",
|
||||||
"ml_drc": "dynamic range compressor",
|
"ml_drc": "dynamic range compressor",
|
||||||
|
|
||||||
|
"mt_loop": "loop/repeat one song\">🔁",
|
||||||
|
"mt_one": "stop after one song\">1️⃣",
|
||||||
"mt_shuf": "shuffle the songs in each folder\">🔀",
|
"mt_shuf": "shuffle the songs in each folder\">🔀",
|
||||||
"mt_aplay": "autoplay if there is a song-ID in the link you clicked to access the server$N$Ndisabling this will also stop the page URL from being updated with song-IDs when playing music, to prevent autoplay if these settings are lost but the URL remains\">a▶",
|
"mt_aplay": "autoplay if there is a song-ID in the link you clicked to access the server$N$Ndisabling this will also stop the page URL from being updated with song-IDs when playing music, to prevent autoplay if these settings are lost but the URL remains\">a▶",
|
||||||
"mt_preload": "start loading the next song near the end for gapless playback\">preload",
|
"mt_preload": "start loading the next song near the end for gapless playback\">preload",
|
||||||
@@ -361,6 +373,7 @@ var tl_browser = {
|
|||||||
"mt_fau": "on phones, prevent music from stopping if the next song doesn't preload fast enough (can make tags display glitchy)\">☕️",
|
"mt_fau": "on phones, prevent music from stopping if the next song doesn't preload fast enough (can make tags display glitchy)\">☕️",
|
||||||
"mt_waves": "waveform seekbar:$Nshow audio amplitude in the scrubber\">~s",
|
"mt_waves": "waveform seekbar:$Nshow audio amplitude in the scrubber\">~s",
|
||||||
"mt_npclip": "show buttons for clipboarding the currently playing song\">/np",
|
"mt_npclip": "show buttons for clipboarding the currently playing song\">/np",
|
||||||
|
"mt_m3u_c": "show buttons for clipboarding the$Nselected songs as m3u8 playlist entries\">📻",
|
||||||
"mt_octl": "os integration (media hotkeys / osd)\">os-ctl",
|
"mt_octl": "os integration (media hotkeys / osd)\">os-ctl",
|
||||||
"mt_oseek": "allow seeking through os integration$N$Nnote: on some devices (iPhones),$Nthis replaces the next-song button\">seek",
|
"mt_oseek": "allow seeking through os integration$N$Nnote: on some devices (iPhones),$Nthis replaces the next-song button\">seek",
|
||||||
"mt_oscv": "show album cover in osd\">art",
|
"mt_oscv": "show album cover in osd\">art",
|
||||||
@@ -369,15 +382,25 @@ var tl_browser = {
|
|||||||
"mt_uncache": "clear cache (try this if your browser cached$Na broken copy of a song so it refuses to play)\">uncache",
|
"mt_uncache": "clear cache (try this if your browser cached$Na broken copy of a song so it refuses to play)\">uncache",
|
||||||
"mt_mloop": "loop the open folder\">🔁 loop",
|
"mt_mloop": "loop the open folder\">🔁 loop",
|
||||||
"mt_mnext": "load the next folder and continue\">📂 next",
|
"mt_mnext": "load the next folder and continue\">📂 next",
|
||||||
|
"mt_mstop": "stop playback\">⏸ stop",
|
||||||
"mt_cflac": "convert flac / wav to opus\">flac",
|
"mt_cflac": "convert flac / wav to opus\">flac",
|
||||||
"mt_caac": "convert aac / m4a to opus\">aac",
|
"mt_caac": "convert aac / m4a to opus\">aac",
|
||||||
"mt_coth": "convert all others (not mp3) to opus\">oth",
|
"mt_coth": "convert all others (not mp3) to opus\">oth",
|
||||||
|
"mt_c2opus": "best choice for desktops, laptops, android\">opus",
|
||||||
|
"mt_c2owa": "opus-weba, for iOS 17.5 and newer\">owa",
|
||||||
|
"mt_c2caf": "opus-caf, for iOS 11 through 17\">caf",
|
||||||
|
"mt_c2mp3": "use this on very old devices\">mp3",
|
||||||
|
"mt_c2ok": "nice, good choice",
|
||||||
|
"mt_c2nd": "that's not the recommended output format for your device, but that's fine",
|
||||||
|
"mt_c2ng": "your device does not seem to support this output format, but let's try anyways",
|
||||||
|
"mt_xowa": "there are bugs in iOS preventing background playback using this format; please use caf or mp3 instead",
|
||||||
"mt_tint": "background level (0-100) on the seekbar$Nto make buffering less distracting",
|
"mt_tint": "background level (0-100) on the seekbar$Nto make buffering less distracting",
|
||||||
"mt_eq": "enables the equalizer and gain control;$N$Nboost <code>0</code> = standard 100% volume (unmodified)$N$Nwidth <code>1 </code> = standard stereo (unmodified)$Nwidth <code>0.5</code> = 50% left-right crossfeed$Nwidth <code>0 </code> = mono$N$Nboost <code>-0.8</code> & width <code>10</code> = vocal removal :^)$N$Nenabling the equalizer makes gapless albums fully gapless, so leave it on with all the values at zero (except width = 1) if you care about that",
|
"mt_eq": "enables the equalizer and gain control;$N$Nboost <code>0</code> = standard 100% volume (unmodified)$N$Nwidth <code>1 </code> = standard stereo (unmodified)$Nwidth <code>0.5</code> = 50% left-right crossfeed$Nwidth <code>0 </code> = mono$N$Nboost <code>-0.8</code> & width <code>10</code> = vocal removal :^)$N$Nenabling the equalizer makes gapless albums fully gapless, so leave it on with all the values at zero (except width = 1) if you care about that",
|
||||||
"mt_drc": "enables the dynamic range compressor (volume flattener / brickwaller); will also enable EQ to balance the spaghetti, so set all EQ fields except for 'width' to 0 if you don't want it$N$Nlowers the volume of audio above THRESHOLD dB; for every RATIO dB past THRESHOLD there is 1 dB of output, so default values of tresh -24 and ratio 12 means it should never get louder than -22 dB and it is safe to increase the equalizer boost to 0.8, or even 1.8 with ATK 0 and a huge RLS like 90 (only works in firefox; RLS is max 1 in other browsers)$N$N(see wikipedia, they explain it much better)",
|
"mt_drc": "enables the dynamic range compressor (volume flattener / brickwaller); will also enable EQ to balance the spaghetti, so set all EQ fields except for 'width' to 0 if you don't want it$N$Nlowers the volume of audio above THRESHOLD dB; for every RATIO dB past THRESHOLD there is 1 dB of output, so default values of tresh -24 and ratio 12 means it should never get louder than -22 dB and it is safe to increase the equalizer boost to 0.8, or even 1.8 with ATK 0 and a huge RLS like 90 (only works in firefox; RLS is max 1 in other browsers)$N$N(see wikipedia, they explain it much better)",
|
||||||
|
|
||||||
"mb_play": "play",
|
"mb_play": "play",
|
||||||
"mm_hashplay": "play this audio file?",
|
"mm_hashplay": "play this audio file?",
|
||||||
|
"mm_m3u": "press <code>Enter/OK</code> to Play\npress <code>ESC/Cancel</code> to Edit",
|
||||||
"mp_breq": "need firefox 82+ or chrome 73+ or iOS 15+",
|
"mp_breq": "need firefox 82+ or chrome 73+ or iOS 15+",
|
||||||
"mm_bload": "now loading...",
|
"mm_bload": "now loading...",
|
||||||
"mm_bconv": "converting to {0}, please wait...",
|
"mm_bconv": "converting to {0}, please wait...",
|
||||||
@@ -390,6 +413,7 @@ var tl_browser = {
|
|||||||
"mm_eunk": "Unknown Errol",
|
"mm_eunk": "Unknown Errol",
|
||||||
"mm_e404": "Could not play audio; error 404: File not found.",
|
"mm_e404": "Could not play audio; error 404: File not found.",
|
||||||
"mm_e403": "Could not play audio; error 403: Access denied.\n\nTry pressing F5 to reload, maybe you got logged out",
|
"mm_e403": "Could not play audio; error 403: Access denied.\n\nTry pressing F5 to reload, maybe you got logged out",
|
||||||
|
"mm_e500": "Could not play audio; error 500: Check server logs.",
|
||||||
"mm_e5xx": "Could not play audio; server error ",
|
"mm_e5xx": "Could not play audio; server error ",
|
||||||
"mm_nof": "not finding any more audio files nearby",
|
"mm_nof": "not finding any more audio files nearby",
|
||||||
"mm_prescan": "Looking for music to play next...",
|
"mm_prescan": "Looking for music to play next...",
|
||||||
@@ -402,8 +426,10 @@ var tl_browser = {
|
|||||||
"f_empty": 'this folder is empty',
|
"f_empty": 'this folder is empty',
|
||||||
"f_chide": 'this will hide the column «{0}»\n\nyou can unhide columns in the settings tab',
|
"f_chide": 'this will hide the column «{0}»\n\nyou can unhide columns in the settings tab',
|
||||||
"f_bigtxt": "this file is {0} MiB large -- really view as text?",
|
"f_bigtxt": "this file is {0} MiB large -- really view as text?",
|
||||||
|
"f_bigtxt2": "view just the end of the file instead? this will also enable following/tailing, showing newly added lines of text in real time",
|
||||||
"fbd_more": '<div id="blazy">showing <code>{0}</code> of <code>{1}</code> files; <a href="#" id="bd_more">show {2}</a> or <a href="#" id="bd_all">show all</a></div>',
|
"fbd_more": '<div id="blazy">showing <code>{0}</code> of <code>{1}</code> files; <a href="#" id="bd_more">show {2}</a> or <a href="#" id="bd_all">show all</a></div>',
|
||||||
"fbd_all": '<div id="blazy">showing <code>{0}</code> of <code>{1}</code> files; <a href="#" id="bd_all">show all</a></div>',
|
"fbd_all": '<div id="blazy">showing <code>{0}</code> of <code>{1}</code> files; <a href="#" id="bd_all">show all</a></div>',
|
||||||
|
"f_anota": "only {0} of the {1} items were selected;\nto select the full folder, first scroll to the bottom",
|
||||||
|
|
||||||
"f_dls": 'the file links in the current folder have\nbeen changed into download links',
|
"f_dls": 'the file links in the current folder have\nbeen changed into download links',
|
||||||
|
|
||||||
@@ -505,6 +531,15 @@ var tl_browser = {
|
|||||||
"tvt_next": "show next document$NHotkey: K\">⬇ next",
|
"tvt_next": "show next document$NHotkey: K\">⬇ next",
|
||||||
"tvt_sel": "select file ( for cut / copy / delete / ... )$NHotkey: S\">sel",
|
"tvt_sel": "select file ( for cut / copy / delete / ... )$NHotkey: S\">sel",
|
||||||
"tvt_edit": "open file in text editor$NHotkey: E\">✏️ edit",
|
"tvt_edit": "open file in text editor$NHotkey: E\">✏️ edit",
|
||||||
|
"tvt_tail": "monitor file for changes; show new lines in real time\">📡 follow",
|
||||||
|
"tvt_wrap": "word-wrap\">↵",
|
||||||
|
"tvt_atail": "lock scroll to bottom of page\">⚓",
|
||||||
|
"tvt_ctail": "decode terminal colors (ansi escape codes)\">🌈",
|
||||||
|
"tvt_ntail": "scrollback limit (how many bytes of text to keep loaded)",
|
||||||
|
|
||||||
|
"m3u_add1": "song added to m3u playlist",
|
||||||
|
"m3u_addn": "{0} songs added to m3u playlist",
|
||||||
|
"m3u_clip": "m3u playlist now copied to clipboard\n\nyou should create a new textfile named something.m3u and paste the playlist in that document; this will make it playable",
|
||||||
|
|
||||||
"gt_vau": "don't show videos, just play the audio\">🎧",
|
"gt_vau": "don't show videos, just play the audio\">🎧",
|
||||||
"gt_msel": "enable file selection; ctrl-click a file to override$N$N<em>when active: doubleclick a file / folder to open it</em>$N$NHotkey: S\">multiselect",
|
"gt_msel": "enable file selection; ctrl-click a file to override$N$N<em>when active: doubleclick a file / folder to open it</em>$N$NHotkey: S\">multiselect",
|
||||||
@@ -600,6 +635,7 @@ var tl_browser = {
|
|||||||
"u_https3": "for better performance",
|
"u_https3": "for better performance",
|
||||||
"u_ancient": 'your browser is impressively ancient -- maybe you should <a href="#" onclick="goto(\'bup\')">use bup instead</a>',
|
"u_ancient": 'your browser is impressively ancient -- maybe you should <a href="#" onclick="goto(\'bup\')">use bup instead</a>',
|
||||||
"u_nowork": "need firefox 53+ or chrome 57+ or iOS 11+",
|
"u_nowork": "need firefox 53+ or chrome 57+ or iOS 11+",
|
||||||
|
"tail_2old": "need firefox 105+ or chrome 71+ or iOS 14.5+",
|
||||||
"u_nodrop": 'your browser is too old for drag-and-drop uploading',
|
"u_nodrop": 'your browser is too old for drag-and-drop uploading',
|
||||||
"u_notdir": "that's not a folder!\n\nyour browser is too old,\nplease try dragdrop instead",
|
"u_notdir": "that's not a folder!\n\nyour browser is too old,\nplease try dragdrop instead",
|
||||||
"u_uri": "to dragdrop images from other browser windows,\nplease drop it onto the big upload button",
|
"u_uri": "to dragdrop images from other browser windows,\nplease drop it onto the big upload button",
|
||||||
@@ -613,8 +649,10 @@ var tl_browser = {
|
|||||||
"u_ewrite": 'you do not have write-access to this folder',
|
"u_ewrite": 'you do not have write-access to this folder',
|
||||||
"u_eread": 'you do not have read-access to this folder',
|
"u_eread": 'you do not have read-access to this folder',
|
||||||
"u_enoi": 'file-search is not enabled in server config',
|
"u_enoi": 'file-search is not enabled in server config',
|
||||||
|
"u_enoow": "overwrite will not work here; need Delete-permission",
|
||||||
"u_badf": 'These {0} files (of {1} total) were skipped, possibly due to filesystem permissions:\n\n',
|
"u_badf": 'These {0} files (of {1} total) were skipped, possibly due to filesystem permissions:\n\n',
|
||||||
"u_blankf": 'These {0} files (of {1} total) are blank / empty; upload them anyways?\n\n',
|
"u_blankf": 'These {0} files (of {1} total) are blank / empty; upload them anyways?\n\n',
|
||||||
|
"u_applef": 'These {0} files (of {1} total) are probably undesirable;\nPress <code>OK/Enter</code> to SKIP the following files,\nPress <code>Cancel/ESC</code> to NOT exclude, and UPLOAD those as well:\n\n',
|
||||||
"u_just1": '\nMaybe it works better if you select just one file',
|
"u_just1": '\nMaybe it works better if you select just one file',
|
||||||
"u_ff_many": "if you're using <b>Linux / MacOS / Android,</b> then this amount of files <a href=\"https://bugzilla.mozilla.org/show_bug.cgi?id=1790500\" target=\"_blank\"><em>may</em> crash Firefox!</a>\nif that happens, please try again (or use Chrome).",
|
"u_ff_many": "if you're using <b>Linux / MacOS / Android,</b> then this amount of files <a href=\"https://bugzilla.mozilla.org/show_bug.cgi?id=1790500\" target=\"_blank\"><em>may</em> crash Firefox!</a>\nif that happens, please try again (or use Chrome).",
|
||||||
"u_up_life": "This upload will be deleted from the server\n{0} after it completes",
|
"u_up_life": "This upload will be deleted from the server\n{0} after it completes",
|
||||||
|
|||||||
46
tests/res/idp/7.conf
Normal file
46
tests/res/idp/7.conf
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
# -*- mode: yaml -*-
|
||||||
|
# vim: ft=yaml:
|
||||||
|
|
||||||
|
[global]
|
||||||
|
idp-h-usr: x-idp-user
|
||||||
|
idp-h-grp: x-idp-group
|
||||||
|
|
||||||
|
[/u/${u}]
|
||||||
|
/u/${u}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/uya/${u%+ga}]
|
||||||
|
/uya/${u}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/uyab/${u%+ga,%+gb}]
|
||||||
|
/uyab/${u}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/una/${u%-ga}]
|
||||||
|
/una/${u}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/unab/${u%-ga,%-gb}]
|
||||||
|
/unab/${u}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/gya/${g%+ga}]
|
||||||
|
/gya/${g}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/gna/${g%-ga}]
|
||||||
|
/gna/${g}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/gnab/${g%-ga,%-gb}]
|
||||||
|
/gnab/${g}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
47
tests/res/idp/8.conf
Normal file
47
tests/res/idp/8.conf
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# -*- mode: yaml -*-
|
||||||
|
# vim: ft=yaml:
|
||||||
|
|
||||||
|
[groups]
|
||||||
|
ga: iua, iuab, iuabc
|
||||||
|
gb: iuab, iuabc, iub, iubc
|
||||||
|
gc: iuabc, iubc, iuc
|
||||||
|
|
||||||
|
[/u/${u}]
|
||||||
|
/u/${u}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/uya/${u%+ga}]
|
||||||
|
/uya/${u}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/uyab/${u%+ga,%+gb}]
|
||||||
|
/uyab/${u}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/una/${u%-ga}]
|
||||||
|
/una/${u}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/unab/${u%-ga,%-gb}]
|
||||||
|
/unab/${u}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/gya/${g%+ga}]
|
||||||
|
/gya/${g}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/gna/${g%-ga}]
|
||||||
|
/gna/${g}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
|
|
||||||
|
[/gnab/${g%-ga,%-gb}]
|
||||||
|
/gnab/${g}
|
||||||
|
accs:
|
||||||
|
r: *
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user