Compare commits

...

84 Commits

Author SHA1 Message Date
ed
c01cad091e v1.6.10 2023-03-20 21:56:31 +00:00
ed
eb349f339c update foldersync / rclone docs 2023-03-20 21:54:08 +00:00
ed
24d8caaf3e switch rclone to owncloud mode so it sends lastmod 2023-03-20 21:45:52 +00:00
ed
5ac2c20959 basic support for rclone sync 2023-03-20 21:17:53 +00:00
ed
bb72e6bf30 support propfind of files (not just dirs) 2023-03-20 20:58:51 +00:00
ed
d8142e866a accept last-modified from owncloud webdav extension 2023-03-20 20:28:26 +00:00
ed
7b7979fd61 add sftpgo to comparison + update docs 2023-03-19 21:45:35 +00:00
ed
749616d09d help iOS understand short audio files 2023-03-19 20:03:35 +00:00
ed
5485c6d7ca prisonparty: FFmpeg runs faster with /dev/urandom 2023-03-19 18:32:35 +00:00
ed
b7aea38d77 add iOS uploader (mk.ii) 2023-03-18 18:38:37 +00:00
ed
0ecd9f99e6 update archpkg to 1.6.9 2023-03-16 22:34:09 +00:00
ed
ca04a00662 v1.6.9 2023-03-16 21:06:18 +00:00
ed
8a09601be8 url-param ?v disables index.html 2023-03-16 20:52:43 +00:00
ed
1fe0d4693e fix logues bleeding into navpane 2023-03-16 20:23:01 +00:00
ed
bba8a3c6bc fix truncated search results 2023-03-16 20:12:13 +00:00
ed
e3d7f0c7d5 add tooltip delay to android too 2023-03-16 19:48:44 +00:00
ed
be7bb71bbc add option to show index.html instead of listing 2023-03-16 19:41:33 +00:00
ed
e0c4829ec6 verify covers against db instead of fs 2023-03-15 19:48:43 +00:00
ed
5af1575329 readme: ideas welcome w 2023-03-14 22:24:43 +00:00
ed
884f966b86 update archpkg to 1.6.8 2023-03-12 18:55:02 +00:00
ed
f6c6fbc223 fix exe builder 2023-03-12 18:54:16 +00:00
ed
b0cc396bca v1.6.8 2023-03-12 16:10:07 +00:00
ed
ae463518f6 u2cli: send upload stats to server + fix py2.6 support 2023-03-11 21:39:56 +00:00
ed
2be2e9a0d8 index folder thumbs in db 2023-03-11 11:43:29 +00:00
ed
e405fddf74 specify that only up2k clients will resume uploads 2023-03-09 22:47:37 +00:00
ed
c269b0dd91 show an error (instead of crashing) if a pic is 404 2023-03-09 22:37:12 +00:00
ed
8c3211263a keep scanning folders for more music to play 2023-03-09 22:26:41 +00:00
ed
bf04e7c089 update some docs 2023-03-09 22:11:39 +00:00
ed
c7c6e48b1a didn't compress numbered logfiles 2023-03-09 21:59:59 +00:00
ed
974ca773be just to be extra sure 2023-03-09 21:49:29 +00:00
ed
9270c2df19 evict basic-browser from crawlers 2023-03-09 21:35:07 +00:00
ed
b39ff92f34 u2cli: support long paths on win7 2023-03-08 22:27:13 +00:00
ed
7454167f78 add DCO PR template 2023-03-08 08:27:17 +01:00
ed
5ceb3a962f build up2k.exe 2023-03-07 22:58:14 +00:00
ed
52bd5642da update archpkg to 1.6.7 2023-03-05 20:20:15 +00:00
ed
c39c93725f v1.6.7 2023-03-05 20:18:16 +00:00
ed
d00f0b9fa7 ftp: support filezilla mkdir 2023-03-05 20:18:02 +00:00
ed
01cfc70982 add example for webdav automount 2023-03-05 19:52:45 +00:00
ed
e6aec189bd fix flickering toast on upload finish 2023-03-05 19:49:54 +00:00
ed
c98fff1647 fix chunkpost-handshake race (affects --no-dedup only);
a handshake arriving in the middle of the final chunk could cause
dupes to become empty -- worst case leading to loss of data
2023-03-05 19:45:50 +00:00
ed
0009e31bd3 heavy webworker load can park the main thread of a
background chrome tab for 10sec; piggyback some pokes off postmessage
2023-03-02 22:35:32 +00:00
ed
db95e880b2 thats not how it works 2023-02-28 22:19:06 +00:00
ed
e69fea4a59 exe: update screenshots 2023-02-26 22:26:40 +00:00
ed
4360800a6e update archpkg to 1.6.6 2023-02-26 22:11:56 +00:00
ed
b179e2b031 prisonparty: ignore unresolvable mount paths;
allows startup even if some locations are missing,
for example if a server rebooted and some disks aren't up yet
2023-02-26 22:11:15 +00:00
ed
ecdec75b4e v1.6.6 2023-02-26 20:30:17 +00:00
ed
5cb2e33353 update readmes + fix typo 2023-02-26 19:22:54 +00:00
ed
43ff2e531a add deadline for filling data into a reserved filename 2023-02-26 19:13:35 +00:00
ed
1c2c9db8f0 retain upload time (but not ip) on file reindex 2023-02-26 19:09:24 +00:00
ed
7ea183baef let http thread handle upload verification plugins 2023-02-26 19:07:49 +00:00
ed
ab87fac6d8 db got the wrong lastmod when linking dupes 2023-02-26 18:52:04 +00:00
ed
1e3b7eee3b dont rmdir volume top on cleanup 2023-02-26 18:28:37 +00:00
ed
4de028fc3b let controlpanel rescan button override lack of e2dsa 2023-02-26 18:27:10 +00:00
ed
604e5dfaaf improve error handling / messages 2023-02-26 18:26:13 +00:00
ed
05e0c2ec9e add xiu (batching hook; runs on idle after uploads) +
bunch of tweaks/fixes for hooks
2023-02-26 18:23:32 +00:00
ed
76bd005bdc cgen fixes 2023-02-21 19:42:08 +00:00
ed
5effaed352 add reminder that SSDP launches IE by default 2023-02-21 19:38:35 +00:00
ed
cedaf4809f add exe integrity selfcheck 2023-02-21 19:18:10 +00:00
ed
6deaf5c268 add jitter simlation 2023-02-20 21:34:30 +00:00
ed
9dc6a26472 webdav.bat and readme tweaks 2023-02-20 21:00:04 +00:00
ed
14ad5916fc freebsd: fancy console listing for fetch 2023-02-19 22:14:21 +00:00
ed
1a46738649 raise edgecases (broken envs on windows) 2023-02-19 22:13:33 +00:00
ed
9e5e3b099a add optional deps to quickstart section 2023-02-19 22:13:02 +00:00
ed
292ce75cc2 return to previous url after login 2023-02-19 19:58:15 +00:00
ed
ce7df7afd4 update platform support listing 2023-02-19 15:16:50 +00:00
ed
e28e793f81 whoops 2023-02-19 15:11:04 +00:00
ed
3e561976db optimize docker build times (884 to 379 sec) 2023-02-19 14:19:35 +00:00
ed
273a4eb7d0 list supported platforms 2023-02-19 01:00:37 +00:00
ed
6175f85bb6 more docker images for arm, arm64, s390x 2023-02-19 00:50:07 +00:00
ed
a80579f63a build docker for x32 aarch64 armhf ppc64 s390x 2023-02-18 23:04:55 +00:00
ed
96d6bcf26e if non-TLS, show warning in the login form 2023-02-17 22:49:03 +00:00
ed
49e8df25ac ie11: support back button 2023-02-17 22:21:13 +00:00
ed
6a05850f21 also undupe search hits from overlapping volumes 2023-02-17 20:48:57 +00:00
ed
5e7c3defe3 update pypi description + docker links 2023-02-16 19:56:57 +00:00
ed
6c0987d4d0 mention --daw 2023-02-15 17:51:20 +00:00
ed
6eba9feffe condense uploads listing on view change 2023-02-14 21:58:15 +00:00
ed
8adfcf5950 win10-based copyparty64.exe 2023-02-14 21:50:14 +00:00
ed
36d6fa512a mention upcoming libopenmpt availability 2023-02-13 06:57:47 +00:00
ed
79b6e9b393 update archpkg to 1.6.5 2023-02-12 15:38:03 +00:00
ed
dc2e2cbd4b v1.6.5 2023-02-12 14:11:45 +00:00
ed
5c12dac30f most ffmpeg builds dont support compressed modules 2023-02-12 14:02:43 +00:00
ed
641929191e fix reading smb shares on windows 2023-02-12 13:59:34 +00:00
ed
617321631a docker: add annotations 2023-02-11 21:10:28 +00:00
ed
ddc0c899f8 update archpkg to 1.6.4 2023-02-11 21:01:45 +00:00
70 changed files with 2700 additions and 688 deletions

2
.github/pull_request_template.md vendored Normal file
View File

@@ -0,0 +1,2 @@
Please include the following text somewhere in this PR description:
This PR complies with the DCO; https://developercertificate.org/

2
.gitignore vendored
View File

@@ -32,3 +32,5 @@ contrib/package/arch/src/
# state/logs # state/logs
up.*.txt up.*.txt
.hist/ .hist/
scripts/docker/*.out
scripts/docker/*.err

157
README.md
View File

@@ -1,35 +1,21 @@
# 🎉 copyparty # 💾🎉 copyparty
* portable file sharing hub (py2/py3) [(on PyPI)](https://pypi.org/project/copyparty/) turn almost any device into a file server with resumable uploads/downloads using [*any*](#browser-support) web browser
* MIT-Licensed, 2019-05-26, ed @ irc.rizon.net
* server only needs Python (2 or 3), all dependencies optional
* 🔌 protocols: [http](#the-browser) // [ftp](#ftp-server) // [webdav](#webdav-server) // [smb/cifs](#smb-server)
* 📱 [android app](#android-app) // [iPhone shortcuts](#ios-shortcuts)
## summary 👉 **[Get started](#quickstart)!** or visit the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
turn your phone or raspi into a portable file server with resumable uploads/downloads using *any* web browser
* server only needs Python (`2.7` or `3.3+`), all dependencies optional
* browse/upload with [IE4](#browser-support) / netscape4.0 on win3.11 (heh)
* protocols: [http](#the-browser) // [ftp](#ftp-server) // [webdav](#webdav-server) // [smb/cifs](#smb-server)
try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running from a basement in finland
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer) 📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer)
## get the app
<a href="https://f-droid.org/packages/me.ocv.partyup/"><img src="https://ocv.me/fdroid.png" alt="Get it on F-Droid" height="50" /> '' <img src="https://img.shields.io/f-droid/v/me.ocv.partyup.svg" alt="f-droid version info" /></a> '' <a href="https://github.com/9001/party-up"><img src="https://img.shields.io/github/release/9001/party-up.svg?logo=github" alt="github version info" /></a>
(the app is **NOT** the full copyparty server! just a basic upload client, nothing fancy yet)
## readme toc ## readme toc
* top * top
* [quickstart](#quickstart) - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set! * [quickstart](#quickstart) - just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
* [on servers](#on-servers) - you may also want these, especially on servers * [on servers](#on-servers) - you may also want these, especially on servers
* [on debian](#on-debian) - recommended additional steps on debian
* [features](#features) * [features](#features)
* [testimonials](#testimonials) - small collection of user feedback * [testimonials](#testimonials) - small collection of user feedback
* [motivations](#motivations) - project goals / philosophy * [motivations](#motivations) - project goals / philosophy
@@ -83,22 +69,25 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites * [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
* [browser support](#browser-support) - TLDR: yes * [browser support](#browser-support) - TLDR: yes
* [client examples](#client-examples) - interact with copyparty using non-browser clients * [client examples](#client-examples) - interact with copyparty using non-browser clients
* [folder sync](#folder-sync) - sync folders to/from copyparty
* [mount as drive](#mount-as-drive) - a remote copyparty server as a local filesystem * [mount as drive](#mount-as-drive) - a remote copyparty server as a local filesystem
* [android app](#android-app) - upload to copyparty with one tap
* [iOS shortcuts](#iOS-shortcuts) - there is no iPhone app, but
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload * [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
* [client-side](#client-side) - when uploading files * [client-side](#client-side) - when uploading files
* [security](#security) - some notes on hardening * [security](#security) - some notes on hardening
* [gotchas](#gotchas) - behavior that might be unexpected * [gotchas](#gotchas) - behavior that might be unexpected
* [cors](#cors) - cross-site request config * [cors](#cors) - cross-site request config
* [https](#https) - both HTTP and HTTPS are accepted
* [recovering from crashes](#recovering-from-crashes) * [recovering from crashes](#recovering-from-crashes)
* [client crashes](#client-crashes) * [client crashes](#client-crashes)
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads * [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
* [HTTP API](#HTTP-API) - see [devnotes](#./docs/devnotes.md#http-api) * [HTTP API](#HTTP-API) - see [devnotes](#./docs/devnotes.md#http-api)
* [dependencies](#dependencies) - mandatory deps * [dependencies](#dependencies) - mandatory deps
* [optional dependencies](#optional-dependencies) - install these to enable bonus features * [optional dependencies](#optional-dependencies) - install these to enable bonus features
* [install recommended deps](#install-recommended-deps)
* [optional gpl stuff](#optional-gpl-stuff) * [optional gpl stuff](#optional-gpl-stuff)
* [sfx](#sfx) - the self-contained "binary" * [sfx](#sfx) - the self-contained "binary"
* [copyparty.exe](#copypartyexe) - download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) or [copyparty64.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty64.exe) * [copyparty.exe](#copypartyexe) - download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (win8+) or [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) (win7+)
* [install on android](#install-on-android) * [install on android](#install-on-android)
* [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports * [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports
* [devnotes](#devnotes) - for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md) * [devnotes](#devnotes) - for build instructions etc, see [./docs/devnotes.md](./docs/devnotes.md)
@@ -106,20 +95,35 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
## quickstart ## quickstart
download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set! just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** -- that's it! 🎉
* or install through pypi (python3 only): `python3 -m pip install --user -U copyparty` * or install through pypi (python3 only): `python3 -m pip install --user -U copyparty`
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead * or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
* or if you are on android, [install copyparty in termux](#install-on-android)
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too * or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
* docker has all deps built-in, so skip this step:
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes) enable thumbnails (images/audio/video), media indexing, and audio transcoding by installing some recommended deps:
* **Alpine:** `apk add py3-pillow ffmpeg`
* **Debian:** `apt install python3-pil ffmpeg`
* **Fedora:** `dnf install python3-pillow ffmpeg`
* **FreeBSD:** `pkg install py39-sqlite3 py39-pillow ffmpeg`
* **MacOS:** `port install py-Pillow ffmpeg`
* **MacOS** (alternative): `brew install pillow ffmpeg`
* **Windows:** `python -m pip install --user -U Pillow`
* install python and ffmpeg manually; do not use `winget` or `Microsoft Store` (it breaks $PATH)
* copyparty.exe comes with `Pillow` and only needs `ffmpeg`
* see [optional dependencies](#optional-dependencies) to enable even more features
running copyparty without arguments (for example doubleclicking it on Windows) will give everyone read/write access to the current folder; you may want [accounts and volumes](#accounts-and-volumes)
some recommended options: some recommended options:
* `-e2dsa` enables general [file indexing](#file-indexing) * `-e2dsa` enables general [file indexing](#file-indexing)
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies) to enable thumbnails and more * `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen)
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar` * `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else * replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other permissions (`r`ead, `w`rite, `m`ove, `d`elete, `g`et, up`G`et) * see [accounts and volumes](#accounts-and-volumes) (or `--help-accounts`) for the syntax and other permissions
### on servers ### on servers
@@ -128,6 +132,7 @@ you may also want these, especially on servers:
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service * [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security) * [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
* [contrib/rc/copyparty](contrib/rc/copyparty) to run copyparty on FreeBSD
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https) * [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer: and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
@@ -139,18 +144,6 @@ firewall-cmd --reload
``` ```
(1900:ssdp, 3921:ftp, 3923:http/https, 3945:smb, 3990:ftps, 5353:mdns, 12000:passive-ftp) (1900:ssdp, 3921:ftp, 3923:http/https, 3945:smb, 3990:ftps, 5353:mdns, 12000:passive-ftp)
### on debian
recommended additional steps on debian which enable audio metadata and thumbnails (from images and videos):
* as root, run the following:
`apt install python3 python3-pip python3-dev ffmpeg`
* then, as the user which will be running copyparty (so hopefully not root), run this:
`python3 -m pip install --user -U Pillow pillow-avif-plugin`
(skipped `pyheif-pillow-opener` because apparently debian is too old to build it)
## features ## features
@@ -164,11 +157,15 @@ recommended additional steps on debian which enable audio metadata and thumbnai
* ☑ [smb/cifs server](#smb-server) * ☑ [smb/cifs server](#smb-server)
* ☑ [qr-code](#qr-code) for quick access * ☑ [qr-code](#qr-code) for quick access
* ☑ [upnp / zeroconf / mdns / ssdp](#zeroconf) * ☑ [upnp / zeroconf / mdns / ssdp](#zeroconf)
* ☑ [event hooks](#event-hooks) / script runner
* ☑ [reverse-proxy support](https://github.com/9001/copyparty#reverse-proxy)
* upload * upload
* ☑ basic: plain multipart, ie6 support * ☑ basic: plain multipart, ie6 support
* ☑ [up2k](#uploading): js, resumable, multithreaded * ☑ [up2k](#uploading): js, resumable, multithreaded
* unaffected by cloudflare's max-upload-size (100 MiB) * unaffected by cloudflare's max-upload-size (100 MiB)
* ☑ stash: simple PUT filedropper * ☑ stash: simple PUT filedropper
* ☑ filename randomizer
* ☑ write-only folders
* ☑ [unpost](#unpost): undo/delete accidental uploads * ☑ [unpost](#unpost): undo/delete accidental uploads
* ☑ [self-destruct](#self-destruct) (specified server-side or client-side) * ☑ [self-destruct](#self-destruct) (specified server-side or client-side)
* ☑ symlink/discard existing files (content-matching) * ☑ symlink/discard existing files (content-matching)
@@ -192,10 +189,15 @@ recommended additional steps on debian which enable audio metadata and thumbnai
* ☑ [locate files by contents](#file-search) * ☑ [locate files by contents](#file-search)
* ☑ search by name/path/date/size * ☑ search by name/path/date/size
* ☑ [search by ID3-tags etc.](#searching) * ☑ [search by ID3-tags etc.](#searching)
* client support
* ☑ [folder sync](#folder-sync)
* ☑ [curl-friendly](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png)
* markdown * markdown
* ☑ [viewer](#markdown-viewer) * ☑ [viewer](#markdown-viewer)
* ☑ editor (sure why not) * ☑ editor (sure why not)
PS: something missing? post any crazy ideas you've got as a [feature request](https://github.com/9001/copyparty/issues/new?assignees=9001&labels=enhancement&template=feature_request.md) or [discussion](https://github.com/9001/copyparty/discussions/new?category=ideas) 🤙
## testimonials ## testimonials
@@ -239,6 +241,9 @@ browser-specific:
server-os-specific: server-os-specific:
* RHEL8 / Rocky8: you can run copyparty using `/usr/libexec/platform-python` * RHEL8 / Rocky8: you can run copyparty using `/usr/libexec/platform-python`
server notes:
* pypy is supported but regular cpython is faster if you enable the database
# bugs # bugs
@@ -264,7 +269,7 @@ server-os-specific:
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11) * iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume * *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
* "future" because `AudioContext` is broken in the current iOS version (15.1), maybe one day... * "future" because `AudioContext` can't maintain a stable playback speed in the current iOS version (15.7), maybe one day...
* Windows: folders cannot be accessed if the name ends with `.` * Windows: folders cannot be accessed if the name ends with `.`
* python or windows bug * python or windows bug
@@ -512,11 +517,14 @@ up2k has several advantages:
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections * much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
* the last-modified timestamp of the file is preserved * the last-modified timestamp of the file is preserved
> it is perfectly safe to restart / upgrade copyparty while someone is uploading to it!
> all known up2k clients will resume just fine 💪
see [up2k](#up2k) for details on how it works, or watch a [demo video](https://a.ocv.me/pub/demo/pics-vids/#gf-0f6f5c0d) see [up2k](#up2k) for details on how it works, or watch a [demo video](https://a.ocv.me/pub/demo/pics-vids/#gf-0f6f5c0d)
![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/129635371-48fc54ca-fa91-48e3-9b1d-ba413e4b68cb.png) ![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/129635371-48fc54ca-fa91-48e3-9b1d-ba413e4b68cb.png)
**protip:** you can avoid scaring away users with [contrib/plugins/minimal-up2k.html](contrib/plugins/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png) **protip:** you can avoid scaring away users with [contrib/plugins/minimal-up2k.js](contrib/plugins/minimal-up2k.js) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress -- also, the `[🔔]` and/or `[🔊]` switches enable visible and/or audible notifications on upload completion **protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress -- also, the `[🔔]` and/or `[🔊]` switches enable visible and/or audible notifications on upload completion
@@ -764,6 +772,8 @@ general usage:
on macos, connect from finder: on macos, connect from finder:
* [Go] -> [Connect to Server...] -> http://192.168.123.1:3923/ * [Go] -> [Connect to Server...] -> http://192.168.123.1:3923/
in order to grant full write-access to webdav clients, the volflag `daw` must be set and the account must also have delete-access (otherwise the client won't be allowed to replace the contents of existing files, which is how webdav works)
### connecting to webdav from windows ### connecting to webdav from windows
@@ -1083,13 +1093,15 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
## reverse-proxy ## reverse-proxy
running copyparty next to other websites hosted on an existing webserver such as nginx or apache running copyparty next to other websites hosted on an existing webserver such as nginx, caddy, or apache
you can either: you can either:
* give copyparty its own domain or subdomain (recommended) * give copyparty its own domain or subdomain (recommended)
* or do location-based proxying, using `--rp-loc=/stuff` to tell copyparty where it is mounted -- has a slight performance cost and higher chance of bugs * or do location-based proxying, using `--rp-loc=/stuff` to tell copyparty where it is mounted -- has a slight performance cost and higher chance of bugs
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below * if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatically obtain a valid https/tls certificate for you, and some support HTTP/2 and QUIC which could be a nice speed boost
example webserver configs: example webserver configs:
* [nginx config](contrib/nginx/copyparty.conf) -- entire domain/subdomain * [nginx config](contrib/nginx/copyparty.conf) -- entire domain/subdomain
@@ -1167,7 +1179,7 @@ interact with copyparty using non-browser clients
* `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923` * `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923`
* python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm) * python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
* file uploads, file-search, folder sync, autoresume of aborted/broken uploads * file uploads, file-search, [folder sync](#folder-sync), autoresume of aborted/broken uploads
* can be downloaded from copyparty: controlpanel -> connect -> [up2k.py](http://127.0.0.1:3923/.cpr/a/up2k.py) * can be downloaded from copyparty: controlpanel -> connect -> [up2k.py](http://127.0.0.1:3923/.cpr/a/up2k.py)
* see [./bin/README.md#up2kpy](bin/README.md#up2kpy) * see [./bin/README.md#up2kpy](bin/README.md#up2kpy)
@@ -1188,6 +1200,15 @@ you can provide passwords using header `PW: hunter2`, cookie `cppwd=hunter2`, ur
NOTE: curl will not send the original filename if you use `-T` combined with url-params! Also, make sure to always leave a trailing slash in URLs unless you want to override the filename NOTE: curl will not send the original filename if you use `-T` combined with url-params! Also, make sure to always leave a trailing slash in URLs unless you want to override the filename
## folder sync
sync folders to/from copyparty
the commandline uploader [up2k.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy) with `--dr` is the best way to sync a folder to copyparty; verifies checksums and does files in parallel, and deletes unexpected files on the server after upload has finished which makes file-renames really cheap (it'll rename serverside and skip uploading)
alternatively there is [rclone](./docs/rclone.md) which allows for bidirectional sync and is *way* more flexible (stream files straight from sftp/s3/gcs to copyparty for instance), although syncing to copyparty is about 5x slower than up2k.py if you have many small files in particular
## mount as drive ## mount as drive
a remote copyparty server as a local filesystem; go to the control-panel and click `connect` to see a list of commands to do that a remote copyparty server as a local filesystem; go to the control-panel and click `connect` to see a list of commands to do that
@@ -1206,6 +1227,27 @@ alternatively, some alternatives roughly sorted by speed (unreproducible benchma
most clients will fail to mount the root of a copyparty server unless there is a root volume (so you get the admin-panel instead of a browser when accessing it) -- in that case, mount a specific volume instead most clients will fail to mount the root of a copyparty server unless there is a root volume (so you get the admin-panel instead of a browser when accessing it) -- in that case, mount a specific volume instead
# android app
upload to copyparty with one tap
<a href="https://f-droid.org/packages/me.ocv.partyup/"><img src="https://ocv.me/fdroid.png" alt="Get it on F-Droid" height="50" /> '' <img src="https://img.shields.io/f-droid/v/me.ocv.partyup.svg" alt="f-droid version info" /></a> '' <a href="https://github.com/9001/party-up"><img src="https://img.shields.io/github/release/9001/party-up.svg?logo=github" alt="github version info" /></a>
the app is **NOT** the full copyparty server! just a basic upload client, nothing fancy yet
if you want to run the copyparty server on your android device, see [install on android](#install-on-android)
# iOS shortcuts
there is no iPhone app, but the following shortcuts are almost as good:
* [upload to copyparty](https://www.icloud.com/shortcuts/41e98dd985cb4d3bb433222bc1e9e770) ([offline](https://github.com/9001/copyparty/raw/hovudstraum/contrib/ios/upload-to-copyparty.shortcut)) ([png](https://user-images.githubusercontent.com/241032/226118053-78623554-b0ed-482e-98e4-6d57ada58ea4.png)) based on the [original](https://www.icloud.com/shortcuts/ab415d5b4de3467b9ce6f151b439a5d7) by [Daedren](https://github.com/Daedren) (thx!)
* can strip exif, upload files, pics, vids, links, clipboard
* can download links and rehost the target file on copyparty (see first comment inside the shortcut)
* pics become lowres if you share from gallery to shortcut, so better to launch the shortcut and pick stuff from there
# performance # performance
defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
@@ -1301,6 +1343,13 @@ by default, except for `GET` and `HEAD` operations, all requests must either:
cors can be configured with `--acao` and `--acam`, or the protections entirely disabled with `--allow-csrf` cors can be configured with `--acao` and `--acam`, or the protections entirely disabled with `--allow-csrf`
## https
both HTTP and HTTPS are accepted by default, but letting a [reverse proxy](#reverse-proxy) handle the https/tls/ssl would be better (probably more secure by default)
copyparty doesn't speak HTTP/2 or QUIC, so using a reverse proxy would solve that as well
# recovering from crashes # recovering from crashes
## client crashes ## client crashes
@@ -1357,12 +1406,6 @@ enable [smb](#smb-server) support (**not** recommended):
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips` `pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
## install recommended deps
```
python -m pip install --user -U jinja2 mutagen Pillow
```
## optional gpl stuff ## optional gpl stuff
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag) some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
@@ -1374,20 +1417,24 @@ these are standalone programs and will never be imported / evaluated by copypart
the self-contained "binary" [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) will unpack itself and run copyparty, assuming you have python installed of course the self-contained "binary" [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) will unpack itself and run copyparty, assuming you have python installed of course
you can reduce the sfx size by repacking it; see [./docs/devnotes.md#sfx-repack](#./docs/devnotes.md#sfx-repack) you can reduce the sfx size by repacking it; see [./docs/devnotes.md#sfx-repack](./docs/devnotes.md#sfx-repack)
## copyparty.exe ## copyparty.exe
download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) or [copyparty64.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty64.exe) download [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) (win8+) or [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) (win7+)
![copyparty-exe-fs8](https://user-images.githubusercontent.com/241032/194707422-cb7f66c9-41a2-4cb9-8dbc-2ab866cd4338.png) ![copyparty-exe-fs8](https://user-images.githubusercontent.com/241032/221445946-1e328e56-8c5b-44a9-8b9f-dee84d942535.png)
can be convenient on old machines where installing python is problematic, however is **not recommended** and should be considered a last resort -- if possible, please use **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** instead can be convenient on machines where installing python is problematic, however is **not recommended** -- if possible, please use **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** instead
* [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) is compatible with 32bit windows7, which means it uses an ancient copy of python (3.7.9) which cannot be upgraded and will definitely become a security hazard at some point * [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) runs on win8 or newer, was compiled on win10, does thumbnails + media tags, and is *currently* safe to use, but any future python/expat/pillow CVEs can only be remedied by downloading a newer version of the exe
* [copyparty64.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty64.exe) is identical except 64bit so it [works in WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png) * on win8 it needs [vc redist 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48145), on win10 it just works
* dangerous: [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is compatible with [windows7](https://user-images.githubusercontent.com/241032/221445944-ae85d1f4-d351-4837-b130-82cab57d6cca.png), which means it uses an ancient copy of python (3.7.9) which cannot be upgraded and should never be exposed to the internet (LAN is fine)
* dangerous and deprecated: [copyparty-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.6.8/copyparty-winpe64.exe) lets you [run copyparty in WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png) and is otherwise completely useless
meanwhile [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) instead relies on your system python which gives better performance and will stay safe as long as you keep your python install up-to-date meanwhile [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) instead relies on your system python which gives better performance and will stay safe as long as you keep your python install up-to-date

View File

@@ -2,15 +2,24 @@ standalone programs which are executed by copyparty when an event happens (uploa
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
run copyparty with `--help-hooks` for usage details / hook type explanations (xbu/xau/xiu/xbr/xar/xbd/xad)
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead > **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
# after upload # after upload
* [notify.py](notify.py) shows a desktop notification ([example](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png)) * [notify.py](notify.py) shows a desktop notification ([example](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png))
* [notify2.py](notify2.py) uses the json API to show more context
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png)) * [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable * [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
# upload batches
these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every single file), `xiu` hooks are given a list of recent uploads on STDIN after the server has gone idle for N seconds, reducing server load + providing more context
* [xiu.py](xiu.py) is a "minimal" example showing a list of filenames + total filesize
* [xiu-sha.py](xiu-sha.py) produces a sha512 checksum list in the volume root
# before upload # before upload
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions * [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions

68
bin/hooks/notify2.py Executable file
View File

@@ -0,0 +1,68 @@
#!/usr/bin/env python3
import json
import os
import sys
import subprocess as sp
from datetime import datetime
from plyer import notification
_ = r"""
same as notify.py but with additional info (uploader, ...)
and also supports --xm (notify on 📟 message)
example usages; either as global config (all volumes) or as volflag:
--xm f,j,bin/hooks/notify2.py
--xau f,j,bin/hooks/notify2.py
-v srv/inc:inc:c,xm=f,j,bin/hooks/notify2.py
-v srv/inc:inc:c,xau=f,j,bin/hooks/notify2.py
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
parameters explained,
xau = execute after upload
f = fork so it doesn't block uploads
j = provide json instead of filepath list
"""
try:
from copyparty.util import humansize
except:
def humansize(n):
return n
def main():
inf = json.loads(sys.argv[1])
fp = inf["ap"]
sz = humansize(inf["sz"])
dp, fn = os.path.split(fp)
mt = datetime.utcfromtimestamp(inf["mt"]).strftime("%Y-%m-%d %H:%M:%S")
msg = f"{fn} ({sz})\n📁 {dp}"
title = "File received"
icon = "emblem-documents-symbolic" if sys.platform == "linux" else ""
if inf.get("txt"):
msg = inf["txt"]
title = "Message received"
icon = "mail-unread-symbolic" if sys.platform == "linux" else ""
msg += f"\n👤 {inf['user']} ({inf['ip']})\n🕒 {mt}"
if "com.termux" in sys.executable:
sp.run(["termux-notification", "-t", title, "-c", msg])
return
notification.notify(
title=title,
message=msg,
app_icon=icon,
timeout=10,
)
if __name__ == "__main__":
main()

103
bin/hooks/xiu-sha.py Executable file
View File

@@ -0,0 +1,103 @@
#!/usr/bin/env python3
import hashlib
import json
import sys
from datetime import datetime
_ = r"""
this hook will produce a single sha512 file which
covers all recent uploads (plus metadata comments)
use this with --xiu, which makes copyparty buffer
uploads until server is idle, providing file infos
on stdin (filepaths or json)
example usage as global config:
--xiu i5,j,bin/hooks/xiu-sha.py
example usage as a volflag (per-volume config):
-v srv/inc:inc:c,xiu=i5,j,bin/hooks/xiu-sha.py
parameters explained,
xiu = execute after uploads...
i5 = ...after volume has been idle for 5sec
j = provide json instead of filepath list
note the "f" (fork) flag is not set, so this xiu
will block other xiu hooks while it's running
"""
try:
from copyparty.util import fsenc
except:
def fsenc(p):
return p
def humantime(ts):
return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S")
def find_files_root(inf):
di = 9000
for f1, f2 in zip(inf, inf[1:]):
p1 = f1["ap"].replace("\\", "/").rsplit("/", 1)[0]
p2 = f2["ap"].replace("\\", "/").rsplit("/", 1)[0]
di = min(len(p1), len(p2), di)
di = next((i for i in range(di) if p1[i] != p2[i]), di)
return di + 1
def find_vol_root(inf):
return len(inf[0]["ap"][: -len(inf[0]["vp"])])
def main():
zb = sys.stdin.buffer.read()
zs = zb.decode("utf-8", "replace")
inf = json.loads(zs)
# root directory (where to put the sha512 file);
# di = find_files_root(inf) # next to the file closest to volume root
di = find_vol_root(inf) # top of the entire volume
ret = []
total_sz = 0
for md in inf:
ap = md["ap"]
rp = ap[di:]
total_sz += md["sz"]
fsize = "{:,}".format(md["sz"])
mtime = humantime(md["mt"])
up_ts = humantime(md["at"])
h = hashlib.sha512()
with open(fsenc(md["ap"]), "rb", 512 * 1024) as f:
while True:
buf = f.read(512 * 1024)
if not buf:
break
h.update(buf)
cksum = h.hexdigest()
meta = " | ".join([md["wark"], up_ts, mtime, fsize, md["ip"]])
ret.append("# {}\n{} *{}".format(meta, cksum, rp))
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
ret.append("")
ftime = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
with open(fsenc(fp), "wb") as f:
f.write("\n".join(ret).encode("utf-8", "replace"))
print("wrote checksums to {}".format(fp))
if __name__ == "__main__":
main()

45
bin/hooks/xiu.py Executable file
View File

@@ -0,0 +1,45 @@
#!/usr/bin/env python3
import json
import sys
_ = r"""
this hook prints absolute filepaths + total size
use this with --xiu, which makes copyparty buffer
uploads until server is idle, providing file infos
on stdin (filepaths or json)
example usage as global config:
--xiu i1,j,bin/hooks/xiu.py
example usage as a volflag (per-volume config):
-v srv/inc:inc:c,xiu=i1,j,bin/hooks/xiu.py
parameters explained,
xiu = execute after uploads...
i1 = ...after volume has been idle for 1sec
j = provide json instead of filepath list
note the "f" (fork) flag is not set, so this xiu
will block other xiu hooks while it's running
"""
def main():
zb = sys.stdin.buffer.read()
zs = zb.decode("utf-8", "replace")
inf = json.loads(zs)
total_sz = 0
for upload in inf:
sz = upload["sz"]
total_sz += sz
print("{:9} {}".format(sz, upload["ap"]))
print("{} files, {} bytes total".format(len(inf), total_sz))
if __name__ == "__main__":
main()

View File

@@ -31,7 +31,7 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these: *alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg` * from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
* from pypy: `keyfinder vamp` * from pip: `keyfinder vamp`
# usage from copyparty # usage from copyparty

View File

@@ -225,7 +225,7 @@ install_vamp() {
$pybin -m pip install --user vamp $pybin -m pip install --user vamp
cd "$td" cd "$td"
echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || { echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n' printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz) (dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
sha512sum -c <( sha512sum -c <(

View File

@@ -4,8 +4,9 @@ set -e
# runs copyparty (or any other program really) in a chroot # runs copyparty (or any other program really) in a chroot
# #
# assumption: these directories, and everything within, are owned by root # assumption: these directories, and everything within, are owned by root
sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr /etc/alternatives ) sysdirs=(); for v in /bin /lib /lib32 /lib64 /sbin /usr /etc/alternatives ; do
[ -e $v ] && sysdirs+=($v)
done
# error-handler # error-handler
help() { cat <<'EOF' help() { cat <<'EOF'
@@ -38,7 +39,7 @@ while true; do
v="$1"; shift v="$1"; shift
[ "$v" = -- ] && break # end of volumes [ "$v" = -- ] && break # end of volumes
[ "$#" -eq 0 ] && break # invalid usage [ "$#" -eq 0 ] && break # invalid usage
vols+=( "$(realpath "$v")" ) vols+=( "$(realpath "$v" || echo "$v")" )
done done
pybin="$1"; shift pybin="$1"; shift
pybin="$(command -v "$pybin")" pybin="$(command -v "$pybin")"
@@ -82,7 +83,7 @@ jail="${jail%/}"
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq | printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
while IFS= read -r v; do while IFS= read -r v; do
[ -e "$v" ] || { [ -e "$v" ] || {
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v" printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
continue continue
} }
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a) i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
@@ -117,6 +118,15 @@ mkdir -p "$jail/tmp"
chmod 777 "$jail/tmp" chmod 777 "$jail/tmp"
# create a dev
(cd $jail; mkdir -p dev; cd dev
[ -e null ] || mknod -m 666 null c 1 3
[ -e zero ] || mknod -m 666 zero c 1 5
[ -e random ] || mknod -m 444 random c 1 8
[ -e urandom ] || mknod -m 444 urandom c 1 9
)
# run copyparty # run copyparty
export HOME=$(getent passwd $uid | cut -d: -f6) export HOME=$(getent passwd $uid | cut -d: -f6)
export USER=$(getent passwd $uid | cut -d: -f1) export USER=$(getent passwd $uid | cut -d: -f1)

View File

@@ -1,9 +1,12 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
S_VERSION = "1.5"
S_BUILD_DT = "2023-03-12"
""" """
up2k.py: upload to copyparty up2k.py: upload to copyparty
2023-01-13, v1.2, ed <irc.rizon.net>, MIT-Licensed 2021, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
- dependencies: requests - dependencies: requests
@@ -24,6 +27,8 @@ import platform
import threading import threading
import datetime import datetime
EXE = sys.executable.endswith("exe")
try: try:
import argparse import argparse
except: except:
@@ -34,7 +39,9 @@ except:
try: try:
import requests import requests
except ImportError: except ImportError:
if sys.version_info > (2, 7): if EXE:
raise
elif sys.version_info > (2, 7):
m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n" m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n"
else: else:
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7" m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
@@ -245,7 +252,13 @@ def eprint(*a, **ka):
def flushing_print(*a, **ka): def flushing_print(*a, **ka):
try:
_print(*a, **ka) _print(*a, **ka)
except:
v = " ".join(str(x) for x in a)
v = v.encode("ascii", "replace").decode("ascii")
_print(v, **ka)
if "flush" not in ka: if "flush" not in ka:
sys.stdout.flush() sys.stdout.flush()
@@ -372,6 +385,23 @@ def walkdir(err, top, seen):
def walkdirs(err, tops): def walkdirs(err, tops):
"""recursive statdir for a list of tops, yields [top, relpath, stat]""" """recursive statdir for a list of tops, yields [top, relpath, stat]"""
sep = "{0}".format(os.sep).encode("ascii") sep = "{0}".format(os.sep).encode("ascii")
if not VT100:
za = []
for td in tops:
try:
ap = os.path.abspath(os.path.realpath(td))
if td[-1:] in (b"\\", b"/"):
ap += sep
except:
# maybe cpython #88013 (ok)
ap = td
za.append(ap)
za = [x if x.startswith(b"\\\\") else b"\\\\?\\" + x for x in za]
za = [x.replace(b"/", b"\\") for x in za]
tops = za
for top in tops: for top in tops:
isdir = os.path.isdir(top) isdir = os.path.isdir(top)
if top[-1:] == sep: if top[-1:] == sep:
@@ -520,7 +550,11 @@ def handshake(ar, file, search):
except Exception as ex: except Exception as ex:
em = str(ex).split("SSLError(")[-1].split("\nURL: ")[0].strip() em = str(ex).split("SSLError(")[-1].split("\nURL: ")[0].strip()
if sc == 422 or "<pre>partial upload exists at a different" in txt: if (
sc == 422
or "<pre>partial upload exists at a different" in txt
or "<pre>source file busy; please try again" in txt
):
file.recheck = True file.recheck = True
return [], False return [], False
elif sc == 409 or "<pre>upload rejected, file already exists" in txt: elif sc == 409 or "<pre>upload rejected, file already exists" in txt:
@@ -552,8 +586,8 @@ def handshake(ar, file, search):
return r["hash"], r["sprs"] return r["hash"], r["sprs"]
def upload(file, cid, pw): def upload(file, cid, pw, stats):
# type: (File, str, str) -> None # type: (File, str, str, str) -> None
"""upload one specific chunk, `cid` (a chunk-hash)""" """upload one specific chunk, `cid` (a chunk-hash)"""
headers = { headers = {
@@ -561,6 +595,10 @@ def upload(file, cid, pw):
"X-Up2k-Wark": file.wark, "X-Up2k-Wark": file.wark,
"Content-Type": "application/octet-stream", "Content-Type": "application/octet-stream",
} }
if stats:
headers["X-Up2k-Stat"] = stats
if pw: if pw:
headers["Cookie"] = "=".join(["cppwd", pw]) headers["Cookie"] = "=".join(["cppwd", pw])
@@ -629,6 +667,8 @@ class Ctl(object):
req_ses.verify = ar.te req_ses.verify = ar.te
self.filegen = walkdirs([], ar.files) self.filegen = walkdirs([], ar.files)
self.recheck = [] # type: list[File]
if ar.safe: if ar.safe:
self._safe() self._safe()
else: else:
@@ -647,11 +687,11 @@ class Ctl(object):
self.t0 = time.time() self.t0 = time.time()
self.t0_up = None self.t0_up = None
self.spd = None self.spd = None
self.eta = "99:99:99"
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.q_handshake = Queue() # type: Queue[File] self.q_handshake = Queue() # type: Queue[File]
self.q_upload = Queue() # type: Queue[tuple[File, str]] self.q_upload = Queue() # type: Queue[tuple[File, str]]
self.recheck = [] # type: list[File]
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int] self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int] self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
@@ -693,7 +733,8 @@ class Ctl(object):
ncs = len(hs) ncs = len(hs)
for nc, cid in enumerate(hs): for nc, cid in enumerate(hs):
print(" {0} up {1}".format(ncs - nc, cid)) print(" {0} up {1}".format(ncs - nc, cid))
upload(file, cid, self.ar.a) stats = "{0}/0/0/{1}".format(nf, self.nfiles - nf)
upload(file, cid, self.ar.a, stats)
print(" ok!") print(" ok!")
if file.recheck: if file.recheck:
@@ -768,12 +809,12 @@ class Ctl(object):
eta = (self.nbytes - self.up_b) / (spd + 1) eta = (self.nbytes - self.up_b) / (spd + 1)
spd = humansize(spd) spd = humansize(spd)
eta = str(datetime.timedelta(seconds=int(eta))) self.eta = str(datetime.timedelta(seconds=int(eta)))
sleft = humansize(self.nbytes - self.up_b) sleft = humansize(self.nbytes - self.up_b)
nleft = self.nfiles - self.up_f nleft = self.nfiles - self.up_f
tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r" tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r"
t = "{0} eta @ {1}/s, {2}, {3}# left".format(eta, spd, sleft, nleft) t = "{0} eta @ {1}/s, {2}, {3}# left".format(self.eta, spd, sleft, nleft)
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail)) eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
if not self.recheck: if not self.recheck:
@@ -811,7 +852,7 @@ class Ctl(object):
zb += quotep(rd.replace(b"\\", b"/")) zb += quotep(rd.replace(b"\\", b"/"))
r = req_ses.get(zb + b"?ls&dots", headers=headers) r = req_ses.get(zb + b"?ls&dots", headers=headers)
if not r: if not r:
raise Exception("HTTP {}".format(r.status_code)) raise Exception("HTTP {0}".format(r.status_code))
j = r.json() j = r.json()
for f in j["dirs"] + j["files"]: for f in j["dirs"] + j["files"]:
@@ -886,6 +927,9 @@ class Ctl(object):
self.handshaker_busy += 1 self.handshaker_busy += 1
upath = file.abs.decode("utf-8", "replace") upath = file.abs.decode("utf-8", "replace")
if not VT100:
upath = upath[4:]
hs, sprs = handshake(self.ar, file, search) hs, sprs = handshake(self.ar, file, search)
if search: if search:
if hs: if hs:
@@ -951,9 +995,20 @@ class Ctl(object):
self.uploader_busy += 1 self.uploader_busy += 1
self.t0_up = self.t0_up or time.time() self.t0_up = self.t0_up or time.time()
zs = "{0}/{1}/{2}/{3} {4}/{5} {6}"
stats = zs.format(
self.up_f,
len(self.recheck),
self.uploader_busy,
self.nfiles - self.up_f,
int(self.nbytes / (1024 * 1024)),
int((self.nbytes - self.up_b) / (1024 * 1024)),
self.eta,
)
file, cid = task file, cid = task
try: try:
upload(file, cid, self.ar.a) upload(file, cid, self.ar.a, stats)
except: except:
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8])) eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
# handshake will fix it # handshake will fix it
@@ -989,8 +1044,13 @@ def main():
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2 cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
ver = "{0}, v{1}".format(S_BUILD_DT, S_VERSION)
if "--version" in sys.argv:
print(ver)
return
# fmt: off # fmt: off
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog=""" ap = app = argparse.ArgumentParser(formatter_class=APF, description="copyparty up2k uploader / filesearch tool, " + ver, epilog="""
NOTE: NOTE:
source file/folder selection uses rsync syntax, meaning that: source file/folder selection uses rsync syntax, meaning that:
"foo" uploads the entire folder to URL/foo/ "foo" uploads the entire folder to URL/foo/
@@ -1003,6 +1063,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath") ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
ap.add_argument("-s", action="store_true", help="file-search (disables upload)") ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible") ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
ap.add_argument("--version", action="store_true", help="show version and exit")
ap = app.add_argument_group("compatibility") ap = app.add_argument_group("compatibility")
ap.add_argument("--cls", action="store_true", help="clear screen before start") ap.add_argument("--cls", action="store_true", help="clear screen before start")
@@ -1026,7 +1087,16 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("-td", action="store_true", help="disable certificate check") ap.add_argument("-td", action="store_true", help="disable certificate check")
# fmt: on # fmt: on
try:
ar = app.parse_args() ar = app.parse_args()
finally:
if EXE and not sys.argv[1:]:
print("*** hit enter to exit ***")
try:
input()
except:
pass
if ar.drd: if ar.drd:
ar.dr = True ar.dr = True
@@ -1040,7 +1110,7 @@ source file/folder selection uses rsync syntax, meaning that:
ar.files = [ ar.files = [
os.path.abspath(os.path.realpath(x.encode("utf-8"))) os.path.abspath(os.path.realpath(x.encode("utf-8")))
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8") + (x[-1:] if x[-1:] in ("\\", "/") else "").encode("utf-8")
for x in ar.files for x in ar.files
] ]
@@ -1050,7 +1120,7 @@ source file/folder selection uses rsync syntax, meaning that:
if ar.a and ar.a.startswith("$"): if ar.a and ar.a.startswith("$"):
fn = ar.a[1:] fn = ar.a[1:]
print("reading password from file [{}]".format(fn)) print("reading password from file [{0}]".format(fn))
with open(fn, "rb") as f: with open(fn, "rb") as f:
ar.a = f.read().decode("utf-8").strip() ar.a = f.read().decode("utf-8").strip()

View File

@@ -3,7 +3,7 @@
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
<title>🎉 redirect</title> <title>💾🎉 redirect</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<style> <style>

Binary file not shown.

View File

@@ -1,6 +1,6 @@
# Maintainer: icxes <dev.null@need.moe> # Maintainer: icxes <dev.null@need.moe>
pkgname=copyparty pkgname=copyparty
pkgver="1.6.3" pkgver="1.6.9"
pkgrel=1 pkgrel=1
pkgdesc="Portable file sharing hub" pkgdesc="Portable file sharing hub"
arch=("any") arch=("any")
@@ -26,12 +26,12 @@ source=("${url}/releases/download/v${pkgver}/${pkgname}-sfx.py"
"https://raw.githubusercontent.com/9001/${pkgname}/v${pkgver}/LICENSE" "https://raw.githubusercontent.com/9001/${pkgname}/v${pkgver}/LICENSE"
) )
backup=("etc/${pkgname}.d/init" ) backup=("etc/${pkgname}.d/init" )
sha256sums=("56c02d43a0e6c18d71295268674454b4c6f5ff2ccef30fb95f81d58d2d1e260d" sha256sums=("64f3b6a7120b3e1c1167e6aa7c0f023c39abb18e50525013b97467326a2f73ab"
"b8565eba5e64dedba1cf6c7aac7e31c5a731ed7153d6810288a28f00a36c28b2" "b8565eba5e64dedba1cf6c7aac7e31c5a731ed7153d6810288a28f00a36c28b2"
"f65c207e0670f9d78ad2e399bda18d5502ff30d2ac79e0e7fc48e7fbdc39afdc" "f65c207e0670f9d78ad2e399bda18d5502ff30d2ac79e0e7fc48e7fbdc39afdc"
"c4f396b083c9ec02ad50b52412c84d2a82be7f079b2d016e1c9fad22d68285ff" "c4f396b083c9ec02ad50b52412c84d2a82be7f079b2d016e1c9fad22d68285ff"
"dba701de9fd584405917e923ea1e59dbb249b96ef23bad479cf4e42740b774c8" "dba701de9fd584405917e923ea1e59dbb249b96ef23bad479cf4e42740b774c8"
"0530459e6fbd57f770c374e960d2eb07a4e8c082c0007fb754454e45c0af57c6" "23054bb206153a1ed34038accaf490b8068f9c856e423c2f2595b148b40c0a0c"
"cb2ce3d6277bf2f5a82ecf336cc44963bc6490bcf496ffbd75fc9e21abaa75f3" "cb2ce3d6277bf2f5a82ecf336cc44963bc6490bcf496ffbd75fc9e21abaa75f3"
) )

View File

@@ -3,8 +3,6 @@ rem removes the 47.6 MiB filesize limit when downloading from webdav
rem + optionally allows/enables password-auth over plaintext http rem + optionally allows/enables password-auth over plaintext http
rem + optionally helps disable wpad, removing the 10sec latency rem + optionally helps disable wpad, removing the 10sec latency
setlocal enabledelayedexpansion
net session >nul 2>&1 net session >nul 2>&1
if %errorlevel% neq 0 ( if %errorlevel% neq 0 (
echo sorry, you must run this as administrator echo sorry, you must run this as administrator
@@ -20,30 +18,26 @@ echo OK;
echo allow webdav basic-auth over plaintext http? echo allow webdav basic-auth over plaintext http?
echo Y: login works, but the password will be visible in wireshark etc echo Y: login works, but the password will be visible in wireshark etc
echo N: login will NOT work unless you use https and valid certificates echo N: login will NOT work unless you use https and valid certificates
set c=. choice
set /p "c=(Y/N): " if %errorlevel% equ 1 (
echo( reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f
if /i not "!c!"=="y" goto :g1 rem default is 1 (require tls)
reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f )
rem default is 1 (require tls)
:g1
echo( echo(
echo OK; echo OK;
echo do you want to disable wpad? echo do you want to disable wpad?
echo can give a HUGE speed boost depending on network settings echo can give a HUGE speed boost depending on network settings
set c=. choice
set /p "c=(Y/N): " if %errorlevel% equ 1 (
echo( echo(
if /i not "!c!"=="y" goto :g2 echo i'm about to open the [Connections] tab in [Internet Properties] for you;
echo( echo please click [LAN settings] and disable [Automatically detect settings]
echo i'm about to open the [Connections] tab in [Internet Properties] for you; echo(
echo please click [LAN settings] and disable [Automatically detect settings] pause
echo( control inetcpl.cpl,,4
pause )
control inetcpl.cpl,,4
:g2
net stop webclient net stop webclient
net start webclient net start webclient
echo( echo(

View File

@@ -34,6 +34,8 @@ ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"]
MACOS = platform.system() == "Darwin" MACOS = platform.system() == "Darwin"
EXE = bool(getattr(sys, "frozen", False))
try: try:
CORES = len(os.sched_getaffinity(0)) CORES = len(os.sched_getaffinity(0))
except: except:

View File

@@ -23,7 +23,7 @@ import traceback
import uuid import uuid
from textwrap import dedent from textwrap import dedent
from .__init__ import ANYWIN, CORES, PY2, VT100, WINDOWS, E, EnvParams, unicode from .__init__ import ANYWIN, CORES, EXE, PY2, VT100, WINDOWS, E, EnvParams, unicode
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
from .authsrv import expand_config_file, re_vol, split_cfg_ln, upgrade_cfg_fmt from .authsrv import expand_config_file, re_vol, split_cfg_ln, upgrade_cfg_fmt
from .cfg import flagcats, onedash from .cfg import flagcats, onedash
@@ -36,7 +36,6 @@ from .util import (
UNPLICATIONS, UNPLICATIONS,
align_tab, align_tab,
ansi_re, ansi_re,
is_exe,
min_ex, min_ex,
py_desc, py_desc,
pybin, pybin,
@@ -512,6 +511,7 @@ def get_sects():
execute a command (a program or script) before or after various events; execute a command (a program or script) before or after various events;
\033[36mxbu\033[35m executes CMD before a file upload starts \033[36mxbu\033[35m executes CMD before a file upload starts
\033[36mxau\033[35m executes CMD after a file upload finishes \033[36mxau\033[35m executes CMD after a file upload finishes
\033[36mxiu\033[35m executes CMD after all uploads finish and volume is idle
\033[36mxbr\033[35m executes CMD before a file rename/move \033[36mxbr\033[35m executes CMD before a file rename/move
\033[36mxar\033[35m executes CMD after a file rename/move \033[36mxar\033[35m executes CMD after a file rename/move
\033[36mxbd\033[35m executes CMD before a file delete \033[36mxbd\033[35m executes CMD before a file delete
@@ -533,6 +533,7 @@ def get_sects():
\033[36mj\033[35m provides json with info as 1st arg instead of filepath \033[36mj\033[35m provides json with info as 1st arg instead of filepath
\033[36mwN\033[35m waits N sec after command has been started before continuing \033[36mwN\033[35m waits N sec after command has been started before continuing
\033[36mtN\033[35m sets an N sec timeout before the command is abandoned \033[36mtN\033[35m sets an N sec timeout before the command is abandoned
\033[36miN\033[35m xiu only: volume must be idle for N sec (default = 5)
\033[36mkt\033[35m kills the entire process tree on timeout (default), \033[36mkt\033[35m kills the entire process tree on timeout (default),
\033[36mkm\033[35m kills just the main process \033[36mkm\033[35m kills just the main process
@@ -543,6 +544,14 @@ def get_sects():
\033[36mc2\033[35m show only stdout \033[36mc2\033[35m show only stdout
\033[36mc3\033[35m mute all process otput \033[36mc3\033[35m mute all process otput
\033[0m \033[0m
each hook is executed once for each event, except for \033[36mxiu\033[0m
which builds up a backlog of uploads, running the hook just once
as soon as the volume has been idle for iN seconds (5 by default)
\033[36mxiu\033[0m is also unique in that it will pass the metadata to the
executed program on STDIN instead of as argv arguments, and
it also includes the wark (file-id/hash) as a json property
except for \033[36mxm\033[0m, only one hook / one action can run at a time, except for \033[36mxm\033[0m, only one hook / one action can run at a time,
so it's recommended to use the \033[36mf\033[0m flag unless you really need so it's recommended to use the \033[36mf\033[0m flag unless you really need
to wait for the hook to finish before continuing (without \033[36mf\033[0m to wait for the hook to finish before continuing (without \033[36mf\033[0m
@@ -652,6 +661,7 @@ def add_upload(ap):
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed") ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip") ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled") ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than SEC seconds ago)")
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600") ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)") ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)")
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)") ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
@@ -684,6 +694,7 @@ def add_network(ap):
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes") ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds") ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds") ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..SEC")
def add_tls(ap): def add_tls(ap):
@@ -747,7 +758,7 @@ def add_ftp(ap):
def add_webdav(ap): def add_webdav(ap):
ap2 = ap.add_argument_group('WebDAV options') ap2 = ap.add_argument_group('WebDAV options')
ap2.add_argument("--daw", action="store_true", help="enable full write support. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)") ap2.add_argument("--daw", action="store_true", help="enable full write support, even if client may not be webdav. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
ap2.add_argument("--dav-inf", action="store_true", help="allow depth:infinite requests (recursive file listing); extremely server-heavy but required for spec compliance -- luckily few clients rely on this") ap2.add_argument("--dav-inf", action="store_true", help="allow depth:infinite requests (recursive file listing); extremely server-heavy but required for spec compliance -- luckily few clients rely on this")
ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)") ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)")
@@ -769,6 +780,7 @@ def add_hooks(ap):
ap2 = ap.add_argument_group('event hooks (see --help-hooks)') ap2 = ap.add_argument_group('event hooks (see --help-hooks)')
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute CMD before a file upload starts") ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute CMD before a file upload starts")
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute CMD after a file upload finishes") ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute CMD after a file upload finishes")
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute CMD after all uploads finish and volume is idle")
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute CMD before a file move/rename") ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute CMD before a file move/rename")
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute CMD after a file move/rename") ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute CMD after a file move/rename")
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute CMD before a file delete") ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute CMD before a file delete")
@@ -802,7 +814,7 @@ def add_safety(ap, fk_salt):
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih") ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r") ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]") ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; used to generate unpredictable internal identifiers for uploads -- doesn't really matter") ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter") ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles") ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile") ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
@@ -862,7 +874,7 @@ def add_thumbnail(ap):
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds") ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled") ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds") ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for") ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; case-insensitive if -e2d")
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html # https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# https://github.com/libvips/libvips # https://github.com/libvips/libvips
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:' # ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
@@ -870,7 +882,7 @@ def add_thumbnail(ap):
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips") ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg") ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg") ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,itgz,itr,itz,m4a,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3gz,s3m,s3r,s3z,tak,tta,ulaw,wav,wma,wv,xm,xmgz,xmr,xmz,xpk", help="audio formats to decode using ffmpeg") ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,m4a,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,tak,tta,ulaw,wav,wma,wv,xm,xpk", help="audio formats to decode using ffmpeg")
def add_transcoding(ap): def add_transcoding(ap):
@@ -934,6 +946,7 @@ def add_ui(ap, retry):
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include") ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include") ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages") ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI)")
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext") ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)") ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents") ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
@@ -1067,7 +1080,7 @@ def main(argv: Optional[list[str]] = None) -> None:
showlic() showlic()
sys.exit(0) sys.exit(0)
if is_exe: if EXE:
print("pybin: {}\n".format(pybin), end="") print("pybin: {}\n".format(pybin), end="")
ensure_locale() ensure_locale()

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (1, 6, 4) VERSION = (1, 6, 10)
CODENAME = "cors k" CODENAME = "cors k"
BUILD_DT = (2023, 2, 11) BUILD_DT = (2023, 3, 20)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -934,7 +934,7 @@ class AuthSrv(object):
is_list: bool, is_list: bool,
) -> None: ) -> None:
desc = flagdescs.get(name, "?").replace("\n", " ") desc = flagdescs.get(name, "?").replace("\n", " ")
if name not in ["mtp", "xbu", "xau", "xbr", "xar", "xbd", "xad", "xm"]: if name not in "mtp xbu xau xiu xbr xar xbd xad xm".split():
if value is True: if value is True:
t = "└─add volflag [{}] = {} ({})" t = "└─add volflag [{}] = {} ({})"
else: else:
@@ -1303,7 +1303,7 @@ class AuthSrv(object):
vol.flags["mth"] = self.args.mth vol.flags["mth"] = self.args.mth
# append additive args from argv to volflags # append additive args from argv to volflags
hooks = "xbu xau xbr xar xbd xad xm".split() hooks = "xbu xau xiu xbr xar xbd xad xm".split()
for name in ["mtp"] + hooks: for name in ["mtp"] + hooks:
self._read_volflag(vol.flags, name, getattr(self.args, name), True) self._read_volflag(vol.flags, name, getattr(self.args, name), True)
@@ -1363,11 +1363,20 @@ class AuthSrv(object):
if k in ints: if k in ints:
vol.flags[k] = int(vol.flags[k]) vol.flags[k] = int(vol.flags[k])
if "lifetime" in vol.flags and "e2d" not in vol.flags: if "e2d" not in vol.flags:
if "lifetime" in vol.flags:
t = 'removing lifetime config from volume "/{}" because e2d is disabled' t = 'removing lifetime config from volume "/{}" because e2d is disabled'
self.log(t.format(vol.vpath), 1) self.log(t.format(vol.vpath), 1)
del vol.flags["lifetime"] del vol.flags["lifetime"]
needs_e2d = [x for x in hooks if x != "xm"]
drop = [x for x in needs_e2d if vol.flags.get(x)]
if drop:
t = 'removing [{}] from volume "/{}" because e2d is disabled'
self.log(t.format(", ".join(drop), vol.vpath), 1)
for x in drop:
vol.flags.pop(x)
if vol.flags.get("neversymlink") and not vol.flags.get("hardlink"): if vol.flags.get("neversymlink") and not vol.flags.get("hardlink"):
vol.flags["copydupes"] = True vol.flags["copydupes"] = True
@@ -1624,8 +1633,8 @@ class AuthSrv(object):
] ]
csv = set("i p".split()) csv = set("i p".split())
lst = set("c ihead mtm mtp xad xar xau xbd xbr xbu xm".split()) lst = set("c ihead mtm mtp xad xar xau xiu xbd xbr xbu xm".split())
askip = set("a v c vc cgen".split()) askip = set("a v c vc cgen theme".split())
# keymap from argv to vflag # keymap from argv to vflag
amap = vf_bmap() amap = vf_bmap()
@@ -1722,6 +1731,11 @@ class AuthSrv(object):
elif v is True: elif v is True:
trues.append(k) trues.append(k)
elif v is not False: elif v is not False:
try:
v = v.pattern
except:
pass
vals.append("{}: {}".format(k, v)) vals.append("{}: {}".format(k, v))
pops = [] pops = []
for k1, k2 in IMPLICATIONS: for k1, k2 in IMPLICATIONS:

View File

@@ -123,6 +123,7 @@ flagcats = {
"event hooks\n(better explained in --help-hooks)": { "event hooks\n(better explained in --help-hooks)": {
"xbu=CMD": "execute CMD before a file upload starts", "xbu=CMD": "execute CMD before a file upload starts",
"xau=CMD": "execute CMD after a file upload finishes", "xau=CMD": "execute CMD after a file upload finishes",
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
"xbr=CMD": "execute CMD before a file rename/move", "xbr=CMD": "execute CMD before a file rename/move",
"xar=CMD": "execute CMD after a file rename/move", "xar=CMD": "execute CMD after a file rename/move",
"xbd=CMD": "execute CMD before a file delete", "xbd=CMD": "execute CMD before a file delete",

View File

@@ -15,6 +15,7 @@ from pyftpdlib.servers import FTPServer
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
from .bos import bos from .bos import bos
from .authsrv import VFS
from .util import ( from .util import (
Daemon, Daemon,
Pebkac, Pebkac,
@@ -23,6 +24,7 @@ from .util import (
ipnorm, ipnorm,
pybin, pybin,
relchk, relchk,
runhook,
sanitize_fn, sanitize_fn,
vjoin, vjoin,
) )
@@ -125,6 +127,10 @@ class FtpFs(AbstractedFS):
self.listdirinfo = self.listdir self.listdirinfo = self.listdir
self.chdir(".") self.chdir(".")
def die(self, msg):
self.h.die(msg)
raise Exception()
def v2a( def v2a(
self, self,
vpath: str, vpath: str,
@@ -132,23 +138,23 @@ class FtpFs(AbstractedFS):
w: bool = False, w: bool = False,
m: bool = False, m: bool = False,
d: bool = False, d: bool = False,
) -> str: ) -> tuple[str, VFS, str]:
try: try:
vpath = vpath.replace("\\", "/").lstrip("/") vpath = vpath.replace("\\", "/").lstrip("/")
rd, fn = os.path.split(vpath) rd, fn = os.path.split(vpath)
if ANYWIN and relchk(rd): if ANYWIN and relchk(rd):
logging.warning("malicious vpath: %s", vpath) logging.warning("malicious vpath: %s", vpath)
raise FilesystemError("unsupported characters in filepath") self.die("Unsupported characters in filepath")
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"]) fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
vpath = vjoin(rd, fn) vpath = vjoin(rd, fn)
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d) vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
if not vfs.realpath: if not vfs.realpath:
raise FilesystemError("no filesystem mounted at this path") self.die("No filesystem mounted at this path")
return os.path.join(vfs.realpath, rem) return os.path.join(vfs.realpath, rem), vfs, rem
except Pebkac as ex: except Pebkac as ex:
raise FilesystemError(str(ex)) self.die(str(ex))
def rv2a( def rv2a(
self, self,
@@ -157,7 +163,7 @@ class FtpFs(AbstractedFS):
w: bool = False, w: bool = False,
m: bool = False, m: bool = False,
d: bool = False, d: bool = False,
) -> str: ) -> tuple[str, VFS, str]:
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d) return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
def ftp2fs(self, ftppath: str) -> str: def ftp2fs(self, ftppath: str) -> str:
@@ -171,7 +177,7 @@ class FtpFs(AbstractedFS):
def validpath(self, path: str) -> bool: def validpath(self, path: str) -> bool:
if "/.hist/" in path: if "/.hist/" in path:
if "/up2k." in path or path.endswith("/dir.txt"): if "/up2k." in path or path.endswith("/dir.txt"):
raise FilesystemError("access to this file is forbidden") self.die("Access to this file is forbidden")
return True return True
@@ -179,7 +185,7 @@ class FtpFs(AbstractedFS):
r = "r" in mode r = "r" in mode
w = "w" in mode or "a" in mode or "+" in mode w = "w" in mode or "a" in mode or "+" in mode
ap = self.rv2a(filename, r, w) ap = self.rv2a(filename, r, w)[0]
if w: if w:
try: try:
st = bos.stat(ap) st = bos.stat(ap)
@@ -188,7 +194,7 @@ class FtpFs(AbstractedFS):
td = 0 td = 0
if td < -1 or td > self.args.ftp_wt: if td < -1 or td > self.args.ftp_wt:
raise FilesystemError("cannot open existing file for writing") self.die("Cannot open existing file for writing")
self.validpath(ap) self.validpath(ap)
return open(fsenc(ap), mode) return open(fsenc(ap), mode)
@@ -199,7 +205,7 @@ class FtpFs(AbstractedFS):
ap = vfs.canonical(rem) ap = vfs.canonical(rem)
if not bos.path.isdir(ap): if not bos.path.isdir(ap):
# returning 550 is library-default and suitable # returning 550 is library-default and suitable
raise FilesystemError("Failed to change directory") self.die("Failed to change directory")
self.cwd = nwd self.cwd = nwd
( (
@@ -212,8 +218,8 @@ class FtpFs(AbstractedFS):
) = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username) ) = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
def mkdir(self, path: str) -> None: def mkdir(self, path: str) -> None:
ap = self.rv2a(path, w=True) ap = self.rv2a(path, w=True)[0]
bos.mkdir(ap) bos.makedirs(ap) # filezilla expects this
def listdir(self, path: str) -> list[str]: def listdir(self, path: str) -> list[str]:
vpath = join(self.cwd, path).lstrip("/") vpath = join(self.cwd, path).lstrip("/")
@@ -244,43 +250,42 @@ class FtpFs(AbstractedFS):
return list(sorted(list(r.keys()))) return list(sorted(list(r.keys())))
def rmdir(self, path: str) -> None: def rmdir(self, path: str) -> None:
ap = self.rv2a(path, d=True) ap = self.rv2a(path, d=True)[0]
bos.rmdir(ap) bos.rmdir(ap)
def remove(self, path: str) -> None: def remove(self, path: str) -> None:
if self.args.no_del: if self.args.no_del:
raise FilesystemError("the delete feature is disabled in server config") self.die("The delete feature is disabled in server config")
vp = join(self.cwd, path).lstrip("/") vp = join(self.cwd, path).lstrip("/")
try: try:
self.hub.up2k.handle_rm(self.uname, self.h.remote_ip, [vp], []) self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [])
except Exception as ex: except Exception as ex:
raise FilesystemError(str(ex)) self.die(str(ex))
def rename(self, src: str, dst: str) -> None: def rename(self, src: str, dst: str) -> None:
if not self.can_move: if not self.can_move:
raise FilesystemError("not allowed for user " + self.h.username) self.die("Not allowed for user " + self.h.username)
if self.args.no_mv: if self.args.no_mv:
t = "the rename/move feature is disabled in server config" self.die("The rename/move feature is disabled in server config")
raise FilesystemError(t)
svp = join(self.cwd, src).lstrip("/") svp = join(self.cwd, src).lstrip("/")
dvp = join(self.cwd, dst).lstrip("/") dvp = join(self.cwd, dst).lstrip("/")
try: try:
self.hub.up2k.handle_mv(self.uname, svp, dvp) self.hub.up2k.handle_mv(self.uname, svp, dvp)
except Exception as ex: except Exception as ex:
raise FilesystemError(str(ex)) self.die(str(ex))
def chmod(self, path: str, mode: str) -> None: def chmod(self, path: str, mode: str) -> None:
pass pass
def stat(self, path: str) -> os.stat_result: def stat(self, path: str) -> os.stat_result:
try: try:
ap = self.rv2a(path, r=True) ap = self.rv2a(path, r=True)[0]
return bos.stat(ap) return bos.stat(ap)
except: except:
ap = self.rv2a(path) ap = self.rv2a(path)[0]
st = bos.stat(ap) st = bos.stat(ap)
if not stat.S_ISDIR(st.st_mode): if not stat.S_ISDIR(st.st_mode):
raise raise
@@ -288,11 +293,11 @@ class FtpFs(AbstractedFS):
return st return st
def utime(self, path: str, timeval: float) -> None: def utime(self, path: str, timeval: float) -> None:
ap = self.rv2a(path, w=True) ap = self.rv2a(path, w=True)[0]
return bos.utime(ap, (timeval, timeval)) return bos.utime(ap, (timeval, timeval))
def lstat(self, path: str) -> os.stat_result: def lstat(self, path: str) -> os.stat_result:
ap = self.rv2a(path) ap = self.rv2a(path)[0]
return bos.stat(ap) return bos.stat(ap)
def isfile(self, path: str) -> bool: def isfile(self, path: str) -> bool:
@@ -303,7 +308,7 @@ class FtpFs(AbstractedFS):
return False # expected for mojibake in ftp_SIZE() return False # expected for mojibake in ftp_SIZE()
def islink(self, path: str) -> bool: def islink(self, path: str) -> bool:
ap = self.rv2a(path) ap = self.rv2a(path)[0]
return bos.path.islink(ap) return bos.path.islink(ap)
def isdir(self, path: str) -> bool: def isdir(self, path: str) -> bool:
@@ -314,18 +319,18 @@ class FtpFs(AbstractedFS):
return True return True
def getsize(self, path: str) -> int: def getsize(self, path: str) -> int:
ap = self.rv2a(path) ap = self.rv2a(path)[0]
return bos.path.getsize(ap) return bos.path.getsize(ap)
def getmtime(self, path: str) -> float: def getmtime(self, path: str) -> float:
ap = self.rv2a(path) ap = self.rv2a(path)[0]
return bos.path.getmtime(ap) return bos.path.getmtime(ap)
def realpath(self, path: str) -> str: def realpath(self, path: str) -> str:
return path return path
def lexists(self, path: str) -> bool: def lexists(self, path: str) -> bool:
ap = self.rv2a(path) ap = self.rv2a(path)[0]
return bos.path.lexists(ap) return bos.path.lexists(ap)
def get_user_by_uid(self, uid: int) -> str: def get_user_by_uid(self, uid: int) -> str:
@@ -349,17 +354,40 @@ class FtpHandler(FTPHandler):
else: else:
super(FtpHandler, self).__init__(conn, server, ioloop) super(FtpHandler, self).__init__(conn, server, ioloop)
cip = self.remote_ip
self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip
# abspath->vpath mapping to resolve log_transfer paths # abspath->vpath mapping to resolve log_transfer paths
self.vfs_map: dict[str, str] = {} self.vfs_map: dict[str, str] = {}
# reduce non-debug logging # reduce non-debug logging
self.log_cmds_list = [x for x in self.log_cmds_list if x not in ("CWD", "XCWD")] self.log_cmds_list = [x for x in self.log_cmds_list if x not in ("CWD", "XCWD")]
def die(self, msg):
self.respond("550 {}".format(msg))
raise FilesystemError(msg)
def ftp_STOR(self, file: str, mode: str = "w") -> Any: def ftp_STOR(self, file: str, mode: str = "w") -> Any:
# Optional[str] # Optional[str]
vp = join(self.fs.cwd, file).lstrip("/") vp = join(self.fs.cwd, file).lstrip("/")
ap = self.fs.v2a(vp) ap, vfs, rem = self.fs.v2a(vp)
self.vfs_map[ap] = vp self.vfs_map[ap] = vp
xbu = vfs.flags.get("xbu")
if xbu and not runhook(
None,
xbu,
ap,
vfs.canonical(rem),
"",
self.username,
0,
0,
self.cli_ip,
0,
"",
):
self.die("Upload blocked by xbu server config")
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap)) # print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
ret = FTPHandler.ftp_STOR(self, file, mode) ret = FTPHandler.ftp_STOR(self, file, mode)
# print("ftp_STOR: {} {} OK".format(vp, mode)) # print("ftp_STOR: {} {} OK".format(vp, mode))
@@ -384,11 +412,13 @@ class FtpHandler(FTPHandler):
vfs, rem = vfs.get_dbv(rem) vfs, rem = vfs.get_dbv(rem)
self.hub.up2k.hash_file( self.hub.up2k.hash_file(
vfs.realpath, vfs.realpath,
vfs.vpath,
vfs.flags, vfs.flags,
rem, rem,
fn, fn,
self.remote_ip, self.cli_ip,
time.time(), time.time(),
self.username,
) )
return FTPHandler.log_transfer( return FTPHandler.log_transfer(

View File

@@ -10,6 +10,7 @@ import gzip
import itertools import itertools
import json import json
import os import os
import random
import re import re
import stat import stat
import string import string
@@ -343,6 +344,8 @@ class HttpCli(object):
if self.args.rsp_slp: if self.args.rsp_slp:
time.sleep(self.args.rsp_slp) time.sleep(self.args.rsp_slp)
if self.args.rsp_jtr:
time.sleep(random.random() * self.args.rsp_jtr)
zso = self.headers.get("cookie") zso = self.headers.get("cookie")
if zso: if zso:
@@ -775,8 +778,8 @@ class HttpCli(object):
if "k304" in self.uparam: if "k304" in self.uparam:
return self.set_k304() return self.set_k304()
if "am_js" in self.uparam: if "setck" in self.uparam:
return self.set_am_js() return self.setck()
if "reset" in self.uparam: if "reset" in self.uparam:
return self.set_cfg_reset() return self.set_cfg_reset()
@@ -862,7 +865,17 @@ class HttpCli(object):
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False, err=401) vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False, err=401)
depth = self.headers.get("depth", "infinity").lower() depth = self.headers.get("depth", "infinity").lower()
if depth == "infinity": try:
topdir = {"vp": "", "st": bos.stat(vn.canonical(rem))}
except OSError as ex:
if ex.errno != errno.ENOENT:
raise
raise Pebkac(404)
if not stat.S_ISDIR(topdir["st"].st_mode):
fgen = []
elif depth == "infinity":
if not self.args.dav_inf: if not self.args.dav_inf:
self.log("client wants --dav-inf", 3) self.log("client wants --dav-inf", 3)
zb = b'<?xml version="1.0" encoding="utf-8"?>\n<D:error xmlns:D="DAV:"><D:propfind-finite-depth/></D:error>' zb = b'<?xml version="1.0" encoding="utf-8"?>\n<D:error xmlns:D="DAV:"><D:propfind-finite-depth/></D:error>'
@@ -901,13 +914,6 @@ class HttpCli(object):
t2 = " or 'infinity'" if self.args.dav_inf else "" t2 = " or 'infinity'" if self.args.dav_inf else ""
raise Pebkac(412, t.format(depth, t2)) raise Pebkac(412, t.format(depth, t2))
try:
topdir = {"vp": "", "st": os.stat(vn.canonical(rem))}
except OSError as ex:
if ex.errno != errno.ENOENT:
raise
raise Pebkac(404)
fgen = itertools.chain([topdir], fgen) # type: ignore fgen = itertools.chain([topdir], fgen) # type: ignore
vtop = vjoin(self.args.R, vjoin(vn.vpath, rem)) vtop = vjoin(self.args.R, vjoin(vn.vpath, rem))
@@ -1269,9 +1275,10 @@ class HttpCli(object):
self.vpath, self.vpath,
self.host, self.host,
self.uname, self.uname,
self.ip,
time.time(), time.time(),
len(xm), len(xm),
self.ip,
time.time(),
plain, plain,
) )
@@ -1412,31 +1419,35 @@ class HttpCli(object):
self.vpath, self.vpath,
self.host, self.host,
self.uname, self.uname,
self.ip,
at, at,
remains, remains,
self.ip,
at,
"", "",
): ):
t = "upload denied by xbu" t = "upload blocked by xbu server config"
self.log(t, 1) self.log(t, 1)
raise Pebkac(403, t) raise Pebkac(403, t)
if is_put and not self.args.no_dav: if is_put and not (self.args.no_dav or self.args.nw) and bos.path.exists(path):
# allow overwrite if... # allow overwrite if...
# * volflag 'daw' is set # * volflag 'daw' is set, or client is definitely webdav
# * and account has delete-access # * and account has delete-access
# or... # or...
# * file exists and is empty # * file exists, is empty, sufficiently new
# * and there is no .PARTIAL # * and there is no .PARTIAL
tnam = fn + ".PARTIAL" tnam = fn + ".PARTIAL"
if self.args.dotpart: if self.args.dotpart:
tnam = "." + tnam tnam = "." + tnam
if (vfs.flags.get("daw") and self.can_delete) or ( if (
self.can_delete
and (vfs.flags.get("daw") or "x-oc-mtime" in self.headers)
) or (
not bos.path.exists(os.path.join(fdir, tnam)) not bos.path.exists(os.path.join(fdir, tnam))
and bos.path.exists(path)
and not bos.path.getsize(path) and not bos.path.getsize(path)
and bos.path.getmtime(path) >= time.time() - self.args.blank_wt
): ):
# small toctou, but better than clobbering a hardlink # small toctou, but better than clobbering a hardlink
bos.unlink(path) bos.unlink(path)
@@ -1458,6 +1469,16 @@ class HttpCli(object):
if self.args.nw: if self.args.nw:
return post_sz, sha_hex, sha_b64, remains, path, "" return post_sz, sha_hex, sha_b64, remains, path, ""
at = mt = time.time() - lifetime
cli_mt = self.headers.get("x-oc-mtime")
if cli_mt:
try:
mt = int(cli_mt)
times = (int(time.time()), mt)
bos.utime(path, times, False)
except:
pass
if nameless and "magic" in vfs.flags: if nameless and "magic" in vfs.flags:
try: try:
ext = self.conn.hsrv.magician.ext(path) ext = self.conn.hsrv.magician.ext(path)
@@ -1480,7 +1501,6 @@ class HttpCli(object):
fn = fn2 fn = fn2
path = path2 path = path2
at = time.time() - lifetime
if xau and not runhook( if xau and not runhook(
self.log, self.log,
xau, xau,
@@ -1488,12 +1508,13 @@ class HttpCli(object):
self.vpath, self.vpath,
self.host, self.host,
self.uname, self.uname,
mt,
post_sz,
self.ip, self.ip,
at, at,
post_sz,
"", "",
): ):
t = "upload denied by xau" t = "upload blocked by xau server config"
self.log(t, 1) self.log(t, 1)
os.unlink(path) os.unlink(path)
raise Pebkac(403, t) raise Pebkac(403, t)
@@ -1502,11 +1523,14 @@ class HttpCli(object):
self.conn.hsrv.broker.say( self.conn.hsrv.broker.say(
"up2k.hash_file", "up2k.hash_file",
vfs.realpath, vfs.realpath,
vfs.vpath,
vfs.flags, vfs.flags,
rem, rem,
fn, fn,
self.ip, self.ip,
at, at,
self.uname,
True,
) )
vsuf = "" vsuf = ""
@@ -1704,7 +1728,7 @@ class HttpCli(object):
except: except:
raise Pebkac(500, min_ex()) raise Pebkac(500, min_ex())
x = self.conn.hsrv.broker.ask("up2k.handle_json", body) x = self.conn.hsrv.broker.ask("up2k.handle_json", body, self.u2fh.aps)
ret = x.get() ret = x.get()
if self.is_vproxied: if self.is_vproxied:
if "purl" in ret: if "purl" in ret:
@@ -1747,12 +1771,13 @@ class HttpCli(object):
hits = idx.fsearch(vols, body) hits = idx.fsearch(vols, body)
msg: Any = repr(hits) msg: Any = repr(hits)
taglist: list[str] = [] taglist: list[str] = []
trunc = False
else: else:
# search by query params # search by query params
q = body["q"] q = body["q"]
n = body.get("n", self.args.srch_hits) n = body.get("n", self.args.srch_hits)
self.log("qj: {} |{}|".format(q, n)) self.log("qj: {} |{}|".format(q, n))
hits, taglist = idx.search(vols, q, n) hits, taglist, trunc = idx.search(vols, q, n)
msg = len(hits) msg = len(hits)
idx.p_end = time.time() idx.p_end = time.time()
@@ -1772,7 +1797,8 @@ class HttpCli(object):
for hit in hits: for hit in hits:
hit["rp"] = self.args.RS + hit["rp"] hit["rp"] = self.args.RS + hit["rp"]
r = json.dumps({"hits": hits, "tag_order": order}).encode("utf-8") rj = {"hits": hits, "tag_order": order, "trunc": trunc}
r = json.dumps(rj).encode("utf-8")
self.reply(r, mime="application/json") self.reply(r, mime="application/json")
return True return True
@@ -1874,17 +1900,10 @@ class HttpCli(object):
with self.mutex: with self.mutex:
self.u2fh.close(path) self.u2fh.close(path)
# windows cant rename open files if not num_left and not self.args.nw:
if ANYWIN and path != fin_path and not self.args.nw: self.conn.hsrv.broker.ask(
self.conn.hsrv.broker.ask("up2k.finish_upload", ptop, wark).get() "up2k.finish_upload", ptop, wark, self.u2fh.aps
).get()
if not ANYWIN and not num_left:
times = (int(time.time()), int(lastmod))
self.log("no more chunks, setting times {}".format(times))
try:
bos.utime(fin_path, times)
except:
self.log("failed to utime ({}, {})".format(fin_path, times))
cinf = self.headers.get("x-up2k-stat", "") cinf = self.headers.get("x-up2k-stat", "")
@@ -2099,12 +2118,13 @@ class HttpCli(object):
self.vpath, self.vpath,
self.host, self.host,
self.uname, self.uname,
self.ip,
at, at,
0, 0,
self.ip,
at,
"", "",
): ):
t = "upload denied by xbu" t = "upload blocked by xbu server config"
self.log(t, 1) self.log(t, 1)
raise Pebkac(403, t) raise Pebkac(403, t)
@@ -2158,12 +2178,13 @@ class HttpCli(object):
self.vpath, self.vpath,
self.host, self.host,
self.uname, self.uname,
self.ip,
at, at,
sz, sz,
self.ip,
at,
"", "",
): ):
t = "upload denied by xau" t = "upload blocked by xau server config"
self.log(t, 1) self.log(t, 1)
os.unlink(abspath) os.unlink(abspath)
raise Pebkac(403, t) raise Pebkac(403, t)
@@ -2172,11 +2193,14 @@ class HttpCli(object):
self.conn.hsrv.broker.say( self.conn.hsrv.broker.say(
"up2k.hash_file", "up2k.hash_file",
dbv.realpath, dbv.realpath,
vfs.vpath,
dbv.flags, dbv.flags,
vrem, vrem,
fname, fname,
self.ip, self.ip,
at, at,
self.uname,
True,
) )
self.conn.nbyte += sz self.conn.nbyte += sz
@@ -2833,7 +2857,7 @@ class HttpCli(object):
} }
fmt = self.uparam.get("ls", "") fmt = self.uparam.get("ls", "")
if not fmt and self.ua.startswith("curl/"): if not fmt and (self.ua.startswith("curl/") or self.ua.startswith("fetch")):
fmt = "v" fmt = "v"
if fmt in ["v", "t", "txt"]: if fmt in ["v", "t", "txt"]:
@@ -2877,6 +2901,7 @@ class HttpCli(object):
url_suf=suf, url_suf=suf,
k304=self.k304(), k304=self.k304(),
ver=S_VERSION if self.args.ver else "", ver=S_VERSION if self.args.ver else "",
ahttps="" if self.is_https else "https://" + self.host + self.req,
) )
self.reply(html.encode("utf-8")) self.reply(html.encode("utf-8"))
return True return True
@@ -2887,15 +2912,16 @@ class HttpCli(object):
self.redirect("", "?h#cc") self.redirect("", "?h#cc")
return True return True
def set_am_js(self) -> bool: def setck(self) -> bool:
v = "n" if self.uparam["am_js"] == "n" else "y" k, v = self.uparam["setck"].split("=", 1)
ck = gencookie("js", v, self.args.R, False, 86400 * 299) t = None if v == "" else 86400 * 299
ck = gencookie(k, v, self.args.R, False, t)
self.out_headerlist.append(("Set-Cookie", ck)) self.out_headerlist.append(("Set-Cookie", ck))
self.reply(b"promoted\n") self.reply(b"o7\n")
return True return True
def set_cfg_reset(self) -> bool: def set_cfg_reset(self) -> bool:
for k in ("k304", "js", "cppwd", "cppws"): for k in ("k304", "js", "idxh", "cppwd", "cppws"):
cookie = gencookie(k, "x", self.args.R, False, None) cookie = gencookie(k, "x", self.args.R, False, None)
self.out_headerlist.append(("Set-Cookie", cookie)) self.out_headerlist.append(("Set-Cookie", cookie))
@@ -2926,7 +2952,7 @@ class HttpCli(object):
vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True) vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True)
args = [self.asrv.vfs.all_vols, [vn.vpath], False] args = [self.asrv.vfs.all_vols, [vn.vpath], False, True]
x = self.conn.hsrv.broker.ask("up2k.rescan", *args) x = self.conn.hsrv.broker.ask("up2k.rescan", *args)
err = x.get() err = x.get()
@@ -3239,21 +3265,45 @@ class HttpCli(object):
): ):
raise Pebkac(403) raise Pebkac(403)
e2d = "e2d" in vn.flags
e2t = "e2t" in vn.flags
self.html_head = vn.flags.get("html_head", "") self.html_head = vn.flags.get("html_head", "")
if vn.flags.get("norobots"): if vn.flags.get("norobots") or "b" in self.uparam:
self.out_headers["X-Robots-Tag"] = "noindex, nofollow" self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
else: else:
self.out_headers.pop("X-Robots-Tag", None) self.out_headers.pop("X-Robots-Tag", None)
is_dir = stat.S_ISDIR(st.st_mode) is_dir = stat.S_ISDIR(st.st_mode)
icur = None
if e2t or (e2d and is_dir):
idx = self.conn.get_u2idx()
icur = idx.get_cur(dbv.realpath)
if self.can_read: if self.can_read:
th_fmt = self.uparam.get("th") th_fmt = self.uparam.get("th")
if th_fmt is not None: if th_fmt is not None:
if is_dir: if is_dir:
for fn in self.args.th_covers.split(","): vrem = vrem.rstrip("/")
if icur and vrem:
q = "select fn from cv where rd=? and dn=?"
crd, cdn = vrem.rsplit("/", 1) if "/" in vrem else ("", vrem)
# no mojibake support:
try:
cfn = icur.execute(q, (crd, cdn)).fetchone()
if cfn:
fn = cfn[0]
fp = os.path.join(abspath, fn) fp = os.path.join(abspath, fn)
if bos.path.exists(fp): if bos.path.exists(fp):
vrem = "{}/{}".format(vrem.rstrip("/"), fn).strip("/") vrem = "{}/{}".format(vrem, fn).strip("/")
is_dir = False
except:
pass
else:
for fn in self.args.th_covers:
fp = os.path.join(abspath, fn)
if bos.path.exists(fp):
vrem = "{}/{}".format(vrem, fn).strip("/")
is_dir = False is_dir = False
break break
@@ -3329,7 +3379,7 @@ class HttpCli(object):
is_ls = "ls" in self.uparam is_ls = "ls" in self.uparam
is_js = self.args.force_js or self.cookies.get("js") == "y" is_js = self.args.force_js or self.cookies.get("js") == "y"
if not is_ls and self.ua.startswith("curl/"): if not is_ls and (self.ua.startswith("curl/") or self.ua.startswith("fetch")):
self.uparam["ls"] = "v" self.uparam["ls"] = "v"
is_ls = True is_ls = True
@@ -3362,8 +3412,8 @@ class HttpCli(object):
"taglist": [], "taglist": [],
"srvinf": srv_infot, "srvinf": srv_infot,
"acct": self.uname, "acct": self.uname,
"idx": ("e2d" in vn.flags), "idx": e2d,
"itag": ("e2t" in vn.flags), "itag": e2t,
"lifetime": vn.flags.get("lifetime") or 0, "lifetime": vn.flags.get("lifetime") or 0,
"frand": bool(vn.flags.get("rand")), "frand": bool(vn.flags.get("rand")),
"perms": perms, "perms": perms,
@@ -3382,8 +3432,8 @@ class HttpCli(object):
"taglist": [], "taglist": [],
"def_hcols": [], "def_hcols": [],
"have_emp": self.args.emp, "have_emp": self.args.emp,
"have_up2k_idx": ("e2d" in vn.flags), "have_up2k_idx": e2d,
"have_tags_idx": ("e2t" in vn.flags), "have_tags_idx": e2t,
"have_acode": (not self.args.no_acode), "have_acode": (not self.args.no_acode),
"have_mv": (not self.args.no_mv), "have_mv": (not self.args.no_mv),
"have_del": (not self.args.no_del), "have_del": (not self.args.no_del),
@@ -3395,11 +3445,12 @@ class HttpCli(object):
"url_suf": url_suf, "url_suf": url_suf,
"logues": logues, "logues": logues,
"readme": readme, "readme": readme,
"title": html_escape(self.vpath, crlf=True) or "🎉", "title": html_escape(self.vpath, crlf=True) or "💾🎉",
"srv_info": srv_infot, "srv_info": srv_infot,
"dtheme": self.args.theme, "dtheme": self.args.theme,
"themes": self.args.themes, "themes": self.args.themes,
"turbolvl": self.args.turbo, "turbolvl": self.args.turbo,
"idxh": int(self.args.ih),
"u2sort": self.args.u2sort, "u2sort": self.args.u2sort,
} }
@@ -3459,11 +3510,6 @@ class HttpCli(object):
if not self.args.ed or "dots" not in self.uparam: if not self.args.ed or "dots" not in self.uparam:
ls_names = exclude_dotfiles(ls_names) ls_names = exclude_dotfiles(ls_names)
icur = None
if "e2t" in vn.flags:
idx = self.conn.get_u2idx()
icur = idx.get_cur(dbv.realpath)
add_fk = vn.flags.get("fk") add_fk = vn.flags.get("fk")
dirs = [] dirs = []
@@ -3538,6 +3584,20 @@ class HttpCli(object):
files.append(item) files.append(item)
item["rd"] = rem item["rd"] = rem
if (
self.cookies.get("idxh") == "y"
and "ls" not in self.uparam
and "v" not in self.uparam
):
idx_html = set(["index.htm", "index.html"])
for item in files:
if item["name"] in idx_html:
# do full resolve in case of shadowed file
vp = vjoin(self.vpath.split("?")[0], item["name"])
vn, rem = self.asrv.vfs.get(vp, self.uname, True, False)
ap = vn.canonical(rem)
return self.tx_file(ap) # is no-cache
tagset: set[str] = set() tagset: set[str] = set()
for fe in files: for fe in files:
fn = fe["name"] fn = fe["name"]

View File

@@ -11,9 +11,19 @@ import time
import queue import queue
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams
try:
MNFE = ModuleNotFoundError
except:
MNFE = ImportError
try: try:
import jinja2 import jinja2
except ImportError: except MNFE:
if EXE:
raise
print( print(
"""\033[1;31m """\033[1;31m
you do not have jinja2 installed,\033[33m you do not have jinja2 installed,\033[33m
@@ -28,7 +38,6 @@ except ImportError:
) )
sys.exit(1) sys.exit(1)
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, EnvParams
from .bos import bos from .bos import bos
from .httpconn import HttpConn from .httpconn import HttpConn
from .util import ( from .util import (

View File

@@ -8,13 +8,12 @@ import shutil
import subprocess as sp import subprocess as sp
import sys import sys
from .__init__ import PY2, WINDOWS, E, unicode from .__init__ import EXE, PY2, WINDOWS, E, unicode
from .bos import bos from .bos import bos
from .util import ( from .util import (
FFMPEG_URL, FFMPEG_URL,
REKOBO_LKEY, REKOBO_LKEY,
fsenc, fsenc,
is_exe,
min_ex, min_ex,
pybin, pybin,
retchk, retchk,
@@ -270,7 +269,9 @@ class MTag(object):
self.args = args self.args = args
self.usable = True self.usable = True
self.prefer_mt = not args.no_mtag_ff self.prefer_mt = not args.no_mtag_ff
self.backend = "ffprobe" if args.no_mutagen else "mutagen" self.backend = (
"ffprobe" if args.no_mutagen or (HAVE_FFPROBE and EXE) else "mutagen"
)
self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff
mappings = args.mtm mappings = args.mtm
or_ffprobe = " or FFprobe" or_ffprobe = " or FFprobe"
@@ -296,7 +297,7 @@ class MTag(object):
self.log(msg, c=3) self.log(msg, c=3)
if not self.usable: if not self.usable:
if is_exe: if EXE:
t = "copyparty.exe cannot use mutagen; need ffprobe.exe to read media tags: " t = "copyparty.exe cannot use mutagen; need ffprobe.exe to read media tags: "
self.log(t + FFMPEG_URL) self.log(t + FFMPEG_URL)
return return
@@ -472,7 +473,10 @@ class MTag(object):
self.log("mutagen: {}\033[0m".format(" ".join(zl)), "90") self.log("mutagen: {}\033[0m".format(" ".join(zl)), "90")
if not md.info.length and not md.info.codec: if not md.info.length and not md.info.codec:
raise Exception() raise Exception()
except: except Exception as ex:
if self.args.mtag_v:
self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90")
return self.get_ffprobe(abspath) if self.can_ffprobe else {} return self.get_ffprobe(abspath) if self.can_ffprobe else {}
sz = bos.path.getsize(abspath) sz = bos.path.getsize(abspath)
@@ -535,7 +539,7 @@ class MTag(object):
env = os.environ.copy() env = os.environ.copy()
try: try:
if is_exe: if EXE:
raise Exception() raise Exception()
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
@@ -543,7 +547,7 @@ class MTag(object):
pypath = str(os.pathsep.join(zsl)) pypath = str(os.pathsep.join(zsl))
env["PYTHONPATH"] = pypath env["PYTHONPATH"] = pypath
except: except:
if not E.ox and not is_exe: if not E.ox and not EXE:
raise raise
ret: dict[str, Any] = {} ret: dict[str, Any] = {}

View File

@@ -9,13 +9,13 @@ import sys
import time import time
from types import SimpleNamespace from types import SimpleNamespace
from .__init__ import ANYWIN, TYPE_CHECKING from .__init__ import ANYWIN, EXE, TYPE_CHECKING
from .authsrv import LEELOO_DALLAS, VFS from .authsrv import LEELOO_DALLAS, VFS
from .bos import bos from .bos import bos
from .util import Daemon, is_exe, min_ex, pybin from .util import Daemon, min_ex, pybin, runhook
if True: # pylint: disable=using-constant-test if True: # pylint: disable=using-constant-test
from typing import Any from typing import Any, Union
if TYPE_CHECKING: if TYPE_CHECKING:
from .svchub import SvcHub from .svchub import SvcHub
@@ -42,7 +42,7 @@ class SMB(object):
from impacket import smbserver from impacket import smbserver
from impacket.ntlm import compute_lmhash, compute_nthash from impacket.ntlm import compute_lmhash, compute_nthash
except ImportError: except ImportError:
if is_exe: if EXE:
print("copyparty.exe cannot do SMB") print("copyparty.exe cannot do SMB")
sys.exit(1) sys.exit(1)
@@ -113,6 +113,9 @@ class SMB(object):
self.stop = srv.stop self.stop = srv.stop
self.log("smb", "listening @ {}:{}".format(ip, port)) self.log("smb", "listening @ {}:{}".format(ip, port))
def nlog(self, msg: str, c: Union[int, str] = 0) -> None:
self.log("smb", msg, c)
def start(self) -> None: def start(self) -> None:
Daemon(self.srv.start) Daemon(self.srv.start)
@@ -169,9 +172,16 @@ class SMB(object):
yeet("blocked write (no --smbw): " + vpath) yeet("blocked write (no --smbw): " + vpath)
vfs, ap = self._v2a("open", vpath, *a) vfs, ap = self._v2a("open", vpath, *a)
if wr and not vfs.axs.uwrite: if wr:
if not vfs.axs.uwrite:
yeet("blocked write (no-write-acc): " + vpath) yeet("blocked write (no-write-acc): " + vpath)
xbu = vfs.flags.get("xbu")
if xbu and not runhook(
self.nlog, xbu, ap, vpath, "", "", 0, 0, "1.7.6.2", 0, ""
):
yeet("blocked by xbu server config: " + vpath)
ret = bos.open(ap, flags, *a, mode=chmod, **ka) ret = bos.open(ap, flags, *a, mode=chmod, **ka)
if wr: if wr:
now = time.time() now = time.time()
@@ -198,11 +208,13 @@ class SMB(object):
vfs, rem = vfs.get_dbv(rem) vfs, rem = vfs.get_dbv(rem)
self.hub.up2k.hash_file( self.hub.up2k.hash_file(
vfs.realpath, vfs.realpath,
vfs.vpath,
vfs.flags, vfs.flags,
rem, rem,
fn, fn,
"1.7.6.2", "1.7.6.2",
time.time(), time.time(),
"",
) )
def _rename(self, vp1: str, vp2: str) -> None: def _rename(self, vp1: str, vp2: str) -> None:

View File

@@ -28,7 +28,7 @@ if True: # pylint: disable=using-constant-test
import typing import typing
from typing import Any, Optional, Union from typing import Any, Optional, Union
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, VT100, EnvParams, unicode from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, VT100, EnvParams, unicode
from .authsrv import AuthSrv from .authsrv import AuthSrv
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
from .tcpsrv import TcpSrv from .tcpsrv import TcpSrv
@@ -43,7 +43,6 @@ from .util import (
HMaccas, HMaccas,
alltrace, alltrace,
ansi_re, ansi_re,
is_exe,
min_ex, min_ex,
mp, mp,
pybin, pybin,
@@ -150,13 +149,10 @@ class SvcHub(object):
self.log("root", t.format(args.j)) self.log("root", t.format(args.j))
if not args.no_fpool and args.j != 1: if not args.no_fpool and args.j != 1:
t = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior" t = "WARNING: ignoring --use-fpool because multithreading (-j{}) is enabled"
if ANYWIN: self.log("root", t.format(args.j), c=3)
t = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
args.no_fpool = True args.no_fpool = True
self.log("root", t, c=3)
bri = "zy"[args.theme % 2 :][:1] bri = "zy"[args.theme % 2 :][:1]
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)] ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
args.theme = "{0}{1} {0} {1}".format(ch, bri) args.theme = "{0}{1} {0} {1}".format(ch, bri)
@@ -212,7 +208,7 @@ class SvcHub(object):
want_ff = True want_ff = True
msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg" msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg"
msg = msg.format(" " * 37, os.path.basename(pybin)) msg = msg.format(" " * 37, os.path.basename(pybin))
if is_exe: if EXE:
msg = "copyparty.exe cannot use Pillow or pyvips; need ffprobe.exe and ffmpeg.exe to create thumbnails" msg = "copyparty.exe cannot use Pillow or pyvips; need ffprobe.exe and ffmpeg.exe to create thumbnails"
self.log("thumb", msg, c=3) self.log("thumb", msg, c=3)
@@ -349,6 +345,11 @@ class SvcHub(object):
al.RS = R + "/" if R else "" al.RS = R + "/" if R else ""
al.SRS = "/" + R + "/" if R else "/" al.SRS = "/" + R + "/" if R else "/"
if al.rsp_jtr:
al.rsp_slp = 0.000001
al.th_covers = set(al.th_covers.split(","))
return True return True
def _setlimits(self) -> None: def _setlimits(self) -> None:
@@ -403,6 +404,7 @@ class SvcHub(object):
def _setup_logfile(self, printed: str) -> None: def _setup_logfile(self, printed: str) -> None:
base_fn = fn = sel_fn = self._logname() base_fn = fn = sel_fn = self._logname()
do_xz = fn.lower().endswith(".xz")
if fn != self.args.lo: if fn != self.args.lo:
ctr = 0 ctr = 0
# yup this is a race; if started sufficiently concurrently, two # yup this is a race; if started sufficiently concurrently, two
@@ -414,7 +416,7 @@ class SvcHub(object):
fn = sel_fn fn = sel_fn
try: try:
if fn.lower().endswith(".xz"): if do_xz:
import lzma import lzma
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0) lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)

View File

@@ -135,7 +135,7 @@ class ThumbSrv(object):
msg = "cannot create audio/video thumbnails because some of the required programs are not available: " msg = "cannot create audio/video thumbnails because some of the required programs are not available: "
msg += ", ".join(missing) msg += ", ".join(missing)
self.log(msg, c=3) self.log(msg, c=3)
if ANYWIN: if ANYWIN and self.args.no_acode:
self.log("download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3) self.log("download FFmpeg to fix it:\033[0m " + FFMPEG_URL, 3)
if self.args.th_clean: if self.args.th_clean:
@@ -561,12 +561,19 @@ class ThumbSrv(object):
if "ac" not in ret: if "ac" not in ret:
raise Exception("not audio") raise Exception("not audio")
try:
dur = ret[".dur"][1]
except:
dur = 0
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus" src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
want_caf = tpath.endswith(".caf") want_caf = tpath.endswith(".caf")
tmp_opus = tpath tmp_opus = tpath
if want_caf: if want_caf:
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus" tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
caf_src = abspath if src_opus else tmp_opus
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)): if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
# fmt: off # fmt: off
cmd = [ cmd = [
@@ -584,7 +591,32 @@ class ThumbSrv(object):
# fmt: on # fmt: on
self._run_ff(cmd) self._run_ff(cmd)
if want_caf: # iOS fails to play some "insufficiently complex" files
# (average file shorter than 8 seconds), so of course we
# fix that by mixing in some inaudible pink noise :^)
# 6.3 sec seems like the cutoff so lets do 7, and
# 7 sec of psyqui-musou.opus @ 3:50 is 174 KiB
if want_caf and (dur < 20 or bos.path.getsize(caf_src) < 256 * 1024):
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath),
b"-filter_complex", b"anoisesrc=a=0.001:d=7:c=pink,asplit[l][r]; [l][r]amerge[s]; [0:a:0][s]amix",
b"-map_metadata", b"-1",
b"-ac", b"2",
b"-c:a", b"libopus",
b"-b:a", b"128k",
b"-f", b"caf",
fsenc(tpath)
]
# fmt: on
self._run_ff(cmd)
elif want_caf:
# simple remux should be safe
# fmt: off # fmt: off
cmd = [ cmd = [
b"ffmpeg", b"ffmpeg",

View File

@@ -120,10 +120,10 @@ class U2idx(object):
def search( def search(
self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int self, vols: list[tuple[str, str, dict[str, Any]]], uq: str, lim: int
) -> tuple[list[dict[str, Any]], list[str]]: ) -> tuple[list[dict[str, Any]], list[str], bool]:
"""search by query params""" """search by query params"""
if not HAVE_SQLITE3: if not HAVE_SQLITE3:
return [], [] return [], [], False
q = "" q = ""
v: Union[str, int] = "" v: Union[str, int] = ""
@@ -275,7 +275,7 @@ class U2idx(object):
have_up: bool, have_up: bool,
have_mt: bool, have_mt: bool,
lim: int, lim: int,
) -> tuple[list[dict[str, Any]], list[str]]: ) -> tuple[list[dict[str, Any]], list[str], bool]:
done_flag: list[bool] = [] done_flag: list[bool] = []
self.active_id = "{:.6f}_{}".format( self.active_id = "{:.6f}_{}".format(
time.time(), threading.current_thread().ident time.time(), threading.current_thread().ident
@@ -293,6 +293,7 @@ class U2idx(object):
self.log("qs: {!r} {!r}".format(uq, uv)) self.log("qs: {!r} {!r}".format(uq, uv))
ret = [] ret = []
seen_rps: set[str] = set()
lim = min(lim, int(self.args.srch_hits)) lim = min(lim, int(self.args.srch_hits))
taglist = {} taglist = {}
for (vtop, ptop, flags) in vols: for (vtop, ptop, flags) in vols:
@@ -315,9 +316,6 @@ class U2idx(object):
c = cur.execute(uq, tuple(vuv)) c = cur.execute(uq, tuple(vuv))
for hit in c: for hit in c:
w, ts, sz, rd, fn, ip, at = hit[:7] w, ts, sz, rd, fn, ip, at = hit[:7]
lim -= 1
if lim < 0:
break
if rd.startswith("//") or fn.startswith("//"): if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn) rd, fn = s3dec(rd, fn)
@@ -326,6 +324,9 @@ class U2idx(object):
if not dots and "/." in ("/" + rp): if not dots and "/." in ("/" + rp):
continue continue
if rp in seen_rps:
continue
if not fk: if not fk:
suf = "" suf = ""
else: else:
@@ -342,6 +343,11 @@ class U2idx(object):
)[:fk] )[:fk]
) )
lim -= 1
if lim < 0:
break
seen_rps.add(rp)
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]}) sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
for hit in sret: for hit in sret:
@@ -361,17 +367,9 @@ class U2idx(object):
done_flag.append(True) done_flag.append(True)
self.active_id = "" self.active_id = ""
# undupe hits from multiple metadata keys
if len(ret) > 1:
ret = [ret[0]] + [
y
for x, y in zip(ret[:-1], ret[1:])
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
]
ret.sort(key=itemgetter("rp")) ret.sort(key=itemgetter("rp"))
return ret, list(taglist.keys()) return ret, list(taglist.keys()), lim < 0
def terminator(self, identifier: str, done_flag: list[bool]) -> None: def terminator(self, identifier: str, done_flag: list[bool]) -> None:
for _ in range(self.timeout): for _ in range(self.timeout):

View File

@@ -48,6 +48,7 @@ from .util import (
rmdirs, rmdirs,
rmdirs_up, rmdirs_up,
runhook, runhook,
runihook,
s2hms, s2hms,
s3dec, s3dec,
s3enc, s3enc,
@@ -72,6 +73,9 @@ if True: # pylint: disable=using-constant-test
if TYPE_CHECKING: if TYPE_CHECKING:
from .svchub import SvcHub from .svchub import SvcHub
zs = "avif,avifs,bmp,gif,heic,heics,heif,heifs,ico,j2p,j2k,jp2,jpeg,jpg,jpx,png,tga,tif,tiff,webp"
CV_EXTS = set(zs.split(","))
class Dbw(object): class Dbw(object):
def __init__(self, c: "sqlite3.Cursor", n: int, t: float) -> None: def __init__(self, c: "sqlite3.Cursor", n: int, t: float) -> None:
@@ -122,6 +126,8 @@ class Up2k(object):
self.flags: dict[str, dict[str, Any]] = {} self.flags: dict[str, dict[str, Any]] = {}
self.droppable: dict[str, list[str]] = {} self.droppable: dict[str, list[str]] = {}
self.volstate: dict[str, str] = {} self.volstate: dict[str, str] = {}
self.vol_act: dict[str, float] = {}
self.busy_aps: set[str] = set()
self.dupesched: dict[str, list[tuple[str, str, float]]] = {} self.dupesched: dict[str, list[tuple[str, str, float]]] = {}
self.snap_persist_interval = 300 # persist unfinished index every 5 min self.snap_persist_interval = 300 # persist unfinished index every 5 min
self.snap_discard_interval = 21600 # drop unfinished after 6 hours inactivity self.snap_discard_interval = 21600 # drop unfinished after 6 hours inactivity
@@ -131,13 +137,17 @@ class Up2k(object):
self.entags: dict[str, set[str]] = {} self.entags: dict[str, set[str]] = {}
self.mtp_parsers: dict[str, dict[str, MParser]] = {} self.mtp_parsers: dict[str, dict[str, MParser]] = {}
self.pending_tags: list[tuple[set[str], str, str, dict[str, Any]]] = [] self.pending_tags: list[tuple[set[str], str, str, dict[str, Any]]] = []
self.hashq: Queue[tuple[str, str, str, str, float]] = Queue() self.hashq: Queue[tuple[str, str, str, str, str, float, str, bool]] = Queue()
self.tagq: Queue[tuple[str, str, str, str, str, float]] = Queue() self.tagq: Queue[tuple[str, str, str, str, str, float]] = Queue()
self.tag_event = threading.Condition() self.tag_event = threading.Condition()
self.n_hashq = 0 self.n_hashq = 0
self.n_tagq = 0 self.n_tagq = 0
self.mpool_used = False self.mpool_used = False
self.xiu_ptn = re.compile(r"(?:^|,)i([0-9]+)")
self.xiu_busy = False # currently running hook
self.xiu_asleep = True # needs rescan_cond poke to schedule self
self.cur: dict[str, "sqlite3.Cursor"] = {} self.cur: dict[str, "sqlite3.Cursor"] = {}
self.mem_cur = None self.mem_cur = None
self.sqlite_ver = None self.sqlite_ver = None
@@ -155,12 +165,6 @@ class Up2k(object):
t = "could not initialize sqlite3, will use in-memory registry only" t = "could not initialize sqlite3, will use in-memory registry only"
self.log(t, 3) self.log(t, 3)
if ANYWIN:
# usually fails to set lastmod too quickly
self.lastmod_q: list[tuple[str, int, tuple[int, int], bool]] = []
self.lastmod_q2 = self.lastmod_q[:]
Daemon(self._lastmodder, "up2k-lastmod")
self.fstab = Fstab(self.log_func) self.fstab = Fstab(self.log_func)
self.gen_fk = self._gen_fk if self.args.log_fk else gen_filekey self.gen_fk = self._gen_fk if self.args.log_fk else gen_filekey
@@ -182,11 +186,11 @@ class Up2k(object):
self.gid += 1 self.gid += 1
self.log("reload #{} initiated".format(self.gid)) self.log("reload #{} initiated".format(self.gid))
all_vols = self.asrv.vfs.all_vols all_vols = self.asrv.vfs.all_vols
self.rescan(all_vols, list(all_vols.keys()), True) self.rescan(all_vols, list(all_vols.keys()), True, False)
def deferred_init(self) -> None: def deferred_init(self) -> None:
all_vols = self.asrv.vfs.all_vols all_vols = self.asrv.vfs.all_vols
have_e2d = self.init_indexes(all_vols, []) have_e2d = self.init_indexes(all_vols, [], False)
if self.stop: if self.stop:
# up-mt consistency not guaranteed if init is interrupted; # up-mt consistency not guaranteed if init is interrupted;
@@ -257,11 +261,13 @@ class Up2k(object):
} }
return json.dumps(ret, indent=4) return json.dumps(ret, indent=4)
def rescan(self, all_vols: dict[str, VFS], scan_vols: list[str], wait: bool) -> str: def rescan(
self, all_vols: dict[str, VFS], scan_vols: list[str], wait: bool, fscan: bool
) -> str:
if not wait and self.pp: if not wait and self.pp:
return "cannot initiate; scan is already in progress" return "cannot initiate; scan is already in progress"
args = (all_vols, scan_vols) args = (all_vols, scan_vols, fscan)
Daemon( Daemon(
self.init_indexes, self.init_indexes,
"up2k-rescan-{}".format(scan_vols[0] if scan_vols else "all"), "up2k-rescan-{}".format(scan_vols[0] if scan_vols else "all"),
@@ -291,7 +297,7 @@ class Up2k(object):
cooldown = now + 1 cooldown = now + 1
continue continue
cooldown = now + 5 cooldown = now + 3
# self.log("SR", 5) # self.log("SR", 5)
if self.args.no_lifetime: if self.args.no_lifetime:
@@ -300,6 +306,8 @@ class Up2k(object):
# important; not deferred by db_act # important; not deferred by db_act
timeout = self._check_lifetimes() timeout = self._check_lifetimes()
timeout = min(timeout, now + self._check_xiu())
with self.mutex: with self.mutex:
for vp, vol in sorted(self.asrv.vfs.all_vols.items()): for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
maxage = vol.flags.get("scan") maxage = vol.flags.get("scan")
@@ -334,7 +342,7 @@ class Up2k(object):
if vols: if vols:
cooldown = now + 10 cooldown = now + 10
err = self.rescan(self.asrv.vfs.all_vols, vols, False) err = self.rescan(self.asrv.vfs.all_vols, vols, False, False)
if err: if err:
for v in vols: for v in vols:
self.need_rescan.add(v) self.need_rescan.add(v)
@@ -394,6 +402,83 @@ class Up2k(object):
return timeout return timeout
def _check_xiu(self) -> float:
if self.xiu_busy:
return 2
ret = 9001
for _, vol in sorted(self.asrv.vfs.all_vols.items()):
rp = vol.realpath
cur = self.cur.get(rp)
if not cur:
continue
with self.mutex:
q = "select distinct c from iu"
cds = cur.execute(q).fetchall()
if not cds:
continue
run_cds: list[int] = []
for cd in sorted([x[0] for x in cds]):
delta = cd - (time.time() - self.vol_act[rp])
if delta > 0:
ret = min(ret, delta)
break
run_cds.append(cd)
if run_cds:
self.xiu_busy = True
Daemon(self._run_xius, "xiu", (vol, run_cds))
return 2
return ret
def _run_xius(self, vol: VFS, cds: list[int]):
for cd in cds:
self._run_xiu(vol, cd)
self.xiu_busy = False
self.xiu_asleep = True
def _run_xiu(self, vol: VFS, cd: int):
rp = vol.realpath
cur = self.cur[rp]
# t0 = time.time()
with self.mutex:
q = "select w,rd,fn from iu where c={} limit 80386"
wrfs = cur.execute(q.format(cd)).fetchall()
if not wrfs:
return
# dont wanna rebox so use format instead of prepared
q = "delete from iu where w=? and +rd=? and +fn=? and +c={}"
cur.executemany(q.format(cd), wrfs)
cur.connection.commit()
q = "select * from up where substr(w,1,16)=? and +rd=? and +fn=?"
ups = []
for wrf in wrfs:
up = cur.execute(q, wrf).fetchone()
if up:
ups.append(up)
# t1 = time.time()
# self.log("mapped {} warks in {:.3f} sec".format(len(wrfs), t1 - t0))
# "mapped 10989 warks in 0.126 sec"
cmds = self.flags[rp]["xiu"]
for cmd in cmds:
m = self.xiu_ptn.search(cmd)
ccd = int(m.group(1)) if m else 5
if ccd != cd:
continue
self.log("xiu: {}# {}".format(len(wrfs), cmd))
runihook(self.log, cmd, vol, ups)
def _vis_job_progress(self, job: dict[str, Any]) -> str: def _vis_job_progress(self, job: dict[str, Any]) -> str:
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"])) perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
path = djoin(job["ptop"], job["prel"], job["name"]) path = djoin(job["ptop"], job["prel"], job["name"])
@@ -417,7 +502,9 @@ class Up2k(object):
return True, ret return True, ret
def init_indexes(self, all_vols: dict[str, VFS], scan_vols: list[str]) -> bool: def init_indexes(
self, all_vols: dict[str, VFS], scan_vols: list[str], fscan: bool
) -> bool:
gid = self.gid gid = self.gid
while self.pp and gid == self.gid: while self.pp and gid == self.gid:
time.sleep(0.1) time.sleep(0.1)
@@ -503,7 +590,7 @@ class Up2k(object):
if "e2d" in vol.flags: if "e2d" in vol.flags:
have_e2d = True have_e2d = True
if "e2ds" in vol.flags: if "e2ds" in vol.flags or fscan:
self.volstate[vol.vpath] = "busy (hashing files)" self.volstate[vol.vpath] = "busy (hashing files)"
_, vac = self._build_file_index(vol, list(all_vols.values())) _, vac = self._build_file_index(vol, list(all_vols.values()))
if vac: if vac:
@@ -710,6 +797,7 @@ class Up2k(object):
self.log("\n".join(ta)) self.log("\n".join(ta))
self.flags[ptop] = flags self.flags[ptop] = flags
self.vol_act[ptop] = 0.0
self.registry[ptop] = reg self.registry[ptop] = reg
self.droppable[ptop] = drp or [] self.droppable[ptop] = drp or []
self.regdrop(ptop, "") self.regdrop(ptop, "")
@@ -860,6 +948,7 @@ class Up2k(object):
unreg: list[str] = [] unreg: list[str] = []
files: list[tuple[int, int, str]] = [] files: list[tuple[int, int, str]] = []
fat32 = True fat32 = True
cv = ""
assert self.pp and self.mem_cur assert self.pp and self.mem_cur
self.pp.msg = "a{} {}".format(self.pp.n, cdir) self.pp.msg = "a{} {}".format(self.pp.n, cdir)
@@ -922,6 +1011,12 @@ class Up2k(object):
continue continue
files.append((sz, lmod, iname)) files.append((sz, lmod, iname))
liname = iname.lower()
if sz and (
iname in self.args.th_covers
or (not cv and liname.rsplit(".", 1)[-1] in CV_EXTS)
):
cv = iname
# folder of 1000 files = ~1 MiB RAM best-case (tiny filenames); # folder of 1000 files = ~1 MiB RAM best-case (tiny filenames);
# free up stuff we're done with before dhashing # free up stuff we're done with before dhashing
@@ -934,6 +1029,7 @@ class Up2k(object):
zh = hashlib.sha1() zh = hashlib.sha1()
_ = [zh.update(str(x).encode("utf-8", "replace")) for x in files] _ = [zh.update(str(x).encode("utf-8", "replace")) for x in files]
zh.update(cv.encode("utf-8", "replace"))
zh.update(spack(b"<d", cst.st_mtime)) zh.update(spack(b"<d", cst.st_mtime))
dhash = base64.urlsafe_b64encode(zh.digest()[:12]).decode("ascii") dhash = base64.urlsafe_b64encode(zh.digest()[:12]).decode("ascii")
sql = "select d from dh where d = ? and h = ?" sql = "select d from dh where d = ? and h = ?"
@@ -947,6 +1043,18 @@ class Up2k(object):
if c.fetchone(): if c.fetchone():
return ret return ret
if cv and rd:
# mojibake not supported (for performance / simplicity):
try:
q = "select * from cv where rd=? and dn=? and +fn=?"
crd, cdn = rd.rsplit("/", 1) if "/" in rd else ("", rd)
if not db.c.execute(q, (crd, cdn, cv)).fetchone():
db.c.execute("delete from cv where rd=? and dn=?", (crd, cdn))
db.c.execute("insert into cv values (?,?,?)", (crd, cdn, cv))
db.n += 1
except Exception as ex:
self.log("cover {}/{} failed: {}".format(rd, cv, ex), 6)
seen_files = set([x[2] for x in files]) # for dropcheck seen_files = set([x[2] for x in files]) # for dropcheck
for sz, lmod, fn in files: for sz, lmod, fn in files:
if self.stop: if self.stop:
@@ -958,7 +1066,7 @@ class Up2k(object):
if fn: # diff-golf if fn: # diff-golf
sql = "select w, mt, sz from up where rd = ? and fn = ?" sql = "select w, mt, sz, at from up where rd = ? and fn = ?"
try: try:
c = db.c.execute(sql, (rd, fn)) c = db.c.execute(sql, (rd, fn))
except: except:
@@ -967,7 +1075,7 @@ class Up2k(object):
in_db = list(c.fetchall()) in_db = list(c.fetchall())
if in_db: if in_db:
self.pp.n -= 1 self.pp.n -= 1
dw, dts, dsz = in_db[0] dw, dts, dsz, at = in_db[0]
if len(in_db) > 1: if len(in_db) > 1:
t = "WARN: multiple entries: [{}] => [{}] |{}|\n{}" t = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
rep_db = "\n".join([repr(x) for x in in_db]) rep_db = "\n".join([repr(x) for x in in_db])
@@ -988,6 +1096,8 @@ class Up2k(object):
ret += 1 ret += 1
db.n += 1 db.n += 1
in_db = [] in_db = []
else:
at = 0
self.pp.msg = "a{} {}".format(self.pp.n, abspath) self.pp.msg = "a{} {}".format(self.pp.n, abspath)
@@ -1010,7 +1120,8 @@ class Up2k(object):
wark = up2k_wark_from_hashlist(self.salt, sz, hashes) wark = up2k_wark_from_hashlist(self.salt, sz, hashes)
self.db_add(db.c, wark, rd, fn, lmod, sz, "", 0) # skip upload hooks by not providing vflags
self.db_add(db.c, {}, rd, fn, lmod, sz, "", "", wark, "", "", "", at)
db.n += 1 db.n += 1
ret += 1 ret += 1
td = time.time() - db.t td = time.time() - db.t
@@ -1140,6 +1251,22 @@ class Up2k(object):
if n_rm2: if n_rm2:
self.log("forgetting {} shadowed deleted files".format(n_rm2)) self.log("forgetting {} shadowed deleted files".format(n_rm2))
c2.connection.commit()
# then covers
n_rm3 = 0
qu = "select 1 from up where rd=? and +fn=? limit 1"
q = "delete from cv where rd=? and dn=? and +fn=?"
for crd, cdn, fn in cur.execute("select * from cv"):
urd = vjoin(crd, cdn)
if not c2.execute(qu, (urd, fn)).fetchone():
c2.execute(q, (crd, cdn, fn))
n_rm3 += 1
if n_rm3:
self.log("forgetting {} deleted covers".format(n_rm3))
c2.connection.commit()
c2.close() c2.close()
return n_rm + n_rm2 return n_rm + n_rm2
@@ -1292,6 +1419,7 @@ class Up2k(object):
cur, _ = reg cur, _ = reg
self._set_tagscan(cur, True) self._set_tagscan(cur, True)
cur.execute("delete from dh") cur.execute("delete from dh")
cur.execute("delete from cv")
cur.connection.commit() cur.connection.commit()
def _set_tagscan(self, cur: "sqlite3.Cursor", need: bool) -> bool: def _set_tagscan(self, cur: "sqlite3.Cursor", need: bool) -> bool:
@@ -1872,6 +2000,8 @@ class Up2k(object):
if ver == DB_VER: if ver == DB_VER:
try: try:
self._add_cv_tab(cur)
self._add_xiu_tab(cur)
self._add_dhash_tab(cur) self._add_dhash_tab(cur)
except: except:
pass pass
@@ -1965,6 +2095,8 @@ class Up2k(object):
cur.execute(cmd) cur.execute(cmd)
self._add_dhash_tab(cur) self._add_dhash_tab(cur)
self._add_xiu_tab(cur)
self._add_cv_tab(cur)
self.log("created DB at {}".format(db_path)) self.log("created DB at {}".format(db_path))
return cur return cur
@@ -1990,12 +2122,57 @@ class Up2k(object):
cur.connection.commit() cur.connection.commit()
def _add_xiu_tab(self, cur: "sqlite3.Cursor") -> None:
# v5a -> v5b
# store rd+fn rather than warks to support nohash vols
try:
cur.execute("select ws, rd, fn from iu limit 1").fetchone()
return
except:
pass
try:
cur.execute("drop table iu")
except:
pass
for cmd in [
r"create table iu (c int, w text, rd text, fn text)",
r"create index iu_c on iu(c)",
r"create index iu_w on iu(w)",
]:
cur.execute(cmd)
cur.connection.commit()
def _add_cv_tab(self, cur: "sqlite3.Cursor") -> None:
# v5b -> v5c
try:
cur.execute("select rd, dn, fn from cv limit 1").fetchone()
return
except:
pass
for cmd in [
r"create table cv (rd text, dn text, fn text)",
r"create index cv_i on cv(rd, dn)",
]:
cur.execute(cmd)
try:
cur.execute("delete from dh")
except:
pass
cur.connection.commit()
def _job_volchk(self, cj: dict[str, Any]) -> None: def _job_volchk(self, cj: dict[str, Any]) -> None:
if not self.register_vpath(cj["ptop"], cj["vcfg"]): if not self.register_vpath(cj["ptop"], cj["vcfg"]):
if cj["ptop"] not in self.registry: if cj["ptop"] not in self.registry:
raise Pebkac(410, "location unavailable") raise Pebkac(410, "location unavailable")
def handle_json(self, cj: dict[str, Any]) -> dict[str, Any]: def handle_json(self, cj: dict[str, Any], busy_aps: set[str]) -> dict[str, Any]:
self.busy_aps = busy_aps
try: try:
# bit expensive; 3.9=10x 3.11=2x # bit expensive; 3.9=10x 3.11=2x
if self.mutex.acquire(timeout=10): if self.mutex.acquire(timeout=10):
@@ -2009,11 +2186,12 @@ class Up2k(object):
with self.mutex: with self.mutex:
self._job_volchk(cj) self._job_volchk(cj)
ptop = cj["ptop"]
cj["name"] = sanitize_fn(cj["name"], "", [".prologue.html", ".epilogue.html"]) cj["name"] = sanitize_fn(cj["name"], "", [".prologue.html", ".epilogue.html"])
cj["poke"] = now = self.db_act = time.time() cj["poke"] = now = self.db_act = self.vol_act[ptop] = time.time()
wark = self._get_wark(cj) wark = self._get_wark(cj)
job = None job = None
pdir = djoin(cj["ptop"], cj["prel"]) pdir = djoin(ptop, cj["prel"])
try: try:
dev = bos.stat(pdir).st_dev dev = bos.stat(pdir).st_dev
except: except:
@@ -2024,7 +2202,6 @@ class Up2k(object):
sprs = self.fstab.get(pdir) != "ng" sprs = self.fstab.get(pdir) != "ng"
with self.mutex: with self.mutex:
ptop = cj["ptop"]
jcur = self.cur.get(ptop) jcur = self.cur.get(ptop)
reg = self.registry[ptop] reg = self.registry[ptop]
vfs = self.asrv.vfs.all_vols[cj["vtop"]] vfs = self.asrv.vfs.all_vols[cj["vtop"]]
@@ -2161,7 +2338,7 @@ class Up2k(object):
raise Pebkac(422, err) raise Pebkac(422, err)
elif "nodupe" in self.flags[cj["ptop"]]: elif "nodupe" in vfs.flags:
self.log("dupe-reject:\n {0}\n {1}".format(src, dst)) self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
err = "upload rejected, file already exists:\n" err = "upload rejected, file already exists:\n"
err += "/" + quotep(vsrc) + " " err += "/" + quotep(vsrc) + " "
@@ -2169,9 +2346,27 @@ class Up2k(object):
else: else:
# symlink to the client-provided name, # symlink to the client-provided name,
# returning the previous upload info # returning the previous upload info
psrc = src + ".PARTIAL"
if self.args.dotpart:
m = re.match(r"(.*[\\/])(.*)", psrc)
if m: # always true but...
zs1, zs2 = m.groups()
psrc = zs1 + "." + zs2
if (
src in self.busy_aps
or psrc in self.busy_aps
or (wark in reg and "done" not in reg[wark])
):
raise Pebkac(
422, "source file busy; please try again later"
)
job = deepcopy(job) job = deepcopy(job)
for k in "ptop vtop prel addr".split(): job["wark"] = wark
job[k] = cj[k] job["at"] = cj.get("at") or time.time()
for k in "lmod ptop vtop prel host user addr".split():
job[k] = cj.get(k) or ""
pdir = djoin(cj["ptop"], cj["prel"]) pdir = djoin(cj["ptop"], cj["prel"])
if rand: if rand:
@@ -2182,6 +2377,24 @@ class Up2k(object):
job["name"] = self._untaken(pdir, cj, now) job["name"] = self._untaken(pdir, cj, now)
dst = djoin(job["ptop"], job["prel"], job["name"]) dst = djoin(job["ptop"], job["prel"], job["name"])
xbu = vfs.flags.get("xbu")
if xbu and not runhook(
self.log,
xbu, # type: ignore
dst,
job["vtop"],
job["host"],
job["user"],
job["lmod"],
job["size"],
job["addr"],
job["at"],
"",
):
t = "upload blocked by xbu server config: {}".format(dst)
self.log(t, 1)
raise Pebkac(403, t)
if not self.args.nw: if not self.args.nw:
try: try:
dvf = self.flags[job["ptop"]] dvf = self.flags[job["ptop"]]
@@ -2192,10 +2405,10 @@ class Up2k(object):
if not n4g: if not n4g:
raise raise
if cur: if cur and not self.args.nw:
a = [job[x] for x in "prel name lmod size addr".split()] zs = "prel name lmod size ptop vtop wark host user addr at"
a += [job.get("at") or time.time()] a = [job[x] for x in zs.split()]
self.db_add(cur, wark, *a) self.db_add(cur, vfs.flags, *a)
cur.connection.commit() cur.connection.commit()
if not job: if not job:
@@ -2269,6 +2482,7 @@ class Up2k(object):
if ( if (
not ret["hash"] not ret["hash"]
and "fk" in vfs.flags and "fk" in vfs.flags
and not self.args.nw
and (cj["user"] in vfs.axs.uread or cj["user"] in vfs.axs.upget) and (cj["user"] in vfs.axs.uread or cj["user"] in vfs.axs.upget)
): ):
ap = absreal(djoin(job["ptop"], job["prel"], job["name"])) ap = absreal(djoin(job["ptop"], job["prel"], job["name"]))
@@ -2366,16 +2580,13 @@ class Up2k(object):
if lmod and (not linked or SYMTIME): if lmod and (not linked or SYMTIME):
times = (int(time.time()), int(lmod)) times = (int(time.time()), int(lmod))
if ANYWIN:
self.lastmod_q.append((dst, 0, times, False))
else:
bos.utime(dst, times, False) bos.utime(dst, times, False)
def handle_chunk( def handle_chunk(
self, ptop: str, wark: str, chash: str self, ptop: str, wark: str, chash: str
) -> tuple[int, list[int], str, float, bool]: ) -> tuple[int, list[int], str, float, bool]:
with self.mutex: with self.mutex:
self.db_act = time.time() self.db_act = self.vol_act[ptop] = time.time()
job = self.registry[ptop].get(wark) job = self.registry[ptop].get(wark)
if not job: if not job:
known = " ".join([x for x in self.registry[ptop].keys()]) known = " ".join([x for x in self.registry[ptop].keys()])
@@ -2426,7 +2637,7 @@ class Up2k(object):
def confirm_chunk(self, ptop: str, wark: str, chash: str) -> tuple[int, str]: def confirm_chunk(self, ptop: str, wark: str, chash: str) -> tuple[int, str]:
with self.mutex: with self.mutex:
self.db_act = time.time() self.db_act = self.vol_act[ptop] = time.time()
try: try:
job = self.registry[ptop][wark] job = self.registry[ptop][wark]
pdir = djoin(job["ptop"], job["prel"]) pdir = djoin(job["ptop"], job["prel"])
@@ -2450,18 +2661,14 @@ class Up2k(object):
self.regdrop(ptop, wark) self.regdrop(ptop, wark)
return ret, dst return ret, dst
# windows cant rename open files
if not ANYWIN or src == dst:
self._finish_upload(ptop, wark)
return ret, dst return ret, dst
def finish_upload(self, ptop: str, wark: str) -> None: def finish_upload(self, ptop: str, wark: str, busy_aps: set[str]) -> None:
self.busy_aps = busy_aps
with self.mutex: with self.mutex:
self._finish_upload(ptop, wark) self._finish_upload(ptop, wark)
def _finish_upload(self, ptop: str, wark: str) -> None: def _finish_upload(self, ptop: str, wark: str) -> None:
self.db_act = time.time()
try: try:
job = self.registry[ptop][wark] job = self.registry[ptop][wark]
pdir = djoin(job["ptop"], job["prel"]) pdir = djoin(job["ptop"], job["prel"])
@@ -2470,40 +2677,29 @@ class Up2k(object):
except Exception as ex: except Exception as ex:
raise Pebkac(500, "finish_upload, wark, " + repr(ex)) raise Pebkac(500, "finish_upload, wark, " + repr(ex))
if job["need"]:
t = "finish_upload {} with remaining chunks {}"
raise Pebkac(500, t.format(wark, job["need"]))
# self.log("--- " + wark + " " + dst + " finish_upload atomic " + dst, 4) # self.log("--- " + wark + " " + dst + " finish_upload atomic " + dst, 4)
atomic_move(src, dst) atomic_move(src, dst)
upt = job.get("at") or time.time() upt = job.get("at") or time.time()
xau = self.flags[ptop].get("xau") vflags = self.flags[ptop]
if xau and not runhook(
self.log,
xau,
dst,
djoin(job["vtop"], job["prel"], job["name"]),
job["host"],
job["user"],
job["addr"],
upt,
job["size"],
"",
):
t = "upload blocked by xau"
self.log(t, 1)
bos.unlink(dst)
self.registry[ptop].pop(wark, None)
raise Pebkac(403, t)
times = (int(time.time()), int(job["lmod"])) times = (int(time.time()), int(job["lmod"]))
if ANYWIN: self.log(
z1 = (dst, job["size"], times, job["sprs"]) "no more chunks, setting times {} ({}) on {}".format(
self.lastmod_q.append(z1) times, bos.path.getsize(dst), dst
elif not job["hash"]: )
)
try: try:
bos.utime(dst, times) bos.utime(dst, times)
except: except:
pass self.log("failed to utime ({}, {})".format(dst, times))
z2 = [job[x] for x in "ptop wark prel name lmod size addr".split()] zs = "prel name lmod size ptop vtop wark host user addr"
z2 = [job[x] for x in zs.split()]
wake_sr = False wake_sr = False
try: try:
flt = job["life"] flt = job["life"]
@@ -2518,9 +2714,10 @@ class Up2k(object):
pass pass
z2 += [upt] z2 += [upt]
if self.idx_wark(*z2): if self.idx_wark(vflags, *z2):
del self.registry[ptop][wark] del self.registry[ptop][wark]
else: else:
self.registry[ptop][wark]["done"] = 1
self.regdrop(ptop, wark) self.regdrop(ptop, wark)
if wake_sr: if wake_sr:
@@ -2540,7 +2737,7 @@ class Up2k(object):
self._symlink(dst, d2, self.flags[ptop], lmod=lmod) self._symlink(dst, d2, self.flags[ptop], lmod=lmod)
if cur: if cur:
self.db_rm(cur, rd, fn) self.db_rm(cur, rd, fn)
self.db_add(cur, wark, rd, fn, *z2[-4:]) self.db_add(cur, vflags, rd, fn, lmod, *z2[3:])
if cur: if cur:
cur.connection.commit() cur.connection.commit()
@@ -2562,22 +2759,43 @@ class Up2k(object):
def idx_wark( def idx_wark(
self, self,
ptop: str, vflags: dict[str, Any],
wark: str,
rd: str, rd: str,
fn: str, fn: str,
lmod: float, lmod: float,
sz: int, sz: int,
ptop: str,
vtop: str,
wark: str,
host: str,
usr: str,
ip: str, ip: str,
at: float, at: float,
skip_xau: bool = False,
) -> bool: ) -> bool:
cur = self.cur.get(ptop) cur = self.cur.get(ptop)
if not cur: if not cur:
return False return False
self.db_act = self.vol_act[ptop] = time.time()
try: try:
self.db_rm(cur, rd, fn) self.db_rm(cur, rd, fn)
self.db_add(cur, wark, rd, fn, lmod, sz, ip, at) self.db_add(
cur,
vflags,
rd,
fn,
lmod,
sz,
ptop,
vtop,
wark,
host,
usr,
ip,
at,
skip_xau,
)
cur.connection.commit() cur.connection.commit()
except Exception as ex: except Exception as ex:
x = self.register_vpath(ptop, {}) x = self.register_vpath(ptop, {})
@@ -2602,13 +2820,19 @@ class Up2k(object):
def db_add( def db_add(
self, self,
db: "sqlite3.Cursor", db: "sqlite3.Cursor",
wark: str, vflags: dict[str, Any],
rd: str, rd: str,
fn: str, fn: str,
ts: float, ts: float,
sz: int, sz: int,
ptop: str,
vtop: str,
wark: str,
host: str,
usr: str,
ip: str, ip: str,
at: float, at: float,
skip_xau: bool = False,
) -> None: ) -> None:
sql = "insert into up values (?,?,?,?,?,?,?)" sql = "insert into up values (?,?,?,?,?,?,?)"
v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0)) v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0))
@@ -2620,6 +2844,59 @@ class Up2k(object):
v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0)) v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0))
db.execute(sql, v) db.execute(sql, v)
xau = False if skip_xau else vflags.get("xau")
dst = djoin(ptop, rd, fn)
if xau and not runhook(
self.log,
xau,
dst,
djoin(vtop, rd, fn),
host,
usr,
int(ts),
sz,
ip,
at or time.time(),
"",
):
t = "upload blocked by xau server config"
self.log(t, 1)
bos.unlink(dst)
self.registry[ptop].pop(wark, None)
raise Pebkac(403, t)
xiu = vflags.get("xiu")
if xiu:
cds: set[int] = set()
for cmd in xiu:
m = self.xiu_ptn.search(cmd)
cds.add(int(m.group(1)) if m else 5)
q = "insert into iu values (?,?,?,?)"
for cd in cds:
# one for each unique cooldown duration
try:
db.execute(q, (cd, wark[:16], rd, fn))
except:
assert self.mem_cur
rd, fn = s3enc(self.mem_cur, rd, fn)
db.execute(q, (cd, wark[:16], rd, fn))
if self.xiu_asleep:
self.xiu_asleep = False
with self.rescan_cond:
self.rescan_cond.notify_all()
if rd and sz and fn.lower() in self.args.th_covers:
# wasteful; db_add will re-index actual covers
# but that won't catch existing files
crd, cdn = rd.rsplit("/", 1) if "/" in rd else ("", rd)
try:
db.execute("delete from cv where rd=? and dn=?", (crd, cdn))
db.execute("insert into cv values (?,?,?)", (crd, cdn, fn))
except:
pass
def handle_rm(self, uname: str, ip: str, vpaths: list[str], lim: list[int]) -> str: def handle_rm(self, uname: str, ip: str, vpaths: list[str], lim: list[int]) -> str:
n_files = 0 n_files = 0
ok = {} ok = {}
@@ -2677,6 +2954,7 @@ class Up2k(object):
ptop = vn.realpath ptop = vn.realpath
atop = vn.canonical(rem, False) atop = vn.canonical(rem, False)
self.vol_act[ptop] = self.db_act
adir, fn = os.path.split(atop) adir, fn = os.path.split(atop)
try: try:
st = bos.lstat(atop) st = bos.lstat(atop)
@@ -2710,18 +2988,31 @@ class Up2k(object):
self.log("hit delete limit of {} files".format(lim[1]), 3) self.log("hit delete limit of {} files".format(lim[1]), 3)
break break
n_files += 1
abspath = djoin(adir, fn) abspath = djoin(adir, fn)
volpath = "{}/{}".format(vrem, fn).strip("/") volpath = "{}/{}".format(vrem, fn).strip("/")
vpath = "{}/{}".format(dbv.vpath, volpath).strip("/") vpath = "{}/{}".format(dbv.vpath, volpath).strip("/")
self.log("rm {}\n {}".format(vpath, abspath)) self.log("rm {}\n {}".format(vpath, abspath))
_ = dbv.get(volpath, uname, *permsets[0]) _ = dbv.get(volpath, uname, *permsets[0])
if xbd and not runhook( if xbd:
self.log, xbd, abspath, vpath, "", uname, "", 0, 0, "" st = bos.stat(abspath)
if not runhook(
self.log,
xbd,
abspath,
vpath,
"",
uname,
st.st_mtime,
st.st_size,
ip,
0,
"",
): ):
self.log("delete blocked by xbd: {}".format(abspath), 1) t = "delete blocked by xbd server config: {}"
self.log(t.format(abspath), 1)
continue continue
n_files += 1
with self.mutex: with self.mutex:
cur = None cur = None
try: try:
@@ -2734,23 +3025,23 @@ class Up2k(object):
bos.unlink(abspath) bos.unlink(abspath)
if xad: if xad:
runhook(self.log, xad, abspath, vpath, "", uname, "", 0, 0, "") runhook(self.log, xad, abspath, vpath, "", uname, 0, 0, ip, 0, "")
ok: list[str] = [] ok: list[str] = []
ng: list[str] = [] ng: list[str] = []
if is_dir: if is_dir:
ok, ng = rmdirs(self.log_func, scandir, True, atop, 1) ok, ng = rmdirs(self.log_func, scandir, True, atop, 1)
ok2, ng2 = rmdirs_up(os.path.dirname(atop)) ok2, ng2 = rmdirs_up(os.path.dirname(atop), ptop)
return n_files, ok + ok2, ng + ng2 return n_files, ok + ok2, ng + ng2
def handle_mv(self, uname: str, svp: str, dvp: str) -> str: def handle_mv(self, uname: str, svp: str, dvp: str) -> str:
self.db_act = time.time()
svn, srem = self.asrv.vfs.get(svp, uname, True, False, True) svn, srem = self.asrv.vfs.get(svp, uname, True, False, True)
svn, srem = svn.get_dbv(srem) svn, srem = svn.get_dbv(srem)
sabs = svn.canonical(srem, False) sabs = svn.canonical(srem, False)
curs: set["sqlite3.Cursor"] = set() curs: set["sqlite3.Cursor"] = set()
self.db_act = self.vol_act[svn.realpath] = time.time()
if not srem: if not srem:
raise Pebkac(400, "mv: cannot move a mountpoint") raise Pebkac(400, "mv: cannot move a mountpoint")
@@ -2786,7 +3077,7 @@ class Up2k(object):
with self.mutex: with self.mutex:
try: try:
for fn in files: for fn in files:
self.db_act = time.time() self.db_act = self.vol_act[dbv.realpath] = time.time()
svpf = "/".join(x for x in [dbv.vpath, vrem, fn[0]] if x) svpf = "/".join(x for x in [dbv.vpath, vrem, fn[0]] if x)
if not svpf.startswith(svp + "/"): # assert if not svpf.startswith(svp + "/"): # assert
raise Pebkac(500, "mv: bug at {}, top {}".format(svpf, svp)) raise Pebkac(500, "mv: bug at {}, top {}".format(svpf, svp))
@@ -2800,7 +3091,7 @@ class Up2k(object):
curs.clear() curs.clear()
rmdirs(self.log_func, scandir, True, sabs, 1) rmdirs(self.log_func, scandir, True, sabs, 1)
rmdirs_up(os.path.dirname(sabs)) rmdirs_up(os.path.dirname(sabs), svn.realpath)
return "k" return "k"
def _mv_file( def _mv_file(
@@ -2829,8 +3120,12 @@ class Up2k(object):
xbr = svn.flags.get("xbr") xbr = svn.flags.get("xbr")
xar = dvn.flags.get("xar") xar = dvn.flags.get("xar")
if xbr and not runhook(self.log, xbr, sabs, svp, "", uname, "", 0, 0, ""): if xbr:
t = "move blocked by xbr: {}".format(svp) st = bos.stat(sabs)
if not runhook(
self.log, xbr, sabs, svp, "", uname, st.st_mtime, st.st_size, "", 0, ""
):
t = "move blocked by xbr server config: {}".format(svp)
self.log(t, 1) self.log(t, 1)
raise Pebkac(405, t) raise Pebkac(405, t)
@@ -2851,7 +3146,7 @@ class Up2k(object):
self.rescan_cond.notify_all() self.rescan_cond.notify_all()
if xar: if xar:
runhook(self.log, xar, dabs, dvp, "", uname, "", 0, 0, "") runhook(self.log, xar, dabs, dvp, "", uname, 0, 0, "", 0, "")
return "k" return "k"
@@ -2892,13 +3187,27 @@ class Up2k(object):
curs.add(c1) curs.add(c1)
if c2: if c2:
self.db_add(c2, w, drd, dfn, ftime, fsize, ip or "", at or 0) self.db_add(
c2,
{}, # skip upload hooks
drd,
dfn,
ftime,
fsize,
dvn.realpath,
dvn.vpath,
w,
"",
"",
ip or "",
at or 0,
)
curs.add(c2) curs.add(c2)
else: else:
self.log("not found in src db: [{}]".format(svp)) self.log("not found in src db: [{}]".format(svp))
if xar: if xar:
runhook(self.log, xar, dabs, dvp, "", uname, "", 0, 0, "") runhook(self.log, xar, dabs, dvp, "", uname, 0, 0, "", 0, "")
return "k" return "k"
@@ -3130,12 +3439,13 @@ class Up2k(object):
vp_chk, vp_chk,
job["host"], job["host"],
job["user"], job["user"],
job["addr"], int(job["lmod"]),
job["t0"],
job["size"], job["size"],
job["addr"],
int(job["t0"]),
"", "",
): ):
t = "upload blocked by xbu: {}".format(vp_chk) t = "upload blocked by xbu server config: {}".format(vp_chk)
self.log(t, 1) self.log(t, 1)
raise Pebkac(403, t) raise Pebkac(403, t)
@@ -3201,27 +3511,6 @@ class Up2k(object):
if not job["hash"]: if not job["hash"]:
self._finish_upload(job["ptop"], job["wark"]) self._finish_upload(job["ptop"], job["wark"])
def _lastmodder(self) -> None:
while True:
ready = self.lastmod_q2
self.lastmod_q2 = self.lastmod_q
self.lastmod_q = []
time.sleep(1)
for path, sz, times, sparse in ready:
self.log("lmod: setting times {} on {}".format(times, path))
try:
bos.utime(path, times, False)
except:
t = "lmod: failed to utime ({}, {}):\n{}"
self.log(t.format(path, times, min_ex()))
if sparse and self.args.sparse and self.args.sparse * 1024 * 1024 <= sz:
try:
sp.check_call(["fsutil", "sparse", "setflag", path, "0"])
except:
self.log("could not unsparse [{}]".format(path), 3)
def _snapshot(self) -> None: def _snapshot(self) -> None:
slp = self.snap_persist_interval slp = self.snap_persist_interval
while True: while True:
@@ -3372,7 +3661,7 @@ class Up2k(object):
self.n_hashq -= 1 self.n_hashq -= 1
# self.log("hashq {}".format(self.n_hashq)) # self.log("hashq {}".format(self.n_hashq))
ptop, rd, fn, ip, at = self.hashq.get() ptop, vtop, rd, fn, ip, at, usr, skip_xau = self.hashq.get()
# self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn)) # self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
if "e2d" not in self.flags[ptop]: if "e2d" not in self.flags[ptop]:
continue continue
@@ -3392,18 +3681,41 @@ class Up2k(object):
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes) wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
with self.mutex: with self.mutex:
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size, ip, at) self.idx_wark(
self.flags[ptop],
rd,
fn,
inf.st_mtime,
inf.st_size,
ptop,
vtop,
wark,
"",
usr,
ip,
at,
skip_xau,
)
if at and time.time() - at > 30: if at and time.time() - at > 30:
with self.rescan_cond: with self.rescan_cond:
self.rescan_cond.notify_all() self.rescan_cond.notify_all()
def hash_file( def hash_file(
self, ptop: str, flags: dict[str, Any], rd: str, fn: str, ip: str, at: float self,
ptop: str,
vtop: str,
flags: dict[str, Any],
rd: str,
fn: str,
ip: str,
at: float,
usr: str,
skip_xau: bool = False,
) -> None: ) -> None:
with self.mutex: with self.mutex:
self.register_vpath(ptop, flags) self.register_vpath(ptop, flags)
self.hashq.put((ptop, rd, fn, ip, at)) self.hashq.put((ptop, vtop, rd, fn, ip, at, usr, skip_xau))
self.n_hashq += 1 self.n_hashq += 1
# self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn)) # self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn))

View File

@@ -31,7 +31,7 @@ from email.utils import formatdate
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
from queue import Queue from queue import Queue
from .__init__ import ANYWIN, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
from .__version__ import S_BUILD_DT, S_VERSION from .__version__ import S_BUILD_DT, S_VERSION
from .stolen import surrogateescape from .stolen import surrogateescape
@@ -294,8 +294,7 @@ REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
pybin = sys.executable or "" pybin = sys.executable or ""
is_exe = bool(getattr(sys, "frozen", False)) if EXE:
if is_exe:
pybin = "" pybin = ""
for p in "python3 python".split(): for p in "python3 python".split():
try: try:
@@ -669,6 +668,7 @@ class FHC(object):
def __init__(self) -> None: def __init__(self) -> None:
self.cache: dict[str, FHC.CE] = {} self.cache: dict[str, FHC.CE] = {}
self.aps: set[str] = set()
def close(self, path: str) -> None: def close(self, path: str) -> None:
try: try:
@@ -680,6 +680,7 @@ class FHC(object):
fh.close() fh.close()
del self.cache[path] del self.cache[path]
self.aps.remove(path)
def clean(self) -> None: def clean(self) -> None:
if not self.cache: if not self.cache:
@@ -700,6 +701,7 @@ class FHC(object):
return self.cache[path].fhs.pop() return self.cache[path].fhs.pop()
def put(self, path: str, fh: typing.BinaryIO) -> None: def put(self, path: str, fh: typing.BinaryIO) -> None:
self.aps.add(path)
try: try:
ce = self.cache[path] ce = self.cache[path]
ce.fhs.append(fh) ce.fhs.append(fh)
@@ -1850,13 +1852,21 @@ def _msaenc(txt: str) -> bytes:
return txt.replace("/", "\\").encode(FS_ENCODING, "surrogateescape") return txt.replace("/", "\\").encode(FS_ENCODING, "surrogateescape")
def _uncify(txt: str) -> str:
txt = txt.replace("/", "\\")
if ":" not in txt and not txt.startswith("\\\\"):
txt = absreal(txt)
return txt if txt.startswith("\\\\") else "\\\\?\\" + txt
def _msenc(txt: str) -> bytes: def _msenc(txt: str) -> bytes:
txt = txt.replace("/", "\\") txt = txt.replace("/", "\\")
if ":" not in txt and not txt.startswith("\\\\"): if ":" not in txt and not txt.startswith("\\\\"):
txt = absreal(txt) txt = absreal(txt)
ret = txt.encode(FS_ENCODING, "surrogateescape") ret = txt.encode(FS_ENCODING, "surrogateescape")
return ret if ret.startswith(b"\\\\?\\") else b"\\\\?\\" + ret return ret if ret.startswith(b"\\\\") else b"\\\\?\\" + ret
w8dec = _w8dec3 if not PY2 else _w8dec2 w8dec = _w8dec3 if not PY2 else _w8dec2
@@ -1878,9 +1888,11 @@ if not PY2 and WINDOWS:
afsenc = _msaenc afsenc = _msaenc
fsenc = _msenc fsenc = _msenc
fsdec = _msdec fsdec = _msdec
uncify = _uncify
elif not PY2 or not WINDOWS: elif not PY2 or not WINDOWS:
fsenc = afsenc = sfsenc = w8enc fsenc = afsenc = sfsenc = w8enc
fsdec = w8dec fsdec = w8dec
uncify = str
else: else:
# moonrunes become \x3f with bytestrings, # moonrunes become \x3f with bytestrings,
# losing mojibake support is worth # losing mojibake support is worth
@@ -1892,6 +1904,7 @@ else:
fsenc = afsenc = sfsenc = _not_actually_mbcs_enc fsenc = afsenc = sfsenc = _not_actually_mbcs_enc
fsdec = _not_actually_mbcs_dec fsdec = _not_actually_mbcs_dec
uncify = str
def s3enc(mem_cur: "sqlite3.Cursor", rd: str, fn: str) -> tuple[str, str]: def s3enc(mem_cur: "sqlite3.Cursor", rd: str, fn: str) -> tuple[str, str]:
@@ -2268,18 +2281,21 @@ def rmdirs(
return ok, ng return ok, ng
def rmdirs_up(top: str) -> tuple[list[str], list[str]]: def rmdirs_up(top: str, stop: str) -> tuple[list[str], list[str]]:
"""rmdir on self, then all parents""" """rmdir on self, then all parents"""
if top == stop:
return [], [top]
try: try:
os.rmdir(fsenc(top)) os.rmdir(fsenc(top))
except: except:
return [], [top] return [], [top]
par = os.path.dirname(top) par = os.path.dirname(top)
if not par: if not par or par == stop:
return [top], [] return [top], []
ok, ng = rmdirs_up(par) ok, ng = rmdirs_up(par, stop)
return [top] + ok, ng return [top] + ok, ng
@@ -2513,23 +2529,14 @@ def retchk(
raise Exception(t) raise Exception(t)
def _runhook( def _parsehook(
log: "NamedLogger", log: Optional["NamedLogger"], cmd: str
cmd: str, ) -> tuple[bool, bool, bool, float, dict[str, Any], str]:
ap: str,
vp: str,
host: str,
uname: str,
ip: str,
at: float,
sz: int,
txt: str,
) -> bool:
chk = False chk = False
fork = False fork = False
jtxt = False jtxt = False
wait = 0 wait = 0.0
tout = 0 tout = 0.0
kill = "t" kill = "t"
cap = 0 cap = 0
ocmd = cmd ocmd = cmd
@@ -2549,13 +2556,15 @@ def _runhook(
cap = int(arg[1:]) # 0=none 1=stdout 2=stderr 3=both cap = int(arg[1:]) # 0=none 1=stdout 2=stderr 3=both
elif arg.startswith("k"): elif arg.startswith("k"):
kill = arg[1:] # [t]ree [m]ain [n]one kill = arg[1:] # [t]ree [m]ain [n]one
elif arg.startswith("i"):
pass
else: else:
t = "hook: invalid flag {} in {}" t = "hook: invalid flag {} in {}"
log(t.format(arg, ocmd)) (log or print)(t.format(arg, ocmd))
env = os.environ.copy() env = os.environ.copy()
try: try:
if is_exe: if EXE:
raise Exception() raise Exception()
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
@@ -2563,25 +2572,95 @@ def _runhook(
pypath = str(os.pathsep.join(zsl)) pypath = str(os.pathsep.join(zsl))
env["PYTHONPATH"] = pypath env["PYTHONPATH"] = pypath
except: except:
if not is_exe: if not EXE:
raise raise
ka = { sp_ka = {
"env": env, "env": env,
"timeout": tout, "timeout": tout,
"kill": kill, "kill": kill,
"capture": cap, "capture": cap,
} }
if cmd.startswith("~"):
cmd = os.path.expanduser(cmd)
return chk, fork, jtxt, wait, sp_ka, cmd
def runihook(
log: Optional["NamedLogger"],
cmd: str,
vol: "VFS",
ups: list[tuple[str, int, int, str, str, str, int]],
) -> bool:
ocmd = cmd
chk, fork, jtxt, wait, sp_ka, cmd = _parsehook(log, cmd)
bcmd = [sfsenc(cmd)]
if cmd.endswith(".py"):
bcmd = [sfsenc(pybin)] + bcmd
vps = [vjoin(*list(s3dec(x[3], x[4]))) for x in ups]
aps = [djoin(vol.realpath, x) for x in vps]
if jtxt:
# 0w 1mt 2sz 3rd 4fn 5ip 6at
ja = [
{
"ap": uncify(ap), # utf8 for json
"vp": vp,
"wark": x[0][:16],
"mt": x[1],
"sz": x[2],
"ip": x[5],
"at": x[6],
}
for x, vp, ap in zip(ups, vps, aps)
]
sp_ka["sin"] = json.dumps(ja).encode("utf-8", "replace")
else:
sp_ka["sin"] = b"\n".join(fsenc(x) for x in aps)
t0 = time.time()
if fork:
Daemon(runcmd, ocmd, [bcmd], ka=sp_ka)
else:
rc, v, err = runcmd(bcmd, **sp_ka) # type: ignore
if chk and rc:
retchk(rc, bcmd, err, log, 5)
return False
wait -= time.time() - t0
if wait > 0:
time.sleep(wait)
return True
def _runhook(
log: Optional["NamedLogger"],
cmd: str,
ap: str,
vp: str,
host: str,
uname: str,
mt: float,
sz: int,
ip: str,
at: float,
txt: str,
) -> bool:
ocmd = cmd
chk, fork, jtxt, wait, sp_ka, cmd = _parsehook(log, cmd)
if jtxt: if jtxt:
ja = { ja = {
"ap": ap, "ap": ap,
"vp": vp, "vp": vp,
"mt": mt,
"sz": sz,
"ip": ip, "ip": ip,
"at": at or time.time(),
"host": host, "host": host,
"user": uname, "user": uname,
"at": at or time.time(),
"sz": sz,
"txt": txt, "txt": txt,
} }
arg = json.dumps(ja) arg = json.dumps(ja)
@@ -2596,9 +2675,9 @@ def _runhook(
t0 = time.time() t0 = time.time()
if fork: if fork:
Daemon(runcmd, ocmd, [acmd], ka=ka) Daemon(runcmd, ocmd, [bcmd], ka=sp_ka)
else: else:
rc, v, err = runcmd(bcmd, **ka) # type: ignore rc, v, err = runcmd(bcmd, **sp_ka) # type: ignore
if chk and rc: if chk and rc:
retchk(rc, bcmd, err, log, 5) retchk(rc, bcmd, err, log, 5)
return False return False
@@ -2611,24 +2690,25 @@ def _runhook(
def runhook( def runhook(
log: "NamedLogger", log: Optional["NamedLogger"],
cmds: list[str], cmds: list[str],
ap: str, ap: str,
vp: str, vp: str,
host: str, host: str,
uname: str, uname: str,
mt: float,
sz: int,
ip: str, ip: str,
at: float, at: float,
sz: int,
txt: str, txt: str,
) -> bool: ) -> bool:
vp = vp.replace("\\", "/") vp = vp.replace("\\", "/")
for cmd in cmds: for cmd in cmds:
try: try:
if not _runhook(log, cmd, ap, vp, host, uname, ip, at, sz, txt): if not _runhook(log, cmd, ap, vp, host, uname, mt, sz, ip, at, txt):
return False return False
except Exception as ex: except Exception as ex:
log("hook: {}".format(ex)) (log or print)("hook: {}".format(ex))
if ",c," in "," + cmd: if ",c," in "," + cmd:
return False return False
break break

View File

@@ -93,6 +93,7 @@
--g-fsel-bg: #d39; --g-fsel-bg: #d39;
--g-fsel-b1: #f4a; --g-fsel-b1: #f4a;
--g-fsel-ts: #804; --g-fsel-ts: #804;
--g-dfg: var(--srv-3);
--g-fg: var(--a-hil); --g-fg: var(--a-hil);
--g-bg: var(--bg-u2); --g-bg: var(--bg-u2);
--g-b1: var(--bg-u4); --g-b1: var(--bg-u4);
@@ -327,6 +328,7 @@ html.c {
} }
html.cz { html.cz {
--bgg: var(--bg-u2); --bgg: var(--bg-u2);
--srv-3: #fff;
} }
html.cy { html.cy {
--fg: #fff; --fg: #fff;
@@ -354,6 +356,7 @@ html.cy {
--chk-fg: #fd0; --chk-fg: #fd0;
--srv-1: #f00; --srv-1: #f00;
--srv-3: #fff;
--op-aa-bg: #fff; --op-aa-bg: #fff;
--u2-b1-bg: #f00; --u2-b1-bg: #f00;
@@ -793,6 +796,8 @@ html.y #path a:hover {
} }
.logue { .logue {
padding: .2em 0; padding: .2em 0;
position: relative;
z-index: 1;
} }
.logue.hidden, .logue.hidden,
.logue:empty { .logue:empty {
@@ -964,6 +969,9 @@ html.y #path a:hover {
#ggrid>a.dir:before { #ggrid>a.dir:before {
content: '📂'; content: '📂';
} }
#ggrid>a.dir>span {
color: var(--g-dfg);
}
#ggrid>a.au:before { #ggrid>a.au:before {
content: '💾'; content: '💾';
} }
@@ -1010,6 +1018,9 @@ html.np_open #ggrid>a.au:before {
background: var(--g-sel-bg); background: var(--g-sel-bg);
border-color: var(--g-sel-b1); border-color: var(--g-sel-b1);
} }
#ggrid>a.sel>span {
color: var(--g-sel-fg);
}
#ggrid>a.sel, #ggrid>a.sel,
#ggrid>a[tt].sel { #ggrid>a[tt].sel {
border-top: 1px solid var(--g-fsel-b1); border-top: 1px solid var(--g-fsel-b1);
@@ -1321,6 +1332,10 @@ html.y #ops svg circle {
padding: .3em .6em; padding: .3em .6em;
white-space: nowrap; white-space: nowrap;
} }
#noie {
color: #b60;
margin: 0 0 0 .5em;
}
.opbox { .opbox {
padding: .5em; padding: .5em;
border-radius: 0 .3em .3em 0; border-radius: 0 .3em .3em 0;

View File

@@ -155,6 +155,7 @@
sb_lg = "{{ sb_lg }}", sb_lg = "{{ sb_lg }}",
lifetime = {{ lifetime }}, lifetime = {{ lifetime }},
turbolvl = {{ turbolvl }}, turbolvl = {{ turbolvl }},
idxh = {{ idxh }},
frand = {{ frand|tojson }}, frand = {{ frand|tojson }},
u2sort = "{{ u2sort }}", u2sort = "{{ u2sort }}",
have_emp = {{ have_emp|tojson }}, have_emp = {{ have_emp|tojson }},

View File

@@ -110,6 +110,7 @@ var Ls = {
"ot_cfg": "configuration options", "ot_cfg": "configuration options",
"ot_u2i": 'up2k: upload files (if you have write-access) or toggle into the search-mode to see if they exist somewhere on the server$N$Nuploads are resumable, multithreaded, and file timestamps are preserved, but it uses more CPU than [🎈]&nbsp; (the basic uploader)<br /><br />during uploads, this icon becomes a progress indicator!', "ot_u2i": 'up2k: upload files (if you have write-access) or toggle into the search-mode to see if they exist somewhere on the server$N$Nuploads are resumable, multithreaded, and file timestamps are preserved, but it uses more CPU than [🎈]&nbsp; (the basic uploader)<br /><br />during uploads, this icon becomes a progress indicator!',
"ot_u2w": 'up2k: upload files with resume support (close your browser and drop the same files in later)$N$Nmultithreaded, and file timestamps are preserved, but it uses more CPU than [🎈]&nbsp; (the basic uploader)<br /><br />during uploads, this icon becomes a progress indicator!', "ot_u2w": 'up2k: upload files with resume support (close your browser and drop the same files in later)$N$Nmultithreaded, and file timestamps are preserved, but it uses more CPU than [🎈]&nbsp; (the basic uploader)<br /><br />during uploads, this icon becomes a progress indicator!',
"ot_noie": 'Please use Chrome / Firefox / Edge',
"ab_mkdir": "make directory", "ab_mkdir": "make directory",
"ab_mkdoc": "new markdown doc", "ab_mkdoc": "new markdown doc",
@@ -192,6 +193,7 @@ var Ls = {
"ct_dots": "show hidden files (if server permits)", "ct_dots": "show hidden files (if server permits)",
"ct_dir1st": "sort folders before files", "ct_dir1st": "sort folders before files",
"ct_readme": "show README.md in folder listings", "ct_readme": "show README.md in folder listings",
"ct_idxh": "show index.html instead of folder listing",
"ct_sbars": "show scrollbars", "ct_sbars": "show scrollbars",
"cut_turbo": "the yolo button, you probably DO NOT want to enable this:$N$Nuse this if you were uploading a huge amount of files and had to restart for some reason, and want to continue the upload ASAP$N$Nthis replaces the hash-check with a simple <em>&quot;does this have the same filesize on the server?&quot;</em> so if the file contents are different it will NOT be uploaded$N$Nyou should turn this off when the upload is done, and then &quot;upload&quot; the same files again to let the client verify them", "cut_turbo": "the yolo button, you probably DO NOT want to enable this:$N$Nuse this if you were uploading a huge amount of files and had to restart for some reason, and want to continue the upload ASAP$N$Nthis replaces the hash-check with a simple <em>&quot;does this have the same filesize on the server?&quot;</em> so if the file contents are different it will NOT be uploaded$N$Nyou should turn this off when the upload is done, and then &quot;upload&quot; the same files again to let the client verify them",
@@ -258,6 +260,10 @@ var Ls = {
"mm_e404": "Could not play audio; error 404: File not found.", "mm_e404": "Could not play audio; error 404: File not found.",
"mm_e403": "Could not play audio; error 403: Access denied.\n\nTry pressing F5 to reload, maybe you got logged out", "mm_e403": "Could not play audio; error 403: Access denied.\n\nTry pressing F5 to reload, maybe you got logged out",
"mm_e5xx": "Could not play audio; server error ", "mm_e5xx": "Could not play audio; server error ",
"mm_nof": "not finding any more audio files nearby",
"mm_hnf": "that song no longer exists",
"im_hnf": "that image no longer exists",
"f_chide": 'this will hide the column «{0}»\n\nyou can unhide columns in the settings tab', "f_chide": 'this will hide the column «{0}»\n\nyou can unhide columns in the settings tab',
"f_bigtxt": "this file is {0} MiB large -- really view as text?", "f_bigtxt": "this file is {0} MiB large -- really view as text?",
@@ -293,6 +299,7 @@ var Ls = {
"fd_ok": "delete OK", "fd_ok": "delete OK",
"fd_err": "delete failed:\n", "fd_err": "delete failed:\n",
"fd_none": "nothing was deleted; maybe blocked by server config (xbd)?",
"fd_busy": "deleting {0} items...\n\n{1}", "fd_busy": "deleting {0} items...\n\n{1}",
"fd_warn1": "DELETE these {0} items?", "fd_warn1": "DELETE these {0} items?",
"fd_warn2": "<b>Last chance!</b> No way to undo. Delete?", "fd_warn2": "<b>Last chance!</b> No way to undo. Delete?",
@@ -450,7 +457,7 @@ var Ls = {
"ur_aun": "All {0} uploads failed, sorry", "ur_aun": "All {0} uploads failed, sorry",
"ur_1sn": "File was NOT found on server", "ur_1sn": "File was NOT found on server",
"ur_asn": "The {0} files were NOT found on server", "ur_asn": "The {0} files were NOT found on server",
"ur_um": "Finished;\n{0} uplads OK,\n{1} uploads failed, sorry", "ur_um": "Finished;\n{0} uploads OK,\n{1} uploads failed, sorry",
"ur_sm": "Finished;\n{0} files found on server,\n{1} files NOT found on server", "ur_sm": "Finished;\n{0} files found on server,\n{1} files NOT found on server",
"lang_set": "refresh to make the change take effect?", "lang_set": "refresh to make the change take effect?",
@@ -563,6 +570,7 @@ var Ls = {
"ot_cfg": "andre innstillinger", "ot_cfg": "andre innstillinger",
"ot_u2i": 'up2k: last opp filer (hvis du har skrivetilgang) eller bytt til søkemodus for å sjekke om filene finnes et-eller-annet sted på serveren$N$Nopplastninger kan gjenopptas etter avbrudd, skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn [🎈]&nbsp; (den primitive opplasteren "bup")<br /><br />mens opplastninger foregår så vises fremdriften her oppe!', "ot_u2i": 'up2k: last opp filer (hvis du har skrivetilgang) eller bytt til søkemodus for å sjekke om filene finnes et-eller-annet sted på serveren$N$Nopplastninger kan gjenopptas etter avbrudd, skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn [🎈]&nbsp; (den primitive opplasteren "bup")<br /><br />mens opplastninger foregår så vises fremdriften her oppe!',
"ot_u2w": 'up2k: filopplastning med støtte for å gjenoppta avbrutte opplastninger -- steng ned nettleseren og dra de samme filene inn i nettleseren igjen for å plukke opp igjen der du slapp$N$Nopplastninger skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn [🎈]&nbsp; (den primitive opplasteren "bup")<br /><br />mens opplastninger foregår så vises fremdriften her oppe!', "ot_u2w": 'up2k: filopplastning med støtte for å gjenoppta avbrutte opplastninger -- steng ned nettleseren og dra de samme filene inn i nettleseren igjen for å plukke opp igjen der du slapp$N$Nopplastninger skjer stykkevis for potensielt høyere ytelse, og ivaretar datostempling -- men bruker litt mer prosessorkraft enn [🎈]&nbsp; (den primitive opplasteren "bup")<br /><br />mens opplastninger foregår så vises fremdriften her oppe!',
"ot_noie": 'Fungerer mye bedre i Chrome / Firefox / Edge',
"ab_mkdir": "lag mappe", "ab_mkdir": "lag mappe",
"ab_mkdoc": "nytt dokument", "ab_mkdoc": "nytt dokument",
@@ -645,6 +653,7 @@ var Ls = {
"ct_dots": "vis skjulte filer (gitt at serveren tillater det)", "ct_dots": "vis skjulte filer (gitt at serveren tillater det)",
"ct_dir1st": "sorter slik at mapper kommer foran filer", "ct_dir1st": "sorter slik at mapper kommer foran filer",
"ct_readme": "vis README.md nedenfor filene", "ct_readme": "vis README.md nedenfor filene",
"ct_idxh": "vis index.html istedenfor fil-liste",
"ct_sbars": "vis rullgardiner / skrollefelt", "ct_sbars": "vis rullgardiner / skrollefelt",
"cut_turbo": "forenklet befaring ved opplastning; bør sannsynlig <em>ikke</em> skrus på:$N$Nnyttig dersom du var midt i en svær opplastning som måtte restartes av en eller annen grunn, og du vil komme igang igjen så raskt som overhodet mulig.$N$Nnår denne er skrudd på så forenkles befaringen kraftig; istedenfor å utføre en trygg sjekk på om filene finnes på serveren i god stand, så sjekkes kun om <em>filstørrelsen</em> stemmer. Så dersom en korrupt fil skulle befinne seg på serveren allerede, på samme sted med samme størrelse og navn, så blir det <em>ikke oppdaget</em>.$N$Ndet anbefales å kun benytte denne funksjonen for å komme seg raskt igjennom selve opplastningen, for så å skru den av, og til slutt &quot;laste opp&quot; de samme filene én gang til -- slik at integriteten kan verifiseres", "cut_turbo": "forenklet befaring ved opplastning; bør sannsynlig <em>ikke</em> skrus på:$N$Nnyttig dersom du var midt i en svær opplastning som måtte restartes av en eller annen grunn, og du vil komme igang igjen så raskt som overhodet mulig.$N$Nnår denne er skrudd på så forenkles befaringen kraftig; istedenfor å utføre en trygg sjekk på om filene finnes på serveren i god stand, så sjekkes kun om <em>filstørrelsen</em> stemmer. Så dersom en korrupt fil skulle befinne seg på serveren allerede, på samme sted med samme størrelse og navn, så blir det <em>ikke oppdaget</em>.$N$Ndet anbefales å kun benytte denne funksjonen for å komme seg raskt igjennom selve opplastningen, for så å skru den av, og til slutt &quot;laste opp&quot; de samme filene én gang til -- slik at integriteten kan verifiseres",
@@ -711,6 +720,10 @@ var Ls = {
"mm_e404": "Avspilling feilet: Fil ikke funnet.", "mm_e404": "Avspilling feilet: Fil ikke funnet.",
"mm_e403": "Avspilling feilet: Tilgang nektet.\n\nKanskje du ble logget ut?\nPrøv å trykk F5 for å laste siden på nytt.", "mm_e403": "Avspilling feilet: Tilgang nektet.\n\nKanskje du ble logget ut?\nPrøv å trykk F5 for å laste siden på nytt.",
"mm_e5xx": "Avspilling feilet: ", "mm_e5xx": "Avspilling feilet: ",
"mm_nof": "finner ikke flere sanger i nærheten",
"mm_hnf": "sangen finnes ikke lenger",
"im_hnf": "bildet finnes ikke lenger",
"f_chide": 'dette vil skjule kolonnen «{0}»\n\nfanen for "andre innstillinger" lar deg vise kolonnen igjen', "f_chide": 'dette vil skjule kolonnen «{0}»\n\nfanen for "andre innstillinger" lar deg vise kolonnen igjen',
"f_bigtxt": "denne filen er hele {0} MiB -- vis som tekst?", "f_bigtxt": "denne filen er hele {0} MiB -- vis som tekst?",
@@ -746,6 +759,7 @@ var Ls = {
"fd_ok": "sletting OK", "fd_ok": "sletting OK",
"fd_err": "sletting feilet:\n", "fd_err": "sletting feilet:\n",
"fd_none": "ingenting ble slettet; kanskje avvist av serverkonfigurasjon (xbd)?",
"fd_busy": "sletter {0} filer...\n\n{1}", "fd_busy": "sletter {0} filer...\n\n{1}",
"fd_warn1": "SLETT disse {0} filene?", "fd_warn1": "SLETT disse {0} filene?",
"fd_warn2": "<b>Siste sjanse!</b> Dette kan ikke angres. Slett?", "fd_warn2": "<b>Siste sjanse!</b> Dette kan ikke angres. Slett?",
@@ -930,6 +944,7 @@ ebi('ops').innerHTML = (
'<a href="#" data-perm="write" data-dest="msg" tt="' + L.ot_msg + '">📟</a>' + '<a href="#" data-perm="write" data-dest="msg" tt="' + L.ot_msg + '">📟</a>' +
'<a href="#" data-dest="player" tt="' + L.ot_mp + '">🎺</a>' + '<a href="#" data-dest="player" tt="' + L.ot_mp + '">🎺</a>' +
'<a href="#" data-dest="cfg" tt="' + L.ot_cfg + '">⚙️</a>' + '<a href="#" data-dest="cfg" tt="' + L.ot_cfg + '">⚙️</a>' +
(IE ? '<span id="noie">' + L.ot_noie + '</span>' : '') +
'<div id="opdesc"></div>' '<div id="opdesc"></div>'
); );
@@ -1070,6 +1085,7 @@ ebi('op_cfg').innerHTML = (
' <a id="dotfiles" class="tgl btn" href="#" tt="' + L.ct_dots + '">dotfiles</a>\n' + ' <a id="dotfiles" class="tgl btn" href="#" tt="' + L.ct_dots + '">dotfiles</a>\n' +
' <a id="dir1st" class="tgl btn" href="#" tt="' + L.ct_dir1st + '">📁 first</a>\n' + ' <a id="dir1st" class="tgl btn" href="#" tt="' + L.ct_dir1st + '">📁 first</a>\n' +
' <a id="ireadme" class="tgl btn" href="#" tt="' + L.ct_readme + '">📜 readme</a>\n' + ' <a id="ireadme" class="tgl btn" href="#" tt="' + L.ct_readme + '">📜 readme</a>\n' +
' <a id="idxh" class="tgl btn" href="#" tt="' + L.ct_idxh + '">htm</a>\n' +
' <a id="sbars" class="tgl btn" href="#" tt="' + L.ct_sbars + '">⟊</a>\n' + ' <a id="sbars" class="tgl btn" href="#" tt="' + L.ct_sbars + '">⟊</a>\n' +
' </div>\n' + ' </div>\n' +
'</div>\n' + '</div>\n' +
@@ -1265,6 +1281,7 @@ function set_files_html(html) {
var ACtx = window.AudioContext || window.webkitAudioContext, var ACtx = window.AudioContext || window.webkitAudioContext,
noih = /[?&]v\b/.exec('' + location),
hash0 = location.hash, hash0 = location.hash,
mp; mp;
@@ -1307,6 +1324,7 @@ var mpl = (function () {
var r = { var r = {
"pb_mode": (sread('pb_mode') || 'next').split('-')[0], "pb_mode": (sread('pb_mode') || 'next').split('-')[0],
"os_ctl": bcfg_get('au_os_ctl', have_mctl) && have_mctl, "os_ctl": bcfg_get('au_os_ctl', have_mctl) && have_mctl,
'traversals': 0,
}; };
bcfg_bind(r, 'preload', 'au_preload', true); bcfg_bind(r, 'preload', 'au_preload', true);
bcfg_bind(r, 'fullpre', 'au_fullpre', false); bcfg_bind(r, 'fullpre', 'au_fullpre', false);
@@ -1481,7 +1499,7 @@ catch (ex) { }
var re_au_native = can_ogg ? /\.(aac|flac|m4a|mp3|ogg|opus|wav)$/i : var re_au_native = can_ogg ? /\.(aac|flac|m4a|mp3|ogg|opus|wav)$/i :
have_acode ? /\.(aac|flac|m4a|mp3|opus|wav)$/i : /\.(aac|flac|m4a|mp3|wav)$/i, have_acode ? /\.(aac|flac|m4a|mp3|opus|wav)$/i : /\.(aac|flac|m4a|mp3|wav)$/i,
re_au_all = /\.(aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|itgz|itr|itz|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3gz|s3m|s3r|s3z|tak|tta|ulaw|wav|wma|wv|xm|xmgz|xmr|xmz|xpk)$/i; re_au_all = /\.(aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3m|tak|tta|ulaw|wav|wma|wv|xm|xpk)$/i;
// extract songs + add play column // extract songs + add play column
@@ -2088,7 +2106,15 @@ function song_skip(n) {
} }
function next_song(e) { function next_song(e) {
ev(e); ev(e);
if (mp.order.length) {
mpl.traversals = 0;
return song_skip(1); return song_skip(1);
}
if (mpl.traversals++ < 5) {
treectl.ls_cb = next_song;
return tree_neigh(1);
}
toast.inf(10, L.mm_nof);
} }
function prev_song(e) { function prev_song(e) {
ev(e); ev(e);
@@ -2577,7 +2603,7 @@ function play(tid, is_ev, seek) {
if ((tn + '').indexOf('f-') === 0) { if ((tn + '').indexOf('f-') === 0) {
tn = mp.order.indexOf(tn); tn = mp.order.indexOf(tn);
if (tn < 0) if (tn < 0)
return; return toast.warn(10, L.mm_hnf);
} }
if (tn >= mp.order.length) { if (tn >= mp.order.length) {
@@ -2845,6 +2871,9 @@ function eval_hash() {
clearInterval(t); clearInterval(t);
baguetteBox.urltime(ts); baguetteBox.urltime(ts);
var im = QS('#ggrid a[ref="' + id + '"]'); var im = QS('#ggrid a[ref="' + id + '"]');
if (!im)
return toast.warn(10, L.im_hnf);
im.click(); im.click();
im.scrollIntoView(); im.scrollIntoView();
}, 50); }, 50);
@@ -3423,12 +3452,14 @@ var fileman = (function () {
if (!sel.length) if (!sel.length)
return toast.err(3, L.fd_emore); return toast.err(3, L.fd_emore);
function deleter() { function deleter(err) {
var xhr = new XHR(), var xhr = new XHR(),
vp = vps.shift(); vp = vps.shift();
if (!vp) { if (!vp) {
if (err !== 'xbd')
toast.ok(2, L.fd_ok); toast.ok(2, L.fd_ok);
treectl.goto(get_evpath()); treectl.goto(get_evpath());
return; return;
} }
@@ -3444,6 +3475,10 @@ var fileman = (function () {
toast.err(9, L.fd_err + msg); toast.err(9, L.fd_err + msg);
return; return;
} }
if (this.responseText.indexOf('deleted 0 files (and 0') + 1) {
toast.err(9, L.fd_none);
return deleter('xbd');
}
deleter(); deleter();
} }
@@ -4841,7 +4876,7 @@ document.onkeydown = function (e) {
var html = mk_files_header(tagord), seen = {}; var html = mk_files_header(tagord), seen = {};
html.push('<tbody>'); html.push('<tbody>');
html.push('<tr class="srch_hdr"><td>-</td><td><a href="#" id="unsearch"><big style="font-weight:bold">[❌] ' + L.sl_close + '</big></a> -- ' + L.sl_hits.format(res.hits.length) + (res.hits.length == cap ? ' -- <a href="#" id="moar">' + L.sl_moar + '</a>' : '') + '</td></tr>'); html.push('<tr class="srch_hdr"><td>-</td><td><a href="#" id="unsearch"><big style="font-weight:bold">[❌] ' + L.sl_close + '</big></a> -- ' + L.sl_hits.format(res.hits.length) + (res.trunc ? ' -- <a href="#" id="moar">' + L.sl_moar + '</a>' : '') + '</td></tr>');
for (var a = 0; a < res.hits.length; a++) { for (var a = 0; a < res.hits.length; a++) {
var r = res.hits[a], var r = res.hits[a],
@@ -4955,6 +4990,7 @@ var treectl = (function () {
treesz = clamp(icfg_get('treesz', 16), 10, 50); treesz = clamp(icfg_get('treesz', 16), 10, 50);
bcfg_bind(r, 'ireadme', 'ireadme', true); bcfg_bind(r, 'ireadme', 'ireadme', true);
bcfg_bind(r, 'idxh', 'idxh', idxh, setidxh);
bcfg_bind(r, 'dyn', 'dyntree', true, onresize); bcfg_bind(r, 'dyn', 'dyntree', true, onresize);
bcfg_bind(r, 'dots', 'dotfiles', false, function (v) { bcfg_bind(r, 'dots', 'dotfiles', false, function (v) {
r.goto(get_evpath()); r.goto(get_evpath());
@@ -4979,6 +5015,16 @@ var treectl = (function () {
} }
setwrap(r.wtree); setwrap(r.wtree);
function setidxh(v) {
if (!v == !/\bidxh=y\b/.exec('' + document.cookie))
return;
var xhr = new XHR();
xhr.open('GET', SR + '/?setck=idxh=' + (v ? 'y' : 'n'), true);
xhr.send();
}
setidxh(r.idxh);
r.entree = function (e, nostore) { r.entree = function (e, nostore) {
ev(e); ev(e);
entreed = true; entreed = true;
@@ -5407,6 +5453,9 @@ var treectl = (function () {
return; return;
} }
if (r.chk_index_html(this.top, res))
return;
for (var a = 0; a < res.files.length; a++) for (var a = 0; a < res.files.length; a++)
if (res.files[a].tags === undefined) if (res.files[a].tags === undefined)
res.files[a].tags = {}; res.files[a].tags = {};
@@ -5454,6 +5503,17 @@ var treectl = (function () {
} }
} }
r.chk_index_html = function (top, res) {
if (!r.idxh || !res || !res.files || noih)
return;
for (var a = 0; a < res.files.length; a++)
if (/^index.html?(\?|$)/i.exec(res.files[a].href)) {
window.location = vjoin(top, res.files[a].href);
return true;
}
};
r.gentab = function (top, res) { r.gentab = function (top, res) {
var nodes = res.dirs.concat(res.files), var nodes = res.dirs.concat(res.files),
html = mk_files_header(res.taglist), html = mk_files_header(res.taglist),
@@ -5574,14 +5634,18 @@ var treectl = (function () {
qsr('#bbsw'); qsr('#bbsw');
if (ls0 === null) { if (ls0 === null) {
var xhr = new XHR(); var xhr = new XHR();
xhr.open('GET', SR + '/?am_js', true); xhr.open('GET', SR + '/?setck=js=y', true);
xhr.send(); xhr.send();
r.ls_cb = showfile.addlinks; r.ls_cb = showfile.addlinks;
return r.reqls(get_evpath(), false); return r.reqls(get_evpath(), false);
} }
r.gentab(get_evpath(), ls0); var top = get_evpath();
if (r.chk_index_html(top, ls0))
return;
r.gentab(top, ls0);
pbar.onresize(); pbar.onresize();
vbar.onresize(); vbar.onresize();
showfile.addlinks(); showfile.addlinks();
@@ -5805,7 +5869,7 @@ function apply_perms(res) {
ebi('acc_info').innerHTML = '<span id="srv_info2"><span>' + srvinf + ebi('acc_info').innerHTML = '<span id="srv_info2"><span>' + srvinf +
'</span></span><span' + aclass + axs + L.access + '</span>' + (acct != '*' ? '</span></span><span' + aclass + axs + L.access + '</span>' + (acct != '*' ?
'<a href="' + SR + '/?pw=x">' + L.logout + acct + '</a>' : '<a href="' + SR + '/?pw=x">' + L.logout + acct + '</a>' :
'<a href="' + SR + '/?h">Login</a>'); '<a href="?h">Login</a>');
var o = QSA('#ops>a[data-perm]'); var o = QSA('#ops>a[data-perm]');
for (var a = 0; a < o.length; a++) { for (var a = 0; a < o.length; a++) {

View File

@@ -36,6 +36,11 @@ a {
td a { td a {
margin: 0; margin: 0;
} }
#w {
color: #fff;
background: #940;
border-color: #b70;
}
.af, .af,
.logout { .logout {
float: right; float: right;
@@ -175,15 +180,19 @@ html.z a.g {
border-color: #af4; border-color: #af4;
box-shadow: 0 .3em 1em #7d0; box-shadow: 0 .3em 1em #7d0;
} }
html.z input { input {
color: #fff; color: #a50;
background: #626; background: #fff;
border: 1px solid #c2c; border: 1px solid #a50;
border-width: 1px 0 0 0;
border-radius: .5em; border-radius: .5em;
padding: .5em .7em; padding: .5em .7em;
margin: 0 .5em 0 0; margin: 0 .5em 0 0;
} }
html.z input {
color: #fff;
background: #626;
border-color: #c2c;
}
html.z .num { html.z .num {
border-color: #777; border-color: #777;
} }

View File

@@ -89,13 +89,16 @@
</ul> </ul>
<h1 id="l">login for more:</h1> <h1 id="l">login for more:</h1>
<ul> <div>
<form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}"> <form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
<input type="hidden" name="act" value="login" /> <input type="hidden" name="act" value="login" />
<input type="password" name="cppwd" /> <input type="password" name="cppwd" />
<input type="submit" value="Login" /> <input type="submit" value="Login" />
{% if ahttps %}
<a id="w" href="{{ ahttps }}">switch to https</a>
{% endif %}
</form> </form>
</ul> </div>
</div> </div>
<a href="#" id="repl">π</a> <a href="#" id="repl">π</a>
{%- if not this.args.nb %} {%- if not this.args.nb %}

View File

@@ -25,7 +25,8 @@ var Ls = {
"t1": "handling", "t1": "handling",
"u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder", "u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder",
"v1": "koble til", "v1": "koble til",
"v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!" "v2": "bruk denne serveren som en lokal harddisk$N$NADVARSEL: kommer til å vise passordet ditt!",
"w1": "bytt til https",
}, },
"eng": { "eng": {
"d2": "shows the state of all active threads", "d2": "shows the state of all active threads",

View File

@@ -46,7 +46,7 @@
<p><em>note: rclone-FTP is a bit faster, so {% if args.ftp or args.ftps %}try that first{% else %}consider enabling FTP in server settings{% endif %}</em></p> <p><em>note: rclone-FTP is a bit faster, so {% if args.ftp or args.ftps %}try that first{% else %}consider enabling FTP in server settings{% endif %}</em></p>
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p> <p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
<pre> <pre>
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %} rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b> rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
</pre> </pre>
{% if s %} {% if s %}
@@ -64,9 +64,14 @@
yum install davfs2 yum install davfs2
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> {% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
</pre> </pre>
<p>or you can use rclone instead, which is much slower but doesn't require root:</p> <p>make it automount on boot:</p>
<pre> <pre>
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %} printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>{{ pw }}</b> k" >> /etc/davfs2/secrets
printf '%s\n' "http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b> davfs rw,user,uid=1000,noauto 0 0" >> /etc/fstab
</pre>
<p>or you can use rclone instead, which is much slower but doesn't require root (plus it keeps lastmodified on upload):</p>
<pre>
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b> rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
</pre> </pre>
{% if s %} {% if s %}

View File

@@ -114,10 +114,10 @@ function up2k_flagbus() {
do_take(now); do_take(now);
return; return;
} }
if (flag.owner && now - flag.owner[1] > 5000) { if (flag.owner && now - flag.owner[1] > 12000) {
flag.owner = null; flag.owner = null;
} }
if (flag.wants && now - flag.wants[1] > 5000) { if (flag.wants && now - flag.wants[1] > 12000) {
flag.wants = null; flag.wants = null;
} }
if (!flag.owner && !flag.wants) { if (!flag.owner && !flag.wants) {
@@ -772,6 +772,7 @@ function fsearch_explain(n) {
function up2k_init(subtle) { function up2k_init(subtle) {
var r = { var r = {
"tact": Date.now(),
"init_deps": init_deps, "init_deps": init_deps,
"set_fsearch": set_fsearch, "set_fsearch": set_fsearch,
"gotallfiles": [gotallfiles] // hooks "gotallfiles": [gotallfiles] // hooks
@@ -1452,7 +1453,7 @@ function up2k_init(subtle) {
}); });
}; };
var etaref = 0, etaskip = 0, utw_minh = 0, utw_read = 0; var etaref = 0, etaskip = 0, utw_minh = 0, utw_read = 0, utw_card = 0;
function etafun() { function etafun() {
var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length, var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length,
nsend = st.busy.upload.length + st.todo.upload.length, nsend = st.busy.upload.length + st.todo.upload.length,
@@ -1465,6 +1466,12 @@ function up2k_init(subtle) {
//ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1); //ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1);
if (utw_card != pvis.act) {
utw_card = pvis.act;
utw_read = 9001;
ebi('u2tabw').style.minHeight = '0px';
}
if (++utw_read >= 20) { if (++utw_read >= 20) {
utw_read = 0; utw_read = 0;
utw_minh = parseInt(ebi('u2tabw').style.minHeight || '0'); utw_minh = parseInt(ebi('u2tabw').style.minHeight || '0');
@@ -1641,8 +1648,14 @@ function up2k_init(subtle) {
running = true; running = true;
while (true) { while (true) {
var now = Date.now(), var now = Date.now(),
blocktime = now - r.tact,
is_busy = st.car < st.files.length; is_busy = st.car < st.files.length;
if (blocktime > 2500)
console.log('main thread blocked for ' + blocktime);
r.tact = now;
if (was_busy && !is_busy) { if (was_busy && !is_busy) {
for (var a = 0; a < st.files.length; a++) { for (var a = 0; a < st.files.length; a++) {
var t = st.files[a]; var t = st.files[a];
@@ -1782,6 +1795,15 @@ function up2k_init(subtle) {
})(); })();
function uptoast() { function uptoast() {
if (st.busy.handshake.length)
return;
for (var a = 0; a < st.files.length; a++) {
var t = st.files[a];
if (t.want_recheck && !t.rechecks)
return;
}
var sr = uc.fsearch, var sr = uc.fsearch,
ok = pvis.ctr.ok, ok = pvis.ctr.ok,
ng = pvis.ctr.ng, ng = pvis.ctr.ng,
@@ -2037,6 +2059,8 @@ function up2k_init(subtle) {
nbusy++; nbusy++;
reading++; reading++;
nchunk++; nchunk++;
if (Date.now() - up2k.tact > 1500)
tasker();
} }
function onmsg(d) { function onmsg(d) {
@@ -2367,16 +2391,17 @@ function up2k_init(subtle) {
} }
var err_pend = rsp.indexOf('partial upload exists at a different') + 1, var err_pend = rsp.indexOf('partial upload exists at a different') + 1,
err_srcb = rsp.indexOf('source file busy; please try again') + 1,
err_plug = rsp.indexOf('upload blocked by x') + 1, err_plug = rsp.indexOf('upload blocked by x') + 1,
err_dupe = rsp.indexOf('upload rejected, file already exists') + 1; err_dupe = rsp.indexOf('upload rejected, file already exists') + 1;
if (err_pend || err_plug || err_dupe) { if (err_pend || err_srcb || err_plug || err_dupe) {
err = rsp; err = rsp;
ofs = err.indexOf('\n/'); ofs = err.indexOf('\n/');
if (ofs !== -1) { if (ofs !== -1) {
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' '); err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
} }
if (!t.rechecks && err_pend) { if (!t.rechecks && (err_pend || err_srcb)) {
t.rechecks = 0; t.rechecks = 0;
t.want_recheck = true; t.want_recheck = true;
} }

View File

@@ -17,6 +17,7 @@ var wah = '',
MOBILE = TOUCH, MOBILE = TOUCH,
CHROME = !!window.chrome, CHROME = !!window.chrome,
VCHROME = CHROME ? 1 : 0, VCHROME = CHROME ? 1 : 0,
IE = /Trident\//.test(navigator.userAgent),
FIREFOX = ('netscape' in window) && / rv:/.test(navigator.userAgent), FIREFOX = ('netscape' in window) && / rv:/.test(navigator.userAgent),
IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(navigator.userAgent), IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(navigator.userAgent),
LINUX = /Linux/.test(navigator.userAgent), LINUX = /Linux/.test(navigator.userAgent),
@@ -111,12 +112,13 @@ if ((document.location + '').indexOf(',rej,') + 1)
try { try {
console.hist = []; console.hist = [];
var CMAXHIST = 100;
var hook = function (t) { var hook = function (t) {
var orig = console[t].bind(console), var orig = console[t].bind(console),
cfun = function () { cfun = function () {
console.hist.push(Date.now() + ' ' + t + ': ' + Array.from(arguments).join(', ')); console.hist.push(Date.now() + ' ' + t + ': ' + Array.from(arguments).join(', '));
if (console.hist.length > 100) if (console.hist.length > CMAXHIST)
console.hist = console.hist.slice(50); console.hist = console.hist.slice(CMAXHIST / 2);
orig.apply(console, arguments); orig.apply(console, arguments);
}; };
@@ -331,6 +333,25 @@ if (!String.prototype.format)
}); });
}; };
try {
new URL('/a/', 'https://a.com/');
}
catch (ex) {
console.log('ie11 shim URL()');
window.URL = function (url, base) {
if (url.indexOf('//') < 0)
url = base + '/' + url.replace(/^\/?/, '');
else if (url.indexOf('//') == 0)
url = 'https:' + url;
var x = url.split('?');
return {
"pathname": '/' + x[0].split('://')[1].replace(/[^/]+\//, ''),
"search": x.length > 1 ? x[1] : ''
};
}
}
// https://stackoverflow.com/a/950146 // https://stackoverflow.com/a/950146
function import_js(url, cb) { function import_js(url, cb) {
var head = document.head || document.getElementsByTagName('head')[0]; var head = document.head || document.getElementsByTagName('head')[0];
@@ -611,6 +632,29 @@ function vsplit(vp) {
} }
function vjoin(p1, p2) {
if (!p1)
p1 = '';
if (!p2)
p2 = '';
if (p1.endsWith('/'))
p1 = p1.slice(0, -1);
if (p2.startsWith('/'))
p2 = p2.slice(1);
if (!p1)
return p2;
if (!p2)
return p1;
return p1 + '/' + p2;
}
function uricom_enc(txt, do_fb_enc) { function uricom_enc(txt, do_fb_enc) {
try { try {
return encodeURIComponent(txt); return encodeURIComponent(txt);
@@ -1162,13 +1206,13 @@ var tt = (function () {
r.th.style.top = (e.pageY + 12 * sy) + 'px'; r.th.style.top = (e.pageY + 12 * sy) + 'px';
}; };
if (IPHONE) { if (TOUCH) {
var f1 = r.show, var f1 = r.show,
f2 = r.hide, f2 = r.hide,
q = []; q = [];
// if an onclick-handler creates a new timer, // if an onclick-handler creates a new timer,
// iOS 13.1.2 delays the entire handler by up to 401ms, // webkits delay the entire handler by up to 401ms,
// win by using a shared timer instead // win by using a shared timer instead
timer.add(function () { timer.add(function () {

View File

@@ -1,3 +1,214 @@
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0316-2106 `v1.6.9` index.html
## new features
* option to show `index.html` instead of the folder listing
* arg `--ih` makes it default-enabled
* clients can enable/disable it in the `[⚙️]` settings tab
* url-param `?v` skips it for a particular folder
* faster folder-thumbnail validation on startup (mostly on conventional HDDs)
## bugfixes
* "load more" button didn't always show up when search results got truncated
* ux: tooltips could block buttons on android
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0312-1610 `v1.6.8` folder thumbs
* read-only demo server at https://a.ocv.me/pub/demo/
* [docker image](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) [client testbed](https://cd.ocv.me/b/)
## new features
* folder thumbnails are indexed in the db
* now supports non-lowercase names (`Cover.jpg`, `Folder.JPG`)
* folders without a specific cover/folder image will show the first pic inside
* when audio playback continues into an empty folder, keep trying for a bit
* add no-index hints (google etc) in basic-browser HTML (`?b`, `?b=u`)
* [commandline uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) supports long filenames on win7
## bugfixes
* rotated logfiles didn't get xz compressed
* image-gallery links pointing to a deleted image shows an error instead of a crashpage
## other changes
* folder thumbnails have purple text to differentiate from files
* `copyparty32.exe` starts 30% faster (but is 6% larger)
----
# what to download?
| download link | is it good? | description |
| -- | -- | -- |
| **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** | ✅ the best 👍 | runs anywhere! only needs python |
| [a docker image](https://github.com/9001/copyparty/blob/hovudstraum/scripts/docker/README.md) | it's ok | good if you prefer docker 🐋 |
| [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) | ⚠️ [acceptable](https://github.com/9001/copyparty#copypartyexe) | for [win8](https://user-images.githubusercontent.com/241032/221445946-1e328e56-8c5b-44a9-8b9f-dee84d942535.png) or later; built-in thumbnailer |
| [up2k.exe](https://github.com/9001/copyparty/releases/latest/download/up2k.exe) | ⚠️ acceptable | [CLI uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) as a win7+ exe ([video](https://a.ocv.me/pub/demo/pics-vids/u2cli.webm)) |
| [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) | ⛔️ [dangerous](https://github.com/9001/copyparty#copypartyexe) | for [win7](https://user-images.githubusercontent.com/241032/221445944-ae85d1f4-d351-4837-b130-82cab57d6cca.png) -- never expose to the internet! |
| [cpp-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.6.8/copyparty-winpe64.exe) | ⛔️ dangerous | runs on [64bit WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png), otherwise useless |
* except for [up2k.exe](https://github.com/9001/copyparty/releases/latest/download/up2k.exe), all of the options above are equivalent
* the zip and tar.gz files below are just source code
* python packages are available at [PyPI](https://pypi.org/project/copyparty/#files)
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0305-2018 `v1.6.7` fix no-dedup + add up2k.exe
## new features
* controlpanel-connect: add example for webdav automount
## bugfixes
* fix a race which, in worst case (but unlikely on linux), **could cause data loss**
* could only happen if `--no-dedup` or volflag `copydupes` was set (**not** default)
* if two identical files were uploaded at the same time, there was a small chance that one of the files would become empty
* check if you were affected by doing a search for zero-byte files using either of the following:
* https://127.0.0.1:3923/#q=size%20%3D%200
* `find -type f -size 0`
* let me know if you lost something important and had logging enabled!
* ftp: mkdir can do multiple levels at once (support filezilla)
* fix flickering toast on upload finish
* `[💤]` (upload-baton) could disengage if chrome decides to pause the background tab for 10sec (which it sometimes does)
----
## introducing [up2k.exe](https://github.com/9001/copyparty/releases/latest/download/up2k.exe)
the commandline up2k upload / filesearch client, now as a standalone windows exe
* based on python 3.7 so it runs on 32bit windows7 or anything newer
* *no https support* (saves space + the python3.7 openssl is getting old)
* built from b39ff92f34e3fca389c78109d20d5454af761f8e so it can do long filepaths and mojibake
----
⭐️ **you probably want [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) below;**
the exe is [not recommended](https://github.com/9001/copyparty#copypartyexe) for longterm use
and the zip and tar.gz files are source code
(python packages are available at [PyPI](https://pypi.org/project/copyparty/#files))
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0226-2030 `v1.6.6` r 2 0 0
two hundred releases wow
* read-only demo server at https://a.ocv.me/pub/demo/
* [docker image](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) [client testbed](https://cd.ocv.me/b/)
* currently fighting a ground fault so the demo server will be unreliable for a while
## new features
* more docker containers! now runs on x64, x32, aarch64, armhf, ppc64, s390x
* pls let me know if you actually run copyparty on an IBM mainframe 👍
* new [event hook](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) type `xiu` runs just once for all recent uploads
* example hook [xiu-sha.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/xiu-sha.py) generates sha512 checksum files
* new arg `--rsp-jtr` simulates connection jitter
* copyparty.exe integrity selftest
* ux:
* return to previous page after logging in
* show a warning on the login page if you're not using https
* freebsd: detect `fetch` and return the [colorful sortable plaintext](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png) listing
## bugfixes
* permit replacing empty files only during a `--blank-wt` grace period
* lifetimes: keep upload-time when a size/mtime change triggers a reindex
* during cleanup after an unlink, never rmdir the entire volume
* rescan button in the controlpanel required volumes to be e2ds
* dupes could get indexed with the wrong mtime
* only affected the search index; the filesystem got the right one
* ux: search results could include the same hit twice in case of overlapping volumes
* ux: upload UI would remain expanded permanently after visiting a huge tab
* ftp: return proper error messages when client does something illegal
* ie11: support the back button
## other changes
* [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) replaces copyparty64.exe -- now built for 64-bit windows 10
* **on win10 it just works** -- on win8 it needs [vc redist 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48145) -- no win7 support
* has the latest security patches, but sfx.py is still better for long-term use
* has pillow and mutagen; can make thumbnails and parse/index media
* [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is the old win7-compatible, dangerously-insecure edition
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0212-1411 `v1.6.5` windows smb fix + win10.exe
* read-only demo server at https://a.ocv.me/pub/demo/
* [docker image](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) [client testbed](https://cd.ocv.me/b/)
## bugfixes
* **windows-only:** smb locations (network drives) could not be accessed
* appeared in [v1.6.4](https://github.com/9001/copyparty/releases/tag/v1.6.4) while adding support for long filepaths (260chars+)
## other changes
* removed tentative support for compressed chiptunes (xmgz, xmz, xmj, ...) since FFmpeg usually doesn't
----
# introducing [copyparty640.exe](https://github.com/9001/copyparty/releases/download/v1.6.5/copyparty640.exe)
* built for win10, comes with the latest python and deps (supports win8 with [vc redist 2015](https://www.microsoft.com/en-us/download/details.aspx?id=48145))
* __*much* safer__ than the old win7-compatible `copyparty.exe` and `copyparty64.exe`
* but only `copyparty-sfx.py` takes advantage of the operating system security patches
* includes pillow for thumbnails and mutagen for media indexing
* around 10% slower (trying to figure out what's up with that)
starting from the next release,
* `copyparty.exe` (win7 x32) will become `copyparty32.exe`
* `copyparty640.exe` (win10) will be the new `copyparty.exe`
* `copyparty64.exe` (win7 x64) will graduate
so the [copyparty64.exe](https://github.com/9001/copyparty/releases/download/v1.6.5/copyparty64.exe) in this release will be the "final" version able to run inside a [64bit Win7-era winPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png) (all regular 32/64-bit win7 editions can just use `copyparty32.exe` instead)
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0211-1802 `v1.6.4` 🔧🎲🔗🐳🇦🎶
* read-only demo server at https://a.ocv.me/pub/demo/
* [1.6 theme song](https://a.ocv.me/pub/demo/music/.bonus/#af-134e597c) // [similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md)
## new features
* 🔧 new [config syntax](https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf) (#20)
* the new syntax is still kinda esoteric and funky but it's an improvement
* old config files are still supported
* `--vc` prints the autoconverted config which you can copy back into the config file to upgrade
* `--vc` will also [annotate and explain](https://user-images.githubusercontent.com/241032/217356028-eb3e141f-80a6-4bc6-8d04-d8d1d874c3e9.png) the config files
* new argument `--cgen` to generate config from commandline arguments
* kinda buggy, especially the `[global]` section, so give it a lookover before saving it
* 🎲 randomize filenames on upload
* either optionally, using the 🎲 button in the up2k ui
* or force-enabled; globally with `--rand` or per-volume with volflag `rand`
* specify filename length with `nrand` (globally or volflag), default 9
* 🔗 export a list of links to your recent uploads
* `copy links` in the up2k tab (🚀) will copy links to all uploads since last page refresh,
* `copy` in the unpost tab (🧯) will copy links to all your recent uploads (max 2000 files / 12 hours by default)
* filekeys are included if that's enabled and you have access to view those (permissions `G` or `r`)
* 🇦 [arch package](https://github.com/9001/copyparty/tree/hovudstraum/contrib/package/arch) -- added in #18, thx @icxes
* maybe in aur soon!
* 🐳 [docker containers](https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker) -- 5 editions,
* [min](https://hub.docker.com/r/copyparty/min) (57 MiB), just copyparty without thumbnails or audio transcoding
* [im](https://hub.docker.com/r/copyparty/im) (70 MiB), thumbnails of popular image formats + media tags with mutagen
* [ac (163 MiB)](https://hub.docker.com/r/copyparty/ac) 🥇 adds audio/video thumbnails + audio transcoding + better tags
* [iv](https://hub.docker.com/r/copyparty/iv) (211 MiB), makes heif/avic/jxl faster to thumbnail
* [dj](https://hub.docker.com/r/copyparty/dj) (309 MiB), adds optional detection of musical key / bpm
* 🎶 [chiptune player](https://a.ocv.me/pub/demo/music/chiptunes/#af-f6fb2e5f)
* transcodes mod/xm/s3m/it/mo3/mptm/mt2/okt to opus
* uses FFmpeg (libopenmpt) so the accuracy is not perfect, but most files play OK enough
* not **yet** supported in the docker container since Alpine's FFmpeg was built without libopenmpt
* windows: support long filepaths (over 260 chars)
* uses the `//?/` winapi syntax to also support windows 7
* `--ver` shows the server version on the control panel
## bugfixes
* markdown files didn't scale properly in the document browser
* detect and refuse multiple volume definitions sharing the same filesystem path
* don't return incomplete transcodes if multiple clients try to play the same flac file
* [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh): more reliable chroot cleanup, sigusr1 for config reload
* pypi packaging: compress web resources, include webdav.bat
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0131-2103 `v1.6.3` sandbox k # 2023-0131-2103 `v1.6.3` sandbox k

View File

@@ -17,6 +17,6 @@ problem: `svchost.exe` is using 100% of a cpu core, and upon further inspection
"solution": create a virtual filesystem which is intentionally slow and trick windows into reading it from there instead "solution": create a virtual filesystem which is intentionally slow and trick windows into reading it from there instead
* create a file called `AppxManifest.xml` and put something dumb in it * create a file called `AppxManifest.xml` and put something dumb in it
* serve the file from a copyparty instance with `--rsp-slp=9` so every request will hang for 9 sec * serve the file from a copyparty instance with `--rsp-slp=1` so every request will hang for 1 sec
* `net use m: http://127.0.0.1:3993/` (mount copyparty using the windows-native webdav client) * `net use m: http://127.0.0.1:3993/` (mount copyparty using the windows-native webdav client)
* `mklink /d c:\windows\systemapps\microsoftwindows.client.cbs_cw5n1h2txyewy\AppxManifest.xml m:\AppxManifest.xml` * `mklink /d c:\windows\systemapps\microsoftwindows.client.cbs_cw5n1h2txyewy\AppxManifest.xml m:\AppxManifest.xml`

View File

@@ -25,13 +25,15 @@
some improvement ideas some improvement ideas
* the JS is a mess -- a preact rewrite would be nice * the JS is a mess -- a ~~preact~~ rewrite would be nice
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js * preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
* good excuse to look at using virtual lists (browsers start to struggle when folders contain over 5000 files) * good excuse to look at using virtual lists (browsers start to struggle when folders contain over 5000 files)
* maybe preact / vdom isn't the best choice, could just wait for the Next Big Thing
* the UX is a mess -- a proper design would be nice * the UX is a mess -- a proper design would be nice
* very organic (much like the python/js), everything was an afterthought * very organic (much like the python/js), everything was an afterthought
* true for both the layout and the visual flair * true for both the layout and the visual flair
* something like the tron board-room ui (or most other hollywood ones, like ironman) would be :100: * something like the tron board-room ui (or most other hollywood ones, like ironman) would be :100:
* would preferably keep the information density, just more organized yet [not too boring](https://blog.rachelbinx.com/2023/02/unbearable-sameness/)
* some of the python files are way too big * some of the python files are way too big
* `up2k.py` ended up doing all the file indexing / db management * `up2k.py` ended up doing all the file indexing / db management
* `httpcli.py` should be separated into modules in general * `httpcli.py` should be separated into modules in general

View File

@@ -14,6 +14,10 @@ when server is on another machine (1gbit LAN),
# creating the config file # creating the config file
the copyparty "connect" page at `/?hc` (so for example http://127.0.0.1:3923/?hc) will generate commands to autoconfigure rclone for your server
**if you prefer to configure rclone manually, continue reading:**
replace `hunter2` with your password, or remove the `hunter2` lines if you allow anonymous access replace `hunter2` with your password, or remove the `hunter2` lines if you allow anonymous access
@@ -22,7 +26,7 @@ replace `hunter2` with your password, or remove the `hunter2` lines if you allow
( (
echo [cpp-rw] echo [cpp-rw]
echo type = webdav echo type = webdav
echo vendor = other echo vendor = owncloud
echo url = http://127.0.0.1:3923/ echo url = http://127.0.0.1:3923/
echo headers = Cookie,cppwd=hunter2 echo headers = Cookie,cppwd=hunter2
echo( echo(
@@ -41,7 +45,7 @@ also install the windows dependencies: [winfsp](https://github.com/billziss-gh/w
cat > ~/.config/rclone/rclone.conf <<'EOF' cat > ~/.config/rclone/rclone.conf <<'EOF'
[cpp-rw] [cpp-rw]
type = webdav type = webdav
vendor = other vendor = owncloud
url = http://127.0.0.1:3923/ url = http://127.0.0.1:3923/
headers = Cookie,cppwd=hunter2 headers = Cookie,cppwd=hunter2
@@ -62,6 +66,17 @@ rclone.exe mount --vfs-cache-mode writes --vfs-cache-max-age 5s --attr-timeout 5
``` ```
# sync folders to/from copyparty
note that the up2k client [up2k.py](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy) (available on the "connect" page of your copyparty server) does uploads much faster and safer, but rclone is bidirectional and more ubiquitous
```
rclone sync /usr/share/icons/ cpp-rw:fds/
```
TODO: rclone bug? `--transfers=4` doesn't seem to do anything (it does one request at a time), doesn't matter if the webdav server is copyparty or rclone
# use rclone as server too, replacing copyparty # use rclone as server too, replacing copyparty
feels out of place but is too good not to mention feels out of place but is too good not to mention
@@ -70,3 +85,26 @@ feels out of place but is too good not to mention
rclone.exe serve http --read-only . rclone.exe serve http --read-only .
rclone.exe serve webdav . rclone.exe serve webdav .
``` ```
# devnotes
copyparty supports and expects [the following](https://github.com/rclone/rclone/blob/46484022b08f8756050aa45505ea0db23e62df8b/backend/webdav/webdav.go#L575-L578) from rclone,
```go
case "owncloud":
f.canStream = true
f.precision = time.Second
f.useOCMtime = true
f.hasOCMD5 = true
f.hasOCSHA1 = true
```
notably,
* `useOCMtime` enables the `x-oc-mtime` header to retain mtime of uploads from rclone
* `canStream` is supported but not required by us
* `hasOCMD5` / `hasOCSHA1` is conveniently dontcare on both ends
there's a scary comment mentioning PROPSET of lastmodified which is not something we wish to support
and if `vendor=owncloud` ever stops working, try `vendor=fastmail` instead

View File

@@ -7,6 +7,21 @@ there is probably some unintentional bias so please submit corrections
currently up to date with [awesome-selfhosted](https://github.com/awesome-selfhosted/awesome-selfhosted) but that probably won't last currently up to date with [awesome-selfhosted](https://github.com/awesome-selfhosted/awesome-selfhosted) but that probably won't last
## symbol legends
### ...in feature matrices:
* `█` = absolutely
* `` = partially
* `•` = maybe?
* ` ` = nope
### ...in reviews:
* ✅ = advantages over copyparty
* 💾 = what copyparty offers as an alternative
* 🔵 = similarities
* ⚠️ = disadvantages (something copyparty does "better")
## toc ## toc
* top * top
@@ -32,11 +47,15 @@ currently up to date with [awesome-selfhosted](https://github.com/awesome-selfho
* [kodbox](#kodbox) * [kodbox](#kodbox)
* [filebrowser](#filebrowser) * [filebrowser](#filebrowser)
* [filegator](#filegator) * [filegator](#filegator)
* [sftpgo](#sftpgo)
* [updog](#updog) * [updog](#updog)
* [goshs](#goshs) * [goshs](#goshs)
* [gimme-that](#gimme-that) * [gimme-that](#gimme-that)
* [ass](#ass) * [ass](#ass)
* [linx](#linx) * [linx](#linx)
* [h5ai](#h5ai)
* [autoindex](#autoindex)
* [miniserve](#miniserve)
* [briefly considered](#briefly-considered) * [briefly considered](#briefly-considered)
@@ -63,8 +82,8 @@ the table headers in the matrixes below are the different softwares, with a quic
the softwares, the softwares,
* `a` = [copyparty](https://github.com/9001/copyparty) * `a` = [copyparty](https://github.com/9001/copyparty)
* `b` = [hfs2](https://github.com/rejetto/hfs2) * `b` = [hfs2](https://rejetto.com/hfs/)
* `c` = [hfs3](https://www.rejetto.com/hfs/) * `c` = [hfs3](https://github.com/rejetto/hfs)
* `d` = [nextcloud](https://github.com/nextcloud/server) * `d` = [nextcloud](https://github.com/nextcloud/server)
* `e` = [seafile](https://github.com/haiwen/seafile) * `e` = [seafile](https://github.com/haiwen/seafile)
* `f` = [rclone](https://github.com/rclone/rclone), specifically `rclone serve webdav .` * `f` = [rclone](https://github.com/rclone/rclone), specifically `rclone serve webdav .`
@@ -73,6 +92,7 @@ the softwares,
* `i` = [kodbox](https://github.com/kalcaddle/kodbox) * `i` = [kodbox](https://github.com/kalcaddle/kodbox)
* `j` = [filebrowser](https://github.com/filebrowser/filebrowser) * `j` = [filebrowser](https://github.com/filebrowser/filebrowser)
* `k` = [filegator](https://github.com/filegator/filegator) * `k` = [filegator](https://github.com/filegator/filegator)
* `l` = [sftpgo](https://github.com/drakkan/sftpgo)
some softwares not in the matrixes, some softwares not in the matrixes,
* [updog](#updog) * [updog](#updog)
@@ -80,6 +100,9 @@ some softwares not in the matrixes,
* [gimme-that](#gimmethat) * [gimme-that](#gimmethat)
* [ass](#ass) * [ass](#ass)
* [linx](#linx) * [linx](#linx)
* [h5ai](#h5ai)
* [autoindex](#autoindex)
* [miniserve](#miniserve)
symbol legend, symbol legend,
* `█` = absolutely * `█` = absolutely
@@ -90,62 +113,64 @@ symbol legend,
## general ## general
| feature / software | a | b | c | d | e | f | g | h | i | j | k | | feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | | ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
| intuitive UX | | | █ | █ | █ | | █ | █ | █ | █ | █ | | intuitive UX | | | █ | █ | █ | | █ | █ | █ | █ | █ | █ |
| config GUI | | █ | █ | █ | █ | | | █ | █ | █ | | | config GUI | | █ | █ | █ | █ | | | █ | █ | █ | | █ |
| good documentation | | | | █ | █ | █ | █ | | | █ | █ | | good documentation | | | | █ | █ | █ | █ | | | █ | █ | |
| runs on iOS | | | | | | | | | | | | | runs on iOS | | | | | | | | | | | | |
| runs on Android | █ | | | | | █ | | | | | | | runs on Android | █ | | | | | █ | | | | | | |
| runs on WinXP | █ | █ | | | | █ | | | | | | | runs on WinXP | █ | █ | | | | █ | | | | | | |
| runs on Windows | █ | █ | █ | █ | █ | █ | █ | | █ | █ | █ | | runs on Windows | █ | █ | █ | █ | █ | █ | █ | | █ | █ | █ | █ |
| runs on Linux | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | | runs on Linux | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
| runs on Macos | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | | runs on Macos | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
| runs on FreeBSD | █ | | | • | █ | █ | █ | • | █ | █ | | | runs on FreeBSD | █ | | | • | █ | █ | █ | • | █ | █ | | █ |
| portable binary | █ | █ | █ | | | █ | █ | | | █ | | | portable binary | █ | █ | █ | | | █ | █ | | | █ | | █ |
| zero setup, just go | █ | █ | █ | | | | █ | | | █ | | | zero setup, just go | █ | █ | █ | | | | █ | | | █ | | |
| android app | | | | █ | █ | | | | | | | | android app | | | | █ | █ | | | | | | | |
| iOS app | | | | █ | █ | | | | | | | | iOS app | | | | █ | █ | | | | | | | |
* `zero setup` = you can get a mostly working setup by just launching the app, without having to install any software or configure whatever * `zero setup` = you can get a mostly working setup by just launching the app, without having to install any software or configure whatever
* `a`/copyparty remarks: * `a`/copyparty remarks:
* no gui for server settings; only for client-side stuff * no gui for server settings; only for client-side stuff
* can theoretically run on iOS / iPads using [iSH](https://ish.app/), but only the iPad will offer sufficient multitasking i think * can theoretically run on iOS / iPads using [iSH](https://ish.app/), but only the iPad will offer sufficient multitasking i think
* [android app](https://f-droid.org/en/packages/me.ocv.partyup/) is for uploading only * [android app](https://f-droid.org/en/packages/me.ocv.partyup/) is for uploading only
* no iOS app but has [shortcuts](https://github.com/9001/copyparty#ios-shortcuts) for easy uploading
* `b`/hfs2 runs on linux through wine * `b`/hfs2 runs on linux through wine
* `f`/rclone must be started with the command `rclone serve webdav .` or similar * `f`/rclone must be started with the command `rclone serve webdav .` or similar
* `h`/chibisafe has undocumented windows support * `h`/chibisafe has undocumented windows support
* `i`/sftpgo must be launched with a command
## file transfer ## file transfer
*the thing that copyparty is actually kinda good at* *the thing that copyparty is actually kinda good at*
| feature / software | a | b | c | d | e | f | g | h | i | j | k | | feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | | ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
| download folder as zip | █ | █ | █ | █ | █ | | █ | | █ | █ | | | download folder as zip | █ | █ | █ | █ | █ | | █ | | █ | █ | | █ |
| download folder as tar | █ | | | | | | | | | █ | | | download folder as tar | █ | | | | | | | | | █ | | |
| upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | | upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
| parallel uploads | █ | | | █ | █ | | • | | █ | | █ | | parallel uploads | █ | | | █ | █ | | • | | █ | | █ | |
| resumable uploads | █ | | | | | | | | █ | | █ | | resumable uploads | █ | | | | | | | | █ | | █ | |
| upload segmenting | █ | | | | | | | █ | █ | | █ | | upload segmenting | █ | | | | | | | █ | █ | | █ | |
| upload acceleration | █ | | | | | | | | █ | | █ | | upload acceleration | █ | | | | | | | | █ | | █ | |
| upload verification | █ | | | █ | █ | | | | █ | | | | upload verification | █ | | | █ | █ | | | | █ | | | |
| upload deduplication | █ | | | | █ | | | | █ | | | | upload deduplication | █ | | | | █ | | | | █ | | | |
| upload a 999 TiB file | █ | | | | █ | █ | • | | █ | | █ | | upload a 999 TiB file | █ | | | | █ | █ | • | | █ | | █ | |
| keep last-modified time | █ | | | █ | █ | █ | | | | | | | keep last-modified time | █ | | | █ | █ | █ | | | | | | █ |
| upload rules | | | | | | | | | | | | | upload rules | | | | | | | | | | | | |
| ┗ max disk usage | █ | █ | | | █ | | | | █ | | | | ┗ max disk usage | █ | █ | | | █ | | | | █ | | | █ |
| ┗ max filesize | █ | | | | | | | █ | | | █ | | ┗ max filesize | █ | | | | | | | █ | | | █ | █ |
| ┗ max items in folder | █ | | | | | | | | | | | | ┗ max items in folder | █ | | | | | | | | | | | |
| ┗ max file age | █ | | | | | | | | █ | | | | ┗ max file age | █ | | | | | | | | █ | | | |
| ┗ max uploads over time | █ | | | | | | | | | | | | ┗ max uploads over time | █ | | | | | | | | | | | |
| ┗ compress before write | █ | | | | | | | | | | | | ┗ compress before write | █ | | | | | | | | | | | |
| ┗ randomize filename | █ | | | | | | | █ | █ | | | | ┗ randomize filename | █ | | | | | | | █ | █ | | | |
| ┗ mimetype reject-list | | | | | | | | | • | | | | ┗ mimetype reject-list | | | | | | | | | • | | | |
| ┗ extension reject-list | | | | | | | | █ | • | | | | ┗ extension reject-list | | | | | | | | █ | • | | | |
| checksums provided | | | | █ | █ | | | | █ | | | | checksums provided | | | | █ | █ | | | | █ | | | |
| cloud storage backend | | | | █ | █ | █ | | | | | █ | | cloud storage backend | | | | █ | █ | █ | | | | | █ | █ |
* `upload segmenting` = files are sliced into chunks, making it possible to upload files larger than 100 MiB on cloudflare for example * `upload segmenting` = files are sliced into chunks, making it possible to upload files larger than 100 MiB on cloudflare for example
@@ -162,25 +187,29 @@ symbol legend,
* can provide checksums for single files on request * can provide checksums for single files on request
* can probably do extension/mimetype rejection similar to copyparty * can probably do extension/mimetype rejection similar to copyparty
* `k`/filegator download-as-zip is not streaming; it creates the full zipfile before download can start * `k`/filegator download-as-zip is not streaming; it creates the full zipfile before download can start
* `l`/sftpgo:
* resumable/segmented uploads only over SFTP, not over HTTP
* upload rules are totals only, not over time
* can probably do extension/mimetype rejection similar to copyparty
## protocols and client support ## protocols and client support
| feature / software | a | b | c | d | e | f | g | h | i | j | k | | feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | | ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
| serve https | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | | serve https | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
| serve webdav | █ | | | █ | █ | █ | █ | | █ | | | | serve webdav | █ | | | █ | █ | █ | █ | | █ | | | █ |
| serve ftp | █ | | | | | █ | | | | | | | serve ftp | █ | | | | | █ | | | | | | █ |
| serve ftps | █ | | | | | █ | | | | | | | serve ftps | █ | | | | | █ | | | | | | █ |
| serve sftp | | | | | | █ | | | | | | | serve sftp | | | | | | █ | | | | | | █ |
| serve smb/cifs | | | | | | █ | | | | | | | serve smb/cifs | | | | | | █ | | | | | | |
| serve dlna | | | | | | █ | | | | | | | serve dlna | | | | | | █ | | | | | | |
| listen on unix-socket | | | | █ | █ | | █ | █ | █ | | █ | | listen on unix-socket | | | | █ | █ | | █ | █ | █ | | █ | █ |
| zeroconf | █ | | | | | | | | | | | | zeroconf | █ | | | | | | | | | | | |
| supports netscape 4 | | | | | | █ | | | | | • | | supports netscape 4 | | | | | | █ | | | | | • | |
| ...internet explorer 6 | | █ | | █ | | █ | | | | | • | | ...internet explorer 6 | | █ | | █ | | █ | | | | | • | |
| mojibake filenames | █ | | | • | • | █ | █ | • | • | • | | | mojibake filenames | █ | | | • | • | █ | █ | • | • | • | | |
| undecodable filenames | █ | | | • | • | █ | | • | • | | | | undecodable filenames | █ | | | • | • | █ | | • | • | | | |
* `webdav` = protocol convenient for mounting a remote server as a local filesystem; see zeroconf: * `webdav` = protocol convenient for mounting a remote server as a local filesystem; see zeroconf:
* `zeroconf` = the server announces itself on the LAN, [automatically appearing](https://user-images.githubusercontent.com/241032/215344737-0eae8d98-9496-4256-9aa8-cd2f6971810d.png) on other zeroconf-capable devices * `zeroconf` = the server announces itself on the LAN, [automatically appearing](https://user-images.githubusercontent.com/241032/215344737-0eae8d98-9496-4256-9aa8-cd2f6971810d.png) on other zeroconf-capable devices
@@ -190,57 +219,62 @@ symbol legend,
* `a`/copyparty remarks: * `a`/copyparty remarks:
* extremely minimal samba/cifs server * extremely minimal samba/cifs server
* netscape 4 / ie6 support is mostly listed as a joke altho some people have actually found it useful ([ie4 tho](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png)) * netscape 4 / ie6 support is mostly listed as a joke altho some people have actually found it useful ([ie4 tho](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png))
* `l`/sftpgo translates mojibake filenames into valid utf-8 (information loss)
## server configuration ## server configuration
| feature / software | a | b | c | d | e | f | g | h | i | j | k | | feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | | ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
| config from cmd args | █ | | | | | █ | █ | | | █ | | | config from cmd args | █ | | | | | █ | █ | | | █ | | |
| config files | █ | █ | █ | | | █ | | █ | | █ | • | | config files | █ | █ | █ | | | █ | | █ | | █ | • | |
| runtime config reload | █ | █ | █ | | | | | █ | █ | █ | █ | | runtime config reload | █ | █ | █ | | | | | █ | █ | █ | █ | |
| same-port http / https | █ | | | | | | | | | | | | same-port http / https | █ | | | | | | | | | | | |
| listen multiple ports | █ | | | | | | | | | | | | listen multiple ports | █ | | | | | | | | | | | █ |
| virtual file system | █ | █ | █ | | | | █ | | | | | | virtual file system | █ | █ | █ | | | | █ | | | | | █ |
| reverse-proxy ok | █ | | █ | █ | █ | █ | █ | █ | • | • | • | | reverse-proxy ok | █ | | █ | █ | █ | █ | █ | █ | • | • | • | █ |
| folder-rproxy ok | █ | | | | █ | █ | | • | • | • | • | | folder-rproxy ok | █ | | | | █ | █ | | • | • | • | • | |
* `folder-rproxy` = reverse-proxying without dedicating an entire (sub)domain, using a subfolder instead * `folder-rproxy` = reverse-proxying without dedicating an entire (sub)domain, using a subfolder instead
* `l`/sftpgo:
* config: users must be added through gui / api calls
## server capabilities ## server capabilities
| feature / software | a | b | c | d | e | f | g | h | i | j | k | | feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | | ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
| accounts | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | | accounts | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
| single-sign-on | | | | | | | | | • | | | | per-account chroot | | | | | | | | | | | | |
| token auth | | | | █ | █ | | | | | | | | single-sign-on | | | | █ | █ | | | | • | | | |
| per-volume permissions | | | | █ | █ | | █ | | | | | | token auth | | | | █ | █ | | | █ | | | | |
| per-folder permissions | | | | █ | █ | | | | █ | █ | | | 2fa | | | | █ | █ | | | | | | | █ |
| per-file permissions | | | | █ | █ | | █ | | █ | | | | per-volume permissions | | | █ | █ | █ | █ | █ | | █ | █ | | |
| per-file passwords | | | | █ | █ | | █ | | █ | | | | per-folder permissions | | | | █ | █ | | █ | | █ | | | █ |
| unmap subfolders | | | | | | | █ | | | █ | | | per-file permissions | | | | █ | █ | | | | █ | | | |
| index.html blocks list | | | | | | | █ | | | • | | | per-file passwords | | | | █ | █ | | █ | | █ | | | |
| write-only folders | █ | | | | | | | | | | | | unmap subfolders | █ | | | | | | | | | █ | | |
| files stored as-is | █ | | | | | █ | | | | | | | index.html blocks list | | | | | | | █ | | | | | |
| file versioning | | | | █ | █ | | | | | | | | write-only folders | █ | | | | | | | | | | █ | █ |
| file encryption | | | | █ | █ | █ | | | | | | | files stored as-is | █ | █ | | █ | | █ | █ | | | | | |
| file indexing | █ | | | █ | █ | | | | | | | | file versioning | | | | █ | █ | | | | | | | |
| ┗ per-volume db | | | | | | | | | | | | | file encryption | | | | | | | | | | | | |
| ┗ db stored in folder | █ | | | | | | | • | • | █ | | | file indexing | █ | | | █ | █ | | | | █ | █ | | |
| ┗ db stored out-of-tree | █ | | | | | | | • | • | | | | ┗ per-volume db | █ | | | | | | | • | • | | | |
| ┗ existing file tree | █ | | █ | | | | | | | | | | ┗ db stored in folder | █ | | | | | | | • | • | █ | | |
| file action event hooks | █ | | | | | | | | | █ | | | ┗ db stored out-of-tree | █ | | █ | █ | | | | | • | █ | | |
| one-way folder sync | █ | | | | | | | | | | | | ┗ existing file tree | █ | | | | | | | | | █ | | |
| full sync | | | | █ | █ | | | | | | | | file action event hooks | █ | | | | | | | | | █ | | █ |
| speed throttle | | █ | █ | | | █ | | | | | | | one-way folder sync | █ | | | █ | █ | █ | | | | | | |
| anti-bruteforce | █ | █ | | █ | █ | | | | | | | | full sync | | | | █ | █ | | | | | | | |
| dyndns updater | | █ | | | | | | | | | | | speed throttle | | █ | | | | | | | █ | | | |
| self-updater | | | █ | | | | | | | | | | anti-bruteforce | █ | █ | █ | █ | █ | | | | | | | |
| log rotation | █ | | | | | | | • | █ | | | | dyndns updater | | █ | | | | | | | | | | |
| upload tracking / log | █ | █ | | | | | | | | | | | self-updater | | | █ | | | | | | | | | |
| curl-friendly ls | █ | | | | | | | | | | | | log rotation | █ | | █ | █ | | | | | | | | |
| curl-friendly upload | █ | | | | | | █ | | | | | | upload tracking / log | █ | | | █ | █ | | | █ | | | | |
| curl-friendly ls | █ | | | | | | | | | | | |
| curl-friendly upload | █ | | | | | █ | █ | • | | | | |
* `unmap subfolders` = "shadowing"; mounting a local folder in the middle of an existing filesystem tree in order to disable access below that path * `unmap subfolders` = "shadowing"; mounting a local folder in the middle of an existing filesystem tree in order to disable access below that path
* `files stored as-is` = uploaded files are trivially readable from the server HDD, not sliced into chunks or in weird folder structures or anything like that * `files stored as-is` = uploaded files are trivially readable from the server HDD, not sliced into chunks or in weird folder structures or anything like that
@@ -261,49 +295,52 @@ symbol legend,
* `k`/filegator remarks: * `k`/filegator remarks:
* `per-* permissions` -- can limit a user to one folder and its subfolders * `per-* permissions` -- can limit a user to one folder and its subfolders
* `unmap subfolders` -- can globally filter a list of paths * `unmap subfolders` -- can globally filter a list of paths
* `l`/sftpgo:
* `file action event hooks` also include on-download triggers
* `upload tracking / log` in main logfile
## client features ## client features
| feature / software | a | b | c | d | e | f | g | h | i | j | k | | feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
| ---------------------- | - | - | - | - | - | - | - | - | - | - | - | | ---------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
| single-page app | █ | | █ | █ | █ | | | █ | █ | █ | █ | | single-page app | █ | | █ | █ | █ | | | █ | █ | █ | █ | |
| themes | █ | █ | | █ | | | | | █ | | | | themes | █ | █ | | █ | | | | | █ | | | |
| directory tree nav | █ | | | | █ | | | | █ | | | | directory tree nav | █ | | | | █ | | | | █ | | | |
| multi-column sorting | █ | | | | | | | | | | | | multi-column sorting | █ | | | | | | | | | | | |
| thumbnails | █ | | | | | | | █ | █ | | | | thumbnails | █ | | | | | | | █ | █ | | | |
| ┗ image thumbnails | █ | | | █ | █ | | | █ | █ | █ | | | ┗ image thumbnails | █ | | | █ | █ | | | █ | █ | █ | | |
| ┗ video thumbnails | █ | | | █ | █ | | | | █ | | | | ┗ video thumbnails | █ | | | █ | █ | | | | █ | | | |
| ┗ audio spectrograms | █ | | | | | | | | | | | | ┗ audio spectrograms | █ | | | | | | | | | | | |
| audio player | █ | | | █ | █ | | | | █ | | | | audio player | █ | | | █ | █ | | | | █ | | | |
| ┗ gapless playback | █ | | | | | | | | • | | | | ┗ gapless playback | █ | | | | | | | | • | | | |
| ┗ audio equalizer | █ | | | | | | | | | | | | ┗ audio equalizer | █ | | | | | | | | | | | |
| ┗ waveform seekbar | █ | | | | | | | | | | | | ┗ waveform seekbar | █ | | | | | | | | | | | |
| ┗ OS integration | █ | | | | | | | | | | | | ┗ OS integration | █ | | | | | | | | | | | |
| ┗ transcode to lossy | █ | | | | | | | | | | | | ┗ transcode to lossy | █ | | | | | | | | | | | |
| video player | █ | | | █ | █ | | | | █ | █ | | | video player | █ | | | █ | █ | | | | █ | █ | | |
| ┗ video transcoding | | | | | | | | | █ | | | | ┗ video transcoding | | | | | | | | | █ | | | |
| audio BPM detector | █ | | | | | | | | | | | | audio BPM detector | █ | | | | | | | | | | | |
| audio key detector | █ | | | | | | | | | | | | audio key detector | █ | | | | | | | | | | | |
| search by path / name | █ | █ | █ | █ | █ | | █ | | █ | █ | | | search by path / name | █ | █ | █ | █ | █ | | █ | | █ | █ | | |
| search by date / size | █ | | | | █ | | | █ | █ | | | | search by date / size | █ | | | | █ | | | █ | █ | | | |
| search by bpm / key | █ | | | | | | | | | | | | search by bpm / key | █ | | | | | | | | | | | |
| search by custom tags | | | | | | | | █ | █ | | | | search by custom tags | | | | | | | | █ | █ | | | |
| search in file contents | | | | █ | █ | | | | █ | | | | search in file contents | | | | █ | █ | | | | █ | | | |
| search by custom parser | █ | | | | | | | | | | | | search by custom parser | █ | | | | | | | | | | | |
| find local file | █ | | | | | | | | | | | | find local file | █ | | | | | | | | | | | |
| undo recent uploads | █ | | | | | | | | | | | | undo recent uploads | █ | | | | | | | | | | | |
| create directories | █ | | | █ | █ | | █ | █ | █ | █ | █ | | create directories | █ | | | █ | █ | | █ | █ | █ | █ | █ | █ |
| image viewer | █ | | | █ | █ | | | | █ | █ | █ | | image viewer | █ | | | █ | █ | | | | █ | █ | █ | |
| markdown viewer | █ | | | | █ | | | | █ | | | | markdown viewer | █ | | | | █ | | | | █ | | | |
| markdown editor | █ | | | | █ | | | | █ | | | | markdown editor | █ | | | | █ | | | | █ | | | |
| readme.md in listing | █ | | | █ | | | | | | | | | readme.md in listing | █ | | | █ | | | | | | | | |
| rename files | █ | █ | █ | █ | █ | | █ | | █ | █ | █ | | rename files | █ | █ | █ | █ | █ | | █ | | █ | █ | █ | █ |
| batch rename | █ | | | | | | | | █ | | | | batch rename | █ | | | | | | | | █ | | | |
| cut / paste files | █ | █ | | █ | █ | | | | █ | | | | cut / paste files | █ | █ | | █ | █ | | | | █ | | | |
| move files | █ | █ | | █ | █ | | █ | | █ | █ | █ | | move files | █ | █ | | █ | █ | | █ | | █ | █ | █ | |
| delete files | █ | █ | | █ | █ | | █ | █ | █ | █ | █ | | delete files | █ | █ | | █ | █ | | █ | █ | █ | █ | █ | █ |
| copy files | | | | | █ | | | | █ | █ | █ | | copy files | | | | | █ | | | | █ | █ | █ | |
* `single-page app` = multitasking; possible to continue navigating while uploading * `single-page app` = multitasking; possible to continue navigating while uploading
* `audio player » os-integration` = use the [lockscreen](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) or [media hotkeys](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) to play/pause, prev/next song * `audio player » os-integration` = use the [lockscreen](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) or [media hotkeys](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) to play/pause, prev/next song
@@ -319,20 +356,21 @@ symbol legend,
## integration ## integration
| feature / software | a | b | c | d | e | f | g | h | i | j | k | | feature / software | a | b | c | d | e | f | g | h | i | j | k | l |
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - | | ----------------------- | - | - | - | - | - | - | - | - | - | - | - | - |
| OS alert on upload | █ | | | | | | | | | | | | OS alert on upload | █ | | | | | | | | | | | |
| discord | █ | | | | | | | | | | | | discord | █ | | | | | | | | | | | |
| ┗ announce uploads | █ | | | | | | | | | | | | ┗ announce uploads | █ | | | | | | | | | | | |
| ┗ custom embeds | | | | | | | | | | | | | ┗ custom embeds | | | | | | | | | | | | |
| sharex | █ | | | █ | | █ | | █ | | | | | sharex | █ | | | █ | | █ | | █ | | | | |
| flameshot | | | | | | █ | | | | | | | flameshot | | | | | | █ | | | | | | |
* sharex `` = yes, but does not provide example sharex config * sharex `` = yes, but does not provide example sharex config
* `a`/copyparty remarks: * `a`/copyparty remarks:
* `OS alert on upload` available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/notify.py) * `OS alert on upload` available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/notify.py)
* `discord » announce uploads` available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/discord-announce.py) * `discord » announce uploads` available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/discord-announce.py)
* `j`/filebrowser can probably pull those off with command runners similar to copyparty * `j`/filebrowser can probably pull those off with command runners similar to copyparty
* `l`/sftpgo has nothing built-in but is very extensible
## another matrix ## another matrix
@@ -350,6 +388,7 @@ symbol legend,
| kodbox | php | ░ gpl3 | 92 MB | | kodbox | php | ░ gpl3 | 92 MB |
| filebrowser | go | █ apl2 | 20 MB | | filebrowser | go | █ apl2 | 20 MB |
| filegator | php | █ mit | • | | filegator | php | █ mit | • |
| sftpgo | go | ‼ agpl | 44 MB |
| updog | python | █ mit | 17 MB | | updog | python | █ mit | 17 MB |
| goshs | go | █ mit | 11 MB | | goshs | go | █ mit | 11 MB |
| gimme-that | python | █ mit | 4.8 MB | | gimme-that | python | █ mit | 4.8 MB |
@@ -363,7 +402,9 @@ symbol legend,
# reviews # reviews
* ✅ are advantages over copyparty * ✅ are advantages over copyparty
* ⚠️ are disadvantages * 💾 are what copyparty offers as an alternative
* 🔵 are similarities
* ⚠️ are disadvantages (something copyparty does "better")
## [copyparty](https://github.com/9001/copyparty) ## [copyparty](https://github.com/9001/copyparty)
* resumable uploads which are verified server-side * resumable uploads which are verified server-side
@@ -371,7 +412,7 @@ symbol legend,
* both of the above are surprisingly uncommon features * both of the above are surprisingly uncommon features
* very cross-platform (python, no dependencies) * very cross-platform (python, no dependencies)
## [hfs2](https://github.com/rejetto/hfs2) ## [hfs2](https://rejetto.com/hfs/)
* the OG, the legend * the OG, the legend
* ⚠️ uploads not resumable / accelerated / integrity-checked * ⚠️ uploads not resumable / accelerated / integrity-checked
* ⚠️ on cloudflare: max upload size 100 MiB * ⚠️ on cloudflare: max upload size 100 MiB
@@ -380,7 +421,7 @@ symbol legend,
* vfs with gui config, per-volume permissions * vfs with gui config, per-volume permissions
* starting to show its age, hence the rewrite: * starting to show its age, hence the rewrite:
## [hfs3](https://www.rejetto.com/hfs/) ## [hfs3](https://github.com/rejetto/hfs)
* nodejs; cross-platform * nodejs; cross-platform
* vfs with gui config, per-volume permissions * vfs with gui config, per-volume permissions
* still early development, let's revisit later * still early development, let's revisit later
@@ -395,10 +436,11 @@ symbol legend,
* ⚠️ http/webdav only; no ftp, zeroconf * ⚠️ http/webdav only; no ftp, zeroconf
* ⚠️ less awesome music player * ⚠️ less awesome music player
* ⚠️ doesn't run on android or ipads * ⚠️ doesn't run on android or ipads
* ⚠️ AGPL licensed
* ✅ great ui/ux * ✅ great ui/ux
* ✅ config gui * ✅ config gui
* ✅ apps (android / iphone) * ✅ apps (android / iphone)
* copyparty: android upload-only app * 💾 android upload-only app + iPhone upload shortcut
* ✅ more granular permissions (per-file) * ✅ more granular permissions (per-file)
* ✅ search: fulltext indexing of file contents * ✅ search: fulltext indexing of file contents
* ✅ webauthn passwordless authentication * ✅ webauthn passwordless authentication
@@ -413,10 +455,11 @@ symbol legend,
* ⚠️ http/webdav only; no ftp, zeroconf * ⚠️ http/webdav only; no ftp, zeroconf
* ⚠️ less awesome music player * ⚠️ less awesome music player
* ⚠️ doesn't run on android or ipads * ⚠️ doesn't run on android or ipads
* ⚠️ AGPL licensed
* ✅ great ui/ux * ✅ great ui/ux
* ✅ config gui * ✅ config gui
* ✅ apps (android / iphone) * ✅ apps (android / iphone)
* copyparty: android upload-only app * 💾 android upload-only app + iPhone upload shortcut
* ✅ more granular permissions (per-file) * ✅ more granular permissions (per-file)
* ✅ search: fulltext indexing of file contents * ✅ search: fulltext indexing of file contents
@@ -434,12 +477,12 @@ symbol legend,
* ⚠️ on cloudflare: max upload size 100 MiB * ⚠️ on cloudflare: max upload size 100 MiB
* ⚠️ doesn't support crazy filenames * ⚠️ doesn't support crazy filenames
* ✅ per-url access control (copyparty is per-volume) * ✅ per-url access control (copyparty is per-volume)
* basic but really snappy ui * 🔵 basic but really snappy ui
* upload, rename, delete, ... see feature matrix * 🔵 upload, rename, delete, ... see feature matrix
## [chibisafe](https://github.com/chibisafe/chibisafe) ## [chibisafe](https://github.com/chibisafe/chibisafe)
* nodejs; recommends docker * nodejs; recommends docker
* *it has upload segmenting!* * 🔵 *it has upload segmenting!*
* ⚠️ but uploads are still not resumable / accelerated / integrity-checked * ⚠️ but uploads are still not resumable / accelerated / integrity-checked
* ⚠️ not portable * ⚠️ not portable
* ⚠️ isolated on-disk file hierarchy, incompatible with other software * ⚠️ isolated on-disk file hierarchy, incompatible with other software
@@ -450,13 +493,13 @@ symbol legend,
* ✅ searchable image tags; delete by tag * ✅ searchable image tags; delete by tag
* ✅ browser extension to upload files to the server * ✅ browser extension to upload files to the server
* ✅ reject uploads by file extension * ✅ reject uploads by file extension
* copyparty: can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins * 💾 can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins
* ✅ token auth (api keys) * ✅ token auth (api keys)
## [kodbox](https://github.com/kalcaddle/kodbox) ## [kodbox](https://github.com/kalcaddle/kodbox)
* this thing is insane * this thing is insane
* php; [docker](https://hub.docker.com/r/kodcloud/kodbox) * php; [docker](https://hub.docker.com/r/kodcloud/kodbox)
* *upload segmenting, acceleration, and integrity checking!* * 🔵 *upload segmenting, acceleration, and integrity checking!*
* ⚠️ but uploads are not resumable(?) * ⚠️ but uploads are not resumable(?)
* ⚠️ not portable * ⚠️ not portable
* ⚠️ isolated on-disk file hierarchy, incompatible with other software * ⚠️ isolated on-disk file hierarchy, incompatible with other software
@@ -483,17 +526,41 @@ symbol legend,
* ⚠️ but no directory tree for navigation * ⚠️ but no directory tree for navigation
* ✅ user signup * ✅ user signup
* ✅ command runner / remote shell * ✅ command runner / remote shell
* supposed to have write-only folders but couldn't get it to work * 🔵 supposed to have write-only folders but couldn't get it to work
## [filegator](https://github.com/filegator/filegator) ## [filegator](https://github.com/filegator/filegator)
* go; cross-platform (windows, linux, mac) * go; cross-platform (windows, linux, mac)
* 🔵 *it has upload segmenting and acceleration*
* ⚠️ but uploads are still not integrity-checked
* ⚠️ http only; no webdav / ftp / zeroconf * ⚠️ http only; no webdav / ftp / zeroconf
* ⚠️ does not support symlinks * ⚠️ does not support symlinks
* ⚠️ expensive download-as-zip feature * ⚠️ expensive download-as-zip feature
* ⚠️ doesn't support crazy filenames * ⚠️ doesn't support crazy filenames
* ⚠️ limited file search * ⚠️ limited file search
* *it has upload segmenting and acceleration*
* ⚠️ but uploads are still not integrity-checked ## [sftpgo](https://github.com/drakkan/sftpgo)
* go; cross-platform (windows, linux, mac)
* ⚠️ http uploads not resumable / accelerated / integrity-checked
* ⚠️ on cloudflare: max upload size 100 MiB
* 🔵 sftp uploads are resumable
* ⚠️ web UI is very minimal + a bit slow
* ⚠️ no thumbnails / image viewer / audio player
* ⚠️ basic file manager (no cut/paste/move)
* ⚠️ no filesystem indexing / search
* ⚠️ doesn't run on phones, tablets
* ⚠️ no zeroconf (mdns/ssdp)
* ⚠️ AGPL licensed
* 🔵 ftp, ftps, webdav
* ✅ sftp server
* ✅ settings gui
* ✅ acme (automatic tls certs)
* 💾 relies on caddy/certbot/acme.sh
* ✅ at-rest encryption
* 💾 relies on LUKS/BitLocker
* ✅ can use S3/GCS as storage backend
* 💾 relies on rclone-mount
* ✅ on-download event hook (otherwise same as copyparty)
* ✅ more extensive permissions control
## [updog](https://github.com/sc0tfree/updog) ## [updog](https://github.com/sc0tfree/updog)
* python; cross-platform * python; cross-platform
@@ -509,9 +576,9 @@ symbol legend,
* ⚠️ uploads not resumable / accelerated / integrity-checked * ⚠️ uploads not resumable / accelerated / integrity-checked
* ⚠️ on cloudflare: max upload size 100 MiB * ⚠️ on cloudflare: max upload size 100 MiB
* ✅ cool clipboard widget * ✅ cool clipboard widget
* copyparty: the markdown editor is an ok substitute * 💾 the markdown editor is an ok substitute
* read-only and upload-only modes (same as copyparty's write-only) * 🔵 read-only and upload-only modes (same as copyparty's write-only)
* https, webdav * 🔵 https, webdav, but no ftp
## [gimme-that](https://github.com/nejdetckenobi/gimme-that) ## [gimme-that](https://github.com/nejdetckenobi/gimme-that)
* python, but with c dependencies * python, but with c dependencies
@@ -520,8 +587,8 @@ symbol legend,
* ⚠️ on cloudflare: max upload size 100 MiB * ⚠️ on cloudflare: max upload size 100 MiB
* ⚠️ weird folder structure for uploads * ⚠️ weird folder structure for uploads
* ✅ clamav antivirus check on upload! neat * ✅ clamav antivirus check on upload! neat
* optional max-filesize, os-notification on uploads * 🔵 optional max-filesize, os-notification on uploads
* copyparty: os-notification available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/notify.py) * 💾 os-notification available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/notify.py)
## [ass](https://github.com/tycrek/ass) ## [ass](https://github.com/tycrek/ass)
* nodejs; recommends docker * nodejs; recommends docker
@@ -532,30 +599,52 @@ symbol legend,
* ⚠️ on cloudflare: max upload size 100 MiB * ⚠️ on cloudflare: max upload size 100 MiB
* ✅ token auth * ✅ token auth
* ✅ gps metadata stripping * ✅ gps metadata stripping
* copyparty: possible with [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py) * 💾 possible with [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py)
* ✅ discord integration (custom embeds, upload webhook) * ✅ discord integration (custom embeds, upload webhook)
* copyparty: [upload webhook plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/discord-announce.py) * 💾 [upload webhook plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/discord-announce.py)
* ✅ reject uploads by mimetype * ✅ reject uploads by mimetype
* copyparty: can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins * 💾 can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins
* ✅ can use S3 as storage backend; copyparty relies on rclone-mount for that * ✅ can use S3 as storage backend
* 💾 relies on rclone-mount
* ✅ custom 404 pages * ✅ custom 404 pages
## [linx](https://github.com/ZizzyDizzyMC/linx-server/) ## [linx](https://github.com/ZizzyDizzyMC/linx-server/)
* originally [andreimarcu/linx-server](https://github.com/andreimarcu/linx-server) but development has ended * originally [andreimarcu/linx-server](https://github.com/andreimarcu/linx-server) but development has ended
* ⚠️ uploads not resumable / accelerated / integrity-checked * ⚠️ uploads not resumable / accelerated / integrity-checked
* ⚠️ on cloudflare: max upload size 100 MiB * ⚠️ on cloudflare: max upload size 100 MiB
* some of its unique features have been added to copyparty as former linx users have migrated * 🔵 some of its unique features have been added to copyparty as former linx users have migrated
* file expiration timers, filename randomization * file expiration timers, filename randomization
* ✅ password-protected files * ✅ password-protected files
* copyparty: password-protected folders + filekeys to skip the folder password seem to cover most usecases * 💾 password-protected folders + filekeys to skip the folder password seem to cover most usecases
* ✅ file deletion keys * ✅ file deletion keys
* ✅ download files as torrents * ✅ download files as torrents
* ✅ remote uploads (send a link to the server and it downloads it) * ✅ remote uploads (send a link to the server and it downloads it)
* copyparty: available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py) * 💾 available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
* ✅ can use S3 as storage backend; copyparty relies on rclone-mount for that * ✅ can use S3 as storage backend
* 💾 relies on rclone-mount
## [h5ai](https://larsjung.de/h5ai/)
* ⚠️ read only; no upload/move/delete
* ⚠️ search hits the filesystem directly; not indexed/cached
* ✅ slick ui
* ✅ in-browser qr generator to share URLs
* 🔵 directory tree, image viewer, thumbnails, download-as-tar
## [autoindex](https://github.com/nielsAD/autoindex)
* ⚠️ read only; no upload/move/delete
* ✅ directory cache for faster browsing of cloud storage
* 💾 local index/cache for recursive search (names/attrs/tags), but not for browsing
## [miniserve](https://github.com/svenstaro/miniserve)
* rust; cross-platform (windows, linux, mac)
* ⚠️ uploads not resumable / accelerated / integrity-checked
* ⚠️ on cloudflare: max upload size 100 MiB
* ⚠️ no thumbnails / image viewer / audio player / file manager
* ⚠️ no filesystem indexing / search
* 🔵 upload, tar/zip download, qr-code
* ✅ faster at loading huge folders
# briefly considered # briefly considered
* [pydio](https://github.com/pydio/cells): python/agpl3, looks great, fantastic ux -- but needs mariadb, systemwide install * [pydio](https://github.com/pydio/cells): python/agpl3, looks great, fantastic ux -- but needs mariadb, systemwide install
* [gossa](https://github.com/pldubouilh/gossa): go/mit, minimalistic, basic file upload, text editor, mkdir and rename (no delete/move) * [gossa](https://github.com/pldubouilh/gossa): go/mit, minimalistic, basic file upload, text editor, mkdir and rename (no delete/move)
* [h5ai](https://larsjung.de/h5ai/): php/mit, slick ui, image viewer, directory tree, no upload feature

View File

@@ -1,5 +1,10 @@
FROM alpine:latest FROM alpine:latest
WORKDIR /z WORKDIR /z
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-ac" \
org.opencontainers.image.description="copyparty with Pillow and FFmpeg (image/audio/video thumbnails, audio transcoding, media tags)"
RUN apk --no-cache add \ RUN apk --no-cache add \
wget \ wget \
@@ -11,4 +16,5 @@ RUN apk --no-cache add \
COPY i/dist/copyparty-sfx.py ./ COPY i/dist/copyparty-sfx.py ./
WORKDIR /w WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"] ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]

View File

@@ -1,20 +1,27 @@
FROM alpine:latest FROM alpine:latest
WORKDIR /z WORKDIR /z
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-dj" \
org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection"
COPY i/bin/mtag/install-deps.sh ./ COPY i/bin/mtag/install-deps.sh ./
COPY i/bin/mtag/audio-bpm.py /mtag/ COPY i/bin/mtag/audio-bpm.py /mtag/
COPY i/bin/mtag/audio-key.py /mtag/ COPY i/bin/mtag/audio-key.py /mtag/
RUN apk add -U \ RUN apk add -U \
wget \ wget \
py3-pillow py3-pip \ py3-pillow py3-pip py3-cffi \
ffmpeg \ ffmpeg \
vips-jxl vips-heif vips-poppler vips-magick \ vips-jxl vips-heif vips-poppler vips-magick \
py3-numpy fftw libsndfile \ py3-numpy fftw libsndfile \
vamp-sdk vamp-sdk-libs \
&& python3 -m pip install pyvips \ && python3 -m pip install pyvips \
&& apk --no-cache add -t .bd \ && apk --no-cache add -t .bd \
bash wget gcc g++ make cmake patchelf \ bash wget gcc g++ make cmake patchelf \
python3-dev ffmpeg-dev fftw-dev libsndfile-dev \ python3-dev ffmpeg-dev fftw-dev libsndfile-dev \
py3-wheel py3-numpy-dev \ py3-wheel py3-numpy-dev \
vamp-sdk-dev \
&& bash install-deps.sh \ && bash install-deps.sh \
&& apk del py3-pip .bd \ && apk del py3-pip .bd \
&& rm -rf /var/cache/apk/* \ && rm -rf /var/cache/apk/* \
@@ -26,4 +33,5 @@ RUN apk add -U \
COPY i/dist/copyparty-sfx.py ./ COPY i/dist/copyparty-sfx.py ./
WORKDIR /w WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"] ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]

View File

@@ -1,13 +1,19 @@
FROM alpine:latest FROM alpine:latest
WORKDIR /z WORKDIR /z
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-im" \
org.opencontainers.image.description="copyparty with Pillow and Mutagen (image thumbnails, media tags)"
RUN apk --no-cache add \ RUN apk --no-cache add \
wget \ wget \
py3-pillow \ py3-pillow py3-mutagen \
&& mkdir /cfg /w \ && mkdir /cfg /w \
&& chmod 777 /cfg /w \ && chmod 777 /cfg /w \
&& echo % /cfg > initcfg && echo % /cfg > initcfg
COPY i/dist/copyparty-sfx.py ./ COPY i/dist/copyparty-sfx.py ./
WORKDIR /w WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"] ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]

View File

@@ -1,9 +1,14 @@
FROM alpine:latest FROM alpine:latest
WORKDIR /z WORKDIR /z
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-iv" \
org.opencontainers.image.description="copyparty with Pillow, FFmpeg, libvips (image/audio/video thumbnails, audio transcoding, media tags)"
RUN apk --no-cache add \ RUN apk --no-cache add \
wget \ wget \
py3-pillow py3-pip \ py3-pillow py3-pip py3-cffi \
ffmpeg \ ffmpeg \
vips-jxl vips-heif vips-poppler vips-magick \ vips-jxl vips-heif vips-poppler vips-magick \
&& python3 -m pip install pyvips \ && python3 -m pip install pyvips \
@@ -14,4 +19,5 @@ RUN apk --no-cache add \
COPY i/dist/copyparty-sfx.py ./ COPY i/dist/copyparty-sfx.py ./
WORKDIR /w WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"] ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]

View File

@@ -1,5 +1,10 @@
FROM alpine:latest FROM alpine:latest
WORKDIR /z WORKDIR /z
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-min" \
org.opencontainers.image.description="just copyparty, no thumbnails / media tags / audio transcoding"
RUN apk --no-cache add \ RUN apk --no-cache add \
python3 \ python3 \
@@ -9,4 +14,5 @@ RUN apk --no-cache add \
COPY i/dist/copyparty-sfx.py ./ COPY i/dist/copyparty-sfx.py ./
WORKDIR /w WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"] ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]

View File

@@ -1,5 +1,10 @@
FROM alpine:latest FROM alpine:latest
WORKDIR /z WORKDIR /z
LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.source="https://github.com/9001/copyparty/tree/hovudstraum/scripts/docker" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-min-pip" \
org.opencontainers.image.description="just copyparty, no thumbnails, no media tags, no audio transcoding"
RUN apk --no-cache add python3 py3-pip \ RUN apk --no-cache add python3 py3-pip \
&& python3 -m pip install copyparty \ && python3 -m pip install copyparty \
@@ -9,4 +14,5 @@ RUN apk --no-cache add python3 py3-pip \
&& echo % /cfg > initcfg && echo % /cfg > initcfg
WORKDIR /w WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "-m", "copyparty", "-c", "/z/initcfg"] ENTRYPOINT ["python3", "-m", "copyparty", "-c", "/z/initcfg"]

View File

@@ -48,7 +48,7 @@ push:
clean: clean:
-docker kill `docker ps -q` -docker kill `docker ps -q`
-docker rm `docker ps -qa` -docker rm `docker ps -qa`
-docker rmi `docker images -a | awk '/^<none>/{print$$3}'` -docker rmi -f `docker images -a | awk '/<none>/{print$$3}'`
hclean: hclean:
-docker kill `docker ps -q` -docker kill `docker ps -q`

View File

@@ -1,5 +1,5 @@
copyparty is availabe in these repos: copyparty is availabe in these repos:
* https://hub.docker.com/r/copyparty * https://hub.docker.com/u/copyparty
* https://github.com/9001?tab=packages&repo_name=copyparty * https://github.com/9001?tab=packages&repo_name=copyparty
@@ -14,6 +14,7 @@ docker run --rm -it -u 1000 -p 3923:3923 -v /mnt/nas:/w -v $PWD/cfgdir:/cfg copy
* `/cfg` is an optional folder with zero or more config files (*.conf) to load * `/cfg` is an optional folder with zero or more config files (*.conf) to load
* `copyparty/ac` is the recommended [image edition](#editions) * `copyparty/ac` is the recommended [image edition](#editions)
* you can download the image from github instead by replacing `copyparty/ac` with `ghcr.io/9001/copyparty-ac` * you can download the image from github instead by replacing `copyparty/ac` with `ghcr.io/9001/copyparty-ac`
* if you are using rootless podman, remove `-u 1000`
i'm unfamiliar with docker-compose and alternatives so let me know if this section could be better 🙏 i'm unfamiliar with docker-compose and alternatives so let me know if this section could be better 🙏
@@ -32,13 +33,17 @@ the recommended way to configure copyparty inside a container is to mount a fold
with image size after installation and when gzipped with image size after installation and when gzipped
* `min` (57 MiB, 20 gz) is just copyparty itself * [`min`](https://hub.docker.com/r/copyparty/min) (57 MiB, 20 gz) is just copyparty itself
* `im` (69 MiB, 24 gz) can create thumbnails using pillow (pics only) * [`im`](https://hub.docker.com/r/copyparty/im) (70 MiB, 25 gz) can thumbnail images with pillow, parse media files with mutagen
* `ac` (163 MiB, 56 gz) is `im` plus ffmpeg for video/audio thumbnails + audio transcoding * [`ac` (163 MiB, 56 gz)](https://hub.docker.com/r/copyparty/ac) is `im` plus ffmpeg for video/audio thumbs + audio transcoding + better tags
* `iv` (211 MiB, 73 gz) is `ac` plus vips for faster heif / avic / jxl thumbnails * [`iv`](https://hub.docker.com/r/copyparty/iv) (211 MiB, 73 gz) is `ac` plus vips for faster heif / avic / jxl thumbnails
* `dj` (309 MiB, 104 gz) is `iv` plus beatroot/keyfinder to detect musical keys and bpm * [`dj`](https://hub.docker.com/r/copyparty/dj) (309 MiB, 104 gz) is `iv` plus beatroot/keyfinder to detect musical keys and bpm
`ac` is recommended since the additional features available in `iv` and `dj` are rarely useful [`ac` is recommended](https://hub.docker.com/r/copyparty/ac) since the additional features available in `iv` and `dj` are rarely useful
most editions support `x86`, `x86_64`, `armhf`, `aarch64`, `ppc64le`, `s390x`
* `dj` doesn't run on `ppc64le`, `s390x`, `armhf`
* `iv` doesn't run on `ppc64le`, `s390x`
## detecting bpm and musical key ## detecting bpm and musical key
@@ -71,4 +76,9 @@ or using commandline arguments,
# build the images yourself # build the images yourself
put `copyparty-sfx.py` into `../dist/` (or [build that from scratch](../../docs/devnotes.md#just-the-sfx) too) then run `make` basically `./make.sh hclean pull img push` but see [devnotes.md](./devnotes.md)
# notes
* currently unable to play [tracker music](https://en.wikipedia.org/wiki/Module_file) (mod/s3m/xm/it/...) -- will be fixed in june 2023 (Alpine 3.18)

View File

@@ -0,0 +1,19 @@
# building the images yourself
```bash
./make.sh hclean pull img push
```
will download the latest copyparty-sfx.py from github unless you have [built it from scratch](../../docs/devnotes.md#just-the-sfx) and then build all the images based on that
deprecated alternative: run `make` to use the makefile however that uses docker instead of podman and only builds x86_64
`make.sh` is necessarily(?) overengineered because:
* podman keeps burning dockerhub pulls by not using the cached images (`--pull=never` does not apply to manifests)
* podman cannot build from a local manifest, only local images or remote manifests
but I don't really know what i'm doing here 💩
* auth for pushing images to repos;
`podman login docker.io`
`podman login ghcr.io -u 9001`
[about gchq](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry) (takes a classic token as password)

152
scripts/docker/make.sh Executable file
View File

@@ -0,0 +1,152 @@
#!/bin/bash
set -e
[ $(id -u) -eq 0 ] && {
echo dont root
exit 1
}
sarchs="386 amd64 arm/v7 arm64/v8 ppc64le s390x"
archs="amd64 arm s390x 386 arm64 ppc64le"
imgs="dj iv min im ac"
dhub_order="iv dj min im ac"
ghcr_order="ac im min dj iv"
ngs=(
iv-{ppc64le,s390x}
dj-{ppc64le,s390x,arm}
)
for v in "$@"; do
[ "$v" = clean ] && clean=1
[ "$v" = hclean ] && hclean=1
[ "$v" = purge ] && purge=1
[ "$v" = pull ] && pull=1
[ "$v" = img ] && img=1
[ "$v" = push ] && push=1
[ "$v" = sh ] && sh=1
done
[ $# -gt 0 ] || {
echo "need list of commands, for example: hclean pull img push"
exit 1
}
[ $sh ] && {
printf "\n\033[1;31mopening a shell in the most recently created docker image\033[0m\n"
podman run --rm -it --entrypoint /bin/ash $(podman images -aq | head -n 1)
exit $?
}
filt=
[ $clean ] && filt='/<none>/{print$$3}'
[ $hclean ] && filt='/localhost\/copyparty-|^<none>.*localhost\/alpine-/{print$3}'
[ $purge ] && filt='NR>1{print$3}'
[ $filt ] && {
[ $purge ] && {
podman kill $(podman ps -q) || true
podman rm $(podman ps -qa) || true
}
podman rmi -f $(podman images -a --history | awk "$filt") || true
podman rmi $(podman images -a --history | awk '/^<none>.*<none>.*-tmp:/{print$3}') || true
}
[ $pull ] && {
for a in $sarchs; do # arm/v6
podman pull --arch=$a alpine:latest
done
podman images --format "{{.ID}} {{.History}}" |
awk '/library\/alpine/{print$1}' |
while read id; do
tag=alpine-$(podman inspect $id | jq -r '.[]|.Architecture' | tr / -)
[ -e .tag-$tag ] && continue
touch .tag-$tag
echo tagging $tag
podman untag $id
podman tag $id $tag
done
rm .tag-*
}
[ $img ] && {
fp=../../dist/copyparty-sfx.py
[ -e $fp ] || {
echo downloading copyparty-sfx.py ...
mkdir -p ../../dist
wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O $fp
}
# kill abandoned builders
ps aux | awk '/bin\/qemu-[^-]+-static/{print$2}' | xargs -r kill -9
# grab deps
rm -rf i err
mkdir i
tar -cC../.. dist/copyparty-sfx.py bin/mtag | tar -xvCi
for i in $imgs; do
podman rm copyparty-$i || true # old manifest
for a in $archs; do
[[ " ${ngs[*]} " =~ " $i-$a " ]] && continue # known incompat
# wait for a free slot
while true; do
touch .blk
[ $(jobs -p | wc -l) -lt $(nproc) ] && break
while [ -e .blk ]; do sleep 0.2; done
done
aa="$(printf '%7s' $a)"
# arm takes forever so make it top priority
[ ${a::3} == arm ] && nice= || nice=nice
# --pull=never does nothing at all btw
(set -x
$nice podman build \
--pull=never \
--from localhost/alpine-$a \
-t copyparty-$i-$a \
-f Dockerfile.$i . ||
(echo $? $i-$a >> err)
rm -f .blk
) 2> >(tee $a.err | sed "s/^/$aa:/" >&2) > >(tee $a.out | sed "s/^/$aa:/") &
done
[ -e err ] && {
echo somethign died,
cat err
pkill -P $$
exit 1
}
for a in $archs; do
rm -f $a.{out,err}
done
done
wait
[ -e err ] && {
echo somethign died,
cat err
pkill -P $$
exit 1
}
# avoid podman race-condition by creating manifest manually --
# Error: creating image to hold manifest list: image name "localhost/copyparty-dj:latest" is already associated with image "[0-9a-f]{64}": that name is already in use
for i in $imgs; do
variants=
for a in $archs; do
[[ " ${ngs[*]} " =~ " $i-$a " ]] && continue
variants="$variants containers-storage:localhost/copyparty-$i-$a"
done
podman manifest create copyparty-$i $variants
done
}
[ $push ] && {
for i in $dhub_order; do
podman manifest push --all copyparty-$i copyparty/$i:latest
done
for i in $ghcr_order; do
podman manifest push --all copyparty-$i ghcr.io/9001/copyparty-$i:latest
done
}
echo ok

View File

@@ -184,9 +184,9 @@ necho() {
mv {markupsafe,jinja2} j2/ mv {markupsafe,jinja2} j2/
necho collecting pyftpdlib necho collecting pyftpdlib
f="../build/pyftpdlib-1.5.6.tar.gz" f="../build/pyftpdlib-1.5.7.tar.gz"
[ -e "$f" ] || [ -e "$f" ] ||
(url=https://github.com/giampaolo/pyftpdlib/archive/refs/tags/release-1.5.6.tar.gz; (url=https://github.com/giampaolo/pyftpdlib/archive/refs/tags/release-1.5.7.tar.gz;
wget -O$f "$url" || curl -L "$url" >$f) wget -O$f "$url" || curl -L "$url" >$f)
tar -zxf $f tar -zxf $f

View File

@@ -1,7 +1,9 @@
builds a fully standalone copyparty.exe compatible with 32bit win7-sp1 and later builds copyparty32.exe, fully standalone, compatible with 32bit win7-sp1 and later
requires a win7 vm which has never been connected to the internet and a host-only network with the linux host at 192.168.123.1 requires a win7 vm which has never been connected to the internet and a host-only network with the linux host at 192.168.123.1
copyparty.exe is built by a win10-ltsc-2021 vm with similar setup
first-time setup steps in notes.txt first-time setup steps in notes.txt
run build.sh in the vm to fetch src + compile + push a new exe to the linux host for manual publishing run build.sh in the vm to fetch src + compile + push a new exe to the linux host for manual publishing

View File

@@ -9,7 +9,17 @@ tee build2.sh | cmp build.sh && rm build2.sh || {
[[ $r =~ [yY] ]] && mv build{2,}.sh && exec ./build.sh [[ $r =~ [yY] ]] && mv build{2,}.sh && exec ./build.sh
} }
uname -s | grep WOW64 && m=64 || m= [ -e up2k.sh ] && ./up2k.sh
uname -s | grep WOW64 && m=64 || m=32
uname -s | grep NT-10 && w10=1 || w7=1
[ $w7 ] && pyv=37 || pyv=311
esuf=
[ $w7 ] && [ $m = 32 ] && esuf=32
[ $w7 ] && [ $m = 64 ] && esuf=-winpe64
appd=$(cygpath.exe "$APPDATA")
spkgs=$appd/Python/Python$pyv/site-packages
dl() { curl -fkLO "$1"; } dl() { curl -fkLO "$1"; }
@@ -25,14 +35,23 @@ python copyparty-sfx.py --version
rm -rf mods; mkdir mods rm -rf mods; mkdir mods
cp -pR $TEMP/pe-copyparty/copyparty/ $TEMP/pe-copyparty/{ftp,j2}/* mods/ cp -pR $TEMP/pe-copyparty/copyparty/ $TEMP/pe-copyparty/{ftp,j2}/* mods/
[ $w10 ] && rm -rf mods/{jinja2,markupsafe}
af() { awk "$1" <$2 >tf; mv tf "$2"; } af() { awk "$1" <$2 >tf; mv tf "$2"; }
rm -rf mods/magic/ rm -rf mods/magic/
sed -ri /pickle/d mods/jinja2/_compat.py [ $w7 ] && {
sed -ri '/(bccache|PackageLoader)/d' mods/jinja2/__init__.py sed -ri /pickle/d mods/jinja2/_compat.py
af '/^class/{s=0}/^class PackageLoader/{s=1}!s' mods/jinja2/loaders.py sed -ri '/(bccache|PackageLoader)/d' mods/jinja2/__init__.py
af '/^class/{s=0}/^class PackageLoader/{s=1}!s' mods/jinja2/loaders.py
}
[ $w10 ] && {
sed -ri '/(bccache|PackageLoader)/d' $spkgs/jinja2/__init__.py
for f in nodes async_utils; do
sed -ri 's/\binspect\b/os/' $spkgs/jinja2/$f.py
done
}
sed -ri /fork_process/d mods/pyftpdlib/servers.py sed -ri /fork_process/d mods/pyftpdlib/servers.py
af '/^class _Base/{s=1}!s' mods/pyftpdlib/authorizers.py af '/^class _Base/{s=1}!s' mods/pyftpdlib/authorizers.py
@@ -43,6 +62,7 @@ read a b c d _ < <(
sed -r 's/[^0-9]+//;s/[" )]//g;s/[-,]/ /g;s/$/ 0/' sed -r 's/[^0-9]+//;s/[" )]//g;s/[-,]/ /g;s/$/ 0/'
) )
sed -r 's/1,2,3,0/'$a,$b,$c,$d'/;s/1\.2\.3/'$a.$b.$c/ <loader.rc >loader.rc2 sed -r 's/1,2,3,0/'$a,$b,$c,$d'/;s/1\.2\.3/'$a.$b.$c/ <loader.rc >loader.rc2
sed -ri s/copyparty.exe/copyparty$esuf.exe/ loader.rc2
excl=( excl=(
copyparty.broker_mp copyparty.broker_mp
@@ -59,7 +79,12 @@ excl=(
urllib.robotparser urllib.robotparser
zipfile zipfile
) )
false || excl+=( [ $w10 ] && excl+=(
PIL.ImageQt
PIL.ImageShow
PIL.ImageTk
PIL.ImageWin
) || excl+=(
PIL PIL
PIL.ExifTags PIL.ExifTags
PIL.Image PIL.Image
@@ -68,7 +93,7 @@ false || excl+=(
) )
excl=( "${excl[@]/#/--exclude-module }" ) excl=( "${excl[@]/#/--exclude-module }" )
$APPDATA/python/python37/scripts/pyinstaller \ $APPDATA/python/python$pyv/scripts/pyinstaller \
-y --clean -p mods --upx-dir=. \ -y --clean -p mods --upx-dir=. \
${excl[*]} \ ${excl[*]} \
--version-file loader.rc2 -i loader.ico -n copyparty -c -F loader.py \ --version-file loader.rc2 -i loader.ico -n copyparty -c -F loader.py \
@@ -77,4 +102,9 @@ $APPDATA/python/python37/scripts/pyinstaller \
# ./upx.exe --best --ultra-brute --lzma -k dist/copyparty.exe # ./upx.exe --best --ultra-brute --lzma -k dist/copyparty.exe
curl -fkT dist/copyparty.exe -b cppwd=wark https://192.168.123.1:3923/copyparty$m.exe printf $(sha512sum ~/Downloads/dist/copyparty.exe | head -c 18 | sed -r 's/(..)/\\x\1/g') |
base64 | head -c12 >> dist/copyparty.exe
dist/copyparty.exe --version
curl -fkT dist/copyparty.exe -b cppwd=wark https://192.168.123.1:3923/copyparty$esuf.exe

56
scripts/pyinstaller/depchk.sh Executable file
View File

@@ -0,0 +1,56 @@
#!/bin/bash
set -e
e=0
cd ~/dev/pyi
ckpypi() {
deps=(
altgraph
pefile
pyinstaller
pyinstaller-hooks-contrib
pywin32-ctypes
Jinja2
MarkupSafe
mutagen
Pillow
)
for dep in "${deps[@]}"; do
k=
echo -n .
curl -s https://pypi.org/pypi/$dep/json >h
ver=$(jq <h -r '.releases|keys|.[]' | sort -V | tail -n 1)
while IFS= read -r fn; do
[ -e "$fn" ] && k="$fn" && break
done < <(
jq -r '.releases["'"$ver"'"]|.[]|.filename' <h
)
[ -z "$k" ] && echo "outdated: $dep" && cp h "ng-$dep" && e=1
done
true
}
ckgh() {
deps=(
upx/upx
)
for dep in "${deps[@]}"; do
k=
echo -n .
while IFS= read -r fn; do
[ -e "$fn" ] && k="$fn" && break
done < <(
curl -s https://api.github.com/repos/$dep/releases | tee h |
jq -r 'first|.assets|.[]|.name'
)
[ -z "$k" ] && echo "outdated: $dep" && cp h "ng-$dep" e=1
done
true
}
ckpypi
ckgh
rm h
exit $e

View File

@@ -1,17 +1,30 @@
d5510a24cb5e15d6d30677335bbc7624c319b371c0513981843dc51d9b3a1e027661096dfcfc540634222bb2634be6db55bf95185b30133cb884f1e47652cf53 altgraph-0.17.3-py2.py3-none-any.whl d5510a24cb5e15d6d30677335bbc7624c319b371c0513981843dc51d9b3a1e027661096dfcfc540634222bb2634be6db55bf95185b30133cb884f1e47652cf53 altgraph-0.17.3-py2.py3-none-any.whl
91c025f7d94bcdf93df838fab67053165a414fc84e8496f92ecbb910dd55f6b6af5e360bbd051444066880c5a6877e75157bd95e150ead46e5c605930dfc50f2 future-0.18.2.tar.gz eda6c38fc4d813fee897e969ff9ecc5acc613df755ae63df0392217bbd67408b5c1f6c676f2bf5497b772a3eb4e1a360e1245e1c16ee83f0af555f1ab82c3977 Git-2.39.1-32-bit.exe
fd8ebead3572a924cd7a256efa67d9a63f49fe30a8890a82d8899c21b7b2411c6761460f334757411c9bf050d8485641e928ccb52455346bf0b5c2d5f89857bc Git-2.38.1-32-bit.exe 17ce52ba50692a9d964f57a23ac163fb74c77fdeb2ca988a6d439ae1fe91955ff43730c073af97a7b3223093ffea3479a996b9b50ee7fba0869247a56f74baa6 pefile-2023.2.7-py3-none-any.whl
c06b3295d1d0b0f0a6f9a6cd0be861b9b643b4a5ea37857f0bd41c45deaf27bb927b71922dab74e633e43d75d04a9bd0d1c4ad875569740b0f2a98dd2bfa5113 importlib_metadata-5.0.0-py3-none-any.whl 85a041cc95cf493f5e2ebc2ca406d2718735e43951988810dc448d29e9ee0bcdb1ca19e0c22243441f45633969af8027469f29f6288f6830c724a3fa38886e5c pyinstaller-5.8.0-py3-none-win32.whl
18f8070bfb13fe9b7005ed55c9d040465d1e4c0ddcc1e8adca9127070f8db7b32272fbe50622c1d5c937d9be3bb110167b6a55502e232e26d7da881a5342a9e3 pefile-2022.5.30.tar.gz adf0d23a98da38056de25e07e68921739173efc70fb9bf3f68d8c7c3d0d092e09efa69d35c0c9ecc990bc3c5fa62038227ef480ed06ddfaf05353f6e468f5dca pyinstaller-5.8.0-py3-none-win_amd64.whl
4e71295da5d1a26c71a0baa8905fdccb522bb16d56bc964db636de68688c5bf703f3b2880cdeea07138789e0eb4506e06f9ccd0da906c89d2cb6d55ad64659ea pip-22.3-py3-none-any.whl 01d7f8125966ed30389a879ba69d2c1fd3212bafad3fb485317580bcb9f489e8b901c4d325f6cb8a52986838ba6d44d3852e62b27c1f1d5a576899821cc0ae02 pyinstaller_hooks_contrib-2023.0-py2.py3-none-any.whl
30aa0f3b15d17867f5d8a35e2c86462fa90cfefd3fde5733570a1f86ee75ee1b08550cce9fb34c94d9bb68efeaba26217e99b8b0769bb179c20f61c856057f07 pyinstaller-5.6.1-py3-none-win32.whl
15319328d3ba5aee43e0b830fdf8030213622bc06e5959c0abf579a92061723b100b66325fea0b540de61b90acbbf3985c6ad6eacfc8ccd1c212d282d4e332ca pyinstaller-5.6.1-py3-none-win_amd64.whl
d2534bcf4b8ecc6708d02c6cc6b3c4d8dc3430db5d9a7d350b63239af097a84a325c3f9c3feddf9ebd82ceb8350d9239f79f8c9378fb5164c857f82b0fa25eb8 pyinstaller_hooks_contrib-2022.11-py2.py3-none-any.whl
6bb73cc2db795c59c92f2115727f5c173cacc9465af7710db9ff2f2aec2d73130d0992d0f16dcb3fac222dc15c0916562d0813b2337401022020673a4461df3d python-3.7.9-amd64.exe
500747651c87f59f2436c5ab91207b5b657856e43d10083f3ce27efb196a2580fadd199a4209519b409920c562aaaa7dcbdfb83ed2072a43eaccae6e2d056f31 python-3.7.9.exe
132a5380f33a245f2e744413a0e1090bc42b7356376de5121397cec5976b04b79f7c9ebe28af222c9c7b01461f7d7920810d220e337694727e0d7cd9e91fa667 pywin32_ctypes-0.2.0-py2.py3-none-any.whl 132a5380f33a245f2e744413a0e1090bc42b7356376de5121397cec5976b04b79f7c9ebe28af222c9c7b01461f7d7920810d220e337694727e0d7cd9e91fa667 pywin32_ctypes-0.2.0-py2.py3-none-any.whl
3c5adf0a36516d284a2ede363051edc1bcc9df925c5a8a9fa2e03cab579dd8d847fdad42f7fd5ba35992e08234c97d2dbfec40a9d12eec61c8dc03758f2bd88e typing_extensions-4.4.0-py3-none-any.whl 3c5adf0a36516d284a2ede363051edc1bcc9df925c5a8a9fa2e03cab579dd8d847fdad42f7fd5ba35992e08234c97d2dbfec40a9d12eec61c8dc03758f2bd88e typing_extensions-4.4.0-py3-none-any.whl
58d50f3639373e3e2adad3c52328a234a0ca527f8a520731c0f03911074610a9c90dbb9bfa93d1f9cbeab3245ec9e45527128f0295e4b7085b97d288a0b4a2fd upx-4.0.0-win32.zip 4b6e9ae967a769fe32be8cf0bc0d5a213b138d1e0344e97656d08a3d15578d81c06c45b334c872009db2db8f39db0c77c94ff6c35168d5e13801917667c08678 upx-4.0.2-win32.zip
# up2k (win7)
a7d259277af4948bf960682bc9fb45a44b9ae9a19763c8a7c313cef4aa9ec2d447d843e4a7c409e9312c8c8f863a24487a8ee4ffa6891e9b1c4e111bb4723861 certifi-2022.12.7-py3-none-any.whl
2822c0dae180b1c8cfb7a70c8c00bad62af9afdbb18b656236680def9d3f1fcdcb8ef5eb64fc3b4c934385cd175ad5992a2284bcba78a243130de75b2d1650db charset_normalizer-3.1.0-cp37-cp37m-win32.whl
ffdd45326f4e91c02714f7a944cbcc2fdd09299f709cfa8aec0892053eef0134fb80d9ba3790afd319538a86feb619037cbf533e2f5939cb56b35bb17f56c858 idna-3.4-py3-none-any.whl
220e0e122d5851aaccf633224dd7fbd3ba8c8d2720944d8019d6a276ed818d83e3426fe21807f22d673b5428f19fcf9a6b4e645f69bbecd967c568bb6aeb7c8d requests-2.28.2-py3-none-any.whl
8770011f4ad1fe40a3062e6cdf1fda431530c59ee7de3fc5f8c57db54bfdb71c3aa220ca0e0bb1874fc6700e9ebb57defbae54ac84938bc9ad8f074910106681 urllib3-1.26.14-py2.py3-none-any.whl
# win7
91c025f7d94bcdf93df838fab67053165a414fc84e8496f92ecbb910dd55f6b6af5e360bbd051444066880c5a6877e75157bd95e150ead46e5c605930dfc50f2 future-0.18.2.tar.gz
c06b3295d1d0b0f0a6f9a6cd0be861b9b643b4a5ea37857f0bd41c45deaf27bb927b71922dab74e633e43d75d04a9bd0d1c4ad875569740b0f2a98dd2bfa5113 importlib_metadata-5.0.0-py3-none-any.whl
4e71295da5d1a26c71a0baa8905fdccb522bb16d56bc964db636de68688c5bf703f3b2880cdeea07138789e0eb4506e06f9ccd0da906c89d2cb6d55ad64659ea pip-22.3-py3-none-any.whl
6bb73cc2db795c59c92f2115727f5c173cacc9465af7710db9ff2f2aec2d73130d0992d0f16dcb3fac222dc15c0916562d0813b2337401022020673a4461df3d python-3.7.9-amd64.exe
500747651c87f59f2436c5ab91207b5b657856e43d10083f3ce27efb196a2580fadd199a4209519b409920c562aaaa7dcbdfb83ed2072a43eaccae6e2d056f31 python-3.7.9.exe
68e1b618d988be56aaae4e2eb92bc0093627a00441c1074ebe680c41aa98a6161e52733ad0c59888c643a33fe56884e4f935178b2557fbbdd105e92e0d993df6 windows6.1-kb2533623-x64.msu 68e1b618d988be56aaae4e2eb92bc0093627a00441c1074ebe680c41aa98a6161e52733ad0c59888c643a33fe56884e4f935178b2557fbbdd105e92e0d993df6 windows6.1-kb2533623-x64.msu
479a63e14586ab2f2228208116fc149ed8ee7b1e4ff360754f5bda4bf765c61af2e04b5ef123976623d04df4976b7886e0445647269da81436bd0a7b5671d361 windows6.1-kb2533623-x86.msu 479a63e14586ab2f2228208116fc149ed8ee7b1e4ff360754f5bda4bf765c61af2e04b5ef123976623d04df4976b7886e0445647269da81436bd0a7b5671d361 windows6.1-kb2533623-x86.msu
ba91ab0518c61eff13e5612d9e6b532940813f6b56e6ed81ea6c7c4d45acee4d98136a383a25067512b8f75538c67c987cf3944bfa0229e3cb677e2fb81e763e zipp-3.10.0-py3-none-any.whl ba91ab0518c61eff13e5612d9e6b532940813f6b56e6ed81ea6c7c4d45acee4d98136a383a25067512b8f75538c67c987cf3944bfa0229e3cb677e2fb81e763e zipp-3.10.0-py3-none-any.whl
# win10
00558cca2e0ac813d404252f6e5aeacb50546822ecb5d0570228b8ddd29d94e059fbeb6b90393dee5abcddaca1370aca784dc9b095cbb74e980b3c024767fb24 Jinja2-3.1.2-py3-none-any.whl
b1db6f5a79fc15391547643e5973cf5946c0acfa6febb68bc90fc3f66369681100cc100f32dd04256dcefa510e7864c718515a436a4af3a10fe205c413c7e693 MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl
4a20aeb52d4fde6aabcba05ee261595eeb5482c72ee27332690f34dd6e7a49c0b3ba3813202ac15c9d21e29f1cd803f2e79ccc1c45ec314fcd0a937016bcbc56 mutagen-1.46.0-py3-none-any.whl
ea152624499966615ee74f2aefed27da528785e1215f46d61e79c5290bb8105fd98e9948938efbca9cd19e2f1dd48c9e712b4f30a4148a0ed5d1ff2dff77106e Pillow-9.4.0-cp311-cp311-win_amd64.whl
2b04b196f1115f42375e623a35edeb71565dfd090416b22510ec0270fefe86f7d397a98aabbe9ebfe3f6a355fe25c487a4875d4252027d0a61ccb64cacd7631d python-3.11.2-amd64.exe

View File

@@ -1,16 +1,31 @@
# links to download pages for each dep # links to download pages for each dep
https://pypi.org/project/altgraph/#files https://pypi.org/project/altgraph/#files
https://pypi.org/project/future/#files
https://pypi.org/project/importlib-metadata/#files
https://pypi.org/project/pefile/#files https://pypi.org/project/pefile/#files
https://pypi.org/project/pip/#files
https://pypi.org/project/pyinstaller/#files https://pypi.org/project/pyinstaller/#files
https://pypi.org/project/pyinstaller-hooks-contrib/#files https://pypi.org/project/pyinstaller-hooks-contrib/#files
https://pypi.org/project/pywin32-ctypes/#files https://pypi.org/project/pywin32-ctypes/#files
https://pypi.org/project/typing-extensions/#files
https://pypi.org/project/zipp/#files
https://github.com/git-for-windows/git/releases/latest https://github.com/git-for-windows/git/releases/latest
https://github.com/upx/upx/releases/latest https://github.com/upx/upx/releases/latest
# win10 additionals
https://pypi.org/project/Jinja2/#files
https://pypi.org/project/MarkupSafe/#files
https://pypi.org/project/mutagen/#files
https://pypi.org/project/Pillow/#files
# up2k (win7) additionals
https://pypi.org/project/certifi/#files
https://pypi.org/project/charset-normalizer/#files # cp37-cp37m-win32.whl
https://pypi.org/project/idna/#files
https://pypi.org/project/requests/#files
https://pypi.org/project/urllib3/#files
# win7 additionals
https://pypi.org/project/future/#files
https://pypi.org/project/importlib-metadata/#files
https://pypi.org/project/pip/#files
https://pypi.org/project/typing-extensions/#files
https://pypi.org/project/zipp/#files
https://support.microsoft.com/en-us/topic/microsoft-security-advisory-insecure-library-loading-could-allow-remote-code-execution-486ea436-2d47-27e5-6cb9-26ab7230c704 https://support.microsoft.com/en-us/topic/microsoft-security-advisory-insecure-library-loading-could-allow-remote-code-execution-486ea436-2d47-27e5-6cb9-26ab7230c704
http://www.microsoft.com/download/details.aspx?familyid=c79c41b0-fbfb-4d61-b5d8-cadbe184b9fc http://www.microsoft.com/download/details.aspx?familyid=c79c41b0-fbfb-4d61-b5d8-cadbe184b9fc
http://www.microsoft.com/download/details.aspx?familyid=146ed6f7-b605-4270-8ec4-b9f0f284bb9e http://www.microsoft.com/download/details.aspx?familyid=146ed6f7-b605-4270-8ec4-b9f0f284bb9e

View File

@@ -1,8 +1,10 @@
#!/bin/bash #!/bin/bash
set -e set -e
genico() {
# imagemagick png compression is broken, use pillow instead # imagemagick png compression is broken, use pillow instead
convert ~/AndroidStudioProjects/PartyUP/metadata/en-US/images/icon.png a.bmp convert $1 a.bmp
#convert a.bmp -trim -resize '48x48!' -strip a.png #convert a.bmp -trim -resize '48x48!' -strip a.png
python3 <<'EOF' python3 <<'EOF'
@@ -17,11 +19,15 @@ EOF
pngquant --strip --quality 30 a.png pngquant --strip --quality 30 a.png
mv a-*.png a.png mv a-*.png a.png
python3 <<'EOF' python3 <<EOF
from PIL import Image from PIL import Image
Image.open('a.png').save('loader.ico',sizes=[(48,48)]) Image.open('a.png').save('$2',sizes=[(48,48)])
EOF EOF
rm a.{bmp,png} rm a.{bmp,png}
}
genico ~/AndroidStudioProjects/PartyUP/metadata/en-US/images/icon.png loader.ico
genico https://raw.githubusercontent.com/googlefonts/noto-emoji/main/png/512/emoji_u1f680.png up2k.ico
ls -al ls -al
exit 0

View File

@@ -1,29 +1,44 @@
# coding: utf-8 # coding: utf-8
import base64
import hashlib
import os
import re
import shutil
import subprocess as sp
import sys
import traceback
v = r""" v = r"""
this is the EXE edition of copyparty, compatible with Windows7-SP1 this 32-bit copyparty.exe is compatible with Windows7-SP1 and later.
and later. To make this possible, the EXE was compiled with Python To make this possible, the EXE was compiled with Python 3.7.9,
3.7.9, which is EOL and does not receive security patches anymore. which is EOL and does not receive security patches anymore.
if possible, for performance and security reasons, please use this instead: if possible, for performance and security reasons, please use this instead:
https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py
""" """
if sys.version_info > (3, 10):
v = r"""
this 64-bit copyparty.exe is compatible with Windows 8 and later.
No security issues were known to affect this EXE at build time,
however that may have changed since then.
if possible, for performance and security reasons, please use this instead:
https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py
"""
if sys.maxsize > 2 ** 32:
v = v.replace("32-bit", "64-bit")
try: try:
print(v.replace("\n", "\n▒▌ ")[1:] + "\n") print(v.replace("\n", "\n▒▌ ")[1:] + "\n")
except: except:
print(v.replace("\n", "\n|| ")[1:] + "\n") print(v.replace("\n", "\n|| ")[1:] + "\n")
import re
import os
import sys
import shutil
import traceback
import subprocess as sp
def confirm(rv): def confirm(rv):
print() print()
print("retcode", rv if rv else traceback.format_exc()) print("retcode", rv if rv else traceback.format_exc())
@@ -36,6 +51,30 @@ def confirm(rv):
sys.exit(rv or 1) sys.exit(rv or 1)
def ckck():
hs = hashlib.sha512()
with open(sys.executable, "rb") as f:
f.seek(-12, 2)
rem = f.tell()
esum = f.read().decode("ascii", "replace")
f.seek(0)
while rem:
buf = f.read(min(rem, 64 * 1024))
rem -= len(buf)
hs.update(buf)
if not buf:
t = "unexpected eof @ {} with {} left"
raise Exception(t.format(f.tell(), rem))
fsum = base64.b64encode(hs.digest()[:9]).decode("utf-8")
if fsum != esum:
t = "exe integrity check error; [{}] != [{}]"
raise Exception(t.format(esum, fsum))
ckck()
def meicln(mod): def meicln(mod):
pdir, mine = os.path.split(mod) pdir, mine = os.path.split(mod)
dirs = os.listdir(pdir) dirs = os.listdir(pdir)

View File

@@ -16,7 +16,7 @@ VSVersionInfo(
StringTable( StringTable(
'000004b0', '000004b0',
[StringStruct('CompanyName', 'ocv.me'), [StringStruct('CompanyName', 'ocv.me'),
StringStruct('FileDescription', 'copyparty'), StringStruct('FileDescription', 'copyparty file server'),
StringStruct('FileVersion', '1.2.3'), StringStruct('FileVersion', '1.2.3'),
StringStruct('InternalName', 'copyparty'), StringStruct('InternalName', 'copyparty'),
StringStruct('LegalCopyright', '2019, ed'), StringStruct('LegalCopyright', '2019, ed'),

View File

@@ -2,34 +2,53 @@ run ./build.sh in git-bash to build + upload the exe
## ============================================================ ## ============================================================
## first-time setup on a stock win7x32sp1 vm: ## first-time setup on a stock win7x32sp1 and/or win10x64 vm:
## ##
to obtain the files referenced below, see ./deps.txt to obtain the files referenced below, see ./deps.txt
download + install git (32bit OK on 64): download + install git (32bit OK on 64):
http://192.168.123.1:3923/ro/pyi/Git-2.38.1-32-bit.exe http://192.168.123.1:3923/ro/pyi/Git-2.39.1-32-bit.exe
===[ copy-paste into git-bash ]================================ ===[ copy-paste into git-bash ]================================
uname -s | grep NT-10 && w10=1 || {
w7=1; uname -s | grep WOW64 && w7x64=1 || w7x32=1
}
fns=( fns=(
upx-4.0.0-win32.zip
pip-22.3-py3-none-any.whl
altgraph-0.17.3-py2.py3-none-any.whl altgraph-0.17.3-py2.py3-none-any.whl
pefile-2023.2.7-py3-none-any.whl
pyinstaller-5.8.0-py3-none-win_amd64.whl
pyinstaller_hooks_contrib-2023.0-py2.py3-none-any.whl
pywin32_ctypes-0.2.0-py2.py3-none-any.whl
upx-4.0.2-win32.zip
)
[ $w10 ] && fns+=(
mutagen-1.46.0-py3-none-any.whl
Pillow-9.4.0-cp311-cp311-win_amd64.whl
python-3.11.2-amd64.exe
}
[ $w7 ] && fns+=(
certifi-2022.12.7-py3-none-any.whl
chardet-5.1.0-py3-none-any.whl
idna-3.4-py3-none-any.whl
requests-2.28.2-py3-none-any.whl
urllib3-1.26.14-py2.py3-none-any.whl
)
[ $w7 ] && fns+=(
future-0.18.2.tar.gz future-0.18.2.tar.gz
importlib_metadata-5.0.0-py3-none-any.whl importlib_metadata-5.0.0-py3-none-any.whl
pefile-2022.5.30.tar.gz pip-22.3-py3-none-any.whl
pyinstaller_hooks_contrib-2022.11-py2.py3-none-any.whl
pywin32_ctypes-0.2.0-py2.py3-none-any.whl
typing_extensions-4.4.0-py3-none-any.whl typing_extensions-4.4.0-py3-none-any.whl
zipp-3.10.0-py3-none-any.whl zipp-3.10.0-py3-none-any.whl
) )
uname -s | grep WOW64 && fns+=( [ $w7x64 ] && fns+=(
windows6.1-kb2533623-x64.msu windows6.1-kb2533623-x64.msu
pyinstaller-5.6.1-py3-none-win_amd64.whl pyinstaller-5.8.0-py3-none-win_amd64.whl
python-3.7.9-amd64.exe python-3.7.9-amd64.exe
) || fns+=( )
[ $w7x32 ] && fns+=(
windows6.1-kb2533623-x86.msu windows6.1-kb2533623-x86.msu
pyinstaller-5.6.1-py3-none-win32.whl pyinstaller-5.8.0-py3-none-win32.whl
python-3.7.9.exe python-3.7.9.exe
) )
dl() { curl -fkLOC- "$1" && return 0; echo "$1"; return 1; } dl() { curl -fkLOC- "$1" && return 0; echo "$1"; return 1; }
@@ -45,20 +64,35 @@ manually install:
python-3.7.9 python-3.7.9
===[ copy-paste into git-bash ]================================ ===[ copy-paste into git-bash ]================================
uname -s | grep NT-10 && w10=1 || w7=1
[ $w7 ] && pyv=37 || pyv=311
appd=$(cygpath.exe "$APPDATA")
cd ~/Downloads && cd ~/Downloads &&
unzip upx-*-win32.zip && unzip upx-*-win32.zip &&
mv upx-*/upx.exe . && mv upx-*/upx.exe . &&
python -m ensurepip && python -m ensurepip &&
python -m pip install --user -U pip-*.whl && python -m pip install --user -U pip-*.whl &&
python -m pip install --user -U pyinstaller-*.whl pefile-*.tar.gz pywin32_ctypes-*.whl pyinstaller_hooks_contrib-*.whl altgraph-*.whl future-*.tar.gz importlib_metadata-*.whl typing_extensions-*.whl zipp-*.whl && { [ $w7 ] || python -m pip install --user -U mutagen-*.whl Pillow-*.whl; } &&
{ [ $w10 ] || python -m pip install --user -U {requests,urllib3,charset_normalizer,certifi,idna}-*.whl; } &&
{ [ $w10 ] || python -m pip install --user -U future-*.tar.gz importlib_metadata-*.whl typing_extensions-*.whl zipp-*.whl; } &&
python -m pip install --user -U pyinstaller-*.whl pefile-*.whl pywin32_ctypes-*.whl pyinstaller_hooks_contrib-*.whl altgraph-*.whl &&
sed -ri 's/--lzma/--best/' $appd/Python/Python$pyv/site-packages/pyinstaller/building/utils.py &&
curl -fkLO https://192.168.123.1:3923/cpp/scripts/uncomment.py &&
python uncomment.py $(for d in $appd/Python/Python$pyv/site-packages/{requests,urllib3,charset_normalizer,certifi,idna}; do find $d -name \*.py; done) &&
cd && cd &&
rm -f build.sh && rm -f build.sh &&
curl -fkLO https://192.168.123.1:3923/cpp/scripts/pyinstaller/build.sh && curl -fkLO https://192.168.123.1:3923/cpp/scripts/pyinstaller/build.sh &&
curl -fkLO https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.sh &&
echo ok echo ok
# python -m pip install --user -U Pillow-9.2.0-cp37-cp37m-win32.whl # python -m pip install --user -U Pillow-9.2.0-cp37-cp37m-win32.whl
# sed -ri 's/, bestopt, /]+bestopt+[/' $APPDATA/Python/Python37/site-packages/pyinstaller/building/utils.py # sed -ri 's/, bestopt, /]+bestopt+[/' $APPDATA/Python/Python37/site-packages/pyinstaller/building/utils.py
# sed -ri 's/(^\s+bestopt = ).*/\1["--best","--lzma","--ultra-brute"]/' $APPDATA/Python/Python37/site-packages/pyinstaller/building/utils.py # sed -ri 's/(^\s+bestopt = ).*/\1["--best","--lzma","--ultra-brute"]/' $APPDATA/Python/Python37/site-packages/pyinstaller/building/utils.py
===[ win10: copy-paste into git-bash ]=========================
#for f in $appd/Python/Python311/site-packages/mutagen/*.py; do awk -i inplace '/^\s*def _?(save|write)/{sub(/d.*/," ");s=$0;ns=length(s)} ns&&/[^ ]/&&substr($0,0,ns)!=s{ns=0} !ns' "$f"; done &&
python uncomment.py $appd/Python/Python311/site-packages/{mutagen,PIL,jinja2,markupsafe}/*.py &&
echo ok
## ============================================================ ## ============================================================
## notes ## notes

View File

@@ -0,0 +1,29 @@
# UTF-8
VSVersionInfo(
ffi=FixedFileInfo(
filevers=(1,2,3,0),
prodvers=(1,2,3,0),
mask=0x3f,
flags=0x0,
OS=0x4,
fileType=0x1,
subtype=0x0,
date=(0, 0)
),
kids=[
StringFileInfo(
[
StringTable(
'000004b0',
[StringStruct('CompanyName', 'ocv.me'),
StringStruct('FileDescription', 'copyparty uploader / filesearch command'),
StringStruct('FileVersion', '1.2.3'),
StringStruct('InternalName', 'up2k'),
StringStruct('LegalCopyright', '2019, ed'),
StringStruct('OriginalFilename', 'up2k.exe'),
StringStruct('ProductName', 'copyparty up2k client'),
StringStruct('ProductVersion', '1.2.3')])
]),
VarFileInfo([VarStruct('Translation', [0, 1200])])
]
)

View File

@@ -0,0 +1,48 @@
#!/bin/bash
set -e
curl -k https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.sh |
tee up2k2.sh | cmp up2k.sh && rm up2k2.sh || {
[ -s up2k2.sh ] || exit 1
echo "new up2k script; upgrade y/n:"
while true; do read -u1 -n1 -r r; [[ $r =~ [yYnN] ]] && break; done
[[ $r =~ [yY] ]] && mv up2k{2,}.sh && exec ./up2k.sh
}
uname -s | grep -E 'WOW64|NT-10' && echo need win7-32 && exit 1
dl() { curl -fkLO "$1"; }
cd ~/Downloads
dl https://192.168.123.1:3923/cpp/bin/up2k.py
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.ico
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.rc
dl https://192.168.123.1:3923/cpp/scripts/pyinstaller/up2k.spec
# $LOCALAPPDATA/programs/python/python37-32/python -m pip install --user -U pyinstaller requests
grep -E '^from .ssl_ import' $APPDATA/python/python37/site-packages/urllib3/util/proxy.py && {
echo golfing
echo > $APPDATA/python/python37/site-packages/requests/certs.py
sed -ri 's/^(DEFAULT_CA_BUNDLE_PATH = ).*/\1""/' $APPDATA/python/python37/site-packages/requests/utils.py
sed -ri '/^import zipfile$/d' $APPDATA/python/python37/site-packages/requests/utils.py
sed -ri 's/"idna"//' $APPDATA/python/python37/site-packages/requests/packages.py
sed -ri 's/import charset_normalizer.*/pass/' $APPDATA/python/python37/site-packages/requests/compat.py
sed -ri 's/raise.*charset_normalizer.*/pass/' $APPDATA/python/python37/site-packages/requests/__init__.py
sed -ri 's/import charset_normalizer.*//' $APPDATA/python/python37/site-packages/requests/packages.py
sed -ri 's/chardet.__name__/"\\roll\\tide"/' $APPDATA/python/python37/site-packages/requests/packages.py
sed -ri 's/chardet,//' $APPDATA/python/python37/site-packages/requests/models.py
for n in util/__init__.py connection.py; do awk -i inplace '/^from (\.util)?\.ssl_ /{s=1} !s; /^\)/{s=0}' $APPDATA/python/python37/site-packages/urllib3/$n; done
sed -ri 's/^from .ssl_ import .*//' $APPDATA/python/python37/site-packages/urllib3/util/proxy.py
echo golfed
}
read a b _ < <(awk -F\" '/^S_VERSION =/{$0=$2;sub(/\./," ");print}' < up2k.py)
sed -r 's/1,2,3,0/'$a,$b,0,0'/;s/1\.2\.3/'$a.$b.0/ <up2k.rc >up2k.rc2
#python uncomment.py up2k.py
$APPDATA/python/python37/scripts/pyinstaller -y --clean --upx-dir=. up2k.spec
./dist/up2k.exe --version
curl -fkT dist/up2k.exe -HPW:wark https://192.168.123.1:3923/

View File

@@ -0,0 +1,78 @@
# -*- mode: python ; coding: utf-8 -*-
block_cipher = None
a = Analysis(
['up2k.py'],
pathex=[],
binaries=[],
datas=[],
hiddenimports=[],
hookspath=[],
hooksconfig={},
runtime_hooks=[],
excludes=[
'ftplib',
'lzma',
'pickle',
'ssl',
'tarfile',
'bz2',
'zipfile',
'tracemalloc',
'zlib',
'urllib3.util.ssl_',
'urllib3.contrib.pyopenssl',
'urllib3.contrib.socks',
'certifi',
'idna',
'chardet',
'charset_normalizer',
'email.contentmanager',
'email.policy',
'encodings.zlib_codec',
'encodings.base64_codec',
'encodings.bz2_codec',
'encodings.charmap',
'encodings.hex_codec',
'encodings.palmos',
'encodings.punycode',
'encodings.rot_13',
],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False,
)
# this is the only change to the autogenerated specfile:
xdll = ["libcrypto-1_1.dll"]
a.binaries = TOC([x for x in a.binaries if x[0] not in xdll])
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
exe = EXE(
pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='up2k',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=True,
disable_windowed_traceback=False,
argv_emulation=False,
target_arch=None,
codesign_identity=None,
entitlements_file=None,
version='up2k.rc2',
icon=['up2k.ico'],
)

View File

@@ -0,0 +1,14 @@
#!/bin/bash
set -e
# grep '">encodings.cp' C:/Users/ed/dev/copyparty/bin/dist/xref-up2k.html | sed -r 's/.*encodings.cp//;s/<.*//' | sort -n | uniq | tr '\n' ,
# grep -i encodings -A1 build/up2k/xref-up2k.html | sed -r 's/.*(Missing|Excluded)Module.*//' | grep moduletype -B1 | grep -v moduletype
ex=(
ftplib lzma pickle ssl tarfile bz2 zipfile tracemalloc zlib
urllib3.util.ssl_ urllib3.contrib.pyopenssl urllib3.contrib.socks certifi idna chardet charset_normalizer
email.contentmanager email.policy
encodings.{zlib_codec,base64_codec,bz2_codec,charmap,hex_codec,palmos,punycode,rot_13}
);
cex=(); for a in "${ex[@]}"; do cex+=(--exclude "$a"); done
$APPDATA/python/python37/scripts/pyi-makespec --version-file up2k.rc2 -i up2k.ico -n up2k -c -F up2k.py "${cex[@]}"

View File

@@ -4,7 +4,6 @@ from __future__ import print_function
import os import os
import sys import sys
import subprocess as sp
from shutil import rmtree from shutil import rmtree
from setuptools import setup, Command from setuptools import setup, Command
@@ -78,7 +77,11 @@ class clean2(Command):
args = { args = {
"name": NAME, "name": NAME,
"version": about["__version__"], "version": about["__version__"],
"description": "http file sharing hub", "description": (
"Portable file server with accelerated resumable uploads, "
+ "deduplication, WebDAV, FTP, zeroconf, media indexer, "
+ "video thumbnails, audio transcoding, and write-only folders"
),
"long_description": long_description, "long_description": long_description,
"long_description_content_type": "text/markdown", "long_description_content_type": "text/markdown",
"author": "ed", "author": "ed",

View File

@@ -98,7 +98,7 @@ class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None): def __init__(self, a=None, v=None, c=None):
ka = {} ka = {}
ex = "daw dav_inf dav_mac dotsrch e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js getmod hardlink ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_thumb no_vthumb no_zip nrand nw rand vc xdev xlink xvol" ex = "daw dav_inf dav_mac dotsrch e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js getmod hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_thumb no_vthumb no_zip nrand nw rand vc xdev xlink xvol"
ka.update(**{k: False for k in ex.split()}) ka.update(**{k: False for k in ex.split()})
ex = "dotpart no_rescan no_sendfile no_voldump plain_ip" ex = "dotpart no_rescan no_sendfile no_voldump plain_ip"
@@ -107,13 +107,13 @@ class Cfg(Namespace):
ex = "css_browser hist js_browser no_forget no_hash no_idx" ex = "css_browser hist js_browser no_forget no_hash no_idx"
ka.update(**{k: None for k in ex.split()}) ka.update(**{k: None for k in ex.split()})
ex = "df loris re_maxage rproxy rsp_slp s_wr_slp theme themes turbo" ex = "df loris re_maxage rproxy rsp_jtr rsp_slp s_wr_slp theme themes turbo"
ka.update(**{k: 0 for k in ex.split()}) ka.update(**{k: 0 for k in ex.split()})
ex = "doctitle favico html_head lg_sbf log_fk md_sbf mth textfiles R RS SR" ex = "doctitle favico html_head lg_sbf log_fk md_sbf mth textfiles R RS SR"
ka.update(**{k: "" for k in ex.split()}) ka.update(**{k: "" for k in ex.split()})
ex = "xad xar xau xbd xbr xbu xm" ex = "xad xar xau xbd xbr xbu xiu xm"
ka.update(**{k: [] for k in ex.split()}) ka.update(**{k: [] for k in ex.split()})
super(Cfg, self).__init__( super(Cfg, self).__init__(