Compare commits
110 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dcadf2b11c | ||
|
|
37a690a4c3 | ||
|
|
87ad23fb93 | ||
|
|
5f54d534e3 | ||
|
|
aecae552a4 | ||
|
|
eaa6b3d0be | ||
|
|
c2ace91e52 | ||
|
|
0bac87c36f | ||
|
|
e650d05939 | ||
|
|
85a96e4446 | ||
|
|
2569005139 | ||
|
|
c50cb66aef | ||
|
|
d4c5fca15b | ||
|
|
75cea4f684 | ||
|
|
68c6794d33 | ||
|
|
82f98dd54d | ||
|
|
741d781c18 | ||
|
|
0be1e43451 | ||
|
|
5366bf22bb | ||
|
|
bcd91b1809 | ||
|
|
9bd5738e6f | ||
|
|
bab4aa4c0a | ||
|
|
e965b9b9e2 | ||
|
|
31101427d3 | ||
|
|
a083dc36ba | ||
|
|
9b7b9262aa | ||
|
|
660011fa6e | ||
|
|
ead31b6823 | ||
|
|
4310580cd4 | ||
|
|
b005acbfda | ||
|
|
460709e6f3 | ||
|
|
a8768d05a9 | ||
|
|
f8e3e87a52 | ||
|
|
70f1642d0d | ||
|
|
3fc7561da4 | ||
|
|
9065226c3d | ||
|
|
b7e321fa47 | ||
|
|
664665b86b | ||
|
|
f4f362b7a4 | ||
|
|
577d23f460 | ||
|
|
504e168486 | ||
|
|
f2f9640371 | ||
|
|
ee46f832b1 | ||
|
|
b0e755d410 | ||
|
|
cfd24604d5 | ||
|
|
264894e595 | ||
|
|
5bb9f56247 | ||
|
|
18942ed066 | ||
|
|
85321a6f31 | ||
|
|
baf641396d | ||
|
|
17c91e7014 | ||
|
|
010770684d | ||
|
|
b4c503657b | ||
|
|
71bd306268 | ||
|
|
dd7fab1352 | ||
|
|
dacca18863 | ||
|
|
53d92cc0a6 | ||
|
|
434823f6f0 | ||
|
|
2cb1f50370 | ||
|
|
03f53f6392 | ||
|
|
a70ecd7af0 | ||
|
|
8b81e58205 | ||
|
|
4500c04edf | ||
|
|
6222ddd720 | ||
|
|
8a7135cf41 | ||
|
|
b4c7282956 | ||
|
|
8491a40a04 | ||
|
|
343d38b693 | ||
|
|
6cf53d7364 | ||
|
|
b070d44de7 | ||
|
|
79aa40fdea | ||
|
|
dcaff2785f | ||
|
|
497f5b4307 | ||
|
|
be32ad0da6 | ||
|
|
8ee2bf810b | ||
|
|
28232656a9 | ||
|
|
fbc2424e8f | ||
|
|
94cd13e8b8 | ||
|
|
447ed5ab37 | ||
|
|
af59808611 | ||
|
|
e3406a9f86 | ||
|
|
7fd1d6a4e8 | ||
|
|
0ab2a665de | ||
|
|
3895575bc2 | ||
|
|
138c2bbcbb | ||
|
|
bc7af1d1c8 | ||
|
|
19cd96e392 | ||
|
|
db194ab519 | ||
|
|
02ad4bfab2 | ||
|
|
56b73dcc8a | ||
|
|
7704b9c8a2 | ||
|
|
999b7ae919 | ||
|
|
252b5a88b1 | ||
|
|
01e2681a07 | ||
|
|
aa32f30202 | ||
|
|
195eb53995 | ||
|
|
06fa78f54a | ||
|
|
7a57c9dbf1 | ||
|
|
bb657bfa85 | ||
|
|
87181726b0 | ||
|
|
f1477a1c14 | ||
|
|
4f94a9e38b | ||
|
|
fbed322d3b | ||
|
|
9b0f519e4e | ||
|
|
6cd6dadd06 | ||
|
|
9a28afcb48 | ||
|
|
45b701801d | ||
|
|
062246fb12 | ||
|
|
416ebfdd68 | ||
|
|
731eb92f33 |
1
.vscode/launch.json
vendored
1
.vscode/launch.json
vendored
@@ -8,6 +8,7 @@
|
||||
"module": "copyparty",
|
||||
"console": "integratedTerminal",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"justMyCode": false,
|
||||
"args": [
|
||||
//"-nw",
|
||||
"-ed",
|
||||
|
||||
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -52,9 +52,11 @@
|
||||
"--disable=missing-module-docstring",
|
||||
"--disable=missing-class-docstring",
|
||||
"--disable=missing-function-docstring",
|
||||
"--disable=import-outside-toplevel",
|
||||
"--disable=wrong-import-position",
|
||||
"--disable=raise-missing-from",
|
||||
"--disable=bare-except",
|
||||
"--disable=broad-except",
|
||||
"--disable=invalid-name",
|
||||
"--disable=line-too-long",
|
||||
"--disable=consider-using-f-string"
|
||||
@@ -64,6 +66,7 @@
|
||||
"editor.formatOnSave": true,
|
||||
"[html]": {
|
||||
"editor.formatOnSave": false,
|
||||
"editor.autoIndent": "keep",
|
||||
},
|
||||
"[css]": {
|
||||
"editor.formatOnSave": false,
|
||||
|
||||
121
README.md
121
README.md
@@ -1,6 +1,6 @@
|
||||
# ⇆🎉 copyparty
|
||||
|
||||
* http file sharing hub (py2/py3) [(on PyPI)](https://pypi.org/project/copyparty/)
|
||||
* portable file sharing hub (py2/py3) [(on PyPI)](https://pypi.org/project/copyparty/)
|
||||
* MIT-Licensed, 2019-05-26, ed @ irc.rizon.net
|
||||
|
||||
|
||||
@@ -75,26 +75,24 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
|
||||
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
|
||||
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
|
||||
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
||||
* [upload events](#upload-events) - trigger a script/program on each upload
|
||||
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
||||
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
||||
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
|
||||
* [themes](#themes)
|
||||
* [complete examples](#complete-examples)
|
||||
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
|
||||
* [browser support](#browser-support) - TLDR: yes
|
||||
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
||||
* [mount as drive](#mount-as-drive) - a remote copyparty server as a local filesystem
|
||||
* [up2k](#up2k) - quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
|
||||
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||
* [client-side](#client-side) - when uploading files
|
||||
* [security](#security) - some notes on hardening
|
||||
* [gotchas](#gotchas) - behavior that might be unexpected
|
||||
* [cors](#cors) - cross-site request config
|
||||
* [recovering from crashes](#recovering-from-crashes)
|
||||
* [client crashes](#client-crashes)
|
||||
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
|
||||
* [HTTP API](#HTTP-API)
|
||||
* [read](#read)
|
||||
* [write](#write)
|
||||
* [admin](#admin)
|
||||
* [general](#general)
|
||||
* [HTTP API](#HTTP-API) - see [devnotes](#./docs/devnotes.md#http-api)
|
||||
* [dependencies](#dependencies) - mandatory deps
|
||||
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
||||
* [install recommended deps](#install-recommended-deps)
|
||||
@@ -128,7 +126,7 @@ you may also want these, especially on servers:
|
||||
|
||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||
* [contrib/systemd/prisonparty.service](contrib/systemd/prisonparty.service) to run it in a chroot (for extra security)
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
|
||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to [reverse-proxy](#reverse-proxy) behind nginx (for better https)
|
||||
|
||||
and remember to open the ports you want; here's a complete example including every feature copyparty has to offer:
|
||||
```
|
||||
@@ -167,6 +165,7 @@ recommended additional steps on debian which enable audio metadata and thumbnai
|
||||
* upload
|
||||
* ☑ basic: plain multipart, ie6 support
|
||||
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
||||
* unaffected by cloudflare's max-upload-size (100 MiB)
|
||||
* ☑ stash: simple PUT filedropper
|
||||
* ☑ [unpost](#unpost): undo/delete accidental uploads
|
||||
* ☑ [self-destruct](#self-destruct) (specified server-side or client-side)
|
||||
@@ -178,7 +177,7 @@ recommended additional steps on debian which enable audio metadata and thumbnai
|
||||
* browser
|
||||
* ☑ [navpane](#navpane) (directory tree sidebar)
|
||||
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
|
||||
* ☑ audio player (with OS media controls and opus transcoding)
|
||||
* ☑ audio player (with [OS media controls](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) and opus transcoding)
|
||||
* ☑ image gallery with webm player
|
||||
* ☑ textfile browser with syntax hilighting
|
||||
* ☑ [thumbnails](#thumbnails)
|
||||
@@ -209,8 +208,7 @@ project goals / philosophy
|
||||
|
||||
* inverse linux philosophy -- do all the things, and do an *okay* job
|
||||
* quick drop-in service to get a lot of features in a pinch
|
||||
* there are probably [better alternatives](https://github.com/awesome-selfhosted/awesome-selfhosted) if you have specific/long-term needs
|
||||
* but the resumable multithreaded uploads are p slick ngl
|
||||
* check [the alternatives](./docs/versus.md)
|
||||
* run anywhere, support everything
|
||||
* as many web-browsers and python versions as possible
|
||||
* every browser should at least be able to browse, download, upload files
|
||||
@@ -234,7 +232,7 @@ browser-specific:
|
||||
* Android-Chrome: increase "parallel uploads" for higher speed (android bug)
|
||||
* Android-Firefox: takes a while to select files (their fix for ☝️)
|
||||
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
|
||||
* Desktop-Firefox: may stop you from deleting files you've uploaded until you visit `about:memory` and click `Minimize memory usage`
|
||||
* Desktop-Firefox: [may stop you from unplugging USB flashdrives](https://bugzilla.mozilla.org/show_bug.cgi?id=1792598) until you visit `about:memory` and click `Minimize memory usage`
|
||||
|
||||
server-os-specific:
|
||||
* RHEL8 / Rocky8: you can run copyparty using `/usr/libexec/platform-python`
|
||||
@@ -252,23 +250,15 @@ server-os-specific:
|
||||
* Windows: if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||
* [the database can get stuck](https://github.com/9001/copyparty/issues/10)
|
||||
* has only happened once but that is once too many
|
||||
* luckily not dangerous for file integrity and doesn't really stop uploads or anything like that
|
||||
* but would really appreciate some logs if anyone ever runs into it again
|
||||
* probably more, pls let me know
|
||||
|
||||
## not my bugs
|
||||
|
||||
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
|
||||
|
||||
* [Chrome issue 1354816](https://bugs.chromium.org/p/chromium/issues/detail?id=1354816) -- chrome may eat all RAM uploading over plaintext http with `mt` enabled
|
||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
||||
|
||||
* more amusingly, [Chrome issue 1354800](https://bugs.chromium.org/p/chromium/issues/detail?id=1354800) -- chrome may eat all RAM uploading in general (altho you probably won't run into this one)
|
||||
|
||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive and likely to run into the above gc bugs)
|
||||
|
||||
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- sometimes forgets to close filedescriptors during upload so the browser can crash after ~4000 files
|
||||
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- entire browser can crash after uploading ~4000 small files
|
||||
|
||||
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
|
||||
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
|
||||
@@ -291,6 +281,9 @@ server-os-specific:
|
||||
|
||||
upgrade notes
|
||||
|
||||
* `1.6.0` (2023-01-29):
|
||||
* http-api: delete/move is now `POST` instead of `GET`
|
||||
* everything other than `GET` and `HEAD` must pass [cors validation](#cors)
|
||||
* `1.5.0` (2022-12-03): [new chunksize formula](https://github.com/9001/copyparty/commit/54e1c8d261df) for files larger than 128 GiB
|
||||
* **users:** upgrade to the latest [cli uploader](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) if you use that
|
||||
* **devs:** update third-party up2k clients (if those even exist)
|
||||
@@ -305,7 +298,7 @@ upgrade notes
|
||||
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
|
||||
|
||||
* can I make copyparty download a file to my server if I give it a URL?
|
||||
* not really, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
|
||||
* yes, using [hooks](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
||||
|
||||
|
||||
# accounts and volumes
|
||||
@@ -700,7 +693,7 @@ using arguments or config files, or a mix of both:
|
||||
|
||||
## zeroconf
|
||||
|
||||
announce enabled services on the LAN if you specify the `-z` option, which enables [mdns](#mdns) and [ssdp](#ssdp)
|
||||
announce enabled services on the LAN ([pic](https://user-images.githubusercontent.com/241032/215344737-0eae8d98-9496-4256-9aa8-cd2f6971810d.png)) -- `-z` enables both [mdns](#mdns) and [ssdp](#ssdp)
|
||||
|
||||
* `--z-on` / `--z-off`' limits the feature to certain networks
|
||||
|
||||
@@ -724,6 +717,10 @@ uses [ssdp](https://en.wikipedia.org/wiki/Simple_Service_Discovery_Protocol) to
|
||||
|
||||
doubleclicking the icon opens the "connect" page which explains how to mount copyparty as a local filesystem
|
||||
|
||||
if copyparty does not appear in windows explorer, use `--zsv` to see why:
|
||||
|
||||
* maybe the discovery multicast was sent from an IP which does not intersect with the server subnets
|
||||
|
||||
|
||||
## qr-code
|
||||
|
||||
@@ -932,6 +929,8 @@ some examples,
|
||||
## other flags
|
||||
|
||||
* `:c,magic` enables filetype detection for nameless uploads, same as `--magic`
|
||||
* needs https://pypi.org/project/python-magic/ `python3 -m pip install --user -U python-magic`
|
||||
* on windows grab this instead `python3 -m pip install --user -U python-magic-bin`
|
||||
|
||||
|
||||
## database location
|
||||
@@ -1000,9 +999,18 @@ copyparty can invoke external programs to collect additional metadata for files
|
||||
if something doesn't work, try `--mtag-v` for verbose error messages
|
||||
|
||||
|
||||
## upload events
|
||||
## event hooks
|
||||
|
||||
trigger a script/program on each upload like so:
|
||||
trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
||||
|
||||
you can set hooks before and/or after an event happens, and currently you can hook uploads, moves/renames, and deletes
|
||||
|
||||
there's a bunch of flags and stuff, see `--help-hooks`
|
||||
|
||||
|
||||
### upload events
|
||||
|
||||
the older, more powerful approach ([examples](./bin/mtag/)):
|
||||
|
||||
```
|
||||
-v /mnt/inc:inc:w:c,mte=+x1:c,mtp=x1=ad,kn,/usr/bin/notify-send
|
||||
@@ -1012,11 +1020,12 @@ so filesystem location `/mnt/inc` shared at `/inc`, write-only for everyone, app
|
||||
|
||||
that'll run the command `notify-send` with the path to the uploaded file as the first and only argument (so on linux it'll show a notification on-screen)
|
||||
|
||||
note that it will only trigger on new unique files, not dupes
|
||||
note that this is way more complicated than the new [event hooks](#event-hooks) but this approach has the following advantages:
|
||||
* non-blocking and multithreaded; doesn't hold other uploads back
|
||||
* you get access to tags from FFmpeg and other mtp parsers
|
||||
* only trigger on new unique files, not dupes
|
||||
|
||||
and it will occupy the parsing threads, so fork anything expensive (or set `kn` to have copyparty fork it for you) -- otoh if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1`
|
||||
|
||||
if this becomes popular maybe there should be a less janky way to do it actually
|
||||
note that it will occupy the parsing threads, so fork anything expensive (or set `kn` to have copyparty fork it for you) -- otoh if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1`
|
||||
|
||||
|
||||
## hiding from google
|
||||
@@ -1068,6 +1077,21 @@ see the top of [./copyparty/web/browser.css](./copyparty/web/browser.css) where
|
||||
`-lo log/cpp-%Y-%m%d-%H%M%S.txt.xz`
|
||||
|
||||
|
||||
## reverse-proxy
|
||||
|
||||
running copyparty next to other websites hosted on an existing webserver such as nginx or apache
|
||||
|
||||
you can either:
|
||||
* give copyparty its own domain or subdomain (recommended)
|
||||
* or do location-based proxying, using `--rp-loc=/stuff` to tell copyparty where it is mounted -- has a slight performance cost and higher chance of bugs
|
||||
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
|
||||
|
||||
example webserver configs:
|
||||
|
||||
* [nginx config](contrib/nginx/copyparty.conf) -- entire domain/subdomain
|
||||
* [apache2 config](contrib/apache/copyparty.conf) -- location-based
|
||||
|
||||
|
||||
# browser support
|
||||
|
||||
TLDR: yes
|
||||
@@ -1127,11 +1151,11 @@ interact with copyparty using non-browser clients
|
||||
* curl/wget: upload some files (post=file, chunk=stdin)
|
||||
* `post(){ curl -F act=bput -F f=@"$1" http://127.0.0.1:3923/?pw=wark;}`
|
||||
`post movie.mkv`
|
||||
* `post(){ curl -b cppwd=wark -H rand:8 -T "$1" http://127.0.0.1:3923/;}`
|
||||
* `post(){ curl -H pw:wark -H rand:8 -T "$1" http://127.0.0.1:3923/;}`
|
||||
`post movie.mkv`
|
||||
* `post(){ wget --header='Cookie: cppwd=wark' --post-file="$1" -O- http://127.0.0.1:3923/?raw;}`
|
||||
* `post(){ wget --header='pw: wark' --post-file="$1" -O- http://127.0.0.1:3923/?raw;}`
|
||||
`post movie.mkv`
|
||||
* `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`
|
||||
* `chunk(){ curl -H pw:wark -T- http://127.0.0.1:3923/;}`
|
||||
`chunk <movie.mkv`
|
||||
|
||||
* bash: when curl and wget is not available or too boring
|
||||
@@ -1139,7 +1163,7 @@ interact with copyparty using non-browser clients
|
||||
* `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923`
|
||||
|
||||
* python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||
* file uploads, file-search, folder sync, autoresume of aborted/broken uploads
|
||||
* can be downloaded from copyparty: controlpanel -> connect -> [up2k.py](http://127.0.0.1:3923/.cpr/a/up2k.py)
|
||||
* see [./bin/README.md#up2kpy](bin/README.md#up2kpy)
|
||||
|
||||
@@ -1155,7 +1179,7 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
|
||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
|
||||
b512 <movie.mkv
|
||||
|
||||
you can provide passwords using cookie `cppwd=hunter2`, as a url-param `?pw=hunter2`, or with basic-authentication (either as the username or password)
|
||||
you can provide passwords using header `PW: hunter2`, cookie `cppwd=hunter2`, url-param `?pw=hunter2`, or with basic-authentication (either as the username or password)
|
||||
|
||||
NOTE: curl will not send the original filename if you use `-T` combined with url-params! Also, make sure to always leave a trailing slash in URLs unless you want to override the filename
|
||||
|
||||
@@ -1191,7 +1215,7 @@ below are some tweaks roughly ordered by usefulness:
|
||||
* `--no-htp --hash-mt=0 --mtag-mt=1 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
|
||||
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
|
||||
* huge amount of short-lived connections
|
||||
* really heavy traffic (downloads/uploads)
|
||||
* simultaneous downloads and uploads saturating a 20gbps connection
|
||||
|
||||
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
|
||||
|
||||
@@ -1216,6 +1240,11 @@ when uploading files,
|
||||
|
||||
some notes on hardening
|
||||
|
||||
* set `--rproxy 0` if your copyparty is directly facing the internet (not through a reverse-proxy)
|
||||
* cors doesn't work right otherwise
|
||||
|
||||
safety profiles:
|
||||
|
||||
* option `-s` is a shortcut to set the following options:
|
||||
* `--no-thumb` disables thumbnails and audio transcoding to stop copyparty from running `FFmpeg`/`Pillow`/`VIPS` on uploaded files, which is a [good idea](https://www.cvedetails.com/vulnerability-list.php?vendor_id=3611) if anonymous upload is enabled
|
||||
* `--no-mtag-ff` uses `mutagen` to grab music tags instead of `FFmpeg`, which is safer and faster but less accurate
|
||||
@@ -1223,7 +1252,6 @@ some notes on hardening
|
||||
* `--no-robots` and `--force-js` makes life harder for crawlers, see [hiding from google](#hiding-from-google)
|
||||
|
||||
* option `-ss` is a shortcut for the above plus:
|
||||
* `--no-logues` and `--no-readme` disables support for readme's and prologues / epilogues in directory listings, which otherwise lets people upload arbitrary `<script>` tags
|
||||
* `--unpost 0`, `--no-del`, `--no-mv` disables all move/delete support
|
||||
* `--hardlink` creates hardlinks instead of symlinks when deduplicating uploads, which is less maintenance
|
||||
* however note if you edit one file it will also affect the other copies
|
||||
@@ -1234,6 +1262,7 @@ some notes on hardening
|
||||
|
||||
* option `-sss` is a shortcut for the above plus:
|
||||
* `--no-dav` disables webdav support
|
||||
* `--no-logues` and `--no-readme` disables support for readme's and prologues / epilogues in directory listings, which otherwise lets people upload arbitrary (but sandboxed) `<script>` tags
|
||||
* `-lo cpp-%Y-%m%d-%H%M%S.txt.xz` enables logging to disk
|
||||
* `-ls **,*,ln,p,r` does a scan on startup for any dangerous symlinks
|
||||
|
||||
@@ -1249,6 +1278,22 @@ other misc notes:
|
||||
behavior that might be unexpected
|
||||
|
||||
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
||||
* users can submit `<script>`s which autorun for other visitors in a few ways;
|
||||
* uploading a `README.md` -- avoid with `--no-readme`
|
||||
* renaming `some.html` to `.epilogue.html` -- avoid with either `--no-logues` or `--no-dot-ren`
|
||||
* the directory-listing embed is sandboxed (so any malicious scripts can't do any damage) but the markdown editor is not
|
||||
|
||||
|
||||
## cors
|
||||
|
||||
cross-site request config
|
||||
|
||||
by default, except for `GET` and `HEAD` operations, all requests must either:
|
||||
* not contain an `Origin` header at all
|
||||
* or have an `Origin` matching the server domain
|
||||
* or the header `PW` with your password as value
|
||||
|
||||
cors can be configured with `--acao` and `--acam`, or the protections entirely disabled with `--allow-csrf`
|
||||
|
||||
|
||||
# recovering from crashes
|
||||
|
||||
9
SECURITY.md
Normal file
9
SECURITY.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Security Policy
|
||||
|
||||
if you hit something extra juicy pls let me know on either of the following
|
||||
* email -- `copyparty@ocv.ze` except `ze` should be `me`
|
||||
* [mastodon dm](https://layer8.space/@tripflag) -- `@tripflag@layer8.space`
|
||||
* [github private vulnerability report](https://github.com/9001/copyparty/security/advisories/new), wow that form is complicated
|
||||
* [twitter dm](https://twitter.com/tripflag) (if im somehow not banned yet)
|
||||
|
||||
no bug bounties sorry! all i can offer is greetz in the release notes
|
||||
@@ -1,7 +1,8 @@
|
||||
# [`up2k.py`](up2k.py)
|
||||
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||
* faster than browsers
|
||||
* sync local folder to server
|
||||
* generally faster than browsers
|
||||
* if something breaks just restart it
|
||||
|
||||
|
||||
|
||||
19
bin/hooks/README.md
Normal file
19
bin/hooks/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
standalone programs which are executed by copyparty when an event happens (upload, file rename, delete, ...)
|
||||
|
||||
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
||||
|
||||
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
||||
|
||||
|
||||
# after upload
|
||||
* [notify.py](notify.py) shows a desktop notification ([example](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png))
|
||||
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||
|
||||
|
||||
# before upload
|
||||
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
||||
|
||||
|
||||
# on message
|
||||
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
||||
61
bin/hooks/discord-announce.py
Executable file
61
bin/hooks/discord-announce.py
Executable file
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import json
|
||||
import requests
|
||||
from copyparty.util import humansize, quotep
|
||||
|
||||
|
||||
_ = r"""
|
||||
announces a new upload on discord
|
||||
|
||||
example usage as global config:
|
||||
--xau f,t5,j,bin/hooks/discord-announce.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||
|
||||
parameters explained,
|
||||
f = fork; don't wait for it to finish
|
||||
t5 = timeout if it's still running after 5 sec
|
||||
j = provide upload information as json; not just the filename
|
||||
|
||||
replace "xau" with "xbu" to announce Before upload starts instead of After completion
|
||||
|
||||
# how to discord:
|
||||
first create the webhook url; https://support.discord.com/hc/en-us/articles/228383668-Intro-to-Webhooks
|
||||
then use this to design your message: https://discohook.org/
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
WEBHOOK = "https://discord.com/api/webhooks/1234/base64"
|
||||
|
||||
# read info from copyparty
|
||||
inf = json.loads(sys.argv[1])
|
||||
vpath = inf["vp"]
|
||||
filename = vpath.split("/")[-1]
|
||||
url = f"https://{inf['host']}/{quotep(vpath)}"
|
||||
|
||||
# compose the message to discord
|
||||
j = {
|
||||
"title": filename,
|
||||
"url": url,
|
||||
"description": url.rsplit("/", 1)[0],
|
||||
"color": 0x449900,
|
||||
"fields": [
|
||||
{"name": "Size", "value": humansize(inf["sz"])},
|
||||
{"name": "User", "value": inf["user"]},
|
||||
{"name": "IP", "value": inf["ip"]},
|
||||
],
|
||||
}
|
||||
|
||||
for v in j["fields"]:
|
||||
v["inline"] = True
|
||||
|
||||
r = requests.post(WEBHOOK, json={"embeds": [j]})
|
||||
print(f"discord: {r}\n", end="")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
48
bin/hooks/notify.py
Executable file
48
bin/hooks/notify.py
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
from plyer import notification
|
||||
|
||||
|
||||
_ = r"""
|
||||
show os notification on upload; works on windows, linux, macos, android
|
||||
|
||||
depdencies:
|
||||
windows: python3 -m pip install --user -U plyer
|
||||
linux: python3 -m pip install --user -U plyer
|
||||
macos: python3 -m pip install --user -U plyer pyobjus
|
||||
android: just termux and termux-api
|
||||
|
||||
example usages; either as global config (all volumes) or as volflag:
|
||||
--xau f,bin/hooks/notify.py
|
||||
-v srv/inc:inc:c,xau=f,bin/hooks/notify.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
f = fork so it doesn't block uploads
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
dp, fn = os.path.split(sys.argv[1])
|
||||
msg = "🏷️ {}\n📁 {}".format(fn, dp)
|
||||
title = "File received"
|
||||
|
||||
if "com.termux" in sys.executable:
|
||||
sp.run(["termux-notification", "-t", title, "-c", msg])
|
||||
return
|
||||
|
||||
icon = "emblem-documents-symbolic" if sys.platform == "linux" else ""
|
||||
notification.notify(
|
||||
title=title,
|
||||
message=msg,
|
||||
app_icon=icon,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
30
bin/hooks/reject-extension.py
Executable file
30
bin/hooks/reject-extension.py
Executable file
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
_ = r"""
|
||||
reject file uploads by file extension
|
||||
|
||||
example usage as global config:
|
||||
--xbu c,bin/hooks/reject-extension.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xbu=c,bin/hooks/reject-extension.py
|
||||
|
||||
parameters explained,
|
||||
xbu = execute before upload
|
||||
c = check result, reject upload if error
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
bad = "exe scr com pif bat ps1 jar msi"
|
||||
|
||||
ext = sys.argv[1].split(".")[-1]
|
||||
|
||||
sys.exit(1 if ext in bad.split() else 0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
39
bin/hooks/reject-mimetype.py
Executable file
39
bin/hooks/reject-mimetype.py
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import magic
|
||||
|
||||
|
||||
_ = r"""
|
||||
reject file uploads by mimetype
|
||||
|
||||
dependencies (linux, macos):
|
||||
python3 -m pip install --user -U python-magic
|
||||
|
||||
dependencies (windows):
|
||||
python3 -m pip install --user -U python-magic-bin
|
||||
|
||||
example usage as global config:
|
||||
--xau c,bin/hooks/reject-mimetype.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xau=c,bin/hooks/reject-mimetype.py
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
c = check result, reject upload if error
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
ok = ["image/jpeg", "image/png"]
|
||||
|
||||
mt = magic.from_file(sys.argv[1], mime=True)
|
||||
|
||||
print(mt)
|
||||
|
||||
sys.exit(1 if mt not in ok else 0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
54
bin/hooks/wget.py
Executable file
54
bin/hooks/wget.py
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
_ = r"""
|
||||
use copyparty as a file downloader by POSTing URLs as
|
||||
application/x-www-form-urlencoded (for example using the
|
||||
message/pager function on the website)
|
||||
|
||||
example usage as global config:
|
||||
--xm f,j,t3600,bin/hooks/wget.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:c,xm=f,j,t3600,bin/hooks/wget.py
|
||||
|
||||
parameters explained,
|
||||
f = fork so it doesn't block uploads
|
||||
j = provide message information as json; not just the text
|
||||
c3 = mute all output
|
||||
t3600 = timeout and kill download after 1 hour
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
inf = json.loads(sys.argv[1])
|
||||
url = inf["txt"]
|
||||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
os.chdir(inf["ap"])
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
tfn = "-- DOWNLOADING " + name
|
||||
print(f"{tfn}\n", end="")
|
||||
open(tfn, "wb").close()
|
||||
|
||||
cmd = ["wget", "--trust-server-names", "-nv", "--", url]
|
||||
|
||||
try:
|
||||
sp.check_call(cmd)
|
||||
except:
|
||||
t = "-- FAILED TO DONWLOAD " + name
|
||||
print(f"{t}\n", end="")
|
||||
open(t, "wb").close()
|
||||
|
||||
os.unlink(tfn)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,5 +1,9 @@
|
||||
standalone programs which take an audio file as argument
|
||||
|
||||
you may want to forget about all this fancy complicated stuff and just use [event hooks](../hooks/) instead (which doesn't need `-e2ts` or ffmpeg)
|
||||
|
||||
----
|
||||
|
||||
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
|
||||
|
||||
some of these rely on libraries which are not MIT-compatible
|
||||
@@ -17,6 +21,7 @@ these do not have any problematic dependencies at all:
|
||||
* [cksum.py](./cksum.py) computes various checksums
|
||||
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
||||
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
||||
* also available as an [event hook](../hooks/wget.py)
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
DEPRECATED -- replaced by event hooks;
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py
|
||||
|
||||
---
|
||||
|
||||
use copyparty as a file downloader by POSTing URLs as
|
||||
application/x-www-form-urlencoded (for example using the
|
||||
message/pager function on the website)
|
||||
|
||||
@@ -997,7 +997,7 @@ def main():
|
||||
ap.add_argument(
|
||||
"-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache"
|
||||
)
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
||||
ap.add_argument("-d", action="store_true", help="enable debug")
|
||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
|
||||
230
bin/up2k.py
230
bin/up2k.py
@@ -3,14 +3,12 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
"""
|
||||
up2k.py: upload to copyparty
|
||||
2022-11-29, v0.22, ed <irc.rizon.net>, MIT-Licensed
|
||||
2023-01-13, v1.2, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||
|
||||
- dependencies: requests
|
||||
- supports python 2.6, 2.7, and 3.3 through 3.11
|
||||
|
||||
- almost zero error-handling
|
||||
- but if something breaks just try again and it'll autoresume
|
||||
- supports python 2.6, 2.7, and 3.3 through 3.12
|
||||
- if something breaks just try again and it'll autoresume
|
||||
"""
|
||||
|
||||
import os
|
||||
@@ -42,6 +40,7 @@ except ImportError:
|
||||
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
|
||||
m = [" https://pypi.org/project/" + x + "/#files" for x in m.split()]
|
||||
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
||||
m += "\n for f in *.whl; do unzip $f; done; rm -r *.dist-info\n"
|
||||
|
||||
print(m.format(sys.executable))
|
||||
sys.exit(1)
|
||||
@@ -262,10 +261,10 @@ def termsize():
|
||||
try:
|
||||
import fcntl, termios, struct
|
||||
|
||||
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
||||
r = struct.unpack(b"hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"AAAA"))
|
||||
return r[::-1]
|
||||
except:
|
||||
return
|
||||
return cr
|
||||
return None
|
||||
|
||||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||
if not cr:
|
||||
@@ -275,12 +274,11 @@ def termsize():
|
||||
os.close(fd)
|
||||
except:
|
||||
pass
|
||||
if not cr:
|
||||
|
||||
try:
|
||||
cr = (env["LINES"], env["COLUMNS"])
|
||||
return cr or (int(env["COLUMNS"]), int(env["LINES"]))
|
||||
except:
|
||||
cr = (25, 80)
|
||||
return int(cr[1]), int(cr[0])
|
||||
return 80, 25
|
||||
|
||||
|
||||
class CTermsize(object):
|
||||
@@ -362,26 +360,29 @@ def walkdir(err, top, seen):
|
||||
|
||||
seen = seen[:] + [atop]
|
||||
for ap, inf in sorted(statdir(err, top)):
|
||||
yield ap, inf
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
try:
|
||||
for x in walkdir(err, ap, seen):
|
||||
yield x
|
||||
except Exception as ex:
|
||||
err.append((ap, str(ex)))
|
||||
else:
|
||||
yield ap, inf
|
||||
|
||||
|
||||
def walkdirs(err, tops):
|
||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
for top in tops:
|
||||
isdir = os.path.isdir(top)
|
||||
if top[-1:] == sep:
|
||||
stop = top.rstrip(sep)
|
||||
yield stop, b"", os.stat(stop)
|
||||
else:
|
||||
stop = os.path.dirname(top)
|
||||
stop, dn = os.path.split(top)
|
||||
if isdir:
|
||||
yield stop, dn, os.stat(stop)
|
||||
|
||||
if os.path.isdir(top):
|
||||
if isdir:
|
||||
for ap, inf in walkdir(err, top, []):
|
||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||
else:
|
||||
@@ -472,14 +473,17 @@ def get_hashlist(file, pcb, mth):
|
||||
file.kchunks[k] = [v1, v2]
|
||||
|
||||
|
||||
def handshake(url, file, pw, search):
|
||||
# type: (str, File, Any, bool) -> tuple[list[str], bool]
|
||||
def handshake(ar, file, search):
|
||||
# type: (argparse.Namespace, File, bool) -> tuple[list[str], bool]
|
||||
"""
|
||||
performs a handshake with the server; reply is:
|
||||
if search, a list of search results
|
||||
otherwise, a list of chunks to upload
|
||||
"""
|
||||
|
||||
url = ar.url
|
||||
pw = ar.a
|
||||
|
||||
req = {
|
||||
"hash": [x[0] for x in file.cids],
|
||||
"name": file.name,
|
||||
@@ -488,35 +492,44 @@ def handshake(url, file, pw, search):
|
||||
}
|
||||
if search:
|
||||
req["srch"] = 1
|
||||
elif ar.dr:
|
||||
req["replace"] = True
|
||||
|
||||
headers = {"Content-Type": "text/plain"} # wtf ed
|
||||
headers = {"Content-Type": "text/plain"} # <=1.5.1 compat
|
||||
if pw:
|
||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||
|
||||
file.recheck = False
|
||||
if file.url:
|
||||
url = file.url
|
||||
elif b"/" in file.rel:
|
||||
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
||||
|
||||
while True:
|
||||
sc = 600
|
||||
txt = ""
|
||||
try:
|
||||
r = req_ses.post(url, headers=headers, json=req)
|
||||
break
|
||||
except Exception as ex:
|
||||
em = str(ex).split("SSLError(")[-1]
|
||||
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
||||
time.sleep(1)
|
||||
|
||||
sc = r.status_code
|
||||
if sc >= 400:
|
||||
txt = r.text
|
||||
if sc < 400:
|
||||
break
|
||||
|
||||
raise Exception("http {0}: {1}".format(sc, txt))
|
||||
|
||||
except Exception as ex:
|
||||
em = str(ex).split("SSLError(")[-1].split("\nURL: ")[0].strip()
|
||||
|
||||
if sc == 422 or "<pre>partial upload exists at a different" in txt:
|
||||
file.recheck = True
|
||||
return [], False
|
||||
elif sc == 409 or "<pre>upload rejected, file already exists" in txt:
|
||||
return [], False
|
||||
elif "<pre>you don't have " in txt:
|
||||
raise
|
||||
|
||||
raise Exception("http {0}: {1}".format(sc, txt))
|
||||
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
||||
time.sleep(1)
|
||||
|
||||
try:
|
||||
r = r.json()
|
||||
@@ -540,7 +553,7 @@ def handshake(url, file, pw, search):
|
||||
|
||||
|
||||
def upload(file, cid, pw):
|
||||
# type: (File, str, Any) -> None
|
||||
# type: (File, str, str) -> None
|
||||
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||
|
||||
headers = {
|
||||
@@ -564,27 +577,20 @@ def upload(file, cid, pw):
|
||||
|
||||
class Ctl(object):
|
||||
"""
|
||||
this will be the coordinator which runs everything in parallel
|
||||
(hashing, handshakes, uploads) but right now it's p dumb
|
||||
the coordinator which runs everything in parallel
|
||||
(hashing, handshakes, uploads)
|
||||
"""
|
||||
|
||||
def __init__(self, ar):
|
||||
self.ar = ar
|
||||
ar.files = [
|
||||
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
||||
for x in ar.files
|
||||
]
|
||||
ar.url = ar.url.rstrip("/") + "/"
|
||||
if "://" not in ar.url:
|
||||
ar.url = "http://" + ar.url
|
||||
|
||||
def _scan(self):
|
||||
ar = self.ar
|
||||
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
||||
|
||||
nfiles = 0
|
||||
nbytes = 0
|
||||
err = []
|
||||
for _, _, inf in walkdirs(err, ar.files):
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
continue
|
||||
|
||||
nfiles += 1
|
||||
nbytes += inf.st_size
|
||||
|
||||
@@ -606,8 +612,15 @@ class Ctl(object):
|
||||
return
|
||||
|
||||
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||
self.nfiles = nfiles
|
||||
self.nbytes = nbytes
|
||||
return nfiles, nbytes
|
||||
|
||||
def __init__(self, ar, stats=None):
|
||||
self.ar = ar
|
||||
self.stats = stats or self._scan()
|
||||
if not self.stats:
|
||||
return
|
||||
|
||||
self.nfiles, self.nbytes = self.stats
|
||||
|
||||
if ar.td:
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
@@ -651,6 +664,9 @@ class Ctl(object):
|
||||
"""minimal basic slow boring fallback codepath"""
|
||||
search = self.ar.s
|
||||
for nf, (top, rel, inf) in enumerate(self.filegen):
|
||||
if stat.S_ISDIR(inf.st_mode) or not rel:
|
||||
continue
|
||||
|
||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
@@ -660,7 +676,7 @@ class Ctl(object):
|
||||
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
print(" hs...")
|
||||
hs, _ = handshake(self.ar.url, file, self.ar.a, search)
|
||||
hs, _ = handshake(self.ar, file, search)
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
@@ -688,10 +704,10 @@ class Ctl(object):
|
||||
|
||||
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
||||
for file in self.recheck:
|
||||
handshake(self.ar.url, file, self.ar.a, search)
|
||||
handshake(self.ar, file, search)
|
||||
|
||||
def _fancy(self):
|
||||
if VT100:
|
||||
if VT100 and not self.ar.ns:
|
||||
atexit.register(self.cleanup_vt100)
|
||||
ss.scroll_region(3)
|
||||
|
||||
@@ -715,7 +731,7 @@ class Ctl(object):
|
||||
else:
|
||||
idles = 0
|
||||
|
||||
if VT100:
|
||||
if VT100 and not self.ar.ns:
|
||||
maxlen = ss.w - len(str(self.nfiles)) - 14
|
||||
txt = "\033[s\033[{0}H".format(ss.g)
|
||||
for y, k, st, f in [
|
||||
@@ -755,7 +771,7 @@ class Ctl(object):
|
||||
eta = str(datetime.timedelta(seconds=int(eta)))
|
||||
sleft = humansize(self.nbytes - self.up_b)
|
||||
nleft = self.nfiles - self.up_f
|
||||
tail = "\033[K\033[u" if VT100 else "\r"
|
||||
tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r"
|
||||
|
||||
t = "{0} eta @ {1}/s, {2}, {3}# left".format(eta, spd, sleft, nleft)
|
||||
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
||||
@@ -765,7 +781,7 @@ class Ctl(object):
|
||||
|
||||
eprint("finalizing {0} duplicate files".format(len(self.recheck)))
|
||||
for file in self.recheck:
|
||||
handshake(self.ar.url, file, self.ar.a, False)
|
||||
handshake(self.ar, file, False)
|
||||
|
||||
def cleanup_vt100(self):
|
||||
ss.scroll_region(None)
|
||||
@@ -778,8 +794,10 @@ class Ctl(object):
|
||||
prd = None
|
||||
ls = {}
|
||||
for top, rel, inf in self.filegen:
|
||||
if self.ar.z:
|
||||
rd = os.path.dirname(rel)
|
||||
isdir = stat.S_ISDIR(inf.st_mode)
|
||||
if self.ar.z or self.ar.drd:
|
||||
rd = rel if isdir else os.path.dirname(rel)
|
||||
srd = rd.decode("utf-8", "replace").replace("\\", "/")
|
||||
if prd != rd:
|
||||
prd = rd
|
||||
headers = {}
|
||||
@@ -788,19 +806,37 @@ class Ctl(object):
|
||||
|
||||
ls = {}
|
||||
try:
|
||||
print(" ls ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
r = req_ses.get(
|
||||
self.ar.url.encode("utf-8") + quotep(rd) + b"?ls",
|
||||
headers=headers,
|
||||
)
|
||||
for f in r.json()["files"]:
|
||||
rfn = f["href"].split("?")[0].encode("utf-8", "replace")
|
||||
ls[unquote(rfn)] = f
|
||||
except:
|
||||
print(" mkdir ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
print(" ls ~{0}".format(srd))
|
||||
zb = self.ar.url.encode("utf-8")
|
||||
zb += quotep(rd.replace(b"\\", b"/"))
|
||||
r = req_ses.get(zb + b"?ls&dots", headers=headers)
|
||||
if not r:
|
||||
raise Exception("HTTP {}".format(r.status_code))
|
||||
|
||||
j = r.json()
|
||||
for f in j["dirs"] + j["files"]:
|
||||
rfn = f["href"].split("?")[0].rstrip("/")
|
||||
ls[unquote(rfn.encode("utf-8", "replace"))] = f
|
||||
except Exception as ex:
|
||||
print(" mkdir ~{0} ({1})".format(srd, ex))
|
||||
|
||||
if self.ar.drd:
|
||||
dp = os.path.join(top, rd)
|
||||
lnodes = set(os.listdir(dp))
|
||||
bnames = [x for x in ls if x not in lnodes]
|
||||
if bnames:
|
||||
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||
names = [x.decode("utf-8", "replace") for x in bnames]
|
||||
locs = [vpath + srd + "/" + x for x in names]
|
||||
print("DELETING ~{0}/#{1}".format(srd, len(names)))
|
||||
req_ses.post(self.ar.url + "?delete", json=locs)
|
||||
|
||||
if isdir:
|
||||
continue
|
||||
|
||||
if self.ar.z:
|
||||
rf = ls.get(os.path.basename(rel), None)
|
||||
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1:
|
||||
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 2:
|
||||
self.nfiles -= 1
|
||||
self.nbytes -= inf.st_size
|
||||
continue
|
||||
@@ -850,7 +886,7 @@ class Ctl(object):
|
||||
self.handshaker_busy += 1
|
||||
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
hs, sprs = handshake(self.ar.url, file, self.ar.a, search)
|
||||
hs, sprs = handshake(self.ar, file, search)
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
@@ -883,6 +919,9 @@ class Ctl(object):
|
||||
self.up_c += len(file.cids) - file.up_c
|
||||
self.up_b += file.size - file.up_b
|
||||
|
||||
if not file.recheck:
|
||||
self.up_done(file)
|
||||
|
||||
if hs and file.up_c:
|
||||
# some chunks failed
|
||||
self.up_c -= len(hs)
|
||||
@@ -917,7 +956,7 @@ class Ctl(object):
|
||||
upload(file, cid, self.ar.a)
|
||||
except:
|
||||
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
|
||||
pass # handshake will fix it
|
||||
# handshake will fix it
|
||||
|
||||
with self.mutex:
|
||||
sz = file.kchunks[cid][1]
|
||||
@@ -933,6 +972,10 @@ class Ctl(object):
|
||||
self.up_c += 1
|
||||
self.uploader_busy -= 1
|
||||
|
||||
def up_done(self, file):
|
||||
if self.ar.dl:
|
||||
os.unlink(file.abs)
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
@@ -957,21 +1000,74 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||
ap.add_argument("-v", action="store_true", help="verbose")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||
|
||||
ap = app.add_argument_group("compatibility")
|
||||
ap.add_argument("--cls", action="store_true", help="clear screen before start")
|
||||
ap.add_argument("--ws", action="store_true", help="copyparty is running on windows; wait before deleting files after uploading")
|
||||
|
||||
ap = app.add_argument_group("folder sync")
|
||||
ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
|
||||
ap.add_argument("--dr", action="store_true", help="delete remote files which don't exist locally")
|
||||
ap.add_argument("--drd", action="store_true", help="delete remote files during upload instead of afterwards; reduces peak disk space usage, but will reupload instead of detecting renames")
|
||||
|
||||
ap = app.add_argument_group("performance tweaks")
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles)")
|
||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
|
||||
ap = app.add_argument_group("tls")
|
||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
# fmt: on
|
||||
|
||||
Ctl(app.parse_args())
|
||||
ar = app.parse_args()
|
||||
if ar.drd:
|
||||
ar.dr = True
|
||||
|
||||
for k in "dl dr drd".split():
|
||||
errs = []
|
||||
if ar.safe and getattr(ar, k):
|
||||
errs.append(k)
|
||||
|
||||
if errs:
|
||||
raise Exception("--safe is incompatible with " + str(errs))
|
||||
|
||||
ar.files = [
|
||||
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
||||
for x in ar.files
|
||||
]
|
||||
|
||||
ar.url = ar.url.rstrip("/") + "/"
|
||||
if "://" not in ar.url:
|
||||
ar.url = "http://" + ar.url
|
||||
|
||||
if ar.a and ar.a.startswith("$"):
|
||||
fn = ar.a[1:]
|
||||
print("reading password from file [{}]".format(fn))
|
||||
with open(fn, "rb") as f:
|
||||
ar.a = f.read().decode("utf-8").strip()
|
||||
|
||||
if ar.cls:
|
||||
print("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
|
||||
|
||||
ctl = Ctl(ar)
|
||||
|
||||
if ar.dr and not ar.drd:
|
||||
print("\npass 2/2: delete")
|
||||
if getattr(ctl, "up_br") and ar.ws:
|
||||
# wait for up2k to mtime if there was uploads
|
||||
time.sleep(4)
|
||||
|
||||
ar.drd = True
|
||||
ar.z = True
|
||||
Ctl(ar, ctl.stats)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -29,11 +29,11 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
|
||||
* disables thumbnails and folder-type detection in windows explorer
|
||||
* makes it way faster (especially for slow/networked locations (such as partyfuse))
|
||||
|
||||
### [`webdav-basicauth.reg`](webdav-basicauth.reg)
|
||||
* enables webdav basic-auth over plaintext http; takes effect after a reboot OR after running `webdav-unlimit.bat`
|
||||
|
||||
### [`webdav-unlimit.bat`](webdav-unlimit.bat)
|
||||
* removes the 47.6 MiB filesize limit when downloading from webdav
|
||||
### [`webdav-cfg.reg`](webdav-cfg.bat)
|
||||
* improves the native webdav support in windows;
|
||||
* removes the 47.6 MiB filesize limit when downloading from webdav
|
||||
* optionally enables webdav basic-auth over plaintext http
|
||||
* optionally helps disable wpad, removing the 10sec latency
|
||||
|
||||
### [`cfssl.sh`](cfssl.sh)
|
||||
* creates CA and server certificates using cfssl
|
||||
|
||||
15
contrib/apache/copyparty.conf
Normal file
15
contrib/apache/copyparty.conf
Normal file
@@ -0,0 +1,15 @@
|
||||
# when running copyparty behind a reverse proxy,
|
||||
# the following arguments are recommended:
|
||||
#
|
||||
# --http-only lower latency on initial connection
|
||||
# -i 127.0.0.1 only accept connections from nginx
|
||||
#
|
||||
# if you are doing location-based proxying (such as `/stuff` below)
|
||||
# you must run copyparty with --rp-loc=stuff
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
LoadModule proxy_module modules/mod_proxy.so
|
||||
ProxyPass "/stuff" "http://127.0.0.1:3923/stuff"
|
||||
# do not specify ProxyPassReverse
|
||||
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
|
||||
@@ -10,6 +10,8 @@
|
||||
#
|
||||
# you may also consider adding -j0 for CPU-intensive configurations
|
||||
# (not that i can really think of any good examples)
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
|
||||
@@ -14,5 +14,5 @@ name="$SVCNAME"
|
||||
command_background=true
|
||||
pidfile="/var/run/$SVCNAME.pid"
|
||||
|
||||
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
|
||||
command="/usr/bin/python3 /usr/local/bin/copyparty-sfx.py"
|
||||
command_args="-q -v /mnt::rw"
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
<!--
|
||||
save this as .epilogue.html inside a write-only folder to declutter the UI, makes it look like
|
||||
https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png
|
||||
|
||||
only works if you disable the prologue/epilogue sandbox with --no-sb-lg
|
||||
which should probably be combined with --no-dot-ren to prevent damage
|
||||
(`no_sb_lg` can also be set per-volume with volflags)
|
||||
-->
|
||||
|
||||
<style>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
@echo off
|
||||
rem removes the 47.6 MiB filesize limit when downloading from webdav
|
||||
rem + optionally allows/enables password-auth over plaintext http
|
||||
rem + optionally helps disable wpad
|
||||
rem + optionally helps disable wpad, removing the 10sec latency
|
||||
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
|
||||
@@ -54,6 +54,7 @@ except:
|
||||
HAVE_SSL = False
|
||||
|
||||
printed: list[str] = []
|
||||
u = unicode
|
||||
|
||||
|
||||
class RiceFormatter(argparse.HelpFormatter):
|
||||
@@ -228,9 +229,10 @@ def get_srvname() -> str:
|
||||
ret = f.read().decode("utf-8", "replace").strip()
|
||||
except:
|
||||
ret = ""
|
||||
while len(ret) < 7:
|
||||
namelen = 5
|
||||
while len(ret) < namelen:
|
||||
ret += base64.b32encode(os.urandom(4))[:7].decode("utf-8").lower()
|
||||
ret = re.sub("[234567=]", "", ret)[:7]
|
||||
ret = re.sub("[234567=]", "", ret)[:namelen]
|
||||
with open(fp, "wb") as f:
|
||||
f.write(ret.encode("utf-8") + b"\n")
|
||||
|
||||
@@ -238,18 +240,23 @@ def get_srvname() -> str:
|
||||
|
||||
|
||||
def ensure_locale() -> None:
|
||||
safe = "en_US.UTF-8"
|
||||
for x in [
|
||||
"en_US.UTF-8",
|
||||
safe,
|
||||
"English_United States.UTF8",
|
||||
"English_United States.1252",
|
||||
]:
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, x)
|
||||
if x != safe:
|
||||
lprint("Locale: {}\n".format(x))
|
||||
break
|
||||
return
|
||||
except:
|
||||
continue
|
||||
|
||||
t = "setlocale {} failed,\n sorting and dates might get funky\n"
|
||||
warn(t.format(safe))
|
||||
|
||||
|
||||
def ensure_cert() -> None:
|
||||
"""
|
||||
@@ -267,8 +274,8 @@ def ensure_cert() -> None:
|
||||
try:
|
||||
if filecmp.cmp(cert_cfg, cert_insec):
|
||||
lprint(
|
||||
"\033[33m using default TLS certificate; https will be insecure."
|
||||
+ "\033[36m\n certificate location: {}\033[0m\n".format(cert_cfg)
|
||||
"\033[33musing default TLS certificate; https will be insecure."
|
||||
+ "\033[36m\ncertificate location: {}\033[0m\n".format(cert_cfg)
|
||||
)
|
||||
except:
|
||||
pass
|
||||
@@ -441,27 +448,8 @@ def showlic() -> None:
|
||||
print(f.read().decode("utf-8", "replace"))
|
||||
|
||||
|
||||
def run_argparse(
|
||||
argv: list[str], formatter: Any, retry: bool, nc: int
|
||||
) -> argparse.Namespace:
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=formatter,
|
||||
prog="copyparty",
|
||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||
)
|
||||
|
||||
try:
|
||||
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
|
||||
except:
|
||||
fk_salt = "hunter2"
|
||||
|
||||
hcores = min(CORES, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
||||
|
||||
tty = os.environ.get("TERM", "").lower() == "linux"
|
||||
|
||||
srvname = get_srvname()
|
||||
|
||||
sects = [
|
||||
def get_sects():
|
||||
return [
|
||||
[
|
||||
"accounts",
|
||||
"accounts and volumes",
|
||||
@@ -511,6 +499,9 @@ def run_argparse(
|
||||
|
||||
\033[0muploads, general:
|
||||
\033[36mnodupe\033[35m rejects existing files (instead of symlinking them)
|
||||
\033[36mhardlink\033[35m does dedup with hardlinks instead of symlinks
|
||||
\033[36mneversymlink\033[35m disables symlink fallback; full copy instead
|
||||
\033[36mcopydupes\033[35m disables dedup, always saves full copies of dupes
|
||||
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
|
||||
\033[36mmagic$\033[35m enables filetype detection for nameless uploads
|
||||
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
|
||||
@@ -540,6 +531,7 @@ def run_argparse(
|
||||
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
|
||||
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
|
||||
\033[36mnoforget$\033[35m don't forget files when deleted from disk
|
||||
\033[36mdbd=[acid|swal|wal|yolo]\033[35m database speed-durability tradeoff
|
||||
\033[36mxlink$\033[35m cross-volume dupe detection / linking
|
||||
\033[36mxdev\033[35m do not descend into other filesystems
|
||||
\033[36mxvol\033[35m skip symlinks leaving the volume root
|
||||
@@ -567,6 +559,51 @@ def run_argparse(
|
||||
\033[0m"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"hooks",
|
||||
"execute commands before/after various events",
|
||||
dedent(
|
||||
"""
|
||||
execute a command (a program or script) before or after various events;
|
||||
\033[36mxbu\033[35m executes CMD before a file upload starts
|
||||
\033[36mxau\033[35m executes CMD after a file upload finishes
|
||||
\033[36mxbr\033[35m executes CMD before a file rename/move
|
||||
\033[36mxar\033[35m executes CMD after a file rename/move
|
||||
\033[36mxbd\033[35m executes CMD before a file delete
|
||||
\033[36mxad\033[35m executes CMD after a file delete
|
||||
\033[36mxm\033[35m executes CMD on message
|
||||
\033[0m
|
||||
can be defined as --args or volflags; for example \033[36m
|
||||
--xau notify-send
|
||||
-v .::r:c,xau=notify-send
|
||||
\033[0m
|
||||
commands specified as --args are appended to volflags;
|
||||
each --arg and volflag can be specified multiple times,
|
||||
each command will execute in order unless one returns non-zero
|
||||
|
||||
optionally prefix the command with comma-sep. flags similar to -mtp:
|
||||
|
||||
\033[36mf\033[35m forks the process, doesn't wait for completion
|
||||
\033[36mc\033[35m checks return code, blocks the action if non-zero
|
||||
\033[36mj\033[35m provides json with info as 1st arg instead of filepath
|
||||
\033[36mwN\033[35m waits N sec after command has been started before continuing
|
||||
\033[36mtN\033[35m sets an N sec timeout before the command is abandoned
|
||||
|
||||
\033[36mkt\033[35m kills the entire process tree on timeout (default),
|
||||
\033[36mkm\033[35m kills just the main process
|
||||
\033[36mkn\033[35m lets it continue running until copyparty is terminated
|
||||
|
||||
\033[36mc0\033[35m show all process output (default)
|
||||
\033[36mc1\033[35m show only stderr
|
||||
\033[36mc2\033[35m show only stdout
|
||||
\033[36mc3\033[35m mute all process otput
|
||||
\033[0m
|
||||
except for \033[36mxm\033[0m, only one hook / one action can run at a time,
|
||||
so it's recommended to use the \033[36mf\033[0m flag unless you really need
|
||||
to wait for the hook to finish before continuing (without \033[36mf\033[0m
|
||||
the upload speed can easily drop to 10% for small files)"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"urlform",
|
||||
"how to handle url-form POSTs",
|
||||
@@ -600,10 +637,32 @@ def run_argparse(
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"dbd",
|
||||
"database durability profiles",
|
||||
dedent(
|
||||
"""
|
||||
mainly affects uploads of many small files on slow HDDs; speeds measured uploading 520 files on a WD20SPZX (SMR 2.5" 5400rpm 4kb)
|
||||
|
||||
\033[32macid\033[0m = extremely safe but slow; the old default. Should never lose any data no matter what
|
||||
|
||||
\033[32mswal\033[0m = 2.4x faster uploads yet 99.9%% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes
|
||||
|
||||
\033[32mwal\033[0m = another 21x faster on HDDs yet 90%% as safe; same pitfall as \033[33mswal\033[0m except more likely
|
||||
|
||||
\033[32myolo\033[0m = another 1.5x faster, and removes the occasional sudden upload-pause while the disk syncs, but now you're at risk of losing the entire database in a powerloss / OS-crash
|
||||
|
||||
profiles can be set globally (--dbd=yolo), or per-volume with volflags: -v ~/Music:music:r:c,dbd=acid
|
||||
"""
|
||||
),
|
||||
],
|
||||
]
|
||||
|
||||
# fmt: off
|
||||
u = unicode
|
||||
|
||||
# fmt: off
|
||||
|
||||
|
||||
def add_general(ap, nc, srvname):
|
||||
ap2 = ap.add_argument_group('general options')
|
||||
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
|
||||
ap2.add_argument("-nc", metavar="NUM", type=int, default=nc, help="max num clients")
|
||||
@@ -619,6 +678,8 @@ def run_argparse(
|
||||
ap2.add_argument("--license", action="store_true", help="show licenses and exit")
|
||||
ap2.add_argument("--version", action="store_true", help="show versions and exit")
|
||||
|
||||
|
||||
def add_qr(ap, tty):
|
||||
ap2 = ap.add_argument_group('qr options')
|
||||
ap2.add_argument("--qr", action="store_true", help="show http:// QR-code on startup")
|
||||
ap2.add_argument("--qrs", action="store_true", help="show https:// QR-code on startup")
|
||||
@@ -629,6 +690,8 @@ def run_argparse(
|
||||
ap2.add_argument("--qrp", metavar="CELLS", type=int, default=4, help="padding (spec says 4 or more, but 1 is usually fine)")
|
||||
ap2.add_argument("--qrz", metavar="N", type=int, default=0, help="[\033[32m1\033[0m]=1x, [\033[32m2\033[0m]=2x, [\033[32m0\033[0m]=auto (try [\033[32m2\033[0m] on broken fonts)")
|
||||
|
||||
|
||||
def add_upload(ap):
|
||||
ap2 = ap.add_argument_group('upload options')
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
|
||||
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
|
||||
@@ -636,9 +699,9 @@ def run_argparse(
|
||||
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)")
|
||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
|
||||
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem)")
|
||||
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made")
|
||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead")
|
||||
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem) (volflag=hardlink)")
|
||||
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made (volflag=neversymlink)")
|
||||
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes")
|
||||
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
|
||||
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
|
||||
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
|
||||
@@ -648,14 +711,24 @@ def run_argparse(
|
||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||
|
||||
|
||||
def add_network(ap):
|
||||
ap2 = ap.add_argument_group('network options')
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="::", help="ip to bind (comma-sep.), default: all IPv4 and IPv6")
|
||||
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||
ap2.add_argument("--ll", action="store_true", help="include link-local IPv4/IPv6 even if the NIC has routable IPs (breaks some mdns clients)")
|
||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd), [\033[32m2\033[0m]=cloudflare, [\033[32m3\033[0m]=nginx, [\033[32m-1\033[0m]=closest proxy")
|
||||
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here (eg. /foo/bar)")
|
||||
if ANYWIN:
|
||||
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
||||
else:
|
||||
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
||||
|
||||
|
||||
def add_tls(ap):
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls -- force plaintext")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext -- force tls")
|
||||
@@ -664,17 +737,22 @@ def run_argparse(
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets for later decryption in wireshark")
|
||||
|
||||
|
||||
def add_zeroconf(ap):
|
||||
ap2 = ap.add_argument_group("Zeroconf options")
|
||||
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
|
||||
ap2.add_argument("--z-on", metavar="NICS/NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes\n └─example: \033[32meth0, wlo1, virhost0, 192.168.123.0/24, fd00:fda::/96\033[0m")
|
||||
ap2.add_argument("--z-off", metavar="NICS/NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--z-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes\n └─example: \033[32meth0, wlo1, virhost0, 192.168.123.0/24, fd00:fda::/96\033[0m")
|
||||
ap2.add_argument("--z-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--z-chk", metavar="SEC", type=int, default=10, help="check for network changes every SEC seconds (0=disable)")
|
||||
ap2.add_argument("-zv", action="store_true", help="verbose all zeroconf backends")
|
||||
ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every SEC seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]")
|
||||
|
||||
ap2 = ap.add_argument_group("Zeroconf-mDNS options:")
|
||||
|
||||
def add_zc_mdns(ap):
|
||||
ap2 = ap.add_argument_group("Zeroconf-mDNS options")
|
||||
ap2.add_argument("--zm", action="store_true", help="announce the enabled protocols over mDNS (multicast DNS-SD) -- compatible with KDE, gnome, macOS, ...")
|
||||
ap2.add_argument("--zm-on", metavar="NICS/NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm-off", metavar="NICS/NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zm4", action="store_true", help="IPv4 only -- try this if some clients can't connect")
|
||||
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
||||
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
||||
@@ -688,14 +766,18 @@ def run_argparse(
|
||||
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working")
|
||||
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
|
||||
|
||||
ap2 = ap.add_argument_group("Zeroconf-SSDP options:")
|
||||
|
||||
def add_zc_ssdp(ap):
|
||||
ap2 = ap.add_argument_group("Zeroconf-SSDP options")
|
||||
ap2.add_argument("--zs", action="store_true", help="announce the enabled protocols over SSDP -- compatible with Windows")
|
||||
ap2.add_argument("--zs-on", metavar="NICS/NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zs-off", metavar="NICS/NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zs-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zs-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
|
||||
ap2.add_argument("--zsv", action="store_true", help="verbose SSDP")
|
||||
ap2.add_argument("--zsl", metavar="PATH", type=u, default="/?hc", help="location to include in the url (or a complete external URL), for example [\033[32mpriv/?pw=hunter2\033[0m] or [\033[32mpriv/?pw=hunter2\033[0m]")
|
||||
ap2.add_argument("--zsl", metavar="PATH", type=u, default="/?hc", help="location to include in the url (or a complete external URL), for example [\033[32mpriv/?pw=hunter2\033[0m] (goes directly to /priv/ with password hunter2) or [\033[32m?hc=priv&pw=hunter2\033[0m] (shows mounting options for /priv/ with password)")
|
||||
ap2.add_argument("--zsid", metavar="UUID", type=u, default=uuid.uuid4().urn[4:], help="USN (device identifier) to announce")
|
||||
|
||||
|
||||
def add_ftp(ap):
|
||||
ap2 = ap.add_argument_group('FTP options')
|
||||
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921")
|
||||
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990")
|
||||
@@ -704,11 +786,15 @@ def run_argparse(
|
||||
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
|
||||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
||||
|
||||
|
||||
def add_webdav(ap):
|
||||
ap2 = ap.add_argument_group('WebDAV options')
|
||||
ap2.add_argument("--daw", action="store_true", help="enable full write support. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
|
||||
ap2.add_argument("--dav-inf", action="store_true", help="allow depth:infinite requests (recursive file listing); extremely server-heavy but required for spec compliance -- luckily few clients rely on this")
|
||||
ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)")
|
||||
|
||||
|
||||
def add_smb(ap):
|
||||
ap2 = ap.add_argument_group('SMB/CIFS options')
|
||||
ap2.add_argument("--smb", action="store_true", help="enable smb (read-only) -- this requires running copyparty as root on linux and macos unless --smb-port is set above 1024 and your OS does port-forwarding from 445 to that.\n\033[1;31mWARNING:\033[0m this protocol is dangerous! Never expose to the internet. Account permissions are coalesced; if one account has write-access to a volume, then all accounts do.")
|
||||
ap2.add_argument("--smbw", action="store_true", help="enable write support (please dont)")
|
||||
@@ -720,6 +806,25 @@ def run_argparse(
|
||||
ap2.add_argument("--smbvv", action="store_true", help="verboser")
|
||||
ap2.add_argument("--smbvvv", action="store_true", help="verbosest")
|
||||
|
||||
|
||||
def add_hooks(ap):
|
||||
ap2 = ap.add_argument_group('hooks (see --help-hooks)')
|
||||
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute CMD before a file upload starts")
|
||||
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute CMD after a file upload finishes")
|
||||
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute CMD before a file move/rename")
|
||||
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute CMD after a file move/rename")
|
||||
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute CMD before a file delete")
|
||||
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute CMD after a file delete")
|
||||
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute CMD on message")
|
||||
|
||||
|
||||
def add_yolo(ap):
|
||||
ap2 = ap.add_argument_group('yolo options')
|
||||
ap2.add_argument("--allow-csrf", action="store_true", help="disable csrf protections; let other domains/sites impersonate you through cross-site requests")
|
||||
ap2.add_argument("--getmod", action="store_true", help="permit ?move=[...] and ?delete as GET")
|
||||
|
||||
|
||||
def add_optouts(ap):
|
||||
ap2 = ap.add_argument_group('opt-outs')
|
||||
ap2.add_argument("-nw", action="store_true", help="never write anything to disk (debug/benchmark)")
|
||||
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows (it is disabled to avoid accidental text selection which will deadlock copyparty)")
|
||||
@@ -728,13 +833,16 @@ def run_argparse(
|
||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
|
||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)")
|
||||
|
||||
|
||||
def add_safety(ap, fk_salt):
|
||||
ap2 = ap.add_argument_group('safety options')
|
||||
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --no-dot-mv --no-dot-ren --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
|
||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
|
||||
ap2.add_argument("-sss", action="store_true", help="further increase safety: Enable logging to disk, scan for dangerous symlinks.\n └─Alias of\033[32m -ss --no-dav --no-logues --no-readme -lo=cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz --ls=**,*,ln,p,r")
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
|
||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; used to generate unpredictable internal identifiers for uploads -- doesn't really matter")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
|
||||
@@ -750,12 +858,18 @@ def run_argparse(
|
||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (disabled by default since turbo-up2k counts as 404s)")
|
||||
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0")
|
||||
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]")
|
||||
ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives")
|
||||
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)")
|
||||
|
||||
|
||||
def add_shutdown(ap):
|
||||
ap2 = ap.add_argument_group('shutdown options')
|
||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; for example [\033[32midx\033[0m] will do volume indexing + metadata analysis")
|
||||
|
||||
|
||||
def add_logging(ap):
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
||||
@@ -765,11 +879,15 @@ def run_argparse(
|
||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching")
|
||||
|
||||
|
||||
def add_admin(ap):
|
||||
ap2 = ap.add_argument_group('admin panel options')
|
||||
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
||||
|
||||
|
||||
def add_thumbnail(ap):
|
||||
ap2 = ap.add_argument_group('thumbnail options')
|
||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
||||
@@ -796,10 +914,14 @@ def run_argparse(
|
||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="av1,asf,avi,flv,m4v,mkv,mjpeg,mjpg,mpg,mpeg,mpg2,mpeg2,h264,avc,mts,h265,hevc,mov,3gp,mp4,ts,mpegts,nut,ogv,ogm,rm,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,m4a,ogg,opus,flac,alac,mp3,mp2,ac3,dts,wma,ra,wav,aif,aiff,au,alaw,ulaw,mulaw,amr,gsm,ape,tak,tta,wv,mpc", help="audio formats to decode using ffmpeg")
|
||||
|
||||
|
||||
def add_transcoding(ap):
|
||||
ap2 = ap.add_argument_group('transcoding options')
|
||||
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
|
||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
|
||||
|
||||
|
||||
def add_db_general(ap, hcores):
|
||||
ap2 = ap.add_argument_group('general db options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplocation")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
|
||||
@@ -811,7 +933,9 @@ def run_argparse(
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans (volflag=nohash)")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans (volflag=noidx)")
|
||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||
ap2.add_argument("--re-dhash", action="store_true", help="rebuild the cache if it gets out of sync (for example crash on startup during metadata scanning)")
|
||||
ap2.add_argument("--no-forget", action="store_true", help="never forget indexed files, even when deleted from disk -- makes it impossible to ever upload the same file twice (volflag=noforget)")
|
||||
ap2.add_argument("--dbd", metavar="PROFILE", default="wal", help="database durability profile; sets the tradeoff between robustness and speed, see --help-dbd (volflag=dbd)")
|
||||
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
|
||||
ap2.add_argument("--xdev", action="store_true", help="do not descend into other filesystems (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
|
||||
ap2.add_argument("--xvol", action="store_true", help="skip symlinks leaving the volume root (volflag=xvol)")
|
||||
@@ -821,6 +945,8 @@ def run_argparse(
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than SEC seconds")
|
||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||
|
||||
|
||||
def add_db_metadata(ap):
|
||||
ap2 = ap.add_argument_group('metadata db options')
|
||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing; makes it possible to search for artist/title/codec/resolution/...")
|
||||
ap2.add_argument("-e2ts", action="store_true", help="scan existing files on startup; sets -e2t")
|
||||
@@ -838,18 +964,28 @@ def run_argparse(
|
||||
default=".vq,.aq,vc,ac,fmt,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
|
||||
|
||||
|
||||
def add_ui(ap, retry):
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
|
||||
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
|
||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
|
||||
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
|
||||
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
|
||||
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
|
||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with -np")
|
||||
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms modals popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
||||
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms modals popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
||||
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README.md documents (volflags: no_sb_md | sb_md)")
|
||||
ap2.add_argument("--no-sb-lg", action="store_true", help="don't sandbox prologue/epilogue docs (volflags: no_sb_lg | sb_lg); enables non-js support")
|
||||
|
||||
|
||||
def add_debug(ap):
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
|
||||
@@ -862,9 +998,58 @@ def run_argparse(
|
||||
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to --bf-nc and --bf-dir")
|
||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than NUM files at --kf-dir already; default: 6.3 GiB max (200*32M)")
|
||||
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at PATH; default: folder named 'bf' wherever copyparty was started")
|
||||
# fmt: on
|
||||
|
||||
|
||||
# fmt: on
|
||||
|
||||
|
||||
def run_argparse(
|
||||
argv: list[str], formatter: Any, retry: bool, nc: int
|
||||
) -> argparse.Namespace:
|
||||
ap = argparse.ArgumentParser(
|
||||
formatter_class=formatter,
|
||||
prog="copyparty",
|
||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||
)
|
||||
|
||||
try:
|
||||
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
|
||||
except:
|
||||
fk_salt = "hunter2"
|
||||
|
||||
hcores = min(CORES, 4) # optimal on py3.11 @ r5-4500U
|
||||
|
||||
tty = os.environ.get("TERM", "").lower() == "linux"
|
||||
|
||||
srvname = get_srvname()
|
||||
|
||||
add_general(ap, nc, srvname)
|
||||
add_network(ap)
|
||||
add_tls(ap)
|
||||
add_qr(ap, tty)
|
||||
add_zeroconf(ap)
|
||||
add_zc_mdns(ap)
|
||||
add_zc_ssdp(ap)
|
||||
add_upload(ap)
|
||||
add_db_general(ap, hcores)
|
||||
add_db_metadata(ap)
|
||||
add_thumbnail(ap)
|
||||
add_transcoding(ap)
|
||||
add_ftp(ap)
|
||||
add_webdav(ap)
|
||||
add_smb(ap)
|
||||
add_safety(ap, fk_salt)
|
||||
add_optouts(ap)
|
||||
add_shutdown(ap)
|
||||
add_yolo(ap)
|
||||
add_hooks(ap)
|
||||
add_ui(ap, retry)
|
||||
add_admin(ap)
|
||||
add_logging(ap)
|
||||
add_debug(ap)
|
||||
|
||||
ap2 = ap.add_argument_group("help sections")
|
||||
sects = get_sects()
|
||||
for k, h, _ in sects:
|
||||
ap2.add_argument("--help-" + k, action="store_true", help=h)
|
||||
|
||||
@@ -981,8 +1166,11 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
retry = True
|
||||
lprint("\n[ {} ]:\n{}\n".format(fmtr, min_ex()))
|
||||
|
||||
try:
|
||||
assert al # type: ignore
|
||||
al.E = E # __init__ is not shared when oxidized
|
||||
except:
|
||||
sys.exit(1)
|
||||
|
||||
if WINDOWS and not al.keep_qem:
|
||||
try:
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 5, 1)
|
||||
CODENAME = "babel"
|
||||
BUILD_DT = (2022, 12, 3)
|
||||
VERSION = (1, 6, 2)
|
||||
CODENAME = "cors k"
|
||||
BUILD_DT = (2023, 1, 29)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -370,7 +370,6 @@ class VFS(object):
|
||||
|
||||
def _find(self, vpath: str) -> tuple["VFS", str]:
|
||||
"""return [vfs,remainder]"""
|
||||
vpath = undot(vpath)
|
||||
if vpath == "":
|
||||
return self, ""
|
||||
|
||||
@@ -381,7 +380,7 @@ class VFS(object):
|
||||
rem = ""
|
||||
|
||||
if name in self.nodes:
|
||||
return self.nodes[name]._find(rem)
|
||||
return self.nodes[name]._find(undot(rem))
|
||||
|
||||
return self, vpath
|
||||
|
||||
@@ -389,7 +388,7 @@ class VFS(object):
|
||||
self, vpath: str, uname: str
|
||||
) -> tuple[bool, bool, bool, bool, bool, bool]:
|
||||
"""can Read,Write,Move,Delete,Get,Upget"""
|
||||
vn, _ = self._find(vpath)
|
||||
vn, _ = self._find(undot(vpath))
|
||||
c = vn.axs
|
||||
return (
|
||||
uname in c.uread or "*" in c.uread,
|
||||
@@ -419,7 +418,7 @@ class VFS(object):
|
||||
self.log("vfs", "invalid relpath [{}]".format(vpath))
|
||||
raise Pebkac(404)
|
||||
|
||||
vn, rem = self._find(vpath)
|
||||
vn, rem = self._find(undot(vpath))
|
||||
c: AXS = vn.axs
|
||||
|
||||
for req, d, msg in [
|
||||
@@ -588,7 +587,7 @@ class VFS(object):
|
||||
|
||||
# if multiselect: add all items to archive root
|
||||
# if single folder: the folder itself is the top-level item
|
||||
folder = "" if flt or not wrap else (vrem.split("/")[-1] or "top")
|
||||
folder = "" if flt or not wrap else (vrem.split("/")[-1].lstrip(".") or "top")
|
||||
|
||||
g = self.walk(folder, vrem, [], uname, [[True, False]], dots, scandir, False)
|
||||
for _, _, vpath, apath, files, rd, vd in g:
|
||||
@@ -813,7 +812,7 @@ class AuthSrv(object):
|
||||
value: Union[str, bool, list[str]],
|
||||
is_list: bool,
|
||||
) -> None:
|
||||
if name not in ["mtp"]:
|
||||
if name not in ["mtp", "xbu", "xau", "xbr", "xar", "xbd", "xad", "xm"]:
|
||||
flags[name] = value
|
||||
return
|
||||
|
||||
@@ -1120,14 +1119,33 @@ class AuthSrv(object):
|
||||
vol.flags[k] = True
|
||||
|
||||
for ga, vf in (
|
||||
("no_sb_md", "no_sb_md"),
|
||||
("no_sb_lg", "no_sb_lg"),
|
||||
("no_forget", "noforget"),
|
||||
("no_dupe", "nodupe"),
|
||||
("hardlink", "hardlink"),
|
||||
("never_symlink", "neversymlink"),
|
||||
("no_dedup", "copydupes"),
|
||||
("magic", "magic"),
|
||||
("xlink", "xlink"),
|
||||
):
|
||||
if getattr(self.args, ga):
|
||||
vol.flags[vf] = True
|
||||
|
||||
for ve, vd in (
|
||||
("sb_md", "no_sb_md"),
|
||||
("sb_lg", "no_sb_lg"),
|
||||
):
|
||||
if ve in vol.flags:
|
||||
vol.flags.pop(vd, None)
|
||||
|
||||
for ga, vf in (
|
||||
("md_sbf", "md_sbf"),
|
||||
("lg_sbf", "lg_sbf"),
|
||||
):
|
||||
if vf not in vol.flags:
|
||||
vol.flags[vf] = getattr(self.args, ga)
|
||||
|
||||
for k1, k2 in IMPLICATIONS:
|
||||
if k1 in vol.flags:
|
||||
vol.flags[k2] = True
|
||||
@@ -1136,6 +1154,12 @@ class AuthSrv(object):
|
||||
if k1 in vol.flags:
|
||||
vol.flags[k2] = False
|
||||
|
||||
dbds = "acid|swal|wal|yolo"
|
||||
vol.flags["dbd"] = dbd = vol.flags.get("dbd") or self.args.dbd
|
||||
if dbd not in dbds.split("|"):
|
||||
t = "invalid dbd [{}]; must be one of [{}]"
|
||||
raise Exception(t.format(dbd, dbds))
|
||||
|
||||
# default tag cfgs if unset
|
||||
if "mte" not in vol.flags:
|
||||
vol.flags["mte"] = self.args.mte
|
||||
@@ -1146,8 +1170,32 @@ class AuthSrv(object):
|
||||
if "mth" not in vol.flags:
|
||||
vol.flags["mth"] = self.args.mth
|
||||
|
||||
# append parsers from argv to volflags
|
||||
self._read_volflag(vol.flags, "mtp", self.args.mtp, True)
|
||||
# append additive args from argv to volflags
|
||||
hooks = "xbu xau xbr xar xbd xad xm".split()
|
||||
for name in ["mtp"] + hooks:
|
||||
self._read_volflag(vol.flags, name, getattr(self.args, name), True)
|
||||
|
||||
for hn in hooks:
|
||||
cmds = vol.flags.get(hn)
|
||||
if not cmds:
|
||||
continue
|
||||
|
||||
ncmds = []
|
||||
for cmd in cmds:
|
||||
hfs = []
|
||||
ocmd = cmd
|
||||
while "," in cmd[:6]:
|
||||
zs, cmd = cmd.split(",", 1)
|
||||
hfs.append(zs)
|
||||
|
||||
if "c" in hfs and "f" in hfs:
|
||||
t = "cannot combine flags c and f; removing f from eventhook [{}]"
|
||||
self.log(t.format(ocmd), 1)
|
||||
hfs = [x for x in hfs if x != "f"]
|
||||
ocmd = ",".join(hfs + [cmd])
|
||||
|
||||
ncmds.append(ocmd)
|
||||
vol.flags[hn] = ncmds
|
||||
|
||||
# d2d drops all database features for a volume
|
||||
for grp, rm in [["d2d", "e2d"], ["d2t", "e2t"], ["d2d", "e2v"]]:
|
||||
@@ -1188,6 +1236,9 @@ class AuthSrv(object):
|
||||
self.log(t.format(vol.vpath), 1)
|
||||
del vol.flags["lifetime"]
|
||||
|
||||
if vol.flags.get("neversymlink") and not vol.flags.get("hardlink"):
|
||||
vol.flags["copydupes"] = True
|
||||
|
||||
# verify tags mentioned by -mt[mp] are used by -mte
|
||||
local_mtp = {}
|
||||
local_only_mtp = {}
|
||||
|
||||
@@ -24,13 +24,15 @@ def listdir(p: str = ".") -> list[str]:
|
||||
return [fsdec(x) for x in os.listdir(fsenc(p))]
|
||||
|
||||
|
||||
def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> None:
|
||||
def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> bool:
|
||||
bname = fsenc(name)
|
||||
try:
|
||||
os.makedirs(bname, mode)
|
||||
return True
|
||||
except:
|
||||
if not exist_ok or not os.path.isdir(bname):
|
||||
raise
|
||||
return False
|
||||
|
||||
|
||||
def mkdir(p: str, mode: int = 0o755) -> None:
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
|
||||
import queue
|
||||
|
||||
@@ -93,12 +94,15 @@ class BrokerMp(object):
|
||||
|
||||
else:
|
||||
# new ipc invoking managed service in hub
|
||||
try:
|
||||
obj = self.hub
|
||||
for node in dest.split("."):
|
||||
obj = getattr(obj, node)
|
||||
|
||||
# TODO will deadlock if dest performs another ipc
|
||||
rv = try_exec(retq_id, obj, *args)
|
||||
except:
|
||||
rv = ["exception", "stack", traceback.format_exc()]
|
||||
|
||||
if retq_id:
|
||||
proc.q_pend.put((retq_id, "retq", rv))
|
||||
|
||||
@@ -63,6 +63,8 @@ from .util import (
|
||||
read_socket_unbounded,
|
||||
relchk,
|
||||
ren_open,
|
||||
runhook,
|
||||
hidedir,
|
||||
s3enc,
|
||||
sanitize_fn,
|
||||
sendfile_kern,
|
||||
@@ -118,14 +120,15 @@ class HttpCli(object):
|
||||
# placeholders; assigned by run()
|
||||
self.keepalive = False
|
||||
self.is_https = False
|
||||
self.is_vproxied = False
|
||||
self.in_hdr_recv = True
|
||||
self.headers: dict[str, str] = {}
|
||||
self.mode = " "
|
||||
self.req = " "
|
||||
self.http_ver = " "
|
||||
self.host = " "
|
||||
self.ua = " "
|
||||
self.is_rclone = False
|
||||
self.is_ancient = False
|
||||
self.ouparam: dict[str, str] = {}
|
||||
self.uparam: dict[str, str] = {}
|
||||
self.cookies: dict[str, str] = {}
|
||||
@@ -154,8 +157,8 @@ class HttpCli(object):
|
||||
self.trailing_slash = True
|
||||
self.out_headerlist: list[tuple[str, str]] = []
|
||||
self.out_headers = {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Cache-Control": "no-store; max-age=0",
|
||||
"Vary": "Origin, PW, Cookie",
|
||||
"Cache-Control": "no-store, max-age=0",
|
||||
}
|
||||
h = self.args.html_head
|
||||
if self.args.no_robots:
|
||||
@@ -190,6 +193,7 @@ class HttpCli(object):
|
||||
|
||||
def j2s(self, name: str, **ka: Any) -> str:
|
||||
tpl = self.conn.hsrv.j2[name]
|
||||
ka["r"] = self.args.SR if self.is_vproxied else ""
|
||||
ka["ts"] = self.conn.hsrv.cachebuster()
|
||||
ka["lang"] = self.args.lang
|
||||
ka["favico"] = self.args.favico
|
||||
@@ -240,7 +244,7 @@ class HttpCli(object):
|
||||
self.http_ver = "HTTP/1.1"
|
||||
# self.log("pebkac at httpcli.run #1: " + repr(ex))
|
||||
self.keepalive = False
|
||||
h = {"WWW-Authenticate": "Basic"} if ex.code == 401 else {}
|
||||
h = {"WWW-Authenticate": 'Basic realm="a"'} if ex.code == 401 else {}
|
||||
try:
|
||||
self.loud_reply(unicode(ex), status=ex.code, headers=h, volsan=True)
|
||||
return self.keepalive
|
||||
@@ -249,15 +253,17 @@ class HttpCli(object):
|
||||
|
||||
self.ua = self.headers.get("user-agent", "")
|
||||
self.is_rclone = self.ua.startswith("rclone/")
|
||||
self.is_ancient = self.ua.startswith("Mozilla/4.")
|
||||
|
||||
zs = self.headers.get("connection", "").lower()
|
||||
self.keepalive = not zs.startswith("close") and (
|
||||
self.keepalive = "close" not in zs and (
|
||||
self.http_ver != "HTTP/1.0" or zs == "keep-alive"
|
||||
)
|
||||
self.is_https = (
|
||||
self.headers.get("x-forwarded-proto", "").lower() == "https" or self.tls
|
||||
)
|
||||
self.host = self.headers.get("host") or "{}:{}".format(
|
||||
*list(self.s.getsockname()[:2])
|
||||
)
|
||||
|
||||
n = self.args.rproxy
|
||||
if n:
|
||||
@@ -275,6 +281,8 @@ class HttpCli(object):
|
||||
self.log(t.format(self.args.rproxy, zso), c=3)
|
||||
|
||||
self.log_src = self.conn.set_rproxy(self.ip)
|
||||
self.is_vproxied = bool(self.args.R)
|
||||
self.host = self.headers.get("x-forwarded-host") or self.host
|
||||
|
||||
if self.is_banned():
|
||||
return False
|
||||
@@ -290,7 +298,10 @@ class HttpCli(object):
|
||||
else:
|
||||
self.keepalive = False
|
||||
|
||||
if self.args.ihead:
|
||||
ptn: Optional[Pattern[str]] = self.conn.lf_url # mypy404
|
||||
self.do_log = not ptn or not ptn.search(self.req)
|
||||
|
||||
if self.args.ihead and self.do_log:
|
||||
keys = self.args.ihead
|
||||
if "*" in keys:
|
||||
keys = list(sorted(self.headers.keys()))
|
||||
@@ -319,6 +330,13 @@ class HttpCli(object):
|
||||
else:
|
||||
uparam[k.lower()] = ""
|
||||
|
||||
if self.is_vproxied:
|
||||
if vpath.startswith(self.args.R):
|
||||
vpath = vpath[len(self.args.R) + 1 :]
|
||||
else:
|
||||
t = "incorrect --rp-loc or webserver config; expected vpath starting with [{}] but got [{}]"
|
||||
self.log(t.format(self.args.R, vpath), 1)
|
||||
|
||||
self.ouparam = {k: zs for k, zs in uparam.items()}
|
||||
|
||||
if self.args.rsp_slp:
|
||||
@@ -328,11 +346,12 @@ class HttpCli(object):
|
||||
if zso:
|
||||
zsll = [x.split("=", 1) for x in zso.split(";") if "=" in x]
|
||||
cookies = {k.strip(): unescape_cookie(zs) for k, zs in zsll}
|
||||
for kc, ku in [["cppwd", "pw"], ["b", "b"]]:
|
||||
if kc in cookies and ku not in uparam:
|
||||
uparam[ku] = cookies[kc]
|
||||
cookie_pw = cookies.get("cppws") or cookies.get("cppwd") or ""
|
||||
if "b" in cookies and "b" not in uparam:
|
||||
uparam["b"] = cookies["b"]
|
||||
else:
|
||||
cookies = {}
|
||||
cookie_pw = ""
|
||||
|
||||
if len(uparam) > 10 or len(cookies) > 50:
|
||||
raise Pebkac(400, "u wot m8")
|
||||
@@ -345,25 +364,24 @@ class HttpCli(object):
|
||||
if ANYWIN:
|
||||
ok = ok and not relchk(self.vpath)
|
||||
|
||||
if not ok:
|
||||
if not ok and (self.vpath != "*" or self.mode != "OPTIONS"):
|
||||
self.log("invalid relpath [{}]".format(self.vpath))
|
||||
return self.tx_404() and self.keepalive
|
||||
|
||||
pwd = ""
|
||||
zso = self.headers.get("authorization")
|
||||
bauth = ""
|
||||
if zso:
|
||||
try:
|
||||
zb = zso.split(" ")[1].encode("ascii")
|
||||
zs = base64.b64decode(zb).decode("utf-8")
|
||||
# try "pwd", "x:pwd", "pwd:x"
|
||||
for zs in [zs] + zs.split(":", 1)[::-1]:
|
||||
if self.asrv.iacct.get(zs):
|
||||
pwd = zs
|
||||
for bauth in [zs] + zs.split(":", 1)[::-1]:
|
||||
if self.asrv.iacct.get(bauth):
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
self.pw = uparam.get("pw") or pwd
|
||||
self.pw = uparam.get("pw") or self.headers.get("pw") or bauth or cookie_pw
|
||||
self.uname = self.asrv.iacct.get(self.pw) or "*"
|
||||
self.rvol = self.asrv.vfs.aread[self.uname]
|
||||
self.wvol = self.asrv.vfs.awrite[self.uname]
|
||||
@@ -372,17 +390,17 @@ class HttpCli(object):
|
||||
self.gvol = self.asrv.vfs.aget[self.uname]
|
||||
self.upvol = self.asrv.vfs.apget[self.uname]
|
||||
|
||||
if self.pw:
|
||||
self.out_headerlist.append(("Set-Cookie", self.get_pwd_cookie(self.pw)[0]))
|
||||
if self.pw and (
|
||||
self.pw != cookie_pw or self.conn.freshen_pwd + 30 < time.time()
|
||||
):
|
||||
self.conn.freshen_pwd = time.time()
|
||||
self.get_pwd_cookie(self.pw)
|
||||
|
||||
if self.is_rclone:
|
||||
uparam["dots"] = ""
|
||||
uparam["b"] = ""
|
||||
cookies["b"] = ""
|
||||
|
||||
ptn: Optional[Pattern[str]] = self.conn.lf_url # mypy404
|
||||
self.do_log = not ptn or not ptn.search(self.req)
|
||||
|
||||
(
|
||||
self.can_read,
|
||||
self.can_write,
|
||||
@@ -393,15 +411,22 @@ class HttpCli(object):
|
||||
) = self.asrv.vfs.can_access(self.vpath, self.uname)
|
||||
|
||||
try:
|
||||
# getattr(self.mode) is not yet faster than this
|
||||
if self.mode in ["GET", "HEAD"]:
|
||||
cors_k = self._cors()
|
||||
if self.mode in ("GET", "HEAD"):
|
||||
return self.handle_get() and self.keepalive
|
||||
elif self.mode == "POST":
|
||||
if self.mode == "OPTIONS":
|
||||
return self.handle_options() and self.keepalive
|
||||
|
||||
if not cors_k:
|
||||
origin = self.headers.get("origin", "<?>")
|
||||
self.log("cors-reject {} from {}".format(self.mode, origin), 3)
|
||||
raise Pebkac(403, "no surfing")
|
||||
|
||||
# getattr(self.mode) is not yet faster than this
|
||||
if self.mode == "POST":
|
||||
return self.handle_post() and self.keepalive
|
||||
elif self.mode == "PUT":
|
||||
return self.handle_put() and self.keepalive
|
||||
elif self.mode == "OPTIONS":
|
||||
return self.handle_options() and self.keepalive
|
||||
elif self.mode == "PROPFIND":
|
||||
return self.handle_propfind() and self.keepalive
|
||||
elif self.mode == "DELETE":
|
||||
@@ -447,7 +472,7 @@ class HttpCli(object):
|
||||
msg += "hint: important info in the server log\r\n"
|
||||
|
||||
zb = b"<pre>" + html_escape(msg).encode("utf-8", "replace")
|
||||
h = {"WWW-Authenticate": "Basic"} if pex.code == 401 else {}
|
||||
h = {"WWW-Authenticate": 'Basic realm="a"'} if pex.code == 401 else {}
|
||||
self.reply(zb, status=pex.code, headers=h, volsan=True)
|
||||
return self.keepalive
|
||||
except Pebkac:
|
||||
@@ -580,12 +605,12 @@ class HttpCli(object):
|
||||
if self.is_rclone:
|
||||
return ""
|
||||
|
||||
cmap = {"pw": "cppwd"}
|
||||
kv = {
|
||||
k: zs
|
||||
for k, zs in self.uparam.items()
|
||||
if k not in rm and self.cookies.get(cmap.get(k, k)) != zs
|
||||
}
|
||||
kv = {k: zs for k, zs in self.uparam.items() if k not in rm}
|
||||
if "pw" in kv:
|
||||
pw = self.cookies.get("cppws") or self.cookies.get("cppwd")
|
||||
if kv["pw"] == pw:
|
||||
del kv["pw"]
|
||||
|
||||
kv.update(add)
|
||||
if not kv:
|
||||
return ""
|
||||
@@ -603,10 +628,11 @@ class HttpCli(object):
|
||||
status: int = 200,
|
||||
use302: bool = False,
|
||||
) -> bool:
|
||||
vp = self.args.RS + vpath
|
||||
html = self.j2s(
|
||||
"msg",
|
||||
h2='<a href="/{}">{} /{}</a>'.format(
|
||||
quotep(vpath) + suf, flavor, html_escape(vpath, crlf=True) + suf
|
||||
quotep(vp) + suf, flavor, html_escape(vp, crlf=True) + suf
|
||||
),
|
||||
pre=msg,
|
||||
click=click,
|
||||
@@ -619,6 +645,63 @@ class HttpCli(object):
|
||||
|
||||
return True
|
||||
|
||||
def _cors(self) -> bool:
|
||||
ih = self.headers
|
||||
origin = ih.get("origin")
|
||||
if not origin:
|
||||
sfsite = ih.get("sec-fetch-site")
|
||||
if sfsite and sfsite.lower().startswith("cross"):
|
||||
origin = ":|" # sandboxed iframe
|
||||
else:
|
||||
return True
|
||||
|
||||
oh = self.out_headers
|
||||
origin = origin.lower()
|
||||
good_origins = self.args.acao + [
|
||||
"{}://{}".format(
|
||||
"https" if self.is_https else "http",
|
||||
self.host.lower().split(":")[0],
|
||||
)
|
||||
]
|
||||
if re.sub(r"(:[0-9]{1,5})?/?$", "", origin) in good_origins:
|
||||
good_origin = True
|
||||
bad_hdrs = ("",)
|
||||
else:
|
||||
good_origin = False
|
||||
bad_hdrs = ("", "pw")
|
||||
|
||||
# '*' blocks all credentials (cookies, http-auth);
|
||||
# exact-match for Origin is necessary to unlock those,
|
||||
# however yolo-requests (?pw=) are always allowed
|
||||
acah = ih.get("access-control-request-headers", "")
|
||||
acao = (origin if good_origin else None) or (
|
||||
"*" if "*" in good_origins else None
|
||||
)
|
||||
if self.args.allow_csrf:
|
||||
acao = origin or acao or "*" # explicitly permit impersonation
|
||||
acam = ", ".join(self.conn.hsrv.mallow) # and all methods + headers
|
||||
oh["Access-Control-Allow-Credentials"] = "true"
|
||||
good_origin = True
|
||||
else:
|
||||
acam = ", ".join(self.args.acam)
|
||||
# wash client-requested headers and roll with that
|
||||
if "range" not in acah.lower():
|
||||
acah += ",Range" # firefox
|
||||
req_h = acah.split(",")
|
||||
req_h = [x.strip() for x in req_h]
|
||||
req_h = [x for x in req_h if x.lower() not in bad_hdrs]
|
||||
acah = ", ".join(req_h)
|
||||
|
||||
if not acao:
|
||||
return False
|
||||
|
||||
oh["Access-Control-Allow-Origin"] = acao
|
||||
oh["Access-Control-Allow-Methods"] = acam.upper()
|
||||
if acah:
|
||||
oh["Access-Control-Allow-Headers"] = acah
|
||||
|
||||
return good_origin
|
||||
|
||||
def handle_get(self) -> bool:
|
||||
if self.do_log:
|
||||
logmsg = "{:4} {}".format(self.mode, self.req)
|
||||
@@ -641,6 +724,15 @@ class HttpCli(object):
|
||||
if self.vpath.startswith(".cpr/ssdp"):
|
||||
return self.conn.hsrv.ssdp.reply(self)
|
||||
|
||||
if self.vpath.startswith(".cpr/dd/") and self.args.mpmc:
|
||||
if self.args.mpmc == ".":
|
||||
raise Pebkac(404)
|
||||
|
||||
loc = self.args.mpmc.rstrip("/") + self.vpath[self.vpath.rfind("/") :]
|
||||
h = {"Location": loc, "Cache-Control": "max-age=39"}
|
||||
self.reply(b"", 301, headers=h)
|
||||
return True
|
||||
|
||||
static_path = os.path.join(self.E.mod, "web/", self.vpath[5:])
|
||||
return self.tx_file(static_path)
|
||||
|
||||
@@ -658,15 +750,16 @@ class HttpCli(object):
|
||||
if "tree" in self.uparam:
|
||||
return self.tx_tree()
|
||||
|
||||
if "scan" in self.uparam:
|
||||
return self.scanvol()
|
||||
|
||||
if self.args.getmod:
|
||||
if "delete" in self.uparam:
|
||||
return self.handle_rm([])
|
||||
|
||||
if "move" in self.uparam:
|
||||
return self.handle_mv()
|
||||
|
||||
if "scan" in self.uparam:
|
||||
return self.scanvol()
|
||||
|
||||
if not self.vpath:
|
||||
if "reload" in self.uparam:
|
||||
return self.handle_reload()
|
||||
@@ -814,7 +907,7 @@ class HttpCli(object):
|
||||
raise Pebkac(404)
|
||||
|
||||
fgen = itertools.chain([topdir], fgen) # type: ignore
|
||||
vtop = vjoin(vn.vpath, rem)
|
||||
vtop = vjoin(self.args.R, vjoin(vn.vpath, rem))
|
||||
|
||||
chunksz = 0x7FF8 # preferred by nginx or cf (dunno which)
|
||||
|
||||
@@ -839,7 +932,7 @@ class HttpCli(object):
|
||||
"supportedlock": '<D:lockentry xmlns:D="DAV:"><D:lockscope><D:exclusive/></D:lockscope><D:locktype><D:write/></D:locktype></D:lockentry>',
|
||||
}
|
||||
if not isdir:
|
||||
pvs["getcontenttype"] = guess_mime(rp)
|
||||
pvs["getcontenttype"] = html_escape(guess_mime(rp))
|
||||
pvs["getcontentlength"] = str(st.st_size)
|
||||
|
||||
for k, v in pvs.items():
|
||||
@@ -914,7 +1007,7 @@ class HttpCli(object):
|
||||
|
||||
el = xroot.find(r"./{DAV:}response")
|
||||
assert el
|
||||
e2 = mktnod("D:href", quotep("/" + self.vpath))
|
||||
e2 = mktnod("D:href", quotep(self.args.SRS + self.vpath))
|
||||
el.insert(0, e2)
|
||||
|
||||
el = xroot.find(r"./{DAV:}response/{DAV:}propstat")
|
||||
@@ -969,7 +1062,9 @@ class HttpCli(object):
|
||||
|
||||
lk.append(mkenod("D:timeout", mktnod("D:href", "Second-3310")))
|
||||
lk.append(mkenod("D:locktoken", mktnod("D:href", uuid.uuid4().urn)))
|
||||
lk.append(mkenod("D:lockroot", mktnod("D:href", "/" + quotep(self.vpath))))
|
||||
lk.append(
|
||||
mkenod("D:lockroot", mktnod("D:href", quotep(self.args.SRS + self.vpath)))
|
||||
)
|
||||
|
||||
lk2 = mkenod("D:activelock")
|
||||
xroot = mkenod("D:prop", mkenod("D:lockdiscovery", lk2))
|
||||
@@ -1061,26 +1156,16 @@ class HttpCli(object):
|
||||
if self.do_log:
|
||||
self.log("OPTIONS " + self.req)
|
||||
|
||||
ret = {
|
||||
"Allow": "GET, HEAD, POST, PUT, OPTIONS",
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "*",
|
||||
"Access-Control-Allow-Headers": "*",
|
||||
}
|
||||
|
||||
wd = {
|
||||
"Dav": "1, 2",
|
||||
"Ms-Author-Via": "DAV",
|
||||
}
|
||||
oh = self.out_headers
|
||||
oh["Allow"] = ", ".join(self.conn.hsrv.mallow)
|
||||
|
||||
if not self.args.no_dav:
|
||||
# PROPPATCH, LOCK, UNLOCK, COPY: noop (spec-must)
|
||||
zs = ", PROPFIND, PROPPATCH, LOCK, UNLOCK, MKCOL, COPY, MOVE, DELETE"
|
||||
ret["Allow"] += zs
|
||||
ret.update(wd)
|
||||
oh["Dav"] = "1, 2"
|
||||
oh["Ms-Author-Via"] = "DAV"
|
||||
|
||||
# winxp-webdav doesnt know what 204 is
|
||||
self.send_headers(0, 200, headers=ret)
|
||||
self.send_headers(0, 200)
|
||||
return True
|
||||
|
||||
def handle_delete(self) -> bool:
|
||||
@@ -1120,15 +1205,23 @@ class HttpCli(object):
|
||||
return self.handle_stash(False)
|
||||
|
||||
ctype = self.headers.get("content-type", "").lower()
|
||||
if not ctype:
|
||||
raise Pebkac(400, "you can't post without a content-type header")
|
||||
|
||||
if "multipart/form-data" in ctype:
|
||||
return self.handle_post_multipart()
|
||||
|
||||
if "text/plain" in ctype or "application/xml" in ctype:
|
||||
if (
|
||||
"application/json" in ctype
|
||||
or "text/plain" in ctype
|
||||
or "application/xml" in ctype
|
||||
):
|
||||
return self.handle_post_json()
|
||||
|
||||
if "move" in self.uparam:
|
||||
return self.handle_mv()
|
||||
|
||||
if "delete" in self.uparam:
|
||||
return self.handle_rm([])
|
||||
|
||||
if "application/octet-stream" in ctype:
|
||||
return self.handle_post_binary()
|
||||
|
||||
@@ -1157,9 +1250,27 @@ class HttpCli(object):
|
||||
plain = zb.decode("utf-8", "replace")
|
||||
if buf.startswith(b"msg="):
|
||||
plain = plain[4:]
|
||||
vfs, rem = self.asrv.vfs.get(
|
||||
self.vpath, self.uname, False, False
|
||||
)
|
||||
xm = vfs.flags.get("xm")
|
||||
if xm:
|
||||
runhook(
|
||||
self.log,
|
||||
xm,
|
||||
vfs.canonical(rem),
|
||||
self.vpath,
|
||||
self.host,
|
||||
self.uname,
|
||||
self.ip,
|
||||
time.time(),
|
||||
len(xm),
|
||||
plain,
|
||||
)
|
||||
|
||||
t = "urlform_dec {} @ {}\n {}\n"
|
||||
self.log(t.format(len(plain), self.vpath, plain))
|
||||
|
||||
except Exception as ex:
|
||||
self.log(repr(ex))
|
||||
|
||||
@@ -1200,7 +1311,7 @@ class HttpCli(object):
|
||||
# post_sz, sha_hex, sha_b64, remains, path, url
|
||||
reader, remains = self.get_body_reader()
|
||||
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||
rnd, _, lifetime = self.upload_flags(vfs)
|
||||
rnd, _, lifetime, xbu, xau = self.upload_flags(vfs)
|
||||
lim = vfs.get_dbv(rem)[0].lim
|
||||
fdir = vfs.canonical(rem)
|
||||
if lim:
|
||||
@@ -1280,6 +1391,8 @@ class HttpCli(object):
|
||||
if rnd and not self.args.nw:
|
||||
fn = self.rand_name(fdir, fn, rnd)
|
||||
|
||||
path = os.path.join(fdir, fn)
|
||||
|
||||
if is_put and not self.args.no_dav:
|
||||
# allow overwrite if...
|
||||
# * volflag 'daw' is set
|
||||
@@ -1288,18 +1401,35 @@ class HttpCli(object):
|
||||
# * file exists and is empty
|
||||
# * and there is no .PARTIAL
|
||||
|
||||
path = os.path.join(fdir, fn)
|
||||
tnam = fn + ".PARTIAL"
|
||||
if self.args.dotpart:
|
||||
tnam = "." + tnam
|
||||
|
||||
if ("daw" in vfs.flags and self.can_delete) or (
|
||||
if (vfs.flags.get("daw") and self.can_delete) or (
|
||||
not bos.path.exists(os.path.join(fdir, tnam))
|
||||
and bos.path.exists(path)
|
||||
and not bos.path.getsize(path)
|
||||
):
|
||||
params["overwrite"] = "a"
|
||||
|
||||
if xbu:
|
||||
at = time.time() - lifetime
|
||||
if not runhook(
|
||||
self.log,
|
||||
xbu,
|
||||
path,
|
||||
self.vpath,
|
||||
self.host,
|
||||
self.uname,
|
||||
self.ip,
|
||||
at,
|
||||
remains,
|
||||
"",
|
||||
):
|
||||
t = "upload denied by xbu"
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
|
||||
with ren_open(fn, *open_a, **params) as zfw:
|
||||
f, fn = zfw["orz"]
|
||||
path = os.path.join(fdir, fn)
|
||||
@@ -1339,6 +1469,24 @@ class HttpCli(object):
|
||||
fn = fn2
|
||||
path = path2
|
||||
|
||||
at = time.time() - lifetime
|
||||
if xau and not runhook(
|
||||
self.log,
|
||||
xau,
|
||||
path,
|
||||
self.vpath,
|
||||
self.host,
|
||||
self.uname,
|
||||
self.ip,
|
||||
at,
|
||||
post_sz,
|
||||
"",
|
||||
):
|
||||
t = "upload denied by xau"
|
||||
self.log(t, 1)
|
||||
os.unlink(path)
|
||||
raise Pebkac(403, t)
|
||||
|
||||
vfs, rem = vfs.get_dbv(rem)
|
||||
self.conn.hsrv.broker.say(
|
||||
"up2k.hash_file",
|
||||
@@ -1347,7 +1495,7 @@ class HttpCli(object):
|
||||
rem,
|
||||
fn,
|
||||
self.ip,
|
||||
time.time() - lifetime,
|
||||
at,
|
||||
)
|
||||
|
||||
vsuf = ""
|
||||
@@ -1364,8 +1512,8 @@ class HttpCli(object):
|
||||
|
||||
url = "{}://{}/{}".format(
|
||||
"https" if self.is_https else "http",
|
||||
self.headers.get("host") or "{}:{}".format(*list(self.s.getsockname()[:2])),
|
||||
vpath + vsuf,
|
||||
self.host,
|
||||
self.args.RS + vpath + vsuf,
|
||||
)
|
||||
|
||||
return post_sz, sha_hex, sha_b64, remains, path, url
|
||||
@@ -1530,11 +1678,6 @@ class HttpCli(object):
|
||||
if "delete" in self.uparam:
|
||||
return self.handle_rm(body)
|
||||
|
||||
# up2k-php compat
|
||||
for k in "chunkpit.php", "handshake.php":
|
||||
if self.vpath.endswith(k):
|
||||
self.vpath = self.vpath[: -len(k)]
|
||||
|
||||
name = undot(body["name"])
|
||||
if "/" in name:
|
||||
raise Pebkac(400, "your client is old; press CTRL-SHIFT-R and try again")
|
||||
@@ -1545,9 +1688,14 @@ class HttpCli(object):
|
||||
body["vtop"] = dbv.vpath
|
||||
body["ptop"] = dbv.realpath
|
||||
body["prel"] = vrem
|
||||
body["host"] = self.host
|
||||
body["user"] = self.uname
|
||||
body["addr"] = self.ip
|
||||
body["vcfg"] = dbv.flags
|
||||
|
||||
if not self.can_delete:
|
||||
body.pop("replace", None)
|
||||
|
||||
if rem:
|
||||
dst = vfs.canonical(rem)
|
||||
try:
|
||||
@@ -1568,6 +1716,10 @@ class HttpCli(object):
|
||||
|
||||
x = self.conn.hsrv.broker.ask("up2k.handle_json", body)
|
||||
ret = x.get()
|
||||
if self.is_vproxied:
|
||||
if "purl" in ret:
|
||||
ret["purl"] = self.args.SR + ret["purl"]
|
||||
|
||||
ret = json.dumps(ret)
|
||||
self.log(ret)
|
||||
self.reply(ret.encode("utf-8"), mime="application/json")
|
||||
@@ -1626,6 +1778,10 @@ class HttpCli(object):
|
||||
if t not in order:
|
||||
order.append(t)
|
||||
|
||||
if self.is_vproxied:
|
||||
for hit in hits:
|
||||
hit["rp"] = self.args.RS + hit["rp"]
|
||||
|
||||
r = json.dumps({"hits": hits, "tag_order": order}).encode("utf-8")
|
||||
self.reply(r, mime="application/json")
|
||||
return True
|
||||
@@ -1753,21 +1909,19 @@ class HttpCli(object):
|
||||
self.parser.drop()
|
||||
|
||||
self.out_headerlist = [
|
||||
x
|
||||
for x in self.out_headerlist
|
||||
if x[0] != "Set-Cookie" or "cppwd=" not in x[1]
|
||||
x for x in self.out_headerlist if x[0] != "Set-Cookie" or "cppw" != x[1][:4]
|
||||
]
|
||||
|
||||
dst = "/"
|
||||
dst = self.args.SRS
|
||||
if self.vpath:
|
||||
dst += quotep(self.vpath)
|
||||
|
||||
ck, msg = self.get_pwd_cookie(pwd)
|
||||
msg = self.get_pwd_cookie(pwd)
|
||||
html = self.j2s("msg", h1=msg, h2='<a href="' + dst + '">ack</a>', redir=dst)
|
||||
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
|
||||
self.reply(html.encode("utf-8"))
|
||||
return True
|
||||
|
||||
def get_pwd_cookie(self, pwd: str) -> tuple[str, str]:
|
||||
def get_pwd_cookie(self, pwd: str) -> str:
|
||||
if pwd in self.asrv.iacct:
|
||||
msg = "login ok"
|
||||
dur = int(60 * 60 * self.args.logout)
|
||||
@@ -1784,11 +1938,18 @@ class HttpCli(object):
|
||||
pwd = "x" # nosec
|
||||
dur = None
|
||||
|
||||
r = gencookie("cppwd", pwd, dur)
|
||||
if self.is_ancient:
|
||||
r = r.rsplit(" ", 1)[0]
|
||||
if pwd == "x":
|
||||
# reset both plaintext and tls
|
||||
# (only affects active tls cookies when tls)
|
||||
for k in ("cppwd", "cppws") if self.is_https else ("cppwd",):
|
||||
ck = gencookie(k, pwd, self.args.R, False, dur)
|
||||
self.out_headerlist.append(("Set-Cookie", ck))
|
||||
else:
|
||||
k = "cppws" if self.is_https else "cppwd"
|
||||
ck = gencookie(k, pwd, self.args.R, self.is_https, dur)
|
||||
self.out_headerlist.append(("Set-Cookie", ck))
|
||||
|
||||
return r, msg
|
||||
return msg
|
||||
|
||||
def handle_mkdir(self) -> bool:
|
||||
assert self.parser
|
||||
@@ -1855,7 +2016,7 @@ class HttpCli(object):
|
||||
self.redirect(vpath, "?edit")
|
||||
return True
|
||||
|
||||
def upload_flags(self, vfs: VFS) -> tuple[int, bool, int]:
|
||||
def upload_flags(self, vfs: VFS) -> tuple[int, bool, int, list[str], list[str]]:
|
||||
srnd = self.uparam.get("rand", self.headers.get("rand", ""))
|
||||
rnd = int(srnd) if srnd and not self.args.nw else 0
|
||||
ac = self.uparam.get(
|
||||
@@ -1869,7 +2030,13 @@ class HttpCli(object):
|
||||
else:
|
||||
lifetime = 0
|
||||
|
||||
return rnd, want_url, lifetime
|
||||
return (
|
||||
rnd,
|
||||
want_url,
|
||||
lifetime,
|
||||
vfs.flags.get("xbu") or [],
|
||||
vfs.flags.get("xau") or [],
|
||||
)
|
||||
|
||||
def handle_plain_upload(self) -> bool:
|
||||
assert self.parser
|
||||
@@ -1886,7 +2053,7 @@ class HttpCli(object):
|
||||
if not nullwrite:
|
||||
bos.makedirs(fdir_base)
|
||||
|
||||
rnd, want_url, lifetime = self.upload_flags(vfs)
|
||||
rnd, want_url, lifetime, xbu, xau = self.upload_flags(vfs)
|
||||
|
||||
files: list[tuple[int, str, str, str, str, str]] = []
|
||||
# sz, sha_hex, sha_b64, p_file, fname, abspath
|
||||
@@ -1928,6 +2095,24 @@ class HttpCli(object):
|
||||
tnam = fname = os.devnull
|
||||
fdir = abspath = ""
|
||||
|
||||
if xbu:
|
||||
at = time.time() - lifetime
|
||||
if not runhook(
|
||||
self.log,
|
||||
xbu,
|
||||
abspath,
|
||||
self.vpath,
|
||||
self.host,
|
||||
self.uname,
|
||||
self.ip,
|
||||
at,
|
||||
0,
|
||||
"",
|
||||
):
|
||||
t = "upload denied by xbu"
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
|
||||
if lim:
|
||||
lim.chk_bup(self.ip)
|
||||
lim.chk_nup(self.ip)
|
||||
@@ -1970,6 +2155,24 @@ class HttpCli(object):
|
||||
files.append(
|
||||
(sz, sha_hex, sha_b64, p_file or "(discarded)", fname, abspath)
|
||||
)
|
||||
at = time.time() - lifetime
|
||||
if xau and not runhook(
|
||||
self.log,
|
||||
xau,
|
||||
abspath,
|
||||
self.vpath,
|
||||
self.host,
|
||||
self.uname,
|
||||
self.ip,
|
||||
at,
|
||||
sz,
|
||||
"",
|
||||
):
|
||||
t = "upload denied by xau"
|
||||
self.log(t, 1)
|
||||
os.unlink(abspath)
|
||||
raise Pebkac(403, t)
|
||||
|
||||
dbv, vrem = vfs.get_dbv(rem)
|
||||
self.conn.hsrv.broker.say(
|
||||
"up2k.hash_file",
|
||||
@@ -1978,7 +2181,7 @@ class HttpCli(object):
|
||||
vrem,
|
||||
fname,
|
||||
self.ip,
|
||||
time.time() - lifetime,
|
||||
at,
|
||||
)
|
||||
self.conn.nbyte += sz
|
||||
|
||||
@@ -2024,7 +2227,7 @@ class HttpCli(object):
|
||||
)[: vfs.flags["fk"]]
|
||||
|
||||
vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
|
||||
rel_url = quotep(vpath) + vsuf
|
||||
rel_url = quotep(self.args.RS + vpath) + vsuf
|
||||
msg += 'sha512: {} // {} // {} bytes // <a href="/{}">{}</a> {}\n'.format(
|
||||
sha_hex[:56],
|
||||
sha_b64,
|
||||
@@ -2038,8 +2241,7 @@ class HttpCli(object):
|
||||
jpart = {
|
||||
"url": "{}://{}/{}".format(
|
||||
"https" if self.is_https else "http",
|
||||
self.headers.get("host")
|
||||
or "{}:{}".format(*list(self.s.getsockname()[:2])),
|
||||
self.host,
|
||||
rel_url,
|
||||
),
|
||||
"sha512": sha_hex[:56],
|
||||
@@ -2168,7 +2370,9 @@ class HttpCli(object):
|
||||
mdir, mfile = os.path.split(fp)
|
||||
mfile2 = "{}.{:.3f}.md".format(mfile[:-3], srv_lastmod)
|
||||
try:
|
||||
bos.mkdir(os.path.join(mdir, ".hist"))
|
||||
dp = os.path.join(mdir, ".hist")
|
||||
bos.mkdir(dp)
|
||||
hidedir(dp)
|
||||
except:
|
||||
pass
|
||||
bos.rename(fp, os.path.join(mdir, ".hist", mfile2))
|
||||
@@ -2240,8 +2444,17 @@ class HttpCli(object):
|
||||
if stat.S_ISDIR(st.st_mode):
|
||||
continue
|
||||
|
||||
if stat.S_ISBLK(st.st_mode):
|
||||
fd = bos.open(fs_path, os.O_RDONLY)
|
||||
try:
|
||||
sz = os.lseek(fd, 0, os.SEEK_END)
|
||||
finally:
|
||||
os.close(fd)
|
||||
else:
|
||||
sz = st.st_size
|
||||
|
||||
file_ts = max(file_ts, int(st.st_mtime))
|
||||
editions[ext or "plain"] = (fs_path, st.st_size)
|
||||
editions[ext or "plain"] = (fs_path, sz)
|
||||
except:
|
||||
pass
|
||||
if not self.vpath.startswith(".cpr/"):
|
||||
@@ -2418,7 +2631,7 @@ class HttpCli(object):
|
||||
if fn:
|
||||
fn = fn.rstrip("/").split("/")[-1]
|
||||
else:
|
||||
fn = self.headers.get("host", "hey")
|
||||
fn = self.host.split(":")[0]
|
||||
|
||||
safe = (string.ascii_letters + string.digits).replace("%", "")
|
||||
afn = "".join([x if x in safe.replace('"', "") else "_" for x in fn])
|
||||
@@ -2527,6 +2740,7 @@ class HttpCli(object):
|
||||
|
||||
boundary = "\roll\tide"
|
||||
targs = {
|
||||
"r": self.args.SR if self.is_vproxied else "",
|
||||
"ts": self.conn.hsrv.cachebuster(),
|
||||
"svcname": self.args.doctitle,
|
||||
"html_head": self.html_head,
|
||||
@@ -2572,7 +2786,7 @@ class HttpCli(object):
|
||||
|
||||
def tx_svcs(self) -> bool:
|
||||
aname = re.sub("[^0-9a-zA-Z]+", "", self.args.name) or "a"
|
||||
ep = self.headers["host"]
|
||||
ep = self.host
|
||||
host = ep.split(":")[0]
|
||||
hport = ep[ep.find(":") :] if ":" in ep else ""
|
||||
rip = (
|
||||
@@ -2580,6 +2794,7 @@ class HttpCli(object):
|
||||
if self.args.rclone_mdns or not self.args.zm
|
||||
else self.conn.hsrv.nm.map(self.ip) or host
|
||||
)
|
||||
vp = (self.uparam["hc"] or "").lstrip("/")
|
||||
html = self.j2s(
|
||||
"svcs",
|
||||
args=self.args,
|
||||
@@ -2587,7 +2802,8 @@ class HttpCli(object):
|
||||
s="s" if self.is_https else "",
|
||||
rip=rip,
|
||||
ep=ep,
|
||||
vp=(self.uparam["hc"] or "").lstrip("/"),
|
||||
vp=vp,
|
||||
rvp=vjoin(self.args.R, vp),
|
||||
host=host,
|
||||
hport=hport,
|
||||
aname=aname,
|
||||
@@ -2618,7 +2834,11 @@ class HttpCli(object):
|
||||
"dbwt": None,
|
||||
}
|
||||
|
||||
if self.uparam.get("ls") in ["v", "t", "txt"]:
|
||||
fmt = self.uparam.get("ls", "")
|
||||
if not fmt and self.ua.startswith("curl/"):
|
||||
fmt = "v"
|
||||
|
||||
if fmt in ["v", "t", "txt"]:
|
||||
if self.uname == "*":
|
||||
txt = "howdy stranger (you're not logged in)"
|
||||
else:
|
||||
@@ -2663,21 +2883,22 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
def set_k304(self) -> bool:
|
||||
ck = gencookie("k304", self.uparam["k304"], 60 * 60 * 24 * 299)
|
||||
ck = gencookie("k304", self.uparam["k304"], self.args.R, False, 86400 * 299)
|
||||
self.out_headerlist.append(("Set-Cookie", ck))
|
||||
self.redirect("", "?h#cc")
|
||||
return True
|
||||
|
||||
def set_am_js(self) -> bool:
|
||||
v = "n" if self.uparam["am_js"] == "n" else "y"
|
||||
ck = gencookie("js", v, 60 * 60 * 24 * 299)
|
||||
ck = gencookie("js", v, self.args.R, False, 86400 * 299)
|
||||
self.out_headerlist.append(("Set-Cookie", ck))
|
||||
self.reply(b"promoted\n")
|
||||
return True
|
||||
|
||||
def set_cfg_reset(self) -> bool:
|
||||
for k in ("k304", "js", "cppwd"):
|
||||
self.out_headerlist.append(("Set-Cookie", gencookie(k, "x", None)))
|
||||
for k in ("k304", "js", "cppwd", "cppws"):
|
||||
cookie = gencookie(k, "x", self.args.R, False, None)
|
||||
self.out_headerlist.append(("Set-Cookie", cookie))
|
||||
|
||||
self.redirect("", "?h#cc")
|
||||
return True
|
||||
@@ -2685,13 +2906,14 @@ class HttpCli(object):
|
||||
def tx_404(self, is_403: bool = False) -> bool:
|
||||
rc = 404
|
||||
if self.args.vague_403:
|
||||
t = '<h1 id="n">404 not found ┐( ´ -`)┌</h1><p id="o">or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>'
|
||||
t = '<h1 id="n">404 not found ┐( ´ -`)┌</h1><p id="o">or maybe you don\'t have access -- try logging in or <a href="{}/?h">go home</a></p>'
|
||||
elif is_403:
|
||||
t = '<h1 id="p">403 forbiddena ~┻━┻</h1><p id="q">you\'ll have to log in or <a href="/?h">go home</a></p>'
|
||||
t = '<h1 id="p">403 forbiddena ~┻━┻</h1><p id="q">you\'ll have to log in or <a href="{}/?h">go home</a></p>'
|
||||
rc = 403
|
||||
else:
|
||||
t = '<h1 id="n">404 not found ┐( ´ -`)┌</h1><p><a id="r" href="/?h">go home</a></p>'
|
||||
t = '<h1 id="n">404 not found ┐( ´ -`)┌</h1><p><a id="r" href="{}/?h">go home</a></p>'
|
||||
|
||||
t = t.format(self.args.SR)
|
||||
html = self.j2s("splash", this=self, qvpath=quotep(self.vpath), msg=t)
|
||||
self.reply(html.encode("utf-8"), status=rc)
|
||||
return True
|
||||
@@ -2755,6 +2977,11 @@ class HttpCli(object):
|
||||
dst = dst[len(top) + 1 :]
|
||||
|
||||
ret = self.gen_tree(top, dst)
|
||||
if self.is_vproxied:
|
||||
parents = self.args.R.split("/")
|
||||
for parent in parents[::-1]:
|
||||
ret = {"k{}".format(parent): ret, "a": []}
|
||||
|
||||
zs = json.dumps(ret)
|
||||
self.reply(zs.encode("utf-8"), mime="application/json")
|
||||
return True
|
||||
@@ -2809,6 +3036,7 @@ class HttpCli(object):
|
||||
raise Pebkac(500, "sqlite3 is not available on the server; cannot unpost")
|
||||
|
||||
filt = self.uparam.get("filter")
|
||||
filt = unquotep(filt or "")
|
||||
lm = "ups [{}]".format(filt)
|
||||
self.log(lm)
|
||||
|
||||
@@ -2865,6 +3093,11 @@ class HttpCli(object):
|
||||
break
|
||||
|
||||
ret = ret[:2000]
|
||||
|
||||
if self.is_vproxied:
|
||||
for v in ret:
|
||||
v["vp"] = self.args.SR + v["vp"]
|
||||
|
||||
jtxt = json.dumps(ret, indent=2, sort_keys=True).encode("utf-8", "replace")
|
||||
self.log("{} #{} {:.2f}sec".format(lm, len(ret), time.time() - t0))
|
||||
self.reply(jtxt, mime="application/json")
|
||||
@@ -2879,6 +3112,8 @@ class HttpCli(object):
|
||||
|
||||
if not req:
|
||||
req = [self.vpath]
|
||||
elif self.is_vproxied:
|
||||
req = [x[len(self.args.SR) :] for x in req]
|
||||
|
||||
nlim = int(self.uparam.get("lim") or 0)
|
||||
lim = [nlim, nlim] if nlim else []
|
||||
@@ -2890,6 +3125,10 @@ class HttpCli(object):
|
||||
def handle_mv(self) -> bool:
|
||||
# full path of new loc (incl filename)
|
||||
dst = self.uparam.get("move")
|
||||
|
||||
if self.is_vproxied and dst and dst.startswith(self.args.SR):
|
||||
dst = dst[len(self.args.RS) :]
|
||||
|
||||
if not dst:
|
||||
raise Pebkac(400, "need dst vpath")
|
||||
|
||||
@@ -2920,7 +3159,7 @@ class HttpCli(object):
|
||||
biggest = 0
|
||||
|
||||
if arg == "v":
|
||||
fmt = "\033[0;7;36m{{}} {{:>{}}}\033[0m {{}}"
|
||||
fmt = "\033[0;7;36m{{}}{{:>{}}}\033[0m {{}}"
|
||||
nfmt = "{}"
|
||||
biggest = 0
|
||||
f2 = "".join(
|
||||
@@ -2940,7 +3179,7 @@ class HttpCli(object):
|
||||
a = x["dt"].replace("-", " ").replace(":", " ").split(" ")
|
||||
x["dt"] = f2.format(*list(a))
|
||||
sz = humansize(x["sz"], True)
|
||||
x["sz"] = "\033[0;3{}m{:>5}".format(ctab.get(sz[-1:], 0), sz)
|
||||
x["sz"] = "\033[0;3{}m {:>5}".format(ctab.get(sz[-1:], 0), sz)
|
||||
else:
|
||||
fmt = "{{}} {{:{},}} {{}}"
|
||||
nfmt = "{:,}"
|
||||
@@ -3091,6 +3330,10 @@ class HttpCli(object):
|
||||
is_ls = "ls" in self.uparam
|
||||
is_js = self.args.force_js or self.cookies.get("js") == "y"
|
||||
|
||||
if not is_ls and self.ua.startswith("curl/"):
|
||||
self.uparam["ls"] = "v"
|
||||
is_ls = True
|
||||
|
||||
tpl = "browser"
|
||||
if "b" in self.uparam:
|
||||
tpl = "browser2"
|
||||
@@ -3113,6 +3356,7 @@ class HttpCli(object):
|
||||
readme = f.read().decode("utf-8")
|
||||
break
|
||||
|
||||
vf = vn.flags
|
||||
ls_ret = {
|
||||
"dirs": [],
|
||||
"files": [],
|
||||
@@ -3145,6 +3389,8 @@ class HttpCli(object):
|
||||
"have_zip": (not self.args.no_zip),
|
||||
"have_unpost": int(self.args.unpost),
|
||||
"have_b_u": (self.can_write and self.uparam.get("b") == "u"),
|
||||
"sb_md": "" if "no_sb_md" in vf else (vf.get("md_sbf") or "y"),
|
||||
"sb_lg": "" if "no_sb_lg" in vf else (vf.get("lg_sbf") or "y"),
|
||||
"url_suf": url_suf,
|
||||
"logues": logues,
|
||||
"readme": readme,
|
||||
|
||||
@@ -65,6 +65,7 @@ class HttpConn(object):
|
||||
self.ico: Ico = Ico(self.args) # mypy404
|
||||
|
||||
self.t0: float = time.time() # mypy404
|
||||
self.freshen_pwd: float = 0.0
|
||||
self.stopping = False
|
||||
self.nreq: int = -1 # mypy404
|
||||
self.nbyte: int = 0 # mypy404
|
||||
|
||||
@@ -28,7 +28,7 @@ except ImportError:
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .__init__ import MACOS, TYPE_CHECKING, EnvParams
|
||||
from .__init__ import ANYWIN, MACOS, TYPE_CHECKING, EnvParams
|
||||
from .bos import bos
|
||||
from .httpconn import HttpConn
|
||||
from .util import (
|
||||
@@ -81,8 +81,7 @@ class HttpSrv(object):
|
||||
self.bans: dict[str, int] = {}
|
||||
self.aclose: dict[str, int] = {}
|
||||
|
||||
self.ip = ""
|
||||
self.port = 0
|
||||
self.bound: set[tuple[str, int]] = set()
|
||||
self.name = "hsrv" + nsuf
|
||||
self.mutex = threading.Lock()
|
||||
self.stopping = False
|
||||
@@ -110,6 +109,11 @@ class HttpSrv(object):
|
||||
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
|
||||
self.prism = os.path.exists(zs)
|
||||
|
||||
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
|
||||
if not self.args.no_dav:
|
||||
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
|
||||
self.mallow += zs.split()
|
||||
|
||||
if self.args.zs:
|
||||
from .ssdp import SSDPr
|
||||
|
||||
@@ -142,7 +146,11 @@ class HttpSrv(object):
|
||||
pass
|
||||
|
||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||
self.nm = NetMap([self.ip], netdevs)
|
||||
ips = set()
|
||||
for ip, _ in self.bound:
|
||||
ips.add(ip)
|
||||
|
||||
self.nm = NetMap(list(ips), netdevs)
|
||||
|
||||
def start_threads(self, n: int) -> None:
|
||||
self.tp_nthr += n
|
||||
@@ -178,20 +186,19 @@ class HttpSrv(object):
|
||||
def listen(self, sck: socket.socket, nlisteners: int) -> None:
|
||||
if self.args.j != 1:
|
||||
# lost in the pickle; redefine
|
||||
try:
|
||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
||||
except:
|
||||
pass
|
||||
if not ANYWIN or self.args.reuseaddr:
|
||||
sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
sck.settimeout(None) # < does not inherit, ^ does
|
||||
|
||||
self.ip, self.port = sck.getsockname()[:2]
|
||||
sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
sck.settimeout(None) # < does not inherit, ^ opts above do
|
||||
|
||||
ip, port = sck.getsockname()[:2]
|
||||
self.srvs.append(sck)
|
||||
self.bound.add((ip, port))
|
||||
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
||||
Daemon(
|
||||
self.thr_listen,
|
||||
"httpsrv-n{}-listen-{}-{}".format(self.nid or "0", self.ip, self.port),
|
||||
"httpsrv-n{}-listen-{}-{}".format(self.nid or "0", ip, port),
|
||||
(sck,),
|
||||
)
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ from .stolen.dnslib import (
|
||||
DNSQuestion,
|
||||
DNSRecord,
|
||||
)
|
||||
from .util import CachedSet, Daemon, Netdev, min_ex
|
||||
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
@@ -55,10 +55,11 @@ class MDNS_Sck(MC_Sck):
|
||||
self.bp_bye = b""
|
||||
|
||||
self.last_tx = 0.0
|
||||
self.tx_ex = False
|
||||
|
||||
|
||||
class MDNS(MCast):
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
def __init__(self, hub: "SvcHub", ngen: int) -> None:
|
||||
al = hub.args
|
||||
grp4 = "" if al.zm6 else MDNS4
|
||||
grp6 = "" if al.zm4 else MDNS6
|
||||
@@ -66,7 +67,8 @@ class MDNS(MCast):
|
||||
hub, MDNS_Sck, al.zm_on, al.zm_off, grp4, grp6, 5353, hub.args.zmv
|
||||
)
|
||||
self.srv: dict[socket.socket, MDNS_Sck] = {}
|
||||
|
||||
self.logsrc = "mDNS-{}".format(ngen)
|
||||
self.ngen = ngen
|
||||
self.ttl = 300
|
||||
|
||||
zs = self.args.name + ".local."
|
||||
@@ -89,7 +91,7 @@ class MDNS(MCast):
|
||||
self.defend: dict[MDNS_Sck, float] = {} # server -> deadline
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("mDNS", msg, c)
|
||||
self.log_func(self.logsrc, msg, c)
|
||||
|
||||
def build_svcs(self) -> tuple[dict[str, dict[str, Any]], set[str]]:
|
||||
zms = self.args.zms
|
||||
@@ -287,12 +289,15 @@ class MDNS(MCast):
|
||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||
self.rx4.cln()
|
||||
self.rx6.cln()
|
||||
buf = b""
|
||||
addr = ("0", 0)
|
||||
for sck in rx:
|
||||
buf, addr = sck.recvfrom(4096)
|
||||
try:
|
||||
buf, addr = sck.recvfrom(4096)
|
||||
self.eat(buf, addr, sck)
|
||||
except:
|
||||
if not self.running:
|
||||
self.log("stopped", 2)
|
||||
return
|
||||
|
||||
t = "{} {} \033[33m|{}| {}\n{}".format(
|
||||
@@ -309,29 +314,38 @@ class MDNS(MCast):
|
||||
self.log(t.format(self.hn[:-1]), 2)
|
||||
self.probing = 0
|
||||
|
||||
self.log("stopped", 2)
|
||||
|
||||
def stop(self, panic=False) -> None:
|
||||
self.running = False
|
||||
if not panic:
|
||||
for srv in self.srv.values():
|
||||
try:
|
||||
if panic:
|
||||
srv.sck.close()
|
||||
else:
|
||||
srv.sck.sendto(srv.bp_bye, (srv.grp, 5353))
|
||||
except:
|
||||
pass
|
||||
|
||||
self.srv = {}
|
||||
|
||||
def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None:
|
||||
cip = addr[0]
|
||||
v6 = ":" in cip
|
||||
if cip.startswith("169.254") or v6 and not cip.startswith("fe80"):
|
||||
if (cip.startswith("169.254") and not self.ll_ok) or (
|
||||
v6 and not cip.startswith("fe80")
|
||||
):
|
||||
return
|
||||
|
||||
cache = self.rx6 if v6 else self.rx4
|
||||
if buf in cache.c:
|
||||
return
|
||||
|
||||
cache.add(buf)
|
||||
srv: Optional[MDNS_Sck] = self.srv[sck] if v6 else self.map_client(cip) # type: ignore
|
||||
if not srv:
|
||||
return
|
||||
|
||||
cache.add(buf)
|
||||
now = time.time()
|
||||
|
||||
if self.args.zmv and cip != srv.ip and cip not in srv.ips:
|
||||
@@ -369,6 +383,14 @@ class MDNS(MCast):
|
||||
# avahi broadcasting 127.0.0.1-only packets
|
||||
return
|
||||
|
||||
# check if we've been given additional IPs
|
||||
for ip in list_ips():
|
||||
if ip in cips:
|
||||
self.sips.add(ip)
|
||||
|
||||
if not self.sips.isdisjoint(cips):
|
||||
return
|
||||
|
||||
t = "mdns zeroconf: "
|
||||
if self.probing:
|
||||
t += "Cannot start; hostname '{}' is occupied"
|
||||
@@ -502,6 +524,15 @@ class MDNS(MCast):
|
||||
if now < srv.last_tx + cooldown:
|
||||
return False
|
||||
|
||||
try:
|
||||
srv.sck.sendto(msg, (srv.grp, 5353))
|
||||
srv.last_tx = now
|
||||
except Exception as ex:
|
||||
if srv.tx_ex:
|
||||
return True
|
||||
|
||||
srv.tx_ex = True
|
||||
t = "tx({},|{}|,{}): {}"
|
||||
self.log(t.format(srv.ip, len(msg), cooldown, ex), 3)
|
||||
|
||||
return True
|
||||
|
||||
@@ -5,10 +5,17 @@ import socket
|
||||
import time
|
||||
|
||||
import ipaddress
|
||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
||||
from ipaddress import (
|
||||
IPv4Address,
|
||||
IPv4Network,
|
||||
IPv6Address,
|
||||
IPv6Network,
|
||||
ip_address,
|
||||
ip_network,
|
||||
)
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .util import MACOS, Netdev, min_ex, spack
|
||||
from .__init__ import MACOS, TYPE_CHECKING
|
||||
from .util import Netdev, find_prefix, min_ex, spack
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .svchub import SvcHub
|
||||
@@ -75,6 +82,7 @@ class MCast(object):
|
||||
|
||||
self.srv: dict[socket.socket, MC_Sck] = {} # listening sockets
|
||||
self.sips: set[str] = set() # all listening ips (including failed attempts)
|
||||
self.ll_ok: set[str] = set() # fallback linklocal IPv4 and IPv6 addresses
|
||||
self.b2srv: dict[bytes, MC_Sck] = {} # binary-ip -> server socket
|
||||
self.b4: list[bytes] = [] # sorted list of binary-ips
|
||||
self.b6: list[bytes] = [] # sorted list of binary-ips
|
||||
@@ -102,15 +110,23 @@ class MCast(object):
|
||||
)
|
||||
|
||||
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
||||
|
||||
# ip -> ip/prefix
|
||||
ips = [[x for x in netdevs if x.startswith(y + "/")][0] for y in ips]
|
||||
ips = find_prefix(ips, netdevs)
|
||||
|
||||
on = self.on[:]
|
||||
off = self.off[:]
|
||||
for lst in (on, off):
|
||||
for av in list(lst):
|
||||
try:
|
||||
arg_net = ip_network(av, False)
|
||||
except:
|
||||
arg_net = None
|
||||
|
||||
for sk, sv in netdevs.items():
|
||||
if arg_net:
|
||||
net_ip = ip_address(sk.split("/")[0])
|
||||
if net_ip in arg_net and sk not in lst:
|
||||
lst.append(sk)
|
||||
|
||||
if (av == str(sv.idx) or av == sv.name) and sk not in lst:
|
||||
lst.append(sk)
|
||||
|
||||
@@ -166,9 +182,21 @@ class MCast(object):
|
||||
srv.ips[oth_ip.split("/")[0]] = ipaddress.ip_network(oth_ip, False)
|
||||
|
||||
# gvfs breaks if a linklocal ip appears in a dns reply
|
||||
srv.ips = {k: v for k, v in srv.ips.items() if not k.startswith("fe80")}
|
||||
ll = {
|
||||
k: v
|
||||
for k, v in srv.ips.items()
|
||||
if k.startswith("169.254") or k.startswith("fe80")
|
||||
}
|
||||
rt = {k: v for k, v in srv.ips.items() if k not in ll}
|
||||
|
||||
if self.args.ll or not rt:
|
||||
self.ll_ok.update(list(ll))
|
||||
|
||||
if not self.args.ll:
|
||||
srv.ips = rt or ll
|
||||
|
||||
if not srv.ips:
|
||||
self.log("no routable IPs on {}; skipping [{}]".format(netdev, ip), 3)
|
||||
self.log("no IPs on {}; skipping [{}]".format(netdev, ip), 3)
|
||||
continue
|
||||
|
||||
try:
|
||||
@@ -318,6 +346,16 @@ class MCast(object):
|
||||
# just give it something
|
||||
ret = list(self.srv.values())[0]
|
||||
|
||||
if not ret and cip.startswith("169.254"):
|
||||
# idk how to map LL IPv4 msgs to nics;
|
||||
# just pick one and hope for the best
|
||||
lls = (
|
||||
x
|
||||
for x in self.srv.values()
|
||||
if next((y for y in x.ips if y in self.ll_ok), None)
|
||||
)
|
||||
ret = next(lls, None)
|
||||
|
||||
if ret:
|
||||
t = "new client on {} ({}): {}"
|
||||
self.log(t.format(ret.name, ret.net, cip), 6)
|
||||
|
||||
@@ -8,7 +8,7 @@ from email.utils import formatdate
|
||||
|
||||
from .__init__ import TYPE_CHECKING
|
||||
from .multicast import MC_Sck, MCast
|
||||
from .util import CachedSet, min_ex
|
||||
from .util import CachedSet, min_ex, html_escape
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .broker_util import BrokerCli
|
||||
@@ -73,13 +73,15 @@ class SSDPr(object):
|
||||
</device>
|
||||
</root>"""
|
||||
|
||||
c = html_escape
|
||||
sip, sport = hc.s.getsockname()[:2]
|
||||
sip = sip.replace("::ffff:", "")
|
||||
proto = "https" if self.args.https_only else "http"
|
||||
ubase = "{}://{}:{}".format(proto, sip, sport)
|
||||
zsl = self.args.zsl
|
||||
url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/")
|
||||
name = "{} @ {}".format(self.args.doctitle, self.args.name)
|
||||
zs = zs.strip().format(ubase, url, name, self.args.zsid)
|
||||
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
|
||||
hc.reply(zs.encode("utf-8", "replace"))
|
||||
return False # close connectino
|
||||
|
||||
@@ -87,19 +89,22 @@ class SSDPr(object):
|
||||
class SSDPd(MCast):
|
||||
"""communicates with ssdp clients over multicast"""
|
||||
|
||||
def __init__(self, hub: "SvcHub") -> None:
|
||||
def __init__(self, hub: "SvcHub", ngen: int) -> None:
|
||||
al = hub.args
|
||||
vinit = al.zsv and not al.zmv
|
||||
super(SSDPd, self).__init__(
|
||||
hub, SSDP_Sck, al.zs_on, al.zs_off, GRP, "", 1900, vinit
|
||||
)
|
||||
self.srv: dict[socket.socket, SSDP_Sck] = {}
|
||||
self.logsrc = "SSDP-{}".format(ngen)
|
||||
self.ngen = ngen
|
||||
|
||||
self.rxc = CachedSet(0.7)
|
||||
self.txc = CachedSet(5) # win10: every 3 sec
|
||||
self.ptn_st = re.compile(b"\nst: *upnp:rootdevice", re.I)
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
self.log_func("SSDP", msg, c)
|
||||
self.log_func(self.logsrc, msg, c)
|
||||
|
||||
def run(self) -> None:
|
||||
try:
|
||||
@@ -125,41 +130,51 @@ class SSDPd(MCast):
|
||||
|
||||
self.log("listening")
|
||||
while self.running:
|
||||
rdy = select.select(self.srv, [], [], 180)
|
||||
rdy = select.select(self.srv, [], [], self.args.z_chk or 180)
|
||||
rx: list[socket.socket] = rdy[0] # type: ignore
|
||||
self.rxc.cln()
|
||||
buf = b""
|
||||
addr = ("0", 0)
|
||||
for sck in rx:
|
||||
buf, addr = sck.recvfrom(4096)
|
||||
try:
|
||||
buf, addr = sck.recvfrom(4096)
|
||||
self.eat(buf, addr)
|
||||
except:
|
||||
if not self.running:
|
||||
return
|
||||
break
|
||||
|
||||
t = "{} {} \033[33m|{}| {}\n{}".format(
|
||||
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
||||
)
|
||||
self.log(t, 6)
|
||||
|
||||
self.log("stopped", 2)
|
||||
|
||||
def stop(self) -> None:
|
||||
self.running = False
|
||||
for srv in self.srv.values():
|
||||
try:
|
||||
srv.sck.close()
|
||||
except:
|
||||
pass
|
||||
|
||||
self.srv = {}
|
||||
|
||||
def eat(self, buf: bytes, addr: tuple[str, int]) -> None:
|
||||
cip = addr[0]
|
||||
if cip.startswith("169.254"):
|
||||
if cip.startswith("169.254") and not self.ll_ok:
|
||||
return
|
||||
|
||||
if buf in self.rxc.c:
|
||||
return
|
||||
|
||||
self.rxc.add(buf)
|
||||
srv: Optional[SSDP_Sck] = self.map_client(cip) # type: ignore
|
||||
if not srv:
|
||||
return
|
||||
|
||||
self.rxc.add(buf)
|
||||
if not buf.startswith(b"M-SEARCH * HTTP/1."):
|
||||
raise Exception("not an ssdp message")
|
||||
return
|
||||
|
||||
if not self.ptn_st.search(buf):
|
||||
return
|
||||
@@ -183,7 +198,8 @@ BOOTID.UPNP.ORG: 0
|
||||
CONFIGID.UPNP.ORG: 1
|
||||
|
||||
"""
|
||||
zs = zs.format(formatdate(usegmt=True), srv.ip, srv.hport, self.args.zsid)
|
||||
v4 = srv.ip.replace("::ffff:", "")
|
||||
zs = zs.format(formatdate(usegmt=True), v4, srv.hport, self.args.zsid)
|
||||
zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace")
|
||||
srv.sck.sendto(zb, addr[:2])
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import stat
|
||||
import tarfile
|
||||
|
||||
from queue import Queue
|
||||
@@ -79,6 +80,9 @@ class StreamTar(StreamArc):
|
||||
src = f["ap"]
|
||||
fsi = f["st"]
|
||||
|
||||
if stat.S_ISDIR(fsi.st_mode):
|
||||
return
|
||||
|
||||
inf = tarfile.TarInfo(name=name)
|
||||
inf.mode = fsi.st_mode
|
||||
inf.size = fsi.st_size
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import argparse
|
||||
import base64
|
||||
import calendar
|
||||
import errno
|
||||
import gzip
|
||||
import logging
|
||||
import os
|
||||
@@ -96,13 +97,13 @@ class SvcHub(object):
|
||||
if args.sss or args.s >= 3:
|
||||
args.ss = True
|
||||
args.no_dav = True
|
||||
args.no_logues = True
|
||||
args.no_readme = True
|
||||
args.lo = args.lo or "cpp-%Y-%m%d-%H%M%S.txt.xz"
|
||||
args.ls = args.ls or "**,*,ln,p,r"
|
||||
|
||||
if args.ss or args.s >= 2:
|
||||
args.s = True
|
||||
args.no_logues = True
|
||||
args.no_readme = True
|
||||
args.unpost = 0
|
||||
args.no_del = True
|
||||
args.no_mv = True
|
||||
@@ -150,9 +151,6 @@ class SvcHub(object):
|
||||
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
|
||||
args.theme = "{0}{1} {0} {1}".format(ch, bri)
|
||||
|
||||
if not args.hardlink and args.never_symlink:
|
||||
args.no_dedup = True
|
||||
|
||||
if args.log_fk:
|
||||
args.log_fk = re.compile(args.log_fk)
|
||||
|
||||
@@ -236,6 +234,7 @@ class SvcHub(object):
|
||||
if not args.zms:
|
||||
args.zms = zms
|
||||
|
||||
self.zc_ngen = 0
|
||||
self.mdns: Optional["MDNS"] = None
|
||||
self.ssdp: Optional["SSDPd"] = None
|
||||
|
||||
@@ -295,11 +294,35 @@ class SvcHub(object):
|
||||
al.zs_on = al.zs_on or al.z_on
|
||||
al.zm_off = al.zm_off or al.z_off
|
||||
al.zs_off = al.zs_off or al.z_off
|
||||
for n in ("zm_on", "zm_off", "zs_on", "zs_off"):
|
||||
vs = getattr(al, n).replace(" ", ",").split(",")
|
||||
ns = "zm_on zm_off zs_on zs_off acao acam"
|
||||
for n in ns.split(" "):
|
||||
vs = getattr(al, n).split(",")
|
||||
vs = [x.strip() for x in vs]
|
||||
vs = [x for x in vs if x]
|
||||
setattr(al, n, vs)
|
||||
|
||||
ns = "acao acam"
|
||||
for n in ns.split(" "):
|
||||
vs = getattr(al, n)
|
||||
vd = {zs: 1 for zs in vs}
|
||||
setattr(al, n, vd)
|
||||
|
||||
ns = "acao"
|
||||
for n in ns.split(" "):
|
||||
vs = getattr(al, n)
|
||||
vs = [x.lower() for x in vs]
|
||||
setattr(al, n, vs)
|
||||
|
||||
R = al.rp_loc
|
||||
if "//" in R or ":" in R:
|
||||
t = "found URL in --rp-loc; it should be just the location, for example /foo/bar"
|
||||
raise Exception(t)
|
||||
|
||||
al.R = R = R.strip("/")
|
||||
al.SR = "/" + R if R else ""
|
||||
al.RS = R + "/" if R else ""
|
||||
al.SRS = "/" + R + "/" if R else "/"
|
||||
|
||||
return True
|
||||
|
||||
def _setlimits(self) -> None:
|
||||
@@ -392,24 +415,10 @@ class SvcHub(object):
|
||||
|
||||
def run(self) -> None:
|
||||
self.tcpsrv.run()
|
||||
|
||||
if getattr(self.args, "zm", False):
|
||||
try:
|
||||
from .mdns import MDNS
|
||||
|
||||
self.mdns = MDNS(self)
|
||||
Daemon(self.mdns.run, "mdns")
|
||||
except:
|
||||
self.log("root", "mdns startup failed;\n" + min_ex(), 3)
|
||||
|
||||
if getattr(self.args, "zs", False):
|
||||
try:
|
||||
from .ssdp import SSDPd
|
||||
|
||||
self.ssdp = SSDPd(self)
|
||||
Daemon(self.ssdp.run, "ssdp")
|
||||
except:
|
||||
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
||||
if getattr(self.args, "z_chk", 0) and (
|
||||
getattr(self.args, "zm", False) or getattr(self.args, "zs", False)
|
||||
):
|
||||
Daemon(self.tcpsrv.netmon, "netmon")
|
||||
|
||||
Daemon(self.thr_httpsrv_up, "sig-hsrv-up2")
|
||||
|
||||
@@ -441,6 +450,33 @@ class SvcHub(object):
|
||||
else:
|
||||
self.stop_thr()
|
||||
|
||||
def start_zeroconf(self) -> None:
|
||||
self.zc_ngen += 1
|
||||
|
||||
if getattr(self.args, "zm", False):
|
||||
try:
|
||||
from .mdns import MDNS
|
||||
|
||||
if self.mdns:
|
||||
self.mdns.stop(True)
|
||||
|
||||
self.mdns = MDNS(self, self.zc_ngen)
|
||||
Daemon(self.mdns.run, "mdns")
|
||||
except:
|
||||
self.log("root", "mdns startup failed;\n" + min_ex(), 3)
|
||||
|
||||
if getattr(self.args, "zs", False):
|
||||
try:
|
||||
from .ssdp import SSDPd
|
||||
|
||||
if self.ssdp:
|
||||
self.ssdp.stop()
|
||||
|
||||
self.ssdp = SSDPd(self, self.zc_ngen)
|
||||
Daemon(self.ssdp.run, "ssdp")
|
||||
except:
|
||||
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
||||
|
||||
def reload(self) -> str:
|
||||
if self.reloading:
|
||||
return "cannot reload; already in progress"
|
||||
@@ -625,13 +661,20 @@ class SvcHub(object):
|
||||
print(msg.encode("utf-8", "replace").decode(), end="")
|
||||
except:
|
||||
print(msg.encode("ascii", "replace").decode(), end="")
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EPIPE:
|
||||
raise
|
||||
|
||||
if self.logf:
|
||||
self.logf.write(msg)
|
||||
|
||||
def pr(self, *a: Any, **ka: Any) -> None:
|
||||
try:
|
||||
with self.log_mutex:
|
||||
print(*a, **ka)
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EPIPE:
|
||||
raise
|
||||
|
||||
def check_mp_support(self) -> str:
|
||||
if MACOS:
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import calendar
|
||||
import time
|
||||
import stat
|
||||
import zlib
|
||||
|
||||
from .bos import bos
|
||||
@@ -238,6 +239,9 @@ class StreamZip(StreamArc):
|
||||
src = f["ap"]
|
||||
st = f["st"]
|
||||
|
||||
if stat.S_ISDIR(st.st_mode):
|
||||
return
|
||||
|
||||
sz = st.st_size
|
||||
ts = st.st_mtime
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, VT100, unicode
|
||||
from .stolen.qrcodegen import QrCode
|
||||
@@ -28,6 +29,9 @@ if TYPE_CHECKING:
|
||||
if not hasattr(socket, "IPPROTO_IPV6"):
|
||||
setattr(socket, "IPPROTO_IPV6", 41)
|
||||
|
||||
if not hasattr(socket, "IP_FREEBIND"):
|
||||
setattr(socket, "IP_FREEBIND", 15)
|
||||
|
||||
|
||||
class TcpSrv(object):
|
||||
"""
|
||||
@@ -46,6 +50,8 @@ class TcpSrv(object):
|
||||
self.stopping = False
|
||||
self.srv: list[socket.socket] = []
|
||||
self.bound: list[tuple[str, int]] = []
|
||||
self.netdevs: dict[str, Netdev] = {}
|
||||
self.netlist = ""
|
||||
self.nsrv = 0
|
||||
self.qr = ""
|
||||
pad = False
|
||||
@@ -121,6 +127,20 @@ class TcpSrv(object):
|
||||
else:
|
||||
self.netdevs = {}
|
||||
|
||||
# keep IPv6 LL-only nics
|
||||
ll_ok: set[str] = set()
|
||||
for ip, nd in self.netdevs.items():
|
||||
if not ip.startswith("fe80"):
|
||||
continue
|
||||
|
||||
just_ll = True
|
||||
for ip2, nd2 in self.netdevs.items():
|
||||
if nd == nd2 and ":" in ip2 and not ip2.startswith("fe80"):
|
||||
just_ll = False
|
||||
|
||||
if just_ll or self.args.ll:
|
||||
ll_ok.add(ip.split("/")[0])
|
||||
|
||||
qr1: dict[str, list[int]] = {}
|
||||
qr2: dict[str, list[int]] = {}
|
||||
msgs = []
|
||||
@@ -128,7 +148,7 @@ class TcpSrv(object):
|
||||
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
|
||||
t = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
|
||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||
if ip.startswith("fe80"):
|
||||
if ip.startswith("fe80") and ip not in ll_ok:
|
||||
continue
|
||||
|
||||
for port in sorted(self.args.p):
|
||||
@@ -195,21 +215,28 @@ class TcpSrv(object):
|
||||
def _listen(self, ip: str, port: int) -> None:
|
||||
ipv = socket.AF_INET6 if ":" in ip else socket.AF_INET
|
||||
srv = socket.socket(ipv, socket.SOCK_STREAM)
|
||||
try:
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
||||
except:
|
||||
pass
|
||||
|
||||
if not ANYWIN or self.args.reuseaddr:
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
srv.settimeout(None) # < does not inherit, ^ does
|
||||
srv.settimeout(None) # < does not inherit, ^ opts above do
|
||||
|
||||
try:
|
||||
srv.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False)
|
||||
except:
|
||||
pass # will create another ipv4 socket instead
|
||||
|
||||
if not ANYWIN and self.args.freebind:
|
||||
srv.setsockopt(socket.SOL_IP, socket.IP_FREEBIND, 1)
|
||||
|
||||
try:
|
||||
srv.bind((ip, port))
|
||||
sport = srv.getsockname()[1]
|
||||
if port != sport:
|
||||
# linux 6.0.16 lets you bind a port which is in use
|
||||
# except it just gives you a random port instead
|
||||
raise OSError(E_ADDR_IN_USE[0], "")
|
||||
self.srv.append(srv)
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno in E_ADDR_IN_USE:
|
||||
@@ -228,6 +255,14 @@ class TcpSrv(object):
|
||||
ip, port = srv.getsockname()[:2]
|
||||
try:
|
||||
srv.listen(self.args.nc)
|
||||
try:
|
||||
ok = srv.getsockopt(socket.SOL_SOCKET, socket.SO_ACCEPTCONN)
|
||||
except:
|
||||
ok = 1 # macos
|
||||
|
||||
if not ok:
|
||||
# some linux don't throw on listen(0.0.0.0) after listen(::)
|
||||
raise Exception("failed to listen on {}".format(srv.getsockname()))
|
||||
except:
|
||||
if ip == "0.0.0.0" and ("::", port) in bound:
|
||||
# dualstack
|
||||
@@ -255,7 +290,11 @@ class TcpSrv(object):
|
||||
self.srv = srvs
|
||||
self.bound = bound
|
||||
self.nsrv = len(srvs)
|
||||
self._distribute_netdevs()
|
||||
|
||||
def _distribute_netdevs(self):
|
||||
self.hub.broker.say("set_netdevs", self.netdevs)
|
||||
self.hub.start_zeroconf()
|
||||
|
||||
def shutdown(self) -> None:
|
||||
self.stopping = True
|
||||
@@ -267,6 +306,27 @@ class TcpSrv(object):
|
||||
|
||||
self.log("tcpsrv", "ok bye")
|
||||
|
||||
def netmon(self):
|
||||
while not self.stopping:
|
||||
time.sleep(self.args.z_chk)
|
||||
netdevs = self.detect_interfaces(self.args.i)
|
||||
if not netdevs:
|
||||
continue
|
||||
|
||||
added = "nothing"
|
||||
removed = "nothing"
|
||||
for k, v in netdevs.items():
|
||||
if k not in self.netdevs:
|
||||
added = "{} = {}".format(k, v)
|
||||
for k, v in self.netdevs.items():
|
||||
if k not in netdevs:
|
||||
removed = "{} = {}".format(k, v)
|
||||
|
||||
t = "network change detected:\n added {}\nremoved {}"
|
||||
self.log("tcpsrv", t.format(added, removed), 3)
|
||||
self.netdevs = netdevs
|
||||
self._distribute_netdevs()
|
||||
|
||||
def detect_interfaces(self, listen_ips: list[str]) -> dict[str, Netdev]:
|
||||
from .stolen.ifaddr import get_adapters
|
||||
|
||||
@@ -276,10 +336,6 @@ class TcpSrv(object):
|
||||
for nip in nic.ips:
|
||||
ipa = nip.ip[0] if ":" in str(nip.ip) else nip.ip
|
||||
sip = "{}/{}".format(ipa, nip.network_prefix)
|
||||
if sip.startswith("169.254"):
|
||||
# browsers dont impl linklocal
|
||||
continue
|
||||
|
||||
nd = Netdev(sip, nic.index or 0, nic.nice_name, "")
|
||||
eps[sip] = nd
|
||||
try:
|
||||
@@ -291,6 +347,12 @@ class TcpSrv(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
netlist = str(sorted(eps.items()))
|
||||
if netlist == self.netlist and self.netdevs:
|
||||
return {}
|
||||
|
||||
self.netlist = netlist
|
||||
|
||||
if "0.0.0.0" not in listen_ips and "::" not in listen_ips:
|
||||
eps = {k: v for k, v in eps.items() if k.split("/")[0] in listen_ips}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
@@ -61,12 +62,16 @@ try:
|
||||
HAVE_AVIF = True
|
||||
except:
|
||||
pass
|
||||
|
||||
logging.getLogger("PIL").setLevel(logging.WARNING)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
HAVE_VIPS = True
|
||||
import pyvips
|
||||
|
||||
logging.getLogger("pyvips").setLevel(logging.WARNING)
|
||||
except:
|
||||
HAVE_VIPS = False
|
||||
|
||||
@@ -242,38 +247,38 @@ class ThumbSrv(object):
|
||||
abspath, tpath = task
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
png_ok = False
|
||||
fun = None
|
||||
funs = []
|
||||
if not bos.path.exists(tpath):
|
||||
for lib in self.args.th_dec:
|
||||
if fun:
|
||||
break
|
||||
elif lib == "pil" and ext in self.fmt_pil:
|
||||
fun = self.conv_pil
|
||||
if lib == "pil" and ext in self.fmt_pil:
|
||||
funs.append(self.conv_pil)
|
||||
elif lib == "vips" and ext in self.fmt_vips:
|
||||
fun = self.conv_vips
|
||||
funs.append(self.conv_vips)
|
||||
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
|
||||
fun = self.conv_ffmpeg
|
||||
funs.append(self.conv_ffmpeg)
|
||||
elif lib == "ff" and ext in self.fmt_ffa:
|
||||
if tpath.endswith(".opus") or tpath.endswith(".caf"):
|
||||
fun = self.conv_opus
|
||||
funs.append(self.conv_opus)
|
||||
elif tpath.endswith(".png"):
|
||||
fun = self.conv_waves
|
||||
funs.append(self.conv_waves)
|
||||
png_ok = True
|
||||
else:
|
||||
fun = self.conv_spec
|
||||
funs.append(self.conv_spec)
|
||||
|
||||
if not png_ok and tpath.endswith(".png"):
|
||||
raise Pebkac(400, "png only allowed for waveforms")
|
||||
|
||||
if fun:
|
||||
for fun in funs:
|
||||
try:
|
||||
fun(abspath, tpath)
|
||||
break
|
||||
except Exception as ex:
|
||||
msg = "{} could not create thumbnail of {}\n{}"
|
||||
msg = msg.format(fun.__name__, abspath, min_ex())
|
||||
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
||||
self.log(msg, c)
|
||||
if getattr(ex, "returncode", 0) != 321:
|
||||
if fun == funs[-1]:
|
||||
with open(tpath, "wb") as _:
|
||||
pass
|
||||
else:
|
||||
@@ -363,7 +368,8 @@ class ThumbSrv(object):
|
||||
img = pyvips.Image.thumbnail(abspath, w, **kw)
|
||||
break
|
||||
except:
|
||||
pass
|
||||
if c == crops[-1]:
|
||||
raise
|
||||
|
||||
img.write_to_file(tpath, Q=40)
|
||||
|
||||
|
||||
@@ -97,14 +97,17 @@ class U2idx(object):
|
||||
return None
|
||||
|
||||
cur = None
|
||||
if ANYWIN:
|
||||
if ANYWIN and not bos.path.exists(db_path + "-wal"):
|
||||
uri = ""
|
||||
try:
|
||||
uri = "{}?mode=ro&nolock=1".format(Path(db_path).as_uri())
|
||||
cur = sqlite3.connect(uri, 2, uri=True).cursor()
|
||||
cur.execute('pragma table_info("up")').fetchone()
|
||||
self.log("ro: {}".format(db_path))
|
||||
except:
|
||||
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
||||
# may not fail until the pragma so unset it
|
||||
cur = None
|
||||
|
||||
if not cur:
|
||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||
|
||||
@@ -38,11 +38,13 @@ from .util import (
|
||||
db_ex_chk,
|
||||
djoin,
|
||||
fsenc,
|
||||
hidedir,
|
||||
min_ex,
|
||||
quotep,
|
||||
ren_open,
|
||||
rmdirs,
|
||||
rmdirs_up,
|
||||
runhook,
|
||||
s2hms,
|
||||
s3dec,
|
||||
s3enc,
|
||||
@@ -152,6 +154,7 @@ class Up2k(object):
|
||||
if ANYWIN:
|
||||
# usually fails to set lastmod too quickly
|
||||
self.lastmod_q: list[tuple[str, int, tuple[int, int], bool]] = []
|
||||
self.lastmod_q2 = self.lastmod_q[:]
|
||||
Daemon(self._lastmodder, "up2k-lastmod")
|
||||
|
||||
self.fstab = Fstab(self.log_func)
|
||||
@@ -180,6 +183,17 @@ class Up2k(object):
|
||||
all_vols = self.asrv.vfs.all_vols
|
||||
have_e2d = self.init_indexes(all_vols, [])
|
||||
|
||||
if self.stop:
|
||||
# up-mt consistency not guaranteed if init is interrupted;
|
||||
# drop caches for a full scan on next boot
|
||||
self._drop_caches()
|
||||
|
||||
if self.pp:
|
||||
self.pp.end = True
|
||||
self.pp = None
|
||||
|
||||
return
|
||||
|
||||
if not self.pp and self.args.exit == "idx":
|
||||
return self.hub.sigterm()
|
||||
|
||||
@@ -428,6 +442,7 @@ class Up2k(object):
|
||||
# only need to protect register_vpath but all in one go feels right
|
||||
for vol in vols:
|
||||
try:
|
||||
bos.makedirs(vol.realpath) # gonna happen at snap anyways
|
||||
bos.listdir(vol.realpath)
|
||||
except:
|
||||
self.volstate[vol.vpath] = "OFFLINE (cannot access folder)"
|
||||
@@ -463,6 +478,10 @@ class Up2k(object):
|
||||
if next((zv for zv in vols if "e2ds" in zv.flags), None):
|
||||
self._block("indexing")
|
||||
|
||||
if self.args.re_dhash:
|
||||
self.args.re_dhash = False
|
||||
self._drop_caches()
|
||||
|
||||
for vol in vols:
|
||||
if self.stop:
|
||||
break
|
||||
@@ -550,6 +569,34 @@ class Up2k(object):
|
||||
if self.stop:
|
||||
return False
|
||||
|
||||
for vol in all_vols.values():
|
||||
if vol.flags["dbd"] == "acid":
|
||||
continue
|
||||
|
||||
reg = self.register_vpath(vol.realpath, vol.flags)
|
||||
try:
|
||||
assert reg
|
||||
cur, db_path = reg
|
||||
if bos.path.getsize(db_path + "-wal") < 1024 * 1024 * 5:
|
||||
continue
|
||||
except:
|
||||
continue
|
||||
|
||||
try:
|
||||
with self.mutex:
|
||||
cur.execute("pragma wal_checkpoint(truncate)")
|
||||
try:
|
||||
cur.execute("commit") # absolutely necessary! for some reason
|
||||
except:
|
||||
pass
|
||||
|
||||
cur.connection.commit() # this one maybe not
|
||||
except Exception as ex:
|
||||
self.log("checkpoint failed: {}".format(ex), 3)
|
||||
|
||||
if self.stop:
|
||||
return False
|
||||
|
||||
self.pp.end = True
|
||||
|
||||
msg = "{} volumes in {:.2f} sec"
|
||||
@@ -597,9 +644,22 @@ class Up2k(object):
|
||||
ff = "\033[0;35m{}{:.0}"
|
||||
fv = "\033[0;36m{}:\033[90m{}"
|
||||
fx = set(("html_head",))
|
||||
fd = {
|
||||
"dbd": "dbd",
|
||||
"lg_sbf": "lg_sbf",
|
||||
"md_sbf": "md_sbf",
|
||||
"mte": "mte",
|
||||
"mth": "mth",
|
||||
"mtp": "mtp",
|
||||
}
|
||||
fl = {
|
||||
k: v
|
||||
for k, v in flags.items()
|
||||
if k not in fd or v != getattr(self.args, fd[k])
|
||||
}
|
||||
a = [
|
||||
(ft if v is True else ff if v is False else fv).format(k, str(v))
|
||||
for k, v in flags.items()
|
||||
for k, v in fl.items()
|
||||
if k not in fx
|
||||
]
|
||||
if a:
|
||||
@@ -654,11 +714,45 @@ class Up2k(object):
|
||||
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
|
||||
return None
|
||||
|
||||
bos.makedirs(histpath)
|
||||
if bos.makedirs(histpath):
|
||||
hidedir(histpath)
|
||||
|
||||
try:
|
||||
cur = self._open_db(db_path)
|
||||
self.cur[ptop] = cur
|
||||
|
||||
# speeds measured uploading 520 small files on a WD20SPZX (SMR 2.5" 5400rpm 4kb)
|
||||
dbd = flags["dbd"]
|
||||
if dbd == "acid":
|
||||
# 217.5s; python-defaults
|
||||
zs = "delete"
|
||||
sync = "full"
|
||||
elif dbd == "swal":
|
||||
# 88.0s; still 99.9% safe (can lose a bit of on OS crash)
|
||||
zs = "wal"
|
||||
sync = "full"
|
||||
elif dbd == "yolo":
|
||||
# 2.7s; may lose entire db on OS crash
|
||||
zs = "wal"
|
||||
sync = "off"
|
||||
else:
|
||||
# 4.1s; corruption-safe but more likely to lose wal
|
||||
zs = "wal"
|
||||
sync = "normal"
|
||||
|
||||
try:
|
||||
amode = cur.execute("pragma journal_mode=" + zs).fetchone()[0]
|
||||
if amode.lower() != zs.lower():
|
||||
t = "sqlite failed to set journal_mode {}; got {}"
|
||||
raise Exception(t.format(zs, amode))
|
||||
except Exception as ex:
|
||||
if sync != "off":
|
||||
sync = "full"
|
||||
t = "reverting to sync={} because {}"
|
||||
self.log(t.format(sync, ex))
|
||||
|
||||
cur.execute("pragma synchronous=" + sync)
|
||||
cur.connection.commit()
|
||||
return cur, db_path
|
||||
except:
|
||||
msg = "cannot use database at [{}]:\n{}"
|
||||
@@ -763,6 +857,7 @@ class Up2k(object):
|
||||
seen = seen + [rcdir]
|
||||
unreg: list[str] = []
|
||||
files: list[tuple[int, int, str]] = []
|
||||
fat32 = True
|
||||
|
||||
assert self.pp and self.mem_cur
|
||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||
@@ -787,6 +882,9 @@ class Up2k(object):
|
||||
|
||||
lmod = int(inf.st_mtime)
|
||||
sz = inf.st_size
|
||||
if fat32 and inf.st_mtime % 2:
|
||||
fat32 = False
|
||||
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
rap = absreal(abspath)
|
||||
if dev and inf.st_dev != dev:
|
||||
@@ -874,6 +972,9 @@ class Up2k(object):
|
||||
self.log(t.format(top, rp, len(in_db), rep_db))
|
||||
dts = -1
|
||||
|
||||
if fat32 and abs(dts - lmod) == 1:
|
||||
dts = lmod
|
||||
|
||||
if dts == lmod and dsz == sz and (nohash or dw[0] != "#" or not sz):
|
||||
continue
|
||||
|
||||
@@ -1179,6 +1280,18 @@ class Up2k(object):
|
||||
|
||||
return ret
|
||||
|
||||
def _drop_caches(self) -> None:
|
||||
self.log("dropping caches for a full filesystem scan")
|
||||
for vol in self.asrv.vfs.all_vols.values():
|
||||
reg = self.register_vpath(vol.realpath, vol.flags)
|
||||
if not reg:
|
||||
continue
|
||||
|
||||
cur, _ = reg
|
||||
self._set_tagscan(cur, True)
|
||||
cur.execute("delete from dh")
|
||||
cur.connection.commit()
|
||||
|
||||
def _set_tagscan(self, cur: "sqlite3.Cursor", need: bool) -> bool:
|
||||
if self.args.no_dhash:
|
||||
return False
|
||||
@@ -1389,6 +1502,10 @@ class Up2k(object):
|
||||
t0 = time.time()
|
||||
for ptop, flags in self.flags.items():
|
||||
if "mtp" in flags:
|
||||
if ptop not in self.entags:
|
||||
t = "skipping mtp for unavailable volume {}"
|
||||
self.log(t.format(ptop), 1)
|
||||
continue
|
||||
self._run_one_mtp(ptop, gid)
|
||||
|
||||
td = time.time() - t0
|
||||
@@ -1727,9 +1844,13 @@ class Up2k(object):
|
||||
self._set_tagscan(write_cur, True)
|
||||
return ret
|
||||
|
||||
def _trace(self, msg: str) -> None:
|
||||
self.log("ST: {}".format(msg))
|
||||
|
||||
def _orz(self, db_path: str) -> "sqlite3.Cursor":
|
||||
return sqlite3.connect(db_path, self.timeout, check_same_thread=False).cursor()
|
||||
# x.set_trace_callback(trace)
|
||||
c = sqlite3.connect(db_path, self.timeout, check_same_thread=False).cursor()
|
||||
# c.connection.set_trace_callback(self._trace)
|
||||
return c
|
||||
|
||||
def _open_db(self, db_path: str) -> "sqlite3.Cursor":
|
||||
existed = bos.path.exists(db_path)
|
||||
@@ -1951,6 +2072,8 @@ class Up2k(object):
|
||||
"sprs": sprs, # dontcare; finished anyways
|
||||
"size": dsize,
|
||||
"lmod": dtime,
|
||||
"host": cj["host"],
|
||||
"user": cj["user"],
|
||||
"addr": ip,
|
||||
"at": at,
|
||||
"hash": [],
|
||||
@@ -2043,12 +2166,13 @@ class Up2k(object):
|
||||
job[k] = cj[k]
|
||||
|
||||
pdir = djoin(cj["ptop"], cj["prel"])
|
||||
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
|
||||
job["name"] = self._untaken(pdir, cj, now)
|
||||
dst = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
if not self.args.nw:
|
||||
bos.unlink(dst) # TODO ed pls
|
||||
try:
|
||||
self._symlink(src, dst, lmod=cj["lmod"])
|
||||
dst_flags = self.flags[job["ptop"]]
|
||||
self._symlink(src, dst, dst_flags, lmod=cj["lmod"])
|
||||
except:
|
||||
if not n4g:
|
||||
raise
|
||||
@@ -2079,6 +2203,8 @@ class Up2k(object):
|
||||
}
|
||||
# client-provided, sanitized by _get_wark: name, size, lmod
|
||||
for k in [
|
||||
"host",
|
||||
"user",
|
||||
"addr",
|
||||
"vtop",
|
||||
"ptop",
|
||||
@@ -2090,7 +2216,7 @@ class Up2k(object):
|
||||
]:
|
||||
job[k] = cj[k]
|
||||
|
||||
for k in ["life"]:
|
||||
for k in ["life", "replace"]:
|
||||
if k in cj:
|
||||
job[k] = cj[k]
|
||||
|
||||
@@ -2122,10 +2248,18 @@ class Up2k(object):
|
||||
"wark": wark,
|
||||
}
|
||||
|
||||
def _untaken(self, fdir: str, fname: str, ts: float, ip: str) -> str:
|
||||
def _untaken(self, fdir: str, job: dict[str, Any], ts: float) -> str:
|
||||
fname = job["name"]
|
||||
ip = job["addr"]
|
||||
|
||||
if self.args.nw:
|
||||
return fname
|
||||
|
||||
fp = os.path.join(fdir, fname)
|
||||
if job.get("replace") and bos.path.exists(fp):
|
||||
self.log("replacing existing file at {}".format(fp))
|
||||
bos.unlink(fp)
|
||||
|
||||
if self.args.plain_ip:
|
||||
dip = ip.replace(":", ".")
|
||||
else:
|
||||
@@ -2136,7 +2270,12 @@ class Up2k(object):
|
||||
return zfw["orz"][1]
|
||||
|
||||
def _symlink(
|
||||
self, src: str, dst: str, verbose: bool = True, lmod: float = 0
|
||||
self,
|
||||
src: str,
|
||||
dst: str,
|
||||
flags: dict[str, Any],
|
||||
verbose: bool = True,
|
||||
lmod: float = 0,
|
||||
) -> None:
|
||||
if verbose:
|
||||
self.log("linking dupe:\n {0}\n {1}".format(src, dst))
|
||||
@@ -2146,7 +2285,7 @@ class Up2k(object):
|
||||
|
||||
linked = False
|
||||
try:
|
||||
if self.args.no_dedup:
|
||||
if "copydupes" in flags:
|
||||
raise Exception("disabled in config")
|
||||
|
||||
lsrc = src
|
||||
@@ -2176,12 +2315,12 @@ class Up2k(object):
|
||||
ldst = ldst.replace("/", "\\")
|
||||
|
||||
try:
|
||||
if self.args.hardlink:
|
||||
if "hardlink" in flags:
|
||||
os.link(fsenc(src), fsenc(dst))
|
||||
linked = True
|
||||
except Exception as ex:
|
||||
self.log("cannot hardlink: " + repr(ex))
|
||||
if self.args.never_symlink:
|
||||
if "neversymlink" in flags:
|
||||
raise Exception("symlink-fallback disabled in cfg")
|
||||
|
||||
if not linked:
|
||||
@@ -2300,6 +2439,26 @@ class Up2k(object):
|
||||
# self.log("--- " + wark + " " + dst + " finish_upload atomic " + dst, 4)
|
||||
atomic_move(src, dst)
|
||||
|
||||
upt = job.get("at") or time.time()
|
||||
xau = self.flags[ptop].get("xau")
|
||||
if xau and not runhook(
|
||||
self.log,
|
||||
xau,
|
||||
dst,
|
||||
djoin(job["vtop"], job["prel"], job["name"]),
|
||||
job["host"],
|
||||
job["user"],
|
||||
job["addr"],
|
||||
upt,
|
||||
job["size"],
|
||||
"",
|
||||
):
|
||||
t = "upload blocked by xau"
|
||||
self.log(t, 1)
|
||||
bos.unlink(dst)
|
||||
self.registry[ptop].pop(wark, None)
|
||||
raise Pebkac(403, t)
|
||||
|
||||
times = (int(time.time()), int(job["lmod"]))
|
||||
if ANYWIN:
|
||||
z1 = (dst, job["size"], times, job["sprs"])
|
||||
@@ -2311,7 +2470,6 @@ class Up2k(object):
|
||||
pass
|
||||
|
||||
z2 = [job[x] for x in "ptop wark prel name lmod size addr".split()]
|
||||
upt = job.get("at") or time.time()
|
||||
wake_sr = False
|
||||
try:
|
||||
flt = job["life"]
|
||||
@@ -2345,7 +2503,7 @@ class Up2k(object):
|
||||
if os.path.exists(d2):
|
||||
continue
|
||||
|
||||
self._symlink(dst, d2, lmod=lmod)
|
||||
self._symlink(dst, d2, self.flags[ptop], lmod=lmod)
|
||||
if cur:
|
||||
self.db_rm(cur, rd, fn)
|
||||
self.db_add(cur, wark, rd, fn, *z2[-4:])
|
||||
@@ -2507,6 +2665,8 @@ class Up2k(object):
|
||||
self.log("rm: skip type-{:x} file [{}]".format(st.st_mode, atop))
|
||||
return 0, [], []
|
||||
|
||||
xbd = vn.flags.get("xbd")
|
||||
xad = vn.flags.get("xad")
|
||||
n_files = 0
|
||||
for dbv, vrem, _, adir, files, rd, vd in g:
|
||||
for fn in [x[0] for x in files]:
|
||||
@@ -2522,6 +2682,12 @@ class Up2k(object):
|
||||
vpath = "{}/{}".format(dbv.vpath, volpath).strip("/")
|
||||
self.log("rm {}\n {}".format(vpath, abspath))
|
||||
_ = dbv.get(volpath, uname, *permsets[0])
|
||||
if xbd and not runhook(
|
||||
self.log, xbd, abspath, vpath, "", uname, "", 0, 0, ""
|
||||
):
|
||||
self.log("delete blocked by xbd: {}".format(abspath), 1)
|
||||
continue
|
||||
|
||||
with self.mutex:
|
||||
cur = None
|
||||
try:
|
||||
@@ -2533,6 +2699,8 @@ class Up2k(object):
|
||||
cur.connection.commit()
|
||||
|
||||
bos.unlink(abspath)
|
||||
if xad:
|
||||
runhook(self.log, xad, abspath, vpath, "", uname, "", 0, 0, "")
|
||||
|
||||
ok: list[str] = []
|
||||
ng: list[str] = []
|
||||
@@ -2625,6 +2793,13 @@ class Up2k(object):
|
||||
if bos.path.exists(dabs):
|
||||
raise Pebkac(400, "mv2: target file exists")
|
||||
|
||||
xbr = svn.flags.get("xbr")
|
||||
xar = dvn.flags.get("xar")
|
||||
if xbr and not runhook(self.log, xbr, sabs, svp, "", uname, "", 0, 0, ""):
|
||||
t = "move blocked by xbr: {}".format(svp)
|
||||
self.log(t, 1)
|
||||
raise Pebkac(405, t)
|
||||
|
||||
bos.makedirs(os.path.dirname(dabs))
|
||||
|
||||
if bos.path.islink(sabs):
|
||||
@@ -2633,7 +2808,7 @@ class Up2k(object):
|
||||
self.log(t.format(sabs, dabs, dlabs))
|
||||
mt = bos.path.getmtime(sabs, False)
|
||||
bos.unlink(sabs)
|
||||
self._symlink(dlabs, dabs, False, lmod=mt)
|
||||
self._symlink(dlabs, dabs, dvn.flags, False, lmod=mt)
|
||||
|
||||
# folders are too scary, schedule rescan of both vols
|
||||
self.need_rescan.add(svn.vpath)
|
||||
@@ -2641,6 +2816,9 @@ class Up2k(object):
|
||||
with self.rescan_cond:
|
||||
self.rescan_cond.notify_all()
|
||||
|
||||
if xar:
|
||||
runhook(self.log, xar, dabs, dvp, "", uname, "", 0, 0, "")
|
||||
|
||||
return "k"
|
||||
|
||||
c1, w, ftime_, fsize_, ip, at = self._find_from_vpath(svn.realpath, srem)
|
||||
@@ -2654,21 +2832,6 @@ class Up2k(object):
|
||||
ftime = ftime_
|
||||
fsize = fsize_ or 0
|
||||
|
||||
if w:
|
||||
assert c1
|
||||
if c2 and c2 != c1:
|
||||
self._copy_tags(c1, c2, w)
|
||||
|
||||
self._forget_file(svn.realpath, srem, c1, w, c1 != c2)
|
||||
self._relink(w, svn.realpath, srem, dabs)
|
||||
curs.add(c1)
|
||||
|
||||
if c2:
|
||||
self.db_add(c2, w, drd, dfn, ftime, fsize, ip or "", at or 0)
|
||||
curs.add(c2)
|
||||
else:
|
||||
self.log("not found in src db: [{}]".format(svp))
|
||||
|
||||
try:
|
||||
atomic_move(sabs, dabs)
|
||||
except OSError as ex:
|
||||
@@ -2685,6 +2848,24 @@ class Up2k(object):
|
||||
|
||||
os.unlink(b1)
|
||||
|
||||
if w:
|
||||
assert c1
|
||||
if c2 and c2 != c1:
|
||||
self._copy_tags(c1, c2, w)
|
||||
|
||||
self._forget_file(svn.realpath, srem, c1, w, c1 != c2)
|
||||
self._relink(w, svn.realpath, srem, dabs)
|
||||
curs.add(c1)
|
||||
|
||||
if c2:
|
||||
self.db_add(c2, w, drd, dfn, ftime, fsize, ip or "", at or 0)
|
||||
curs.add(c2)
|
||||
else:
|
||||
self.log("not found in src db: [{}]".format(svp))
|
||||
|
||||
if xar:
|
||||
runhook(self.log, xar, dabs, dvp, "", uname, "", 0, 0, "")
|
||||
|
||||
return "k"
|
||||
|
||||
def _copy_tags(
|
||||
@@ -2809,14 +2990,14 @@ class Up2k(object):
|
||||
bos.unlink(slabs)
|
||||
bos.rename(sabs, slabs)
|
||||
bos.utime(slabs, (int(time.time()), int(mt)), False)
|
||||
self._symlink(slabs, sabs, False)
|
||||
self._symlink(slabs, sabs, self.flags.get(ptop) or {}, False)
|
||||
full[slabs] = (ptop, rem)
|
||||
sabs = slabs
|
||||
|
||||
if not dabs:
|
||||
dabs = list(sorted(full.keys()))[0]
|
||||
|
||||
for alink in links:
|
||||
for alink, parts in links.items():
|
||||
lmod = None
|
||||
try:
|
||||
if alink != sabs and absreal(alink) != sabs:
|
||||
@@ -2828,7 +3009,8 @@ class Up2k(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
self._symlink(dabs, alink, False, lmod=lmod or 0)
|
||||
flags = self.flags.get(parts[0]) or {}
|
||||
self._symlink(dabs, alink, flags, False, lmod=lmod or 0)
|
||||
|
||||
return len(full) + len(links)
|
||||
|
||||
@@ -2900,10 +3082,29 @@ class Up2k(object):
|
||||
return
|
||||
|
||||
self.registry[job["ptop"]][job["wark"]] = job
|
||||
job["name"] = self._untaken(pdir, job["name"], job["t0"], job["addr"])
|
||||
job["name"] = self._untaken(pdir, job, job["t0"])
|
||||
# if len(job["name"].split(".")) > 8:
|
||||
# raise Exception("aaa")
|
||||
|
||||
xbu = self.flags[job["ptop"]].get("xbu")
|
||||
ap_chk = djoin(pdir, job["name"])
|
||||
vp_chk = djoin(job["vtop"], job["prel"], job["name"])
|
||||
if xbu and not runhook(
|
||||
self.log,
|
||||
xbu,
|
||||
ap_chk,
|
||||
vp_chk,
|
||||
job["host"],
|
||||
job["user"],
|
||||
job["addr"],
|
||||
job["t0"],
|
||||
job["size"],
|
||||
"",
|
||||
):
|
||||
t = "upload blocked by xbu: {}".format(vp_chk)
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
|
||||
tnam = job["name"] + ".PARTIAL"
|
||||
if self.args.dotpart:
|
||||
tnam = "." + tnam
|
||||
@@ -2968,11 +3169,11 @@ class Up2k(object):
|
||||
|
||||
def _lastmodder(self) -> None:
|
||||
while True:
|
||||
ready = self.lastmod_q
|
||||
ready = self.lastmod_q2
|
||||
self.lastmod_q2 = self.lastmod_q
|
||||
self.lastmod_q = []
|
||||
|
||||
# self.log("lmod: got {}".format(len(ready)))
|
||||
time.sleep(5)
|
||||
time.sleep(1)
|
||||
for path, sz, times, sparse in ready:
|
||||
self.log("lmod: setting times {} on {}".format(times, path))
|
||||
try:
|
||||
@@ -3064,7 +3265,8 @@ class Up2k(object):
|
||||
if etag == self.snap_prev.get(ptop):
|
||||
return
|
||||
|
||||
bos.makedirs(histpath)
|
||||
if bos.makedirs(histpath):
|
||||
hidedir(histpath)
|
||||
|
||||
path2 = "{}.{}".format(path, os.getpid())
|
||||
body = {"droppable": self.droppable[ptop], "registry": reg}
|
||||
@@ -3117,7 +3319,7 @@ class Up2k(object):
|
||||
continue
|
||||
|
||||
# TODO is undef if vol 404 on startup
|
||||
entags = self.entags[ptop]
|
||||
entags = self.entags.get(ptop)
|
||||
if not entags:
|
||||
self.log("no entags okay.jpg", c=3)
|
||||
continue
|
||||
@@ -3177,6 +3379,7 @@ class Up2k(object):
|
||||
if self.mth:
|
||||
self.mth.stop = True
|
||||
|
||||
# in case we're killed early
|
||||
for x in list(self.spools):
|
||||
self._unspool(x)
|
||||
|
||||
@@ -3184,6 +3387,16 @@ class Up2k(object):
|
||||
self.log("writing snapshot")
|
||||
self.do_snapshot()
|
||||
|
||||
t0 = time.time()
|
||||
while self.pp:
|
||||
time.sleep(0.1)
|
||||
if time.time() - t0 >= 1:
|
||||
break
|
||||
|
||||
# if there is time
|
||||
for x in list(self.spools):
|
||||
self._unspool(x)
|
||||
|
||||
|
||||
def up2k_chunksize(filesize: int) -> int:
|
||||
chunksize = 1024 * 1024
|
||||
|
||||
@@ -6,6 +6,7 @@ import contextlib
|
||||
import errno
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import logging
|
||||
import math
|
||||
import mimetypes
|
||||
@@ -148,6 +149,7 @@ HTTPCODE = {
|
||||
204: "No Content",
|
||||
206: "Partial Content",
|
||||
207: "Multi-Status",
|
||||
301: "Moved Permanently",
|
||||
302: "Found",
|
||||
304: "Not Modified",
|
||||
400: "Bad Request",
|
||||
@@ -227,6 +229,7 @@ application msi=x-ms-installer cab=vnd.ms-cab-compressed rpm=x-rpm crx=x-chrome-
|
||||
application epub=epub+zip mobi=x-mobipocket-ebook lit=x-ms-reader rss=rss+xml atom=atom+xml torrent=x-bittorrent
|
||||
application p7s=pkcs7-signature dcm=dicom shx=vnd.shx shp=vnd.shp dbf=x-dbf gml=gml+xml gpx=gpx+xml amf=x-amf
|
||||
application swf=x-shockwave-flash m3u=vnd.apple.mpegurl db3=vnd.sqlite3 sqlite=vnd.sqlite3
|
||||
text ass=plain ssa=plain
|
||||
image jpg=jpeg xpm=x-xpixmap psd=vnd.adobe.photoshop jpf=jpx tif=tiff ico=x-icon djvu=vnd.djvu
|
||||
image heic=heic-sequence heif=heif-sequence hdr=vnd.radiance svg=svg+xml
|
||||
audio caf=x-caf mp3=mpeg m4a=mp4 mid=midi mpc=musepack aif=aiff au=basic qcp=qcelp
|
||||
@@ -360,8 +363,11 @@ class Daemon(threading.Thread):
|
||||
name: Optional[str] = None,
|
||||
a: Optional[Iterable[Any]] = None,
|
||||
r: bool = True,
|
||||
ka: Optional[dict[Any, Any]] = None,
|
||||
) -> None:
|
||||
threading.Thread.__init__(self, target=target, name=name, args=a or ())
|
||||
threading.Thread.__init__(
|
||||
self, target=target, name=name, args=a or (), kwargs=ka
|
||||
)
|
||||
self.daemon = True
|
||||
if r:
|
||||
self.start()
|
||||
@@ -377,6 +383,9 @@ class Netdev(object):
|
||||
def __str__(self):
|
||||
return "{}-{}{}".format(self.idx, self.name, self.desc)
|
||||
|
||||
def __repr__(self):
|
||||
return "'{}-{}'".format(self.idx, self.name)
|
||||
|
||||
def __lt__(self, rhs):
|
||||
return str(self) < str(rhs)
|
||||
|
||||
@@ -436,9 +445,7 @@ class HLog(logging.Handler):
|
||||
else:
|
||||
c = 1
|
||||
|
||||
if record.name.startswith("PIL") and lv < logging.WARNING:
|
||||
return
|
||||
elif record.name == "pyftpdlib":
|
||||
if record.name == "pyftpdlib":
|
||||
m = self.ptn_ftp.match(msg)
|
||||
if m:
|
||||
ip = m.group(1)
|
||||
@@ -468,7 +475,7 @@ class NetMap(object):
|
||||
)
|
||||
|
||||
ips = [x for x in ips if x not in ("::1", "127.0.0.1")]
|
||||
ips = [[x for x in netdevs if x.startswith(y + "/")][0] for y in ips]
|
||||
ips = find_prefix(ips, netdevs)
|
||||
|
||||
self.cache: dict[str, str] = {}
|
||||
self.b2sip: dict[bytes, str] = {}
|
||||
@@ -1186,7 +1193,7 @@ def ren_open(
|
||||
else:
|
||||
fpath = fname
|
||||
|
||||
if suffix and os.path.exists(fsenc(fpath)):
|
||||
if suffix and os.path.lexists(fsenc(fpath)):
|
||||
fpath += suffix
|
||||
fname += suffix
|
||||
ext += suffix
|
||||
@@ -1548,14 +1555,16 @@ def gen_filekey_dbg(
|
||||
return ret
|
||||
|
||||
|
||||
def gencookie(k: str, v: str, dur: Optional[int]) -> str:
|
||||
def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str:
|
||||
v = v.replace(";", "")
|
||||
if dur:
|
||||
exp = formatdate(time.time() + dur, usegmt=True)
|
||||
else:
|
||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||
|
||||
return "{}={}; Path=/; Expires={}; SameSite=Lax".format(k, v, exp)
|
||||
return "{}={}; Path=/{}; Expires={}{}; SameSite=Lax".format(
|
||||
k, v, r, exp, "; Secure" if tls else ""
|
||||
)
|
||||
|
||||
|
||||
def humansize(sz: float, terse: bool = False) -> str:
|
||||
@@ -1711,6 +1720,15 @@ def ipnorm(ip: str) -> str:
|
||||
return ip
|
||||
|
||||
|
||||
def find_prefix(ips: list[str], netdevs: dict[str, Netdev]) -> list[str]:
|
||||
ret = []
|
||||
for ip in ips:
|
||||
hit = next((x for x in netdevs if x.startswith(ip + "/")), None)
|
||||
if hit:
|
||||
ret.append(hit)
|
||||
return ret
|
||||
|
||||
|
||||
def html_escape(s: str, quot: bool = False, crlf: bool = False) -> str:
|
||||
"""html.escape but also newlines"""
|
||||
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||
@@ -2007,6 +2025,20 @@ def read_socket_chunked(
|
||||
raise Pebkac(400, t.format(x))
|
||||
|
||||
|
||||
def list_ips() -> list[str]:
|
||||
from .stolen.ifaddr import get_adapters
|
||||
|
||||
ret: set[str] = set()
|
||||
for nic in get_adapters():
|
||||
for ipo in nic.ips:
|
||||
if len(ipo.ip) < 7:
|
||||
ret.add(ipo.ip[0]) # ipv6 is (ip,0,0)
|
||||
else:
|
||||
ret.add(ipo.ip)
|
||||
|
||||
return list(ret)
|
||||
|
||||
|
||||
def yieldfile(fn: str) -> Generator[bytes, None, None]:
|
||||
with open(fsenc(fn), "rb", 512 * 1024) as f:
|
||||
while True:
|
||||
@@ -2427,6 +2459,124 @@ def retchk(
|
||||
raise Exception(t)
|
||||
|
||||
|
||||
def _runhook(
|
||||
log: "NamedLogger",
|
||||
cmd: str,
|
||||
ap: str,
|
||||
vp: str,
|
||||
host: str,
|
||||
uname: str,
|
||||
ip: str,
|
||||
at: float,
|
||||
sz: int,
|
||||
txt: str,
|
||||
) -> bool:
|
||||
chk = False
|
||||
fork = False
|
||||
jtxt = False
|
||||
wait = 0
|
||||
tout = 0
|
||||
kill = "t"
|
||||
cap = 0
|
||||
ocmd = cmd
|
||||
while "," in cmd[:6]:
|
||||
arg, cmd = cmd.split(",", 1)
|
||||
if arg == "c":
|
||||
chk = True
|
||||
elif arg == "f":
|
||||
fork = True
|
||||
elif arg == "j":
|
||||
jtxt = True
|
||||
elif arg.startswith("w"):
|
||||
wait = float(arg[1:])
|
||||
elif arg.startswith("t"):
|
||||
tout = float(arg[1:])
|
||||
elif arg.startswith("c"):
|
||||
cap = int(arg[1:]) # 0=none 1=stdout 2=stderr 3=both
|
||||
elif arg.startswith("k"):
|
||||
kill = arg[1:] # [t]ree [m]ain [n]one
|
||||
else:
|
||||
t = "hook: invalid flag {} in {}"
|
||||
log(t.format(arg, ocmd))
|
||||
|
||||
env = os.environ.copy()
|
||||
# try:
|
||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
zsl = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(zsl))
|
||||
env["PYTHONPATH"] = pypath
|
||||
# except: if not E.ox: raise
|
||||
|
||||
ka = {
|
||||
"env": env,
|
||||
"timeout": tout,
|
||||
"kill": kill,
|
||||
"capture": cap,
|
||||
}
|
||||
|
||||
if jtxt:
|
||||
ja = {
|
||||
"ap": ap,
|
||||
"vp": vp,
|
||||
"ip": ip,
|
||||
"host": host,
|
||||
"user": uname,
|
||||
"at": at or time.time(),
|
||||
"sz": sz,
|
||||
"txt": txt,
|
||||
}
|
||||
arg = json.dumps(ja)
|
||||
else:
|
||||
arg = txt or ap
|
||||
|
||||
acmd = [cmd, arg]
|
||||
if cmd.endswith(".py"):
|
||||
acmd = [sys.executable] + acmd
|
||||
|
||||
bcmd = [fsenc(x) for x in acmd]
|
||||
|
||||
t0 = time.time()
|
||||
if fork:
|
||||
Daemon(runcmd, ocmd, [acmd], ka=ka)
|
||||
else:
|
||||
rc, v, err = runcmd(bcmd, **ka) # type: ignore
|
||||
if chk and rc:
|
||||
retchk(rc, bcmd, err, log, 5)
|
||||
return False
|
||||
|
||||
wait -= time.time() - t0
|
||||
if wait > 0:
|
||||
time.sleep(wait)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def runhook(
|
||||
log: "NamedLogger",
|
||||
cmds: list[str],
|
||||
ap: str,
|
||||
vp: str,
|
||||
host: str,
|
||||
uname: str,
|
||||
ip: str,
|
||||
at: float,
|
||||
sz: int,
|
||||
txt: str,
|
||||
) -> bool:
|
||||
vp = vp.replace("\\", "/")
|
||||
for cmd in cmds:
|
||||
try:
|
||||
if not _runhook(log, cmd, ap, vp, host, uname, ip, at, sz, txt):
|
||||
return False
|
||||
except Exception as ex:
|
||||
log("hook: {}".format(ex))
|
||||
if ",c," in "," + cmd:
|
||||
return False
|
||||
break
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def gzip_orig_sz(fn: str) -> int:
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
f.seek(-4, 2)
|
||||
@@ -2536,7 +2686,7 @@ def termsize() -> tuple[int, int]:
|
||||
def ioctl_GWINSZ(fd: int) -> Optional[tuple[int, int]]:
|
||||
try:
|
||||
cr = sunpack(b"hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"AAAA"))
|
||||
return int(cr[1]), int(cr[0])
|
||||
return cr[::-1]
|
||||
except:
|
||||
return None
|
||||
|
||||
@@ -2549,15 +2699,23 @@ def termsize() -> tuple[int, int]:
|
||||
except:
|
||||
pass
|
||||
|
||||
if cr:
|
||||
return cr
|
||||
|
||||
try:
|
||||
return int(env["COLUMNS"]), int(env["LINES"])
|
||||
return cr or (int(env["COLUMNS"]), int(env["LINES"]))
|
||||
except:
|
||||
return 80, 25
|
||||
|
||||
|
||||
def hidedir(dp) -> None:
|
||||
if ANYWIN:
|
||||
try:
|
||||
k32 = ctypes.WinDLL("kernel32")
|
||||
attrs = k32.GetFileAttributesW(dp)
|
||||
if attrs >= 0:
|
||||
k32.SetFileAttributesW(dp, attrs | 2)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
class Pebkac(Exception):
|
||||
def __init__(self, code: int, msg: Optional[str] = None) -> None:
|
||||
super(Pebkac, self).__init__(msg or HTTPCODE[code])
|
||||
|
||||
@@ -27,7 +27,7 @@ window.baguetteBox = (function () {
|
||||
isOverlayVisible = false,
|
||||
touch = {}, // start-pos
|
||||
touchFlag = false, // busy
|
||||
re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i,
|
||||
re_i = /.+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||
re_v = /.+\.(webm|mkv|mp4)(\?|$)/i,
|
||||
anims = ['slideIn', 'fadeIn', 'none'],
|
||||
data = {}, // all galleries
|
||||
@@ -277,8 +277,8 @@ window.baguetteBox = (function () {
|
||||
playpause();
|
||||
else if (k == "KeyU" || k == "KeyO")
|
||||
relseek(k == "KeyU" ? -10 : 10);
|
||||
else if (k.indexOf('Digit') === 0)
|
||||
vid().currentTime = vid().duration * parseInt(k.slice(-1)) * 0.1;
|
||||
else if (k.indexOf('Digit') === 0 && v)
|
||||
v.currentTime = v.duration * parseInt(k.slice(-1)) * 0.1;
|
||||
else if (k == "KeyM" && v) {
|
||||
v.muted = vmute = !vmute;
|
||||
mp_ctl();
|
||||
|
||||
@@ -572,6 +572,11 @@ html.dy {
|
||||
* {
|
||||
line-height: 1.2em;
|
||||
}
|
||||
::selection {
|
||||
color: var(--bg-d1);
|
||||
background: var(--fg);
|
||||
text-shadow: none;
|
||||
}
|
||||
html,body,tr,th,td,#files,a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
@@ -754,8 +759,9 @@ html.y #files thead th {
|
||||
display: inline;
|
||||
}
|
||||
#path a {
|
||||
margin: 0 0 0 -.2em;
|
||||
padding: 0 0 0 .4em;
|
||||
padding: 0 .35em;
|
||||
position: relative;
|
||||
z-index: 1;
|
||||
/* ie: */
|
||||
border-bottom: .1em solid #777\9;
|
||||
margin-right: 1em\9;
|
||||
@@ -763,18 +769,17 @@ html.y #files thead th {
|
||||
#path a:first-child {
|
||||
padding-left: .8em;
|
||||
}
|
||||
#path a:not(:last-child):after {
|
||||
content: '';
|
||||
#path i {
|
||||
width: 1.05em;
|
||||
height: 1.05em;
|
||||
margin: -.2em .3em -.2em -.4em;
|
||||
margin: -.5em .15em -.15em -.7em;
|
||||
display: inline-block;
|
||||
border: 1px solid rgba(255,224,192,0.3);
|
||||
border-width: .05em .05em 0 0;
|
||||
transform: rotate(45deg);
|
||||
background: linear-gradient(45deg, rgba(0,0,0,0) 40%, rgba(0,0,0,0.25) 75%, rgba(0,0,0,0.35));
|
||||
}
|
||||
html.y #path a:not(:last-child)::after {
|
||||
html.y #path i {
|
||||
background: none;
|
||||
border-color: rgba(0,0,0,0.2);
|
||||
border-width: .1em .1em 0 0;
|
||||
@@ -793,6 +798,17 @@ html.y #path a:hover {
|
||||
.logue:empty {
|
||||
display: none;
|
||||
}
|
||||
.logue>iframe {
|
||||
background: var(--bgg);
|
||||
border-radius: .3em;
|
||||
visibility: hidden;
|
||||
border: none;
|
||||
width: 100%;
|
||||
height: 0;
|
||||
}
|
||||
.logue>iframe.focus {
|
||||
box-shadow: 0 0 .1em .1em var(--a);
|
||||
}
|
||||
#pro.logue {
|
||||
margin-bottom: .8em;
|
||||
}
|
||||
@@ -817,6 +833,9 @@ html.y #path a:hover {
|
||||
.mdo {
|
||||
max-width: 52em;
|
||||
}
|
||||
.mdo.sb {
|
||||
max-width: unset;
|
||||
}
|
||||
.mdo,
|
||||
.mdo * {
|
||||
line-height: 1.4em;
|
||||
@@ -1075,18 +1094,18 @@ html.y #widget.open {
|
||||
top: -.12em;
|
||||
}
|
||||
#wtico {
|
||||
cursor: url(/.cpr/dd/4.png), pointer;
|
||||
cursor: url(dd/4.png), pointer;
|
||||
animation: cursor 500ms;
|
||||
}
|
||||
#wtico:hover {
|
||||
animation: cursor 500ms infinite;
|
||||
}
|
||||
@keyframes cursor {
|
||||
0% {cursor: url(/.cpr/dd/2.png), pointer}
|
||||
30% {cursor: url(/.cpr/dd/3.png), pointer}
|
||||
50% {cursor: url(/.cpr/dd/4.png), pointer}
|
||||
75% {cursor: url(/.cpr/dd/5.png), pointer}
|
||||
85% {cursor: url(/.cpr/dd/4.png), pointer}
|
||||
0% {cursor: url(dd/2.png), pointer}
|
||||
30% {cursor: url(dd/3.png), pointer}
|
||||
50% {cursor: url(dd/4.png), pointer}
|
||||
75% {cursor: url(dd/5.png), pointer}
|
||||
85% {cursor: url(dd/4.png), pointer}
|
||||
}
|
||||
@keyframes spin {
|
||||
100% {transform: rotate(360deg)}
|
||||
@@ -2557,7 +2576,6 @@ html.b #u2conf a.b:hover {
|
||||
#u2conf input[type="checkbox"]:checked+label:hover {
|
||||
background: var(--u2-o-1h-bg);
|
||||
}
|
||||
#op_up2k.srch #u2conf td:nth-child(1)>*,
|
||||
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
||||
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
||||
background: #777;
|
||||
|
||||
@@ -8,8 +8,8 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8, minimum-scale=0.6">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/browser.css?_={{ ts }}">
|
||||
{%- if css %}
|
||||
<link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
|
||||
{%- endif %}
|
||||
@@ -71,7 +71,7 @@
|
||||
<h1 id="path">
|
||||
<a href="#" id="entree">🌲</a>
|
||||
{%- for n in vpnodes %}
|
||||
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
||||
<a href="{{ r }}/{{ n[0] }}">{{ n[1] }}</a>
|
||||
{%- endfor %}
|
||||
</h1>
|
||||
|
||||
@@ -85,7 +85,7 @@
|
||||
<div id="bdoc"></div>
|
||||
{%- endif %}
|
||||
|
||||
<div id="pro" class="logue">{{ logues[0] }}</div>
|
||||
<div id="pro" class="logue">{{ "" if sb_lg else logues[0] }}</div>
|
||||
|
||||
<table id="files">
|
||||
<thead>
|
||||
@@ -119,9 +119,9 @@
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div id="epi" class="logue">{{ logues[1] }}</div>
|
||||
<div id="epi" class="logue">{{ "" if sb_lg else logues[1] }}</div>
|
||||
|
||||
<h2><a href="/?h" id="goh">control-panel</a></h2>
|
||||
<h2><a href="{{ r }}/?h" id="goh">control-panel</a></h2>
|
||||
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
@@ -134,7 +134,8 @@
|
||||
<div id="widget"></div>
|
||||
|
||||
<script>
|
||||
var acct = "{{ acct }}",
|
||||
var SR = {{ r|tojson }},
|
||||
acct = "{{ acct }}",
|
||||
perms = {{ perms }},
|
||||
themes = {{ themes }},
|
||||
dtheme = "{{ dtheme }}",
|
||||
@@ -149,21 +150,23 @@
|
||||
have_del = {{ have_del|tojson }},
|
||||
have_unpost = {{ have_unpost }},
|
||||
have_zip = {{ have_zip|tojson }},
|
||||
sb_md = "{{ sb_md }}",
|
||||
sb_lg = "{{ sb_lg }}",
|
||||
lifetime = {{ lifetime }},
|
||||
turbolvl = {{ turbolvl }},
|
||||
u2sort = "{{ u2sort }}",
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
txt_ext = "{{ txt_ext }}",
|
||||
{% if no_prism %}no_prism = 1,{% endif %}
|
||||
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||
readme = {{ readme|tojson }},
|
||||
ls0 = {{ ls0|tojson }};
|
||||
|
||||
document.documentElement.className = localStorage.theme || dtheme;
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/baguettebox.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/baguettebox.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/browser.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/up2k.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}?_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
|
||||
@@ -94,6 +94,9 @@ var Ls = {
|
||||
"ht_and": " and ",
|
||||
|
||||
"goh": "control-panel",
|
||||
"gop": 'previous sibling">prev',
|
||||
"gou": 'parent folder">up',
|
||||
"gon": 'next folder">next',
|
||||
"logout": "Logout ",
|
||||
"access": " access",
|
||||
"ot_close": "close submenu",
|
||||
@@ -257,6 +260,8 @@ var Ls = {
|
||||
"fbd_more": '<div id="blazy">showing <code>{0}</code> of <code>{1}</code> files; <a href="#" id="bd_more">show {2}</a> or <a href="#" id="bd_all">show all</a></div>',
|
||||
"fbd_all": '<div id="blazy">showing <code>{0}</code> of <code>{1}</code> files; <a href="#" id="bd_all">show all</a></div>',
|
||||
|
||||
"f_dls": 'the file links in the current folder have\nbeen changed into download links',
|
||||
|
||||
"ft_paste": "paste {0} items$NHotkey: ctrl-V",
|
||||
"fr_eperm": 'cannot rename:\nyou do not have “move” permission in this folder',
|
||||
"fd_eperm": 'cannot delete:\nyou do not have “delete” permission in this folder',
|
||||
@@ -534,6 +539,9 @@ var Ls = {
|
||||
"ht_and": " og ",
|
||||
|
||||
"goh": "kontrollpanel",
|
||||
"gop": 'naviger til mappen før denne">forr.',
|
||||
"gou": 'naviger ett nivå opp">opp',
|
||||
"gon": 'naviger til mappen etter denne">neste',
|
||||
"logout": "Logg ut ",
|
||||
"access": " tilgang",
|
||||
"ot_close": "lukk verktøy",
|
||||
@@ -697,6 +705,8 @@ var Ls = {
|
||||
"fbd_more": '<div id="blazy">viser <code>{0}</code> av <code>{1}</code> filer; <a href="#" id="bd_more">vis {2}</a> eller <a href="#" id="bd_all">vis alle</a></div>',
|
||||
"fbd_all": '<div id="blazy">viser <code>{0}</code> av <code>{1}</code> filer; <a href="#" id="bd_all">vis alle</a></div>',
|
||||
|
||||
"f_dls": 'linkene i denne mappen er nå\nomgjort til nedlastningsknapper',
|
||||
|
||||
"ft_paste": "Lim inn {0} filer$NSnarvei: ctrl-V",
|
||||
"fr_eperm": 'kan ikke endre navn:\ndu har ikke “move”-rettigheten i denne mappen',
|
||||
"fd_eperm": 'kan ikke slette:\ndu har ikke “delete”-rettigheten i denne mappen',
|
||||
@@ -841,7 +851,7 @@ var Ls = {
|
||||
"u_hashdone": 'befaring ferdig',
|
||||
"u_hashing": 'les',
|
||||
"u_fixed": "OK! Løste seg 👍",
|
||||
"u_cuerr": "kunne ikke laste opp del {0} av {1};\nsikkert harmløst, fortsetter\n\nfil: {2}",
|
||||
"u_cuerr": "kunne ikke laste opp del {0} av {1};\nsikkert greit, fortsetter\n\nfil: {2}",
|
||||
"u_cuerr2": "server nektet opplastningen (del {0} av {1});\nprøver igjen senere\n\nfil: {2}\n\nerror ",
|
||||
"u_ehstmp": "prøver igjen; se mld nederst",
|
||||
"u_ehsfin": "server nektet forespørselen om å ferdigstille filen; prøver igjen...",
|
||||
@@ -943,7 +953,7 @@ ebi('op_up2k').innerHTML = (
|
||||
|
||||
'<table id="u2conf">\n' +
|
||||
' <tr>\n' +
|
||||
' <td class="c"><br />' + L.ul_par + '</td>\n' +
|
||||
' <td class="c" data-perm="read"><br />' + L.ul_par + '</td>\n' +
|
||||
' <td class="c" rowspan="2">\n' +
|
||||
' <input type="checkbox" id="multitask" />\n' +
|
||||
' <label for="multitask" tt="' + L.ut_mt + '">🏃</label>\n' +
|
||||
@@ -964,7 +974,7 @@ ebi('op_up2k').innerHTML = (
|
||||
' <td data-perm="read" rowspan="2" id="u2c3w"></td>\n' +
|
||||
' </tr>\n' +
|
||||
' <tr>\n' +
|
||||
' <td class="c">\n' +
|
||||
' <td class="c" data-perm="read">\n' +
|
||||
' <a href="#" class="b" id="nthread_sub">–</a><input\n' +
|
||||
' class="txtbox" id="nthread" value="2" tt="' + L.ut_par + '"/><a\n' +
|
||||
' href="#" class="b" id="nthread_add">+</a><br /> \n' +
|
||||
@@ -2078,8 +2088,13 @@ function prev_song(e) {
|
||||
return song_skip(-1);
|
||||
}
|
||||
function dl_song() {
|
||||
if (!mp || !mp.au)
|
||||
return;
|
||||
if (!mp || !mp.au) {
|
||||
var o = QSA('#files a[id]');
|
||||
for (var a = 0; a < o.length; a++)
|
||||
o[a].setAttribute('download', '');
|
||||
|
||||
return toast.inf(10, L.f_dls);
|
||||
}
|
||||
|
||||
var url = mp.tracks[mp.au.tid];
|
||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=987';
|
||||
@@ -2865,7 +2880,7 @@ function eval_hash() {
|
||||
|
||||
// folder nav
|
||||
ebi('goh').parentElement.appendChild(mknod('span', null,
|
||||
'<a href="#" id="gop">prev</a>/<a href="#" id="gou">up</a>/<a href="#" id="gon">next</a>'));
|
||||
'<a href="#" id="gop" tt="' + L.gop + '</a>/<a href="#" id="gou" tt="' + L.gou + '</a>/<a href="#" id="gon" tt="' + L.gon + '</a>'));
|
||||
ebi('gop').onclick = function () { tree_neigh(-1); }
|
||||
ebi('gon').onclick = function () { tree_neigh(1); }
|
||||
ebi('gou').onclick = function () { tree_up(true); }
|
||||
@@ -3377,7 +3392,7 @@ var fileman = (function () {
|
||||
}
|
||||
|
||||
var xhr = new XHR();
|
||||
xhr.open('GET', f[0].src + '?move=' + dst, true);
|
||||
xhr.open('POST', f[0].src + '?move=' + dst, true);
|
||||
xhr.onload = xhr.onerror = rename_cb;
|
||||
xhr.send();
|
||||
}
|
||||
@@ -3408,7 +3423,7 @@ var fileman = (function () {
|
||||
}
|
||||
toast.show('inf r', 0, esc(L.fd_busy.format(vps.length + 1, vp)), 'r');
|
||||
|
||||
xhr.open('GET', vp + '?delete', true);
|
||||
xhr.open('POST', vp + '?delete', true);
|
||||
xhr.onload = xhr.onerror = delete_cb;
|
||||
xhr.send();
|
||||
}
|
||||
@@ -3516,7 +3531,7 @@ var fileman = (function () {
|
||||
|
||||
var dst = get_evpath() + vp.split('/').pop();
|
||||
|
||||
xhr.open('GET', vp + '?move=' + dst, true);
|
||||
xhr.open('POST', vp + '?move=' + dst, true);
|
||||
xhr.onload = xhr.onerror = paste_cb;
|
||||
xhr.send();
|
||||
}
|
||||
@@ -3641,7 +3656,7 @@ var showfile = (function () {
|
||||
qsr('#prism_css');
|
||||
var el = mknod('link', 'prism_css');
|
||||
el.rel = 'stylesheet';
|
||||
el.href = '/.cpr/deps/prism' + (light ? '' : 'd') + '.css';
|
||||
el.href = SR + '/.cpr/deps/prism' + (light ? '' : 'd') + '.css';
|
||||
document.head.appendChild(el);
|
||||
};
|
||||
|
||||
@@ -3769,7 +3784,7 @@ var showfile = (function () {
|
||||
if (!defer)
|
||||
fun(el.firstChild);
|
||||
else
|
||||
import_js('/.cpr/deps/prism.js', function () { fun(); });
|
||||
import_js(SR + '/.cpr/deps/prism.js', function () { fun(); });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4053,7 +4068,7 @@ var thegrid = (function () {
|
||||
var oth = ebi(this.getAttribute('ref')),
|
||||
href = noq_href(this),
|
||||
aplay = ebi('a' + oth.getAttribute('id')),
|
||||
is_img = /\.(gif|jpe?g|png|webp|webm|mkv|mp4)(\?|$)/i.test(href),
|
||||
is_img = /\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp|webm|mkv|mp4)(\?|$)/i.test(href),
|
||||
is_dir = href.endsWith('/'),
|
||||
in_tree = is_dir && treectl.find(oth.textContent.slice(0, -1)),
|
||||
have_sel = QS('#files tr.sel'),
|
||||
@@ -4175,10 +4190,10 @@ var thegrid = (function () {
|
||||
if (r.thumbs) {
|
||||
ihref += '?th=' + (have_webp ? 'w' : 'j');
|
||||
if (href == "#")
|
||||
ihref = '/.cpr/ico/⏏️';
|
||||
ihref = SR + '/.cpr/ico/⏏️';
|
||||
}
|
||||
else if (isdir) {
|
||||
ihref = '/.cpr/ico/folder';
|
||||
ihref = SR + '/.cpr/ico/folder';
|
||||
}
|
||||
else {
|
||||
var ar = href.split('.');
|
||||
@@ -4203,7 +4218,7 @@ var thegrid = (function () {
|
||||
else
|
||||
ext = "unk";
|
||||
}
|
||||
ihref = '/.cpr/ico/' + ext;
|
||||
ihref = SR + '/.cpr/ico/' + ext;
|
||||
}
|
||||
ihref += (ihref.indexOf('?') > 0 ? '&' : '?') + 'cache=i';
|
||||
|
||||
@@ -4778,7 +4793,7 @@ document.onkeydown = function (e) {
|
||||
clearTimeout(search_timeout);
|
||||
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', '/?srch', true);
|
||||
xhr.open('POST', SR + '/?srch', true);
|
||||
xhr.setRequestHeader('Content-Type', 'text/plain');
|
||||
xhr.onload = xhr.onerror = xhr_search_results;
|
||||
xhr.ts = Date.now();
|
||||
@@ -5172,8 +5187,8 @@ var treectl = (function () {
|
||||
|
||||
function rendertree(res, ts, top0, dst, rst) {
|
||||
var cur = ebi('treeul').getAttribute('ts');
|
||||
if (cur && parseInt(cur) > ts) {
|
||||
console.log("reject tree");
|
||||
if (cur && parseInt(cur) > ts + 20 && QS('#treeul>li>a+a')) {
|
||||
console.log("reject tree; " + cur + " / " + (ts - cur));
|
||||
return;
|
||||
}
|
||||
ebi('treeul').setAttribute('ts', ts);
|
||||
@@ -5317,7 +5332,12 @@ var treectl = (function () {
|
||||
treegrow.call(this.previousSibling, e);
|
||||
return;
|
||||
}
|
||||
r.reqls(this.getAttribute('href'), true);
|
||||
var href = this.getAttribute('href');
|
||||
if (R && !href.startsWith(SR)) {
|
||||
window.location = href;
|
||||
return;
|
||||
}
|
||||
r.reqls(href, true);
|
||||
r.dir_cb = tree_scrollto;
|
||||
thegrid.setvis(true);
|
||||
}
|
||||
@@ -5392,7 +5412,7 @@ var treectl = (function () {
|
||||
for (var a = 0; a < res.dirs.length; a++)
|
||||
dirs.push(res.dirs[a].href.split('/')[0].split('?')[0]);
|
||||
|
||||
rendertree({ "a": dirs }, Date.now(), ".", get_evpath());
|
||||
rendertree({ "a": dirs }, this.ts, ".", get_evpath());
|
||||
}
|
||||
|
||||
r.gentab(this.top, res);
|
||||
@@ -5400,8 +5420,8 @@ var treectl = (function () {
|
||||
despin('#files');
|
||||
despin('#gfiles');
|
||||
|
||||
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
|
||||
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
|
||||
sandbox(ebi('pro'), sb_lg, '', res.logues ? res.logues[0] || "" : "");
|
||||
sandbox(ebi('epi'), sb_lg, '', res.logues ? res.logues[1] || "" : "");
|
||||
|
||||
clmod(ebi('epi'), 'mdo');
|
||||
if (res.readme)
|
||||
@@ -5541,7 +5561,7 @@ var treectl = (function () {
|
||||
qsr('#bbsw');
|
||||
if (ls0 === null) {
|
||||
var xhr = new XHR();
|
||||
xhr.open('GET', '/?am_js', true);
|
||||
xhr.open('GET', SR + '/?am_js', true);
|
||||
xhr.send();
|
||||
|
||||
r.ls_cb = showfile.addlinks;
|
||||
@@ -5739,7 +5759,8 @@ function apply_perms(newperms) {
|
||||
|
||||
ebi('acc_info').innerHTML = '<span id="srv_info2"><span>' + srvinf +
|
||||
'</span></span><span' + aclass + axs + L.access + '</span>' + (acct != '*' ?
|
||||
'<a href="/?pw=x">' + L.logout + acct + '</a>' : '<a href="/?h">Login</a>');
|
||||
'<a href="' + SR + '/?pw=x">' + L.logout + acct + '</a>' :
|
||||
'<a href="' + SR + '/?h">Login</a>');
|
||||
|
||||
var o = QSA('#ops>a[data-perm]');
|
||||
for (var a = 0; a < o.length; a++) {
|
||||
@@ -6534,6 +6555,37 @@ var msel = (function () {
|
||||
})();
|
||||
|
||||
|
||||
var globalcss = (function () {
|
||||
var ret = '';
|
||||
return function () {
|
||||
if (ret)
|
||||
return ret;
|
||||
|
||||
var dcs = document.styleSheets;
|
||||
for (var a = 0; a < dcs.length; a++) {
|
||||
var base = dcs[a].href,
|
||||
ds = dcs[a].cssRules;
|
||||
|
||||
if (!base)
|
||||
continue;
|
||||
|
||||
base = base.replace(/[^/]+$/, '');
|
||||
for (var b = 0; b < ds.length; b++) {
|
||||
var css = ds[b].cssText.split(/\burl\(/g);
|
||||
ret += css[0];
|
||||
for (var c = 1; c < css.length; c++) {
|
||||
var delim = (/^["']/.exec(css[c])) ? css[c].slice(0, 1) : '';
|
||||
ret += 'url(' + delim + ((css[c].slice(0, 8).indexOf('://') + 1 || css[c].startsWith('/')) ? '' : base) +
|
||||
css[c].slice(delim ? 1 : 0);
|
||||
}
|
||||
ret += '\n';
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
})();
|
||||
|
||||
|
||||
function show_md(md, name, div, url, depth) {
|
||||
var errmsg = L.md_eshow + name + ':\n\n',
|
||||
now = get_evpath();
|
||||
@@ -6546,14 +6598,14 @@ function show_md(md, name, div, url, depth) {
|
||||
if (depth)
|
||||
return toast.warn(10, errmsg + 'failed to load marked.js')
|
||||
|
||||
return import_js('/.cpr/deps/marked.js', function () {
|
||||
return import_js(SR + '/.cpr/deps/marked.js', function () {
|
||||
show_md(md, name, div, url, 1);
|
||||
});
|
||||
}
|
||||
|
||||
md_plug = {}
|
||||
md = load_md_plug(md, 'pre');
|
||||
md = load_md_plug(md, 'post');
|
||||
md = load_md_plug(md, 'post', sb_md);
|
||||
|
||||
var marked_opts = {
|
||||
headerPrefix: 'md-',
|
||||
@@ -6566,7 +6618,8 @@ function show_md(md, name, div, url, depth) {
|
||||
|
||||
try {
|
||||
clmod(div, 'mdo', 1);
|
||||
div.innerHTML = marked.parse(md, marked_opts);
|
||||
if (sandbox(div, sb_md, 'mdo', marked.parse(md, marked_opts)))
|
||||
return;
|
||||
|
||||
ext = md_plug.post;
|
||||
ext = ext ? [ext[0].render, ext[0].render2] : [];
|
||||
@@ -6620,6 +6673,86 @@ if (readme)
|
||||
show_readme(readme);
|
||||
|
||||
|
||||
function sandbox(tgt, rules, cls, html) {
|
||||
if (!rules || (html || '').indexOf('<') == -1) {
|
||||
tgt.innerHTML = html;
|
||||
clmod(tgt, 'sb');
|
||||
return false;
|
||||
}
|
||||
clmod(tgt, 'sb', 1);
|
||||
var tid = tgt.getAttribute('id'),
|
||||
hash = location.hash,
|
||||
want = '';
|
||||
|
||||
if (hash.startsWith('#md-'))
|
||||
want = hash.slice(1);
|
||||
|
||||
var env = '', tags = QSA('script');
|
||||
for (var a = 0; a < tags.length; a++) {
|
||||
var js = tags[a].innerHTML;
|
||||
if (js && js.indexOf('have_up2k_idx') + 1)
|
||||
env = js.split(/\blogues *=/)[0] + 'a;';
|
||||
}
|
||||
|
||||
html = '<html class="iframe ' + document.documentElement.className + '"><head><style>' + globalcss() +
|
||||
'</style><base target="_parent"></head><body id="b" class="logue ' + cls + '">' + html +
|
||||
'<script>' + env + '</script>' +
|
||||
'<script src="' + SR + '/.cpr/util.js?_={{ ts }}"></script>' +
|
||||
'<script>var ebi=document.getElementById.bind(document),d=document.documentElement,' +
|
||||
'loc=new URL("' + location.href.split('?')[0] + '");' +
|
||||
'function say(m){window.parent.postMessage(m,"*")};' +
|
||||
'setTimeout(function(){var its=0,pih=-1,f=function(){' +
|
||||
'var ih=2+Math.min(parseInt(getComputedStyle(d).height),d.scrollHeight);' +
|
||||
'if(ih!=pih){pih=ih;say("iheight #' + tid + ' "+ih,"*")}' +
|
||||
'if(++its<20)return setTimeout(f,20);if(its==20)setInterval(f,200)' +
|
||||
'};f();' +
|
||||
'window.onfocus=function(){say("igot #' + tid + '")};' +
|
||||
'window.onblur=function(){say("ilost #' + tid + '")};' +
|
||||
'var el="' + want + '"&&ebi("' + want + '");' +
|
||||
'if(el)say("iscroll #' + tid + ' "+el.offsetTop);' +
|
||||
(cls == 'mdo' && md_plug.post ?
|
||||
'const x={' + md_plug.post + '};' +
|
||||
'if(x.render)x.render(ebi("b"));' +
|
||||
'if(x.render2)x.render2(ebi("b"));' : '') +
|
||||
'},1)</script></body></html>';
|
||||
|
||||
var fr = mknod('iframe');
|
||||
fr.setAttribute('sandbox', rules ? 'allow-' + rules.replace(/ /g, ' allow-') : '');
|
||||
fr.setAttribute('srcdoc', html);
|
||||
tgt.innerHTML = '';
|
||||
tgt.appendChild(fr);
|
||||
return true;
|
||||
}
|
||||
window.addEventListener("message", function (e) {
|
||||
try {
|
||||
console.log('msg:' + e.data);
|
||||
var t = e.data.split(/ /g);
|
||||
if (t[0] == 'iheight') {
|
||||
var el = QS(t[1] + '>iframe');
|
||||
el.style.height = t[2] + 'px';
|
||||
el.style.visibility = 'unset';
|
||||
}
|
||||
else if (t[0] == 'iscroll') {
|
||||
var y1 = QS(t[1]).offsetTop,
|
||||
y2 = parseInt(t[2]);
|
||||
console.log(y1, y2);
|
||||
document.documentElement.scrollTop = y1 + y2;
|
||||
}
|
||||
else if (t[0] == 'igot' || t[0] == 'ilost') {
|
||||
clmod(QS(t[1] + '>iframe'), 'focus', t[0] == 'igot');
|
||||
}
|
||||
} catch (ex) {
|
||||
console.log('msg-err: ' + ex);
|
||||
}
|
||||
}, false);
|
||||
|
||||
|
||||
if (sb_lg && logues.length) {
|
||||
sandbox(ebi('pro'), sb_lg, '', logues[0]);
|
||||
sandbox(ebi('epi'), sb_lg, '', logues[1]);
|
||||
}
|
||||
|
||||
|
||||
(function () {
|
||||
try {
|
||||
var tr = ebi('files').tBodies[0].rows;
|
||||
@@ -6678,7 +6811,7 @@ var unpost = (function () {
|
||||
else
|
||||
html.push('-- <em>' + (filt.value ? L.un_no2 : L.un_no1) + '</em>');
|
||||
|
||||
var mods = [1000, 100, 10];
|
||||
var mods = [10, 100, 1000];
|
||||
for (var a = 0; a < res.length; a++) {
|
||||
for (var b = 0; b < mods.length; b++)
|
||||
if (a % mods[b] == 0 && res.length > a + mods[b] / 10)
|
||||
@@ -6699,7 +6832,7 @@ var unpost = (function () {
|
||||
r.me = me;
|
||||
}
|
||||
|
||||
var q = '/?ups';
|
||||
var q = SR + '/?ups';
|
||||
if (filt.value)
|
||||
q += '&filter=' + uricom_enc(filt.value, true);
|
||||
|
||||
@@ -6765,7 +6898,7 @@ var unpost = (function () {
|
||||
var xhr = new XHR();
|
||||
xhr.n = n;
|
||||
xhr.n2 = n2;
|
||||
xhr.open('POST', '/?delete&lim=' + req.length, true);
|
||||
xhr.open('POST', SR + '/?delete&lim=' + req.length, true);
|
||||
xhr.onload = xhr.onerror = unpost_delete_cb;
|
||||
xhr.send(JSON.stringify(req));
|
||||
};
|
||||
@@ -6882,18 +7015,19 @@ function reload_browser() {
|
||||
filecols.set_style();
|
||||
|
||||
var parts = get_evpath().split('/'),
|
||||
rm = QSA('#path>a+a+a'),
|
||||
rm = ebi('entree'),
|
||||
ftab = ebi('files'),
|
||||
link = '/', o;
|
||||
link = '', o;
|
||||
|
||||
for (a = rm.length - 1; a >= 0; a--)
|
||||
rm[a].parentNode.removeChild(rm[a]);
|
||||
while (rm.nextSibling)
|
||||
rm.parentNode.removeChild(rm.nextSibling);
|
||||
|
||||
for (var a = 1; a < parts.length - 1; a++) {
|
||||
for (var a = 0; a < parts.length - 1; a++) {
|
||||
link += parts[a] + '/';
|
||||
o = mknod('a');
|
||||
o.setAttribute('href', link);
|
||||
o.textContent = uricom_dec(parts[a]);
|
||||
o.textContent = uricom_dec(parts[a]) || '/';
|
||||
ebi('path').appendChild(mknod('i'));
|
||||
ebi('path').appendChild(o);
|
||||
}
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
<div>{{ logues[1] }}</div><br />
|
||||
{%- endif %}
|
||||
|
||||
<h2><a href="/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||
<h2><a href="{{ r }}/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/md.css?_={{ ts }}">
|
||||
{%- if edit %}
|
||||
<link rel="stylesheet" href="/.cpr/md2.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/md2.css?_={{ ts }}">
|
||||
{%- endif %}
|
||||
</head>
|
||||
<body>
|
||||
@@ -128,7 +128,8 @@ write markdown (most html is 🙆 too)
|
||||
|
||||
<script>
|
||||
|
||||
var last_modified = {{ lastmod }},
|
||||
var SR = {{ r|tojson }},
|
||||
last_modified = {{ lastmod }},
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
dfavico = "{{ favico }}";
|
||||
|
||||
@@ -153,10 +154,10 @@ l.light = drk? 0:1;
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/md.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/md.js?_={{ ts }}"></script>
|
||||
{%- if edit %}
|
||||
<script src="/.cpr/md2.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/md2.js?_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body></html>
|
||||
|
||||
@@ -930,7 +930,9 @@ var set_lno = (function () {
|
||||
(function () {
|
||||
function keydown(ev) {
|
||||
ev = ev || window.event;
|
||||
var kc = ev.code || ev.keyCode || ev.which;
|
||||
var kc = ev.code || ev.keyCode || ev.which,
|
||||
editing = document.activeElement == dom_src;
|
||||
|
||||
//console.log(ev.key, ev.code, ev.keyCode, ev.which);
|
||||
if (ctrl(ev) && (ev.code == "KeyS" || kc == 83)) {
|
||||
save();
|
||||
@@ -941,12 +943,17 @@ var set_lno = (function () {
|
||||
if (d)
|
||||
d.click();
|
||||
}
|
||||
if (document.activeElement != dom_src)
|
||||
return true;
|
||||
|
||||
if (editing)
|
||||
set_lno();
|
||||
|
||||
if (ctrl(ev)) {
|
||||
if (ev.code == "KeyE") {
|
||||
dom_nsbs.click();
|
||||
return false;
|
||||
}
|
||||
if (!editing)
|
||||
return true;
|
||||
|
||||
if (ev.code == "KeyH" || kc == 72) {
|
||||
md_header(ev.shiftKey);
|
||||
return false;
|
||||
@@ -971,10 +978,6 @@ var set_lno = (function () {
|
||||
iter_uni();
|
||||
return false;
|
||||
}
|
||||
if (ev.code == "KeyE") {
|
||||
dom_nsbs.click();
|
||||
return false;
|
||||
}
|
||||
var up = ev.code == "ArrowUp" || kc == 38;
|
||||
var dn = ev.code == "ArrowDown" || kc == 40;
|
||||
if (up || dn) {
|
||||
@@ -987,6 +990,9 @@ var set_lno = (function () {
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (!editing)
|
||||
return true;
|
||||
|
||||
if (ev.code == "Tab" || kc == 9) {
|
||||
md_indent(ev.shiftKey);
|
||||
return false;
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/deps/easymde.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/mde.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/deps/mini-fa.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="{{ r }}/.cpr/deps/easymde.css?_={{ ts }}">
|
||||
</head>
|
||||
<body>
|
||||
<div id="mw">
|
||||
@@ -26,7 +26,8 @@
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var last_modified = {{ lastmod }},
|
||||
var SR = {{ r|tojson }},
|
||||
last_modified = {{ lastmod }},
|
||||
have_emp = {{ have_emp|tojson }},
|
||||
dfavico = "{{ favico }}";
|
||||
|
||||
@@ -48,8 +49,8 @@ l.light = drk? 0:1;
|
||||
})();
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/mde.js?_={{ ts }}"></script>
|
||||
</body></html>
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/msg.css?_={{ ts }}">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
@@ -16,7 +16,8 @@ html {
|
||||
h1 {
|
||||
border-bottom: 1px solid #ccc;
|
||||
margin: 2em 0 .4em 0;
|
||||
padding: 0 0 .2em 0;
|
||||
padding: 0;
|
||||
line-height: 1em;
|
||||
font-weight: normal;
|
||||
}
|
||||
li {
|
||||
@@ -26,6 +27,7 @@ a {
|
||||
color: #047;
|
||||
background: #fff;
|
||||
text-decoration: none;
|
||||
white-space: nowrap;
|
||||
border-bottom: 1px solid #8ab;
|
||||
border-radius: .2em;
|
||||
padding: .2em .6em;
|
||||
@@ -49,12 +51,30 @@ a.g {
|
||||
border-color: #3a0;
|
||||
box-shadow: 0 .3em 1em #4c0;
|
||||
}
|
||||
#repl {
|
||||
#repl,
|
||||
#pb a {
|
||||
border: none;
|
||||
background: none;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
}
|
||||
#repl {
|
||||
position: fixed;
|
||||
bottom: .25em;
|
||||
left: .2em;
|
||||
}
|
||||
#pb {
|
||||
opacity: .5;
|
||||
position: fixed;
|
||||
bottom: .25em;
|
||||
right: .3em;
|
||||
}
|
||||
#pb span {
|
||||
opacity: .6;
|
||||
}
|
||||
#pb a {
|
||||
margin: 0;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
|
||||
@@ -8,19 +8,19 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
<a id="a" href="/?h" class="af">refresh</a>
|
||||
<a id="v" href="/?hc" class="af">connect</a>
|
||||
<a id="a" href="{{ r }}/?h" class="af">refresh</a>
|
||||
<a id="v" href="{{ r }}/?hc" class="af">connect</a>
|
||||
|
||||
{%- if this.uname == '*' %}
|
||||
<p id="b">howdy stranger <small>(you're not logged in)</small></p>
|
||||
{%- else %}
|
||||
<a id="c" href="/?pw=x" class="logout">logout</a>
|
||||
<a id="c" href="{{ r }}/?pw=x" class="logout">logout</a>
|
||||
<p><span id="m">welcome back,</span> <strong>{{ this.uname }}</strong></p>
|
||||
{%- endif %}
|
||||
|
||||
@@ -46,15 +46,15 @@
|
||||
<tbody>
|
||||
{% for mp in avol %}
|
||||
{%- if mp in vstate and vstate[mp] %}
|
||||
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a class="s" href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
||||
<tr><td><a href="{{ r }}{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a class="s" href="{{ r }}{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
||||
{%- endif %}
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</td></tr></table>
|
||||
<div class="btns">
|
||||
<a id="d" href="/?stack">dump stack</a>
|
||||
<a id="e" href="/?reload=cfg">reload cfg</a>
|
||||
<a id="d" href="{{ r }}/?stack">dump stack</a>
|
||||
<a id="e" href="{{ r }}/?reload=cfg">reload cfg</a>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
@@ -62,7 +62,7 @@
|
||||
<h1 id="f">you can browse:</h1>
|
||||
<ul>
|
||||
{% for mp in rvol %}
|
||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
<li><a href="{{ r }}{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{%- endif %}
|
||||
@@ -71,7 +71,7 @@
|
||||
<h1 id="g">you can upload to:</h1>
|
||||
<ul>
|
||||
{% for mp in wvol %}
|
||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
<li><a href="{{ r }}{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{%- endif %}
|
||||
@@ -79,18 +79,18 @@
|
||||
<h1 id="cc">client config:</h1>
|
||||
<ul>
|
||||
{% if k304 %}
|
||||
<li><a id="h" href="/?k304=n">disable k304</a> (currently enabled)
|
||||
<li><a id="h" href="{{ r }}/?k304=n">disable k304</a> (currently enabled)
|
||||
{%- else %}
|
||||
<li><a id="i" href="/?k304=y" class="r">enable k304</a> (currently disabled)
|
||||
<li><a id="i" href="{{ r }}/?k304=y" class="r">enable k304</a> (currently disabled)
|
||||
{% endif %}
|
||||
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||
|
||||
<li><a id="k" href="/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||
</ul>
|
||||
|
||||
<h1 id="l">login for more:</h1>
|
||||
<ul>
|
||||
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
|
||||
<form method="post" enctype="multipart/form-data" action="{{ r }}/{{ qvpath }}">
|
||||
<input type="hidden" name="act" value="login" />
|
||||
<input type="password" name="cppwd" />
|
||||
<input type="submit" value="Login" />
|
||||
@@ -98,15 +98,19 @@
|
||||
</ul>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
{%- if not this.args.nb %}
|
||||
<span id="pb"><span>powered by</span> <a href="{{ this.args.pb_url }}">copyparty</a></span>
|
||||
{%- endif %}
|
||||
<script>
|
||||
|
||||
var lang="{{ lang }}",
|
||||
var SR = {{ r|tojson }},
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
document.documentElement.className=localStorage.theme||"{{ this.args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/splash.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/splash.js?_={{ ts }}"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -17,9 +17,9 @@ var Ls = {
|
||||
"l1": "logg inn:",
|
||||
"m1": "velkommen tilbake,",
|
||||
"n1": "404: filen finnes ikke ┐( ´ -`)┌",
|
||||
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="/?h">gå hjem</a>',
|
||||
"o1": 'eller kanskje du ikke har tilgang? prøv å logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"p1": "403: tilgang nektet ~┻━┻",
|
||||
"q1": 'du må logge inn eller <a href="/?h">gå hjem</a>',
|
||||
"q1": 'du må logge inn eller <a href="' + SR + '/?h">gå hjem</a>',
|
||||
"r1": "gå hjem",
|
||||
".s1": "kartlegg",
|
||||
"t1": "handling",
|
||||
|
||||
@@ -8,14 +8,14 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#333">
|
||||
{{ html_head }}
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="wrap" class="w">
|
||||
<div class="cn">
|
||||
<p class="btns"><a href="/">browse files</a> // <a href="/?h">control panel</a></p>
|
||||
<p class="btns"><a href="/{{ rvp }}">browse files</a> // <a href="{{ r }}/?h">control panel</a></p>
|
||||
<p>or choose your OS for cooler alternatives:</p>
|
||||
<div class="ossel">
|
||||
<a id="swin" href="#">Windows</a>
|
||||
@@ -28,7 +28,7 @@
|
||||
make this server appear on your computer as a regular HDD!<br />
|
||||
pick your favorite below (sorted by performance, best first) and lets 🎉<br />
|
||||
<br />
|
||||
placeholders:
|
||||
<span class="os win lin mac">placeholders:</span>
|
||||
<span class="os win">
|
||||
{% if accs %}<code><b>{{ pw }}</b></code>=password, {% endif %}<code><b>W:</b></code>=mountpoint
|
||||
</span>
|
||||
@@ -47,27 +47,27 @@
|
||||
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ vp }} <b>W:</b>
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
||||
{% endif %}
|
||||
|
||||
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
||||
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
|
||||
<pre>
|
||||
net use <b>w:</b> http{{ s }}://{{ ep }}/{{ vp }}{% if accs %} k /user:<b>{{ pw }}</b>{% endif %}
|
||||
net use <b>w:</b> http{{ s }}://{{ ep }}/{{ rvp }}{% if accs %} k /user:<b>{{ pw }}</b>{% endif %}
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
<div class="os lin">
|
||||
<pre>
|
||||
yum install davfs2
|
||||
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ vp }} <b>mp</b>
|
||||
{% if accs %}printf '%s\n' <b>{{ pw }}</b> k | {% endif %}mount -t davfs -ouid=1000 http{{ s }}://{{ ep }}/{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
<p>or you can use rclone instead, which is much slower but doesn't require root:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=other{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ vp }} <b>mp</b>
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p>
|
||||
@@ -77,20 +77,20 @@
|
||||
<!-- gnome-bug: ignores vp -->
|
||||
<pre>
|
||||
{%- if accs %}
|
||||
echo <b>{{ pw }}</b> | gio mount dav{{ s }}://k@{{ ep }}/{{ vp }}
|
||||
echo <b>{{ pw }}</b> | gio mount dav{{ s }}://k@{{ ep }}/{{ rvp }}
|
||||
{%- else %}
|
||||
gio mount -a dav{{ s }}://{{ ep }}/{{ vp }}
|
||||
gio mount -a dav{{ s }}://{{ ep }}/{{ rvp }}
|
||||
{%- endif %}
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
<div class="os mac">
|
||||
<pre>
|
||||
osascript -e ' mount volume "http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ vp }}" '
|
||||
osascript -e ' mount volume "http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ rvp }}" '
|
||||
</pre>
|
||||
<p>or you can open up a Finder, press command-K and paste this instead:</p>
|
||||
<pre>
|
||||
http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ vp }}
|
||||
http{{ s }}://k:<b>{{ pw }}</b>@{{ ep }}/{{ rvp }}
|
||||
</pre>
|
||||
|
||||
{% if s %}
|
||||
@@ -108,26 +108,26 @@
|
||||
<p>if you can, install <a href="https://winfsp.dev/rel/">winfsp</a>+<a href="https://downloads.rclone.org/rclone-current-windows-amd64.zip">rclone</a> and then paste this in cmd:</p>
|
||||
<pre>
|
||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ vp }} <b>W:</b>
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>W:</b>
|
||||
</pre>
|
||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||
<pre>
|
||||
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ vp }}
|
||||
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
<div class="os lin">
|
||||
<pre>
|
||||
rclone config create {{ aname }}-ftp ftp host={{ rip }} port={{ args.ftp or args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls={{ "false" if args.ftp else "true" }}
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ vp }} <b>mp</b>
|
||||
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftp:{{ rvp }} <b>mp</b>
|
||||
</pre>
|
||||
<p>emergency alternative (gnome/gui-only):</p>
|
||||
<!-- gnome-bug: ignores vp -->
|
||||
<pre>
|
||||
{%- if accs %}
|
||||
echo <b>{{ pw }}</b> | gio mount ftp{{ "" if args.ftp else "s" }}://k@{{ host }}:{{ args.ftp or args.ftps }}/{{ vp }}
|
||||
echo <b>{{ pw }}</b> | gio mount ftp{{ "" if args.ftp else "s" }}://k@{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||
{%- else %}
|
||||
gio mount -a ftp{{ "" if args.ftp else "s" }}://{{ host }}:{{ args.ftp or args.ftps }}/{{ vp }}
|
||||
gio mount -a ftp{{ "" if args.ftp else "s" }}://{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||
{%- endif %}
|
||||
</pre>
|
||||
</div>
|
||||
@@ -135,7 +135,7 @@
|
||||
<div class="os mac">
|
||||
<p>note: FTP is read-only on macos; please use WebDAV instead</p>
|
||||
<pre>
|
||||
open {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}k:<b>{{ pw }}</b>@{% else %}anonymous:@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ vp }}
|
||||
open {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}k:<b>{{ pw }}</b>@{% else %}anonymous:@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
|
||||
</pre>
|
||||
</div>
|
||||
{% endif %}
|
||||
@@ -144,18 +144,18 @@
|
||||
|
||||
<h1>partyfuse</h1>
|
||||
<p>
|
||||
<a href="/.cpr/a/partyfuse.py">partyfuse.py</a> -- fast, read-only,
|
||||
<a href="{{ r }}/.cpr/a/partyfuse.py">partyfuse.py</a> -- fast, read-only,
|
||||
<span class="os win">needs <a href="https://winfsp.dev/rel/">winfsp</a></span>
|
||||
<span class="os lin">doesn't need root</span>
|
||||
</p>
|
||||
<pre>
|
||||
partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ vp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b>
|
||||
partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ rvp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b>
|
||||
</pre>
|
||||
{% if s %}
|
||||
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p>
|
||||
{% endif %}
|
||||
<p>
|
||||
you can use <a href="/.cpr/a/up2k.py">up2k.py</a> to upload (sometimes faster than web-browsers)
|
||||
you can use <a href="{{ r }}/.cpr/a/up2k.py">up2k.py</a> to upload (sometimes faster than web-browsers)
|
||||
</p>
|
||||
|
||||
|
||||
@@ -188,13 +188,14 @@
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var lang="{{ lang }}",
|
||||
var SR = {{ r|tojson }},
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
document.documentElement.className=localStorage.theme||"{{ args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/svcs.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/svcs.js?_={{ ts }}"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -39,4 +39,4 @@ function setos(os) {
|
||||
clmod(oa[a], 'g', oa[a].id.slice(1) == os);
|
||||
}
|
||||
|
||||
setos(WINDOWS ? 'win' : LINUX ? 'lin' : MACOS ? 'mac' : '');
|
||||
setos(WINDOWS ? 'win' : LINUX ? 'lin' : MACOS ? 'mac' : 'idk');
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
@font-face {
|
||||
font-family: 'scp';
|
||||
font-display: swap;
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html {
|
||||
touch-action: manipulation;
|
||||
@@ -69,6 +69,7 @@ html {
|
||||
#toastb {
|
||||
max-height: 70vh;
|
||||
overflow-y: auto;
|
||||
padding: 1px;
|
||||
}
|
||||
#toast.scroll #toastb {
|
||||
overflow-y: scroll;
|
||||
|
||||
@@ -672,7 +672,7 @@ function Donut(uc, st) {
|
||||
favico.upd();
|
||||
wintitle();
|
||||
if (document.visibilityState == 'hidden')
|
||||
tenstrobe = setTimeout(enstrobe, 500); //debounce
|
||||
tenstrobe = setTimeout(r.enstrobe, 500); //debounce
|
||||
}
|
||||
};
|
||||
|
||||
@@ -709,7 +709,7 @@ function Donut(uc, st) {
|
||||
}
|
||||
};
|
||||
|
||||
function enstrobe() {
|
||||
r.enstrobe = function () {
|
||||
strobes = ['████████████████', '________________', '████████████████'];
|
||||
tstrober = setInterval(strobe, 300);
|
||||
|
||||
@@ -779,7 +779,7 @@ function up2k_init(subtle) {
|
||||
|
||||
setTimeout(function () {
|
||||
if (window.WebAssembly && !hws.length)
|
||||
fetch('/.cpr/w.hash.js' + CB);
|
||||
fetch(SR + '/.cpr/w.hash.js' + CB);
|
||||
}, 1000);
|
||||
|
||||
function showmodal(msg) {
|
||||
@@ -809,7 +809,7 @@ function up2k_init(subtle) {
|
||||
m = L.u_https1 + ' <a href="' + (window.location + '').replace(':', 's:') + '">' + L.u_https2 + '</a> ' + L.u_https3;
|
||||
|
||||
showmodal('<h1>loading ' + fn + '</h1>');
|
||||
import_js('/.cpr/deps/' + fn, unmodal);
|
||||
import_js(SR + '/.cpr/deps/' + fn, unmodal);
|
||||
|
||||
if (HTTPS) {
|
||||
// chrome<37 firefox<34 edge<12 opera<24 safari<7
|
||||
@@ -867,7 +867,7 @@ function up2k_init(subtle) {
|
||||
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort);
|
||||
bcfg_bind(uc, 'hashw', 'hashw', !!window.WebAssembly && (!subtle || !CHROME || MOBILE || VCHROME >= 107), set_hashw);
|
||||
bcfg_bind(uc, 'upnag', 'upnag', false, set_upnag);
|
||||
bcfg_bind(uc, 'upsfx', 'upsfx', false);
|
||||
bcfg_bind(uc, 'upsfx', 'upsfx', false, set_upsfx);
|
||||
|
||||
var st = {
|
||||
"files": [],
|
||||
@@ -895,15 +895,25 @@ function up2k_init(subtle) {
|
||||
"finished": 0
|
||||
},
|
||||
"time": {
|
||||
"hashing": 0,
|
||||
"uploading": 0,
|
||||
"busy": 0
|
||||
"hashing": 0.01,
|
||||
"uploading": 0.01,
|
||||
"busy": 0.01
|
||||
},
|
||||
"eta": {
|
||||
"h": "",
|
||||
"u": "",
|
||||
"t": ""
|
||||
},
|
||||
"etaw": {
|
||||
"h": [['', 0, 0, 0]],
|
||||
"u": [['', 0, 0, 0]],
|
||||
"t": [['', 0, 0, 0]]
|
||||
},
|
||||
"etac": {
|
||||
"h": 0,
|
||||
"u": 0,
|
||||
"t": 0
|
||||
},
|
||||
"car": 0,
|
||||
"slow_io": null,
|
||||
"oserr": false,
|
||||
@@ -1312,7 +1322,7 @@ function up2k_init(subtle) {
|
||||
|
||||
if (window.WebAssembly && !hws.length) {
|
||||
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
|
||||
hws.push(new Worker('/.cpr/w.hash.js' + CB));
|
||||
hws.push(new Worker(SR + '/.cpr/w.hash.js' + CB));
|
||||
|
||||
console.log(hws.length + " hashers");
|
||||
}
|
||||
@@ -1480,10 +1490,20 @@ function up2k_init(subtle) {
|
||||
}
|
||||
}
|
||||
for (var a = 0; a < t.length; a++) {
|
||||
var rem = st.bytes.total - t[a][2],
|
||||
bps = t[a][1] / t[a][3],
|
||||
hid = t[a][0],
|
||||
var hid = t[a][0],
|
||||
eid = hid.slice(-1),
|
||||
etaw = st.etaw[eid];
|
||||
|
||||
if (st.etac[eid] > 100) { // num chunks
|
||||
st.etac[eid] = 0;
|
||||
etaw.push(jcp(t[a]));
|
||||
if (etaw.length > 5)
|
||||
etaw.shift();
|
||||
}
|
||||
|
||||
var h = etaw[0],
|
||||
rem = st.bytes.total - t[a][2],
|
||||
bps = (t[a][1] - h[1]) / Math.max(0.1, t[a][3] - h[3]),
|
||||
eta = Math.floor(rem / bps);
|
||||
|
||||
if (t[a][1] < 1024 || t[a][3] < 0.1) {
|
||||
@@ -1535,11 +1555,11 @@ function up2k_init(subtle) {
|
||||
st.busy.handshake.length)
|
||||
return false;
|
||||
|
||||
if (t.n - st.car > 8)
|
||||
if (t.n - st.car > Math.max(8, parallel_uploads))
|
||||
// prevent runahead from a stuck upload (slow server hdd)
|
||||
return false;
|
||||
|
||||
if ((uc.multitask ? 1 : 0) <
|
||||
if ((uc.multitask ? parallel_uploads : 0) <
|
||||
st.todo.upload.length +
|
||||
st.busy.upload.length)
|
||||
return false;
|
||||
@@ -1551,21 +1571,22 @@ function up2k_init(subtle) {
|
||||
if (!parallel_uploads)
|
||||
return false;
|
||||
|
||||
var nhs = st.todo.handshake.length + st.busy.handshake.length,
|
||||
nup = st.todo.upload.length + st.busy.upload.length;
|
||||
|
||||
if (uc.multitask) {
|
||||
if (nhs + nup < parallel_uploads)
|
||||
return true;
|
||||
|
||||
if (!uc.az)
|
||||
return st.todo.handshake.length + st.busy.handshake.length < 2;
|
||||
return nhs < 2;
|
||||
|
||||
var ahead = st.bytes.hashed - st.bytes.finished,
|
||||
nmax = ahead < biggest_file / 8 ? 32 : 16;
|
||||
|
||||
return ahead < biggest_file &&
|
||||
st.todo.handshake.length + st.busy.handshake.length < nmax;
|
||||
return ahead < biggest_file && nhs < nmax;
|
||||
}
|
||||
return handshakes_permitted() && 0 ==
|
||||
st.todo.handshake.length +
|
||||
st.busy.handshake.length +
|
||||
st.todo.upload.length +
|
||||
st.busy.upload.length;
|
||||
return handshakes_permitted() && 0 == nhs + nup;
|
||||
}
|
||||
|
||||
var tasker = (function () {
|
||||
@@ -1730,20 +1751,22 @@ function up2k_init(subtle) {
|
||||
var sr = uc.fsearch,
|
||||
ok = pvis.ctr.ok,
|
||||
ng = pvis.ctr.ng,
|
||||
spd = Math.floor(st.bytes.finished / st.time.busy),
|
||||
suf = '\n\n{0} @ {1}/s'.format(shumantime(st.time.busy), humansize(spd)),
|
||||
t = uc.ask_up ? 0 : 10;
|
||||
|
||||
console.log('toast', ok, ng);
|
||||
|
||||
if (ok && ng)
|
||||
toast.warn(t, uc.nagtxt = (sr ? L.ur_sm : L.ur_um).format(ok, ng));
|
||||
toast.warn(t, uc.nagtxt = (sr ? L.ur_sm : L.ur_um).format(ok, ng) + suf);
|
||||
else if (ok > 1)
|
||||
toast.ok(t, uc.nagtxt = (sr ? L.ur_aso : L.ur_auo).format(ok));
|
||||
toast.ok(t, uc.nagtxt = (sr ? L.ur_aso : L.ur_auo).format(ok) + suf);
|
||||
else if (ok)
|
||||
toast.ok(t, uc.nagtxt = sr ? L.ur_1so : L.ur_1uo);
|
||||
toast.ok(t, uc.nagtxt = (sr ? L.ur_1so : L.ur_1uo) + suf);
|
||||
else if (ng > 1)
|
||||
toast.err(t, uc.nagtxt = (sr ? L.ur_asn : L.ur_aun).format(ng));
|
||||
toast.err(t, uc.nagtxt = (sr ? L.ur_asn : L.ur_aun).format(ng) + suf);
|
||||
else if (ng)
|
||||
toast.err(t, uc.nagtxt = sr ? L.ur_1sn : L.ur_1un);
|
||||
toast.err(t, uc.nagtxt = (sr ? L.ur_1sn : L.ur_1un) + suf);
|
||||
|
||||
timer.rm(etafun);
|
||||
timer.rm(donut.do);
|
||||
@@ -1854,6 +1877,7 @@ function up2k_init(subtle) {
|
||||
cdr = Math.min(chunksize + car, t.size);
|
||||
|
||||
st.bytes.hashed += cdr - car;
|
||||
st.etac.h++;
|
||||
|
||||
function orz(e) {
|
||||
bpend--;
|
||||
@@ -2298,9 +2322,10 @@ function up2k_init(subtle) {
|
||||
}
|
||||
|
||||
var err_pend = rsp.indexOf('partial upload exists at a different') + 1,
|
||||
err_plug = rsp.indexOf('upload blocked by x') + 1,
|
||||
err_dupe = rsp.indexOf('upload rejected, file already exists') + 1;
|
||||
|
||||
if (err_pend || err_dupe) {
|
||||
if (err_pend || err_plug || err_dupe) {
|
||||
err = rsp;
|
||||
ofs = err.indexOf('\n/');
|
||||
if (ofs !== -1) {
|
||||
@@ -2357,8 +2382,17 @@ function up2k_init(subtle) {
|
||||
function can_upload_next() {
|
||||
var upt = st.todo.upload[0],
|
||||
upf = st.files[upt.nfile],
|
||||
nhs = st.busy.handshake.length,
|
||||
hs = nhs && st.busy.handshake[0],
|
||||
now = Date.now();
|
||||
|
||||
if (nhs >= 16)
|
||||
return false;
|
||||
|
||||
if (hs && hs.t_uploaded && Date.now() - hs.t_busied > 10000)
|
||||
// verification HS possibly held back by uploads
|
||||
return false;
|
||||
|
||||
for (var a = 0, aa = st.busy.handshake.length; a < aa; a++) {
|
||||
var hs = st.busy.handshake[a];
|
||||
if (hs.n < upt.nfile && hs.t_busied > now - 10 * 1000 && !st.files[hs.n].bytes_uploaded)
|
||||
@@ -2398,11 +2432,21 @@ function up2k_init(subtle) {
|
||||
|
||||
function orz(xhr) {
|
||||
var txt = ((xhr.response && xhr.response.err) || xhr.responseText) + '';
|
||||
if (txt.indexOf('upload blocked by x') + 1) {
|
||||
apop(st.busy.upload, upt);
|
||||
apop(t.postlist, npart);
|
||||
pvis.seth(t.n, 1, "ERROR");
|
||||
pvis.seth(t.n, 2, txt.split(/\n/)[0]);
|
||||
pvis.move(t.n, 'ng');
|
||||
return;
|
||||
}
|
||||
if (xhr.status == 200) {
|
||||
pvis.prog(t, npart, cdr - car);
|
||||
st.bytes.finished += cdr - car;
|
||||
st.bytes.uploaded += cdr - car;
|
||||
t.bytes_uploaded += cdr - car;
|
||||
st.etac.u++;
|
||||
st.etac.t++;
|
||||
}
|
||||
else if (txt.indexOf('already got that') + 1 ||
|
||||
txt.indexOf('already being written') + 1) {
|
||||
@@ -2530,9 +2574,15 @@ function up2k_init(subtle) {
|
||||
if (dir.target) {
|
||||
clmod(obj, 'err', 1);
|
||||
var v = Math.floor(parseInt(obj.value));
|
||||
if (v < 0 || v > 64 || v !== v)
|
||||
if (v < 0 || v !== v)
|
||||
return;
|
||||
|
||||
if (v > 64) {
|
||||
var p = obj.selectionStart;
|
||||
v = obj.value = 64;
|
||||
obj.selectionStart = obj.selectionEnd = p;
|
||||
}
|
||||
|
||||
parallel_uploads = v;
|
||||
swrite('nthread', v);
|
||||
clmod(obj, 'err');
|
||||
@@ -2749,6 +2799,21 @@ function up2k_init(subtle) {
|
||||
|
||||
if (en && Notification.permission == 'default')
|
||||
Notification.requestPermission().then(chknag, chknag);
|
||||
|
||||
set_upsfx(en);
|
||||
}
|
||||
|
||||
function set_upsfx(en) {
|
||||
if (!en)
|
||||
return;
|
||||
|
||||
toast.inf(10, 'OK -- <a href="#" id="nagtest">test it!</a>')
|
||||
|
||||
ebi('nagtest').onclick = function () {
|
||||
start_actx();
|
||||
uc.nagtxt = ':^)';
|
||||
setTimeout(donut.enstrobe, 200);
|
||||
};
|
||||
}
|
||||
|
||||
if (uc.upnag && (!window.Notification || Notification.permission != 'granted'))
|
||||
|
||||
@@ -9,6 +9,8 @@ if (!window.console || !console.log)
|
||||
var wah = '',
|
||||
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
|
||||
CB = '?_=' + Date.now(),
|
||||
R = SR.slice(1),
|
||||
RS = R ? "/" + R : "",
|
||||
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
||||
HTTPS = (window.location + '').indexOf('https:') === 0,
|
||||
TOUCH = 'ontouchstart' in window,
|
||||
@@ -193,8 +195,12 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||
var lsk = Object.keys(ls);
|
||||
lsk.sort();
|
||||
html.push('<p class="b">');
|
||||
for (var a = 0; a < lsk.length; a++)
|
||||
for (var a = 0; a < lsk.length; a++) {
|
||||
if (ls[lsk[a]].length > 9000)
|
||||
continue;
|
||||
|
||||
html.push(' <b>' + esc(lsk[a]) + '</b> <code>' + esc(ls[lsk[a]]) + '</code> ');
|
||||
}
|
||||
html.push('</p>');
|
||||
}
|
||||
catch (e) { }
|
||||
@@ -686,7 +692,9 @@ function noq_href(el) {
|
||||
|
||||
|
||||
function get_pwd() {
|
||||
var pwd = ('; ' + document.cookie).split('; cppwd=');
|
||||
var k = HTTPS ? 's=' : 'd=',
|
||||
pwd = ('; ' + document.cookie).split('; cppw' + k);
|
||||
|
||||
if (pwd.length < 2)
|
||||
return null;
|
||||
|
||||
@@ -1526,25 +1534,33 @@ var md_plug_err = function (ex, js) {
|
||||
if (ex)
|
||||
console.log(ex, js);
|
||||
};
|
||||
function load_md_plug(md_text, plug_type) {
|
||||
function load_md_plug(md_text, plug_type, defer) {
|
||||
if (defer)
|
||||
md_plug[plug_type] = null;
|
||||
|
||||
if (!have_emp)
|
||||
return md_text;
|
||||
|
||||
var find = '\n```copyparty_' + plug_type + '\n';
|
||||
var ofs = md_text.indexOf(find);
|
||||
if (ofs === -1)
|
||||
var find = '\n```copyparty_' + plug_type + '\n',
|
||||
md = md_text.replace(/\r/g, ''),
|
||||
ofs = md.indexOf(find),
|
||||
ofs2 = md.indexOf('\n```', ofs + 1);
|
||||
|
||||
if (ofs < 0 || ofs2 < 0)
|
||||
return md_text;
|
||||
|
||||
var ofs2 = md_text.indexOf('\n```', ofs + 1);
|
||||
if (ofs2 == -1)
|
||||
return md_text;
|
||||
var js = md.slice(ofs + find.length, ofs2 + 1);
|
||||
md = md.slice(0, ofs + 1) + md.slice(ofs2 + 4);
|
||||
md = md.replace(/$/g, '\r');
|
||||
|
||||
var js = md_text.slice(ofs + find.length, ofs2 + 1);
|
||||
var md = md_text.slice(0, ofs + 1) + md_text.slice(ofs2 + 4);
|
||||
if (defer) { // insert into sandbox
|
||||
md_plug[plug_type] = js;
|
||||
return md;
|
||||
}
|
||||
|
||||
var old_plug = md_plug[plug_type];
|
||||
if (!old_plug || old_plug[1] != js) {
|
||||
js = 'const x = { ' + js + ' }; x;';
|
||||
js = 'const loc = new URL("' + location.href + '"), x = { ' + js + ' }; x;';
|
||||
try {
|
||||
var x = eval(js);
|
||||
if (x['ctor']) {
|
||||
@@ -1678,7 +1694,7 @@ function xhrchk(xhr, prefix, e404, lvl, tag) {
|
||||
|
||||
qsr('#cf_frame');
|
||||
var fr = mknod('iframe', 'cf_frame');
|
||||
fr.src = '/?cf_challenge';
|
||||
fr.src = SR + '/?cf_challenge';
|
||||
document.body.appendChild(fr);
|
||||
}
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ catch (ex) {
|
||||
}
|
||||
function load_fb() {
|
||||
subtle = null;
|
||||
importScripts('/.cpr/deps/sha512.hw.js');
|
||||
importScripts('deps/sha512.hw.js');
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,143 @@
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0112-0515 `v1.5.6` many hands
|
||||
|
||||
hello from warsaw airport (goodbye japan ;_;)
|
||||
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||
|
||||
## new features
|
||||
* multiple upload handshakes in parallel
|
||||
* around **5x faster** when uploading small files
|
||||
* or **50x faster** if the server is on the other side of the planet
|
||||
* just crank up the `parallel uploads` like crazy (max is 64)
|
||||
* upload ui: total time and average speed is shown on completion
|
||||
|
||||
## bugfixes
|
||||
* browser ui didn't allow specifying number of threads for file search
|
||||
* dont panic if a digit key is pressed while viewing an image
|
||||
* workaround [linux kernel bug](https://utcc.utoronto.ca/~cks/space/blog/linux/KernelBindBugIn6016) causing log spam on dualstack
|
||||
* ~~related issue (also mostly harmless) will be fixed next relese 010770684db95bece206943768621f2c7c27bace~~
|
||||
* they fixed it in linux 6.1 so these workarounds will be gone too
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2022-1230-0754 `v1.5.5` made in japan
|
||||
|
||||
hello from tokyo
|
||||
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||
|
||||
## new features
|
||||
* image viewer now supports heif, avif, apng, svg
|
||||
* [partyfuse and up2k.py](https://github.com/9001/copyparty/tree/hovudstraum/bin): option to read password from textfile
|
||||
|
||||
## bugfixes
|
||||
* thumbnailing could fail if a primitive build of libvips is installed
|
||||
* ssdp was wonky on dualstack ipv6
|
||||
* mdns could crash on networks with invalid routes
|
||||
* support fat32 timestamp precisions
|
||||
* fixes spurious file reindexing in volumes located on SD cards on android tablets which lie about timestamps until the next device reboot or filesystem remount
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2022-1213-1956 `v1.5.3` folder-sync + turbo-rust
|
||||
|
||||
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||
|
||||
## new features
|
||||
* one-way folder sync (client to server) using [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/README.md#up2kpy) `-z --dr`
|
||||
* great rsync alternative when combined with `-e2ds --hardlink` deduplication on the server
|
||||
* **50x faster** when uploading small files to HDD, especially SMR
|
||||
* by switching sqlite to WAL which carries a small chance of temporarily forgetting the ~200 most recent uploads if you have a power outage or your OS crashes; see `--help-dbd` if you have `-mtp` plugins which produces metadata you can't afford to lose
|
||||
* location-based [reverse-proxying](https://github.com/9001/copyparty/#reverse-proxy) (but it's still recommended to use a dedicated domain/subdomain instead)
|
||||
* IPv6 link-local automatically enabled for TCP and zeroconf on NICs without a routable IPv6
|
||||
* zeroconf network filters now accept subnets too, for example `--z-on 192.168.0.0/16`
|
||||
* `.hist` folders are hidden on windows
|
||||
* ux:
|
||||
* more accurate total ETA on upload
|
||||
* sorting of batch-unpost links was unintuitive / dangerous
|
||||
* hotkey `Y` turns files into download links if nothing's selected
|
||||
* option to replace or disable the mediaplayer-toggle mouse cursor with `--mpmc`
|
||||
|
||||
## bugfixes
|
||||
* WAL probably/hopefully fixes #10 (we'll know in 6 months roughly)
|
||||
* repair db inconsistencies (which can happen if terminated during startup)
|
||||
* [davfs2](https://wiki.archlinux.org/title/Davfs2) did not approve of the authentication prompt
|
||||
* the `connect` button on the control-panel didn't work on phones
|
||||
* couldn't specify windows NICs in arguments `--z-on` / `--z-off` and friends
|
||||
* ssdp xml escaping for `--zsl` URL
|
||||
* no longer possible to accidentally launch multiple copyparty instances on the same port on windows
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2022-1203-2048 `v1.5.1` babel
|
||||
|
||||
named after [that other thing](https://en.wikipedia.org/wiki/Tower_of_Babel), not [the song](https://soundcloud.com/kanaze/babel-dimension-0-remix)
|
||||
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||
|
||||
## new features
|
||||
* new protocols!
|
||||
* native IPv6 support, no longer requiring a reverse-proxy for that
|
||||
* [webdav server](https://github.com/9001/copyparty#webdav-server) -- read/write-access to copyparty straight from windows explorer, macos finder, kde/gnome
|
||||
* [smb/cifs server](https://github.com/9001/copyparty#smb-server) -- extremely buggy and unsafe, for when there is no other choice
|
||||
* [zeroconf](https://github.com/9001/copyparty#zeroconf) -- copyparty announces itself on the LAN, showing up in various file managers
|
||||
* [mdns](https://github.com/9001/copyparty#mdns) -- macos/kde/gnome + makes copyparty available at http://hostname.local/
|
||||
* [ssdp](https://github.com/9001/copyparty#ssdp) -- windows
|
||||
* commands to mount copyparty as a local disk are in the web-UI at control-panel --> `connect`
|
||||
* detect buggy / malicious clients spamming the server with idle connections
|
||||
* first tries to be nice with `Connection: close` (enough to fix windows-webdav)
|
||||
* eventually bans the IP for `--loris` minutes (default: 1 hour)
|
||||
* new arg `--xlink` for cross-volume detection of duplicate files on upload
|
||||
* new arg `--no-snap` to disable upload tracking on restart
|
||||
* will not create `.hist` folders unless required for thumbnails or markdown backups
|
||||
* [config includes](https://github.com/9001/copyparty/blob/hovudstraum/docs/example2.conf) -- split your config across multiple config files
|
||||
* ux improvements
|
||||
* hotkey `?` shows a summary of all the hotkeys
|
||||
* hotkey `Y` to download selected files
|
||||
* position indicator when hovering over the audio scrubber
|
||||
* textlabel on the volume slider
|
||||
* placeholder values in textboxes
|
||||
* options to hide scrollbars, compact media player, follow playing song
|
||||
* phone-specific
|
||||
* buttons for prev/next folder
|
||||
* much better ui for hiding folder columns
|
||||
|
||||
## bugfixes
|
||||
* now possible to upload files larger than 697 GiB
|
||||
* technically a [breaking change](https://github.com/9001/copyparty#breaking-changes) if you wrote your own up2k client
|
||||
* please let me know if you did because that's awesome
|
||||
* several macos issues due to hardcoded syscall numbers
|
||||
* sfx: fix python 3.12 support (forbids nullbytes in source code)
|
||||
* use ctypes to discover network config -- fixes grapheneos, non-english windows
|
||||
* detect firefox showing stale markdown documents in the editor
|
||||
* detect+ban password bruteforcing on ftp too
|
||||
* http 206 failing on empty files
|
||||
* incorrect header timestamps on non-english locales
|
||||
* remind ftp clients that you cannot cd into an image file -- fixes kde dolphin
|
||||
* ux fixes
|
||||
* uploader survives running into inaccessible folders
|
||||
* middleclick documents in the textviewer sidebar to open in a new tab
|
||||
* playing really long audio files (1 week or more) would spinlock the browser
|
||||
|
||||
## other changes
|
||||
* autodetect max number of clients based on OS limits
|
||||
* `-nc` is probably no longer necessary when running behind a reverse-proxy
|
||||
* allow/try playing mkv files in chrome
|
||||
* markdown documents returned as plaintext unless `?v`
|
||||
* only compress `-lo` logfiles if filename ends with `.xz`
|
||||
* changed sfx compression from bz2 to gz
|
||||
* startup is slightly faster
|
||||
* better compatibility with embedded linux
|
||||
* copyparty64.exe -- 64bit edition for [running inside WinPE](https://user-images.githubusercontent.com/241032/205454984-e6b550df-3c49-486d-9267-1614078dd0dd.png)
|
||||
* which was an actual feature request, believe it or not!
|
||||
* more attempts at avoiding the [firefox fd leak](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500)
|
||||
* if you are uploading many small files and the browser keeps crashing, use chrome instead
|
||||
* or the commandline client, which is now available for download straight from copyparty
|
||||
* control-panel --> `connect` --> `up2k.py`
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2022-1013-1937 `v1.4.6` wav2opus
|
||||
|
||||
|
||||
22
docs/cursed-usecases/README.md
Normal file
22
docs/cursed-usecases/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
insane ways to use copyparty
|
||||
|
||||
|
||||
## wireless keyboard
|
||||
|
||||
problem: you wanna control mpv or whatever software from the couch but you don't have a wireless keyboard
|
||||
|
||||
"solution": load some custom javascript which renders a virtual keyboard on the upload UI and each keystroke is actually an upload which gets picked up by a dummy metadata parser which forwards the keystrokes into xdotool
|
||||
|
||||
[no joke, this actually exists and it wasn't even my idea or handiwork (thx steen)](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js)
|
||||
|
||||
|
||||
## appxsvc tarpit
|
||||
|
||||
problem: `svchost.exe` is using 100% of a cpu core, and upon further inspection (`procmon`) it is `wsappx` desperately trying to install something, repeatedly reading a file named `AppxManifest.xml` and messing with an sqlite3 database
|
||||
|
||||
"solution": create a virtual filesystem which is intentionally slow and trick windows into reading it from there instead
|
||||
|
||||
* create a file called `AppxManifest.xml` and put something dumb in it
|
||||
* serve the file from a copyparty instance with `--rsp-slp=9` so every request will hang for 9 sec
|
||||
* `net use m: http://127.0.0.1:3993/` (mount copyparty using the windows-native webdav client)
|
||||
* `mklink /d c:\windows\systemapps\microsoftwindows.client.cbs_cw5n1h2txyewy\AppxManifest.xml m:\AppxManifest.xml`
|
||||
@@ -3,7 +3,13 @@
|
||||
* top
|
||||
* [future plans](#future-plans) - some improvement ideas
|
||||
* [design](#design)
|
||||
* [up2k](#up2k) - quick outline of the up2k protocol
|
||||
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
|
||||
* [http api](#http-api)
|
||||
* [read](#read)
|
||||
* [write](#write)
|
||||
* [admin](#admin)
|
||||
* [general](#general)
|
||||
* [assumptions](#assumptions)
|
||||
* [mdns](#mdns)
|
||||
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
|
||||
@@ -35,7 +41,7 @@ some improvement ideas
|
||||
|
||||
## up2k
|
||||
|
||||
quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
|
||||
quick outline of the up2k protocol, see [uploading](https://github.com/9001/copyparty#uploading) for the web-client
|
||||
* the up2k client splits a file into an "optimal" number of chunks
|
||||
* 1 MiB each, unless that becomes more than 256 chunks
|
||||
* tries 1.5M, 2M, 3, 4, 6, ... until <= 256 chunks or size >= 32M
|
||||
@@ -121,7 +127,7 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
|
||||
| method | params | result |
|
||||
|--|--|--|
|
||||
| GET | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
|
||||
| POST | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
|
||||
|
||||
| method | params | body | result |
|
||||
|--|--|--|--|
|
||||
@@ -131,7 +137,7 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| mPOST | | `act=bput`, `f=FILE` | upload `FILE` into the folder at URL |
|
||||
| mPOST | `?j` | `act=bput`, `f=FILE` | ...and reply with json |
|
||||
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
||||
| GET | `?delete` | | delete URL recursively |
|
||||
| POST | `?delete` | | delete URL recursively |
|
||||
| jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively |
|
||||
| uPOST | | `msg=foo` | send message `foo` into server log |
|
||||
| mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL |
|
||||
|
||||
560
docs/versus.md
Normal file
560
docs/versus.md
Normal file
@@ -0,0 +1,560 @@
|
||||
# alternatives to copyparty
|
||||
|
||||
copyparty compared against all similar software i've bumped into
|
||||
|
||||
there is probably some unintentional bias so please submit corrections
|
||||
|
||||
currently up to date with [awesome-selfhosted](https://github.com/awesome-selfhosted/awesome-selfhosted) but that probably won't last
|
||||
|
||||
|
||||
## toc
|
||||
|
||||
* top
|
||||
* [recommendations](#recommendations)
|
||||
* [feature comparisons](#feature-comparisons)
|
||||
* [general](#general)
|
||||
* [file transfer](#file-transfer)
|
||||
* [protocols and client support](#protocols-and-client-support)
|
||||
* [server configuration](#server-configuration)
|
||||
* [server capabilities](#server-capabilities)
|
||||
* [client features](#client-features)
|
||||
* [integration](#integration)
|
||||
* [another matrix](#another-matrix)
|
||||
* [reviews](#reviews)
|
||||
* [copyparty](#copyparty)
|
||||
* [hfs2](#hfs2)
|
||||
* [hfs3](#hfs3)
|
||||
* [nextcloud](#nextcloud)
|
||||
* [seafile](#seafile)
|
||||
* [rclone](#rclone)
|
||||
* [dufs](#dufs)
|
||||
* [chibisafe](#chibisafe)
|
||||
* [kodbox](#kodbox)
|
||||
* [filebrowser](#filebrowser)
|
||||
* [filegator](#filegator)
|
||||
* [updog](#updog)
|
||||
* [goshs](#goshs)
|
||||
* [gimme-that](#gimme-that)
|
||||
* [ass](#ass)
|
||||
* [linx](#linx)
|
||||
* [briefly considered](#briefly-considered)
|
||||
|
||||
|
||||
# recommendations
|
||||
|
||||
* [kodbox](https://github.com/kalcaddle/kodbox) ([review](#kodbox)) appears to be a fantastic alternative if you're not worried about running chinese software, with several advantages over copyparty
|
||||
* but anything you want to share must be moved into the kodbox filesystem
|
||||
* [seafile](https://github.com/haiwen/seafile) ([review](#seafile)) and [nextcloud](https://github.com/nextcloud/server) ([review](#nextcloud)) could be decent alternatives if you need something heavier than copyparty
|
||||
* but their [license](https://snyk.io/learn/agpl-license/) is [problematic](https://opensource.google/documentation/reference/using/agpl-policy)
|
||||
* and copyparty is way better at uploads in particular (resumable, accelerated)
|
||||
* and anything you want to share must be moved into the respective filesystems
|
||||
* [filebrowser](https://github.com/filebrowser/filebrowser) ([review](#filebrowser)) and [dufs](https://github.com/sigoden/dufs) ([review](#dufs)) are simpler copyparties but with a settings gui
|
||||
* has some of the same strengths of copyparty, being portable and able to work with an existing folder structure
|
||||
* ...but copyparty is better at uploads + some other things
|
||||
|
||||
|
||||
# feature comparisons
|
||||
|
||||
```
|
||||
<&Kethsar> copyparty is very much bloat ed, so yeah
|
||||
```
|
||||
|
||||
the table headers in the matrixes below are the different softwares, with a quick review of each software in the next section
|
||||
|
||||
the softwares,
|
||||
* `a` = [copyparty](https://github.com/9001/copyparty)
|
||||
* `b` = [hfs2](https://github.com/rejetto/hfs2)
|
||||
* `c` = [hfs3](https://www.rejetto.com/hfs/)
|
||||
* `d` = [nextcloud](https://github.com/nextcloud/server)
|
||||
* `e` = [seafile](https://github.com/haiwen/seafile)
|
||||
* `f` = [rclone](https://github.com/rclone/rclone), specifically `rclone serve webdav .`
|
||||
* `g` = [dufs](https://github.com/sigoden/dufs)
|
||||
* `h` = [chibisafe](https://github.com/chibisafe/chibisafe)
|
||||
* `i` = [kodbox](https://github.com/kalcaddle/kodbox)
|
||||
* `j` = [filebrowser](https://github.com/filebrowser/filebrowser)
|
||||
* `k` = [filegator](https://github.com/filegator/filegator)
|
||||
|
||||
some softwares not in the matrixes,
|
||||
* [updog](#updog)
|
||||
* [goshs](#goshs)
|
||||
* [gimme-that](#gimmethat)
|
||||
* [ass](#ass)
|
||||
* [linx](#linx)
|
||||
|
||||
symbol legend,
|
||||
* `█` = absolutely
|
||||
* `╱` = partially
|
||||
* `•` = maybe?
|
||||
* ` ` = nope
|
||||
|
||||
|
||||
## general
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| intuitive UX | | ╱ | █ | █ | █ | | █ | █ | █ | █ | █ |
|
||||
| config GUI | | █ | █ | █ | █ | | | █ | █ | █ | |
|
||||
| good documentation | | | | █ | █ | █ | █ | | | █ | █ |
|
||||
| runs on iOS | ╱ | | | | | ╱ | | | | | |
|
||||
| runs on Android | █ | | | | | █ | | | | | |
|
||||
| runs on WinXP | █ | █ | | | | █ | | | | | |
|
||||
| runs on Windows | █ | █ | █ | █ | █ | █ | █ | ╱ | █ | █ | █ |
|
||||
| runs on Linux | █ | ╱ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| runs on Macos | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| runs on FreeBSD | █ | | | • | █ | █ | █ | • | █ | █ | |
|
||||
| portable binary | █ | █ | █ | | | █ | █ | | | █ | |
|
||||
| zero setup, just go | █ | █ | █ | | | ╱ | █ | | | █ | |
|
||||
| android app | ╱ | | | █ | █ | | | | | | |
|
||||
| iOS app | | | | █ | █ | | | | | | |
|
||||
|
||||
* `zero setup` = you can get a mostly working setup by just launching the app, without having to install any software or configure whatever
|
||||
* `a`/copyparty remarks:
|
||||
* no gui for server settings; only for client-side stuff
|
||||
* can theoretically run on iOS / iPads using [iSH](https://ish.app/), but only the iPad will offer sufficient multitasking i think
|
||||
* [android app](https://f-droid.org/en/packages/me.ocv.partyup/) is for uploading only
|
||||
* `b`/hfs2 runs on linux through wine
|
||||
* `f`/rclone must be started with the command `rclone serve webdav .` or similar
|
||||
* `h`/chibisafe has undocumented windows support
|
||||
|
||||
|
||||
## file transfer
|
||||
|
||||
*the thing that copyparty is actually kinda good at*
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| download folder as zip | █ | █ | █ | █ | █ | | █ | | █ | █ | ╱ |
|
||||
| download folder as tar | █ | | | | | | | | | █ | |
|
||||
| upload | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| parallel uploads | █ | | | █ | █ | | • | | █ | | █ |
|
||||
| resumable uploads | █ | | | | | | | | █ | | █ |
|
||||
| upload segmenting | █ | | | | | | | █ | █ | | █ |
|
||||
| upload acceleration | █ | | | | | | | | █ | | █ |
|
||||
| upload verification | █ | | | █ | █ | | | | █ | | |
|
||||
| upload deduplication | █ | | | | █ | | | | █ | | |
|
||||
| upload a 999 TiB file | █ | | | | █ | █ | • | | █ | | █ |
|
||||
| keep last-modified time | █ | | | █ | █ | █ | | | | | |
|
||||
| upload rules | ╱ | ╱ | ╱ | ╱ | ╱ | | | ╱ | ╱ | | ╱ |
|
||||
| ┗ max disk usage | █ | █ | | | █ | | | | █ | | |
|
||||
| ┗ max filesize | █ | | | | | | | █ | | | █ |
|
||||
| ┗ max items in folder | █ | | | | | | | | | | |
|
||||
| ┗ max file age | █ | | | | | | | | █ | | |
|
||||
| ┗ max uploads over time | █ | | | | | | | | | | |
|
||||
| ┗ compress before write | █ | | | | | | | | | | |
|
||||
| ┗ randomize filename | █ | | | | | | | █ | █ | | |
|
||||
| ┗ mimetype reject-list | ╱ | | | | | | | | • | ╱ | |
|
||||
| ┗ extension reject-list | ╱ | | | | | | | █ | • | ╱ | |
|
||||
| checksums provided | | | | █ | █ | | | | █ | ╱ | |
|
||||
| cloud storage backend | ╱ | ╱ | ╱ | █ | █ | █ | ╱ | | | ╱ | █ |
|
||||
|
||||
* `upload segmenting` = files are sliced into chunks, making it possible to upload files larger than 100 MiB on cloudflare for example
|
||||
|
||||
* `upload acceleration` = each file can be uploaded using several TCP connections, which can offer a huge speed boost over huge distances / on flaky connections -- like the good old [download accelerators](https://en.wikipedia.org/wiki/GetRight) except in reverse
|
||||
|
||||
* `upload verification` = uploads are checksummed or otherwise confirmed to have been transferred correctly
|
||||
|
||||
* `checksums provided` = when downloading a file from the server, the file's checksum is provided for verification client-side
|
||||
|
||||
* `cloud storage backend` = able to serve files from (and write to) s3 or similar cloud services; `╱` means the software can do this with some help from `rclone mount` as a bridge
|
||||
|
||||
* `a`/copyparty can reject uploaded files (based on complex conditions), for example [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py)
|
||||
* `j`/filebrowser remarks:
|
||||
* can provide checksums for single files on request
|
||||
* can probably do extension/mimetype rejection similar to copyparty
|
||||
* `k`/filegator download-as-zip is not streaming; it creates the full zipfile before download can start
|
||||
|
||||
|
||||
## protocols and client support
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| serve https | █ | | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| serve webdav | █ | | | █ | █ | █ | █ | | █ | | |
|
||||
| serve ftp | █ | | | | | █ | | | | | |
|
||||
| serve ftps | █ | | | | | █ | | | | | |
|
||||
| serve sftp | | | | | | █ | | | | | |
|
||||
| serve smb/cifs | ╱ | | | | | █ | | | | | |
|
||||
| serve dlna | | | | | | █ | | | | | |
|
||||
| listen on unix-socket | | | | █ | █ | | █ | █ | █ | | █ |
|
||||
| zeroconf | █ | | | | | | | | | | |
|
||||
| supports netscape 4 | ╱ | | | | | █ | | | | | • |
|
||||
| ...internet explorer 6 | ╱ | █ | | █ | | █ | | | | | • |
|
||||
| mojibake filenames | █ | | | • | • | █ | █ | • | • | • | |
|
||||
| undecodable filenames | █ | | | • | • | █ | | • | • | | |
|
||||
|
||||
* `webdav` = protocol convenient for mounting a remote server as a local filesystem; see zeroconf:
|
||||
* `zeroconf` = the server announces itself on the LAN, [automatically appearing](https://user-images.githubusercontent.com/241032/215344737-0eae8d98-9496-4256-9aa8-cd2f6971810d.png) on other zeroconf-capable devices
|
||||
* `mojibake filenames` = filenames decoded with the wrong codec and then reencoded (usually to utf-8), so `宇多田ヒカル` might look like `ëFæ╜ôcâqâJâï`
|
||||
* `undecodable filenames` = pure binary garbage which cannot be parsed as utf-8
|
||||
* you can successfully play `$'\355\221'` with mpv through mounting a remote copyparty server with rclone, pog
|
||||
* `a`/copyparty remarks:
|
||||
* extremely minimal samba/cifs server
|
||||
* netscape 4 / ie6 support is mostly listed as a joke altho some people have actually found it useful ([ie4 tho](https://user-images.githubusercontent.com/241032/118192791-fb31fe00-b446-11eb-9647-898ea8efc1f7.png))
|
||||
|
||||
|
||||
## server configuration
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| config from cmd args | █ | | | | | █ | █ | | | █ | |
|
||||
| config files | █ | █ | █ | ╱ | ╱ | █ | | █ | | █ | • |
|
||||
| runtime config reload | █ | █ | █ | | | | | █ | █ | █ | █ |
|
||||
| same-port http / https | █ | | | | | | | | | | |
|
||||
| listen multiple ports | █ | | | | | | | | | | |
|
||||
| virtual file system | █ | █ | █ | | | | █ | | | | |
|
||||
| reverse-proxy ok | █ | | █ | █ | █ | █ | █ | █ | • | • | • |
|
||||
| folder-rproxy ok | █ | | | | █ | █ | | • | • | • | • |
|
||||
|
||||
* `folder-rproxy` = reverse-proxying without dedicating an entire (sub)domain, using a subfolder instead
|
||||
|
||||
|
||||
## server capabilities
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| accounts | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ | █ |
|
||||
| single-sign-on | | | | █ | █ | | | | • | | |
|
||||
| token auth | | | | █ | █ | | | █ | | | |
|
||||
| per-volume permissions | █ | █ | █ | █ | █ | █ | █ | | █ | █ | ╱ |
|
||||
| per-folder permissions | ╱ | | | █ | █ | | █ | | █ | █ | ╱ |
|
||||
| per-file permissions | | | | █ | █ | | █ | | █ | | |
|
||||
| per-file passwords | █ | | | █ | █ | | █ | | █ | | |
|
||||
| unmap subfolders | █ | | | | | | █ | | | █ | ╱ |
|
||||
| index.html blocks list | | | | | | | █ | | | • | |
|
||||
| write-only folders | █ | | | | | | | | | | █ |
|
||||
| files stored as-is | █ | █ | █ | █ | | █ | █ | | | █ | █ |
|
||||
| file versioning | | | | █ | █ | | | | | | |
|
||||
| file encryption | | | | █ | █ | █ | | | | | |
|
||||
| file indexing | █ | | █ | █ | █ | | | █ | █ | █ | |
|
||||
| ┗ per-volume db | █ | | • | • | • | | | • | • | | |
|
||||
| ┗ db stored in folder | █ | | | | | | | • | • | █ | |
|
||||
| ┗ db stored out-of-tree | █ | | █ | █ | █ | | | • | • | █ | |
|
||||
| ┗ existing file tree | █ | | █ | | | | | | | █ | |
|
||||
| file action event hooks | █ | | | | | | | | | █ | |
|
||||
| one-way folder sync | █ | | | █ | █ | █ | | | | | |
|
||||
| full sync | | | | █ | █ | | | | | | |
|
||||
| speed throttle | | █ | █ | | | █ | | | █ | | |
|
||||
| anti-bruteforce | █ | █ | █ | █ | █ | | | | • | | |
|
||||
| dyndns updater | | █ | | | | | | | | | |
|
||||
| self-updater | | | █ | | | | | | | | |
|
||||
| log rotation | █ | | █ | █ | █ | | | • | █ | | |
|
||||
| upload tracking / log | █ | █ | • | █ | █ | | | █ | █ | | |
|
||||
| curl-friendly ls | █ | | | | | | | | | | |
|
||||
| curl-friendly upload | █ | | | | | █ | █ | • | | | |
|
||||
|
||||
* `unmap subfolders` = "shadowing"; mounting a local folder in the middle of an existing filesystem tree in order to disable access below that path
|
||||
* `files stored as-is` = uploaded files are trivially readable from the server HDD, not sliced into chunks or in weird folder structures or anything like that
|
||||
* `db stored in folder` = filesystem index can be written to a database file inside the folder itself
|
||||
* `db stored out-of-tree` = filesystem index can be stored some place else, not necessarily inside the shared folders
|
||||
* `existing file tree` = will index any existing files it finds
|
||||
* `file action event hooks` = run script before/after upload, move, rename, ...
|
||||
* `one-way folder sync` = like rsync, optionally deleting unexpected files at target
|
||||
* `full sync` = stateful, dropbox-like sync
|
||||
* `curl-friendly ls` = returns a [sortable plaintext folder listing](https://user-images.githubusercontent.com/241032/215322619-ea5fd606-3654-40ad-94ee-2bc058647bb2.png) when curled
|
||||
* `curl-friendly upload` = uploading with curl is just `curl -T some.bin http://.../`
|
||||
* `a`/copyparty remarks:
|
||||
* one-way folder sync from local to server can be done efficiently with [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py), or with webdav and conventional rsync
|
||||
* can hot-reload config files (with just a few exceptions)
|
||||
* can set per-folder permissions if that folder is made into a separate volume, so there is configuration overhead
|
||||
* [event hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) ([discord](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png), [desktop](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png)) inspired by filebrowser, as well as the more complex [media parser](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) alternative
|
||||
* upload history can be visualized using [partyjournal](https://github.com/9001/copyparty/blob/hovudstraum/bin/partyjournal.py)
|
||||
* `k`/filegator remarks:
|
||||
* `per-* permissions` -- can limit a user to one folder and its subfolders
|
||||
* `unmap subfolders` -- can globally filter a list of paths
|
||||
|
||||
|
||||
## client features
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ---------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| single-page app | █ | | █ | █ | █ | | | █ | █ | █ | █ |
|
||||
| themes | █ | █ | | █ | | | | | █ | | |
|
||||
| directory tree nav | █ | ╱ | | | █ | | | | █ | | ╱ |
|
||||
| multi-column sorting | █ | | | | | | | | | | |
|
||||
| thumbnails | █ | | | ╱ | ╱ | | | █ | █ | ╱ | |
|
||||
| ┗ image thumbnails | █ | | | █ | █ | | | █ | █ | █ | |
|
||||
| ┗ video thumbnails | █ | | | █ | █ | | | | █ | | |
|
||||
| ┗ audio spectrograms | █ | | | | | | | | | | |
|
||||
| audio player | █ | | | █ | █ | | | | █ | ╱ | |
|
||||
| ┗ gapless playback | █ | | | | | | | | • | | |
|
||||
| ┗ audio equalizer | █ | | | | | | | | | | |
|
||||
| ┗ waveform seekbar | █ | | | | | | | | | | |
|
||||
| ┗ OS integration | █ | | | | | | | | | | |
|
||||
| ┗ transcode to lossy | █ | | | | | | | | | | |
|
||||
| video player | █ | | | █ | █ | | | | █ | █ | |
|
||||
| ┗ video transcoding | | | | | | | | | █ | | |
|
||||
| audio BPM detector | █ | | | | | | | | | | |
|
||||
| audio key detector | █ | | | | | | | | | | |
|
||||
| search by path / name | █ | █ | █ | █ | █ | | █ | | █ | █ | ╱ |
|
||||
| search by date / size | █ | | | | █ | | | █ | █ | | |
|
||||
| search by bpm / key | █ | | | | | | | | | | |
|
||||
| search by custom tags | | | | | | | | █ | █ | | |
|
||||
| search in file contents | | | | █ | █ | | | | █ | | |
|
||||
| search by custom parser | █ | | | | | | | | | | |
|
||||
| find local file | █ | | | | | | | | | | |
|
||||
| undo recent uploads | █ | | | | | | | | | | |
|
||||
| create directories | █ | | | █ | █ | ╱ | █ | █ | █ | █ | █ |
|
||||
| image viewer | █ | | | █ | █ | | | | █ | █ | █ |
|
||||
| markdown viewer | █ | | | | █ | | | | █ | ╱ | ╱ |
|
||||
| markdown editor | █ | | | | █ | | | | █ | ╱ | ╱ |
|
||||
| readme.md in listing | █ | | | █ | | | | | | | |
|
||||
| rename files | █ | █ | █ | █ | █ | ╱ | █ | | █ | █ | █ |
|
||||
| batch rename | █ | | | | | | | | █ | | |
|
||||
| cut / paste files | █ | █ | | █ | █ | | | | █ | | |
|
||||
| move files | █ | █ | | █ | █ | | █ | | █ | █ | █ |
|
||||
| delete files | █ | █ | | █ | █ | ╱ | █ | █ | █ | █ | █ |
|
||||
| copy files | | | | | █ | | | | █ | █ | █ |
|
||||
|
||||
* `single-page app` = multitasking; possible to continue navigating while uploading
|
||||
* `audio player » os-integration` = use the [lockscreen](https://user-images.githubusercontent.com/241032/142711926-0700be6c-3e31-47b3-9928-53722221f722.png) or [media hotkeys](https://user-images.githubusercontent.com/241032/215347492-b4250797-6c90-4e09-9a4c-721edf2fb15c.png) to play/pause, prev/next song
|
||||
* `search by custom tags` = ability to tag files through the UI and search by those
|
||||
* `find local file` = drop a file into the browser to see if it exists on the server
|
||||
* `a`/copyparty has teeny-tiny skips playing gapless albums depending on audio codec (opus best)
|
||||
* `b`/hfs2 has a very basic directory tree view, not showing sibling folders
|
||||
* `f`/rclone can do some file management (mkdir, rename, delete) when hosting througn webdav
|
||||
* `j`/filebrowser has a plaintext viewer/editor
|
||||
* `k`/filegator directory tree is a modal window
|
||||
|
||||
|
||||
## integration
|
||||
|
||||
| feature / software | a | b | c | d | e | f | g | h | i | j | k |
|
||||
| ----------------------- | - | - | - | - | - | - | - | - | - | - | - |
|
||||
| OS alert on upload | █ | | | | | | | | | ╱ | |
|
||||
| discord | █ | | | | | | | | | ╱ | |
|
||||
| ┗ announce uploads | █ | | | | | | | | | | |
|
||||
| ┗ custom embeds | | | | | | | | | | | |
|
||||
| sharex | █ | | | █ | | █ | ╱ | █ | | | |
|
||||
| flameshot | | | | | | █ | | | | | |
|
||||
|
||||
* sharex `╱` = yes, but does not provide example sharex config
|
||||
* `a`/copyparty remarks:
|
||||
* `OS alert on upload` available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/notify.py)
|
||||
* `discord » announce uploads` available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/discord-announce.py)
|
||||
* `j`/filebrowser can probably pull those off with command runners similar to copyparty
|
||||
|
||||
|
||||
## another matrix
|
||||
|
||||
| software / feature | lang | lic | size |
|
||||
| ------------------ | ------ | ------ | ------ |
|
||||
| copyparty | python | █ mit | 0.6 MB |
|
||||
| hfs2 | delphi | ░ gpl3 | 2 MB |
|
||||
| hfs3 | ts | ░ gpl3 | 36 MB |
|
||||
| nextcloud | php | ‼ agpl | • |
|
||||
| seafile | c | ‼ agpl | • |
|
||||
| rclone | c | █ mit | 45 MB |
|
||||
| dufs | rust | █ apl2 | 2.5 MB |
|
||||
| chibisafe | ts | █ mit | • |
|
||||
| kodbox | php | ░ gpl3 | 92 MB |
|
||||
| filebrowser | go | █ apl2 | 20 MB |
|
||||
| filegator | php | █ mit | • |
|
||||
| updog | python | █ mit | 17 MB |
|
||||
| goshs | go | █ mit | 11 MB |
|
||||
| gimme-that | python | █ mit | 4.8 MB |
|
||||
| ass | ts | █ isc | • |
|
||||
| linx | go | ░ gpl3 | 20 MB |
|
||||
|
||||
* `size` = binary (if available) or installed size of program and its dependencies
|
||||
* copyparty size is for the [standalone python](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) file; the [windows exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) is **6 MiB**
|
||||
|
||||
|
||||
# reviews
|
||||
|
||||
* ✅ are advantages over copyparty
|
||||
* ⚠️ are disadvantages
|
||||
|
||||
## [copyparty](https://github.com/9001/copyparty)
|
||||
* resumable uploads which are verified server-side
|
||||
* upload segmenting allows for potentially much faster uploads on some connections, and terabyte-sized files even on cloudflare
|
||||
* both of the above are surprisingly uncommon features
|
||||
* very cross-platform (python, no dependencies)
|
||||
|
||||
## [hfs2](https://github.com/rejetto/hfs2)
|
||||
* the OG, the legend
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ⚠️ windows-only
|
||||
* ✅ config GUI
|
||||
* vfs with gui config, per-volume permissions
|
||||
* starting to show its age, hence the rewrite:
|
||||
|
||||
## [hfs3](https://www.rejetto.com/hfs/)
|
||||
* nodejs; cross-platform
|
||||
* vfs with gui config, per-volume permissions
|
||||
* still early development, let's revisit later
|
||||
|
||||
## [nextcloud](https://github.com/nextcloud/server)
|
||||
* php, mariadb
|
||||
* ⚠️ [isolated on-disk file hierarchy] in per-user folders
|
||||
* not that bad, can probably be remedied with bindmounts or maybe symlinks
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ⚠️ no write-only / upload-only folders
|
||||
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||
* ⚠️ less awesome music player
|
||||
* ⚠️ doesn't run on android or ipads
|
||||
* ✅ great ui/ux
|
||||
* ✅ config gui
|
||||
* ✅ apps (android / iphone)
|
||||
* copyparty: android upload-only app
|
||||
* ✅ more granular permissions (per-file)
|
||||
* ✅ search: fulltext indexing of file contents
|
||||
* ✅ webauthn passwordless authentication
|
||||
|
||||
## [seafile](https://github.com/haiwen/seafile)
|
||||
* c, mariadb
|
||||
* ⚠️ [isolated on-disk file hierarchy](https://manual.seafile.com/maintain/seafile_fsck/), incompatible with other software
|
||||
* *much worse than nextcloud* in that regard
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ⚠️ no write-only / upload-only folders
|
||||
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||
* ⚠️ less awesome music player
|
||||
* ⚠️ doesn't run on android or ipads
|
||||
* ✅ great ui/ux
|
||||
* ✅ config gui
|
||||
* ✅ apps (android / iphone)
|
||||
* copyparty: android upload-only app
|
||||
* ✅ more granular permissions (per-file)
|
||||
* ✅ search: fulltext indexing of file contents
|
||||
|
||||
## [rclone](https://github.com/rclone/rclone)
|
||||
* nice standalone c program
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ⚠️ no web-ui, just a server / downloader / uploader utility
|
||||
* ✅ works with almost any protocol, cloud provider
|
||||
* ⚠️ copyparty's webdav server is slightly faster
|
||||
|
||||
## [dufs](https://github.com/sigoden/dufs)
|
||||
* rust; cross-platform (windows, linux, macos)
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ⚠️ doesn't support crazy filenames
|
||||
* ✅ per-url access control (copyparty is per-volume)
|
||||
* basic but really snappy ui
|
||||
* upload, rename, delete, ... see feature matrix
|
||||
|
||||
## [chibisafe](https://github.com/chibisafe/chibisafe)
|
||||
* nodejs; recommends docker
|
||||
* *it has upload segmenting!*
|
||||
* ⚠️ but uploads are still not resumable / accelerated / integrity-checked
|
||||
* ⚠️ not portable
|
||||
* ⚠️ isolated on-disk file hierarchy, incompatible with other software
|
||||
* ⚠️ http/webdav only; no ftp or zeroconf
|
||||
* ✅ pretty ui
|
||||
* ✅ control panel for server settings and user management
|
||||
* ✅ user registration
|
||||
* ✅ searchable image tags; delete by tag
|
||||
* ✅ browser extension to upload files to the server
|
||||
* ✅ reject uploads by file extension
|
||||
* copyparty: can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins
|
||||
* ✅ token auth (api keys)
|
||||
|
||||
## [kodbox](https://github.com/kalcaddle/kodbox)
|
||||
* this thing is insane
|
||||
* php; [docker](https://hub.docker.com/r/kodcloud/kodbox)
|
||||
* *upload segmenting, acceleration, and integrity checking!*
|
||||
* ⚠️ but uploads are not resumable(?)
|
||||
* ⚠️ not portable
|
||||
* ⚠️ isolated on-disk file hierarchy, incompatible with other software
|
||||
* ⚠️ http/webdav only; no ftp or zeroconf
|
||||
* ⚠️ some parts of the GUI are in chinese
|
||||
* ✅ fantastic ui/ux
|
||||
* ✅ control panel for server settings and user management
|
||||
* ✅ file tags; file discussions!?
|
||||
* ✅ video transcoding
|
||||
* ✅ unzip uploaded archives
|
||||
* ✅ IDE with syntax hilighting
|
||||
* ✅ wysiwyg editor for openoffice files
|
||||
|
||||
## [filebrowser](https://github.com/filebrowser/filebrowser)
|
||||
* go; cross-platform (windows, linux, mac)
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ⚠️ http only; no webdav / ftp / zeroconf
|
||||
* ⚠️ doesn't support crazy filenames
|
||||
* ⚠️ no directory tree nav
|
||||
* ⚠️ limited file search
|
||||
* ✅ settings gui
|
||||
* ✅ good ui/ux
|
||||
* ⚠️ but no directory tree for navigation
|
||||
* ✅ user signup
|
||||
* ✅ command runner / remote shell
|
||||
* supposed to have write-only folders but couldn't get it to work
|
||||
|
||||
## [filegator](https://github.com/filegator/filegator)
|
||||
* go; cross-platform (windows, linux, mac)
|
||||
* ⚠️ http only; no webdav / ftp / zeroconf
|
||||
* ⚠️ does not support symlinks
|
||||
* ⚠️ expensive download-as-zip feature
|
||||
* ⚠️ doesn't support crazy filenames
|
||||
* ⚠️ limited file search
|
||||
* *it has upload segmenting and acceleration*
|
||||
* ⚠️ but uploads are still not integrity-checked
|
||||
|
||||
## [updog](https://github.com/sc0tfree/updog)
|
||||
* python; cross-platform
|
||||
* basic directory listing with upload feature
|
||||
* ⚠️ less portable
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ⚠️ no vfs; single folder, single account
|
||||
|
||||
## [goshs](https://github.com/patrickhener/goshs)
|
||||
* go; cross-platform (windows, linux, mac)
|
||||
* ⚠️ no vfs; single folder, single account
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ✅ cool clipboard widget
|
||||
* copyparty: the markdown editor is an ok substitute
|
||||
* read-only and upload-only modes (same as copyparty's write-only)
|
||||
* https, webdav
|
||||
|
||||
## [gimme-that](https://github.com/nejdetckenobi/gimme-that)
|
||||
* python, but with c dependencies
|
||||
* ⚠️ no vfs; single folder, multiple accounts
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ⚠️ weird folder structure for uploads
|
||||
* ✅ clamav antivirus check on upload! neat
|
||||
* optional max-filesize, os-notification on uploads
|
||||
* copyparty: os-notification available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/notify.py)
|
||||
|
||||
## [ass](https://github.com/tycrek/ass)
|
||||
* nodejs; recommends docker
|
||||
* ⚠️ not portable
|
||||
* ⚠️ upload only; no browser
|
||||
* ⚠️ upload through sharex only; no web-ui
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* ✅ token auth
|
||||
* ✅ gps metadata stripping
|
||||
* copyparty: possible with [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py)
|
||||
* ✅ discord integration (custom embeds, upload webhook)
|
||||
* copyparty: [upload webhook plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/discord-announce.py)
|
||||
* ✅ reject uploads by mimetype
|
||||
* copyparty: can reject uploads [by extension](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-extension.py) or [mimetype](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/reject-mimetype.py) using plugins
|
||||
* ✅ can use S3 as storage backend; copyparty relies on rclone-mount for that
|
||||
* ✅ custom 404 pages
|
||||
|
||||
## [linx](https://github.com/ZizzyDizzyMC/linx-server/)
|
||||
* originally [andreimarcu/linx-server](https://github.com/andreimarcu/linx-server) but development has ended
|
||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||
* some of its unique features have been added to copyparty as former linx users have migrated
|
||||
* file expiration timers, filename randomization
|
||||
* ✅ password-protected files
|
||||
* copyparty: password-protected folders + filekeys to skip the folder password seem to cover most usecases
|
||||
* ✅ file deletion keys
|
||||
* ✅ download files as torrents
|
||||
* ✅ remote uploads (send a link to the server and it downloads it)
|
||||
* copyparty: available as [a plugin](https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py)
|
||||
* ✅ can use S3 as storage backend; copyparty relies on rclone-mount for that
|
||||
|
||||
|
||||
# briefly considered
|
||||
* [pydio](https://github.com/pydio/cells): python/agpl3, looks great, fantastic ux -- but needs mariadb, systemwide install
|
||||
* [gossa](https://github.com/pldubouilh/gossa): go/mit, minimalistic, basic file upload, text editor, mkdir and rename (no delete/move)
|
||||
* [h5ai](https://larsjung.de/h5ai/): php/mit, slick ui, image viewer, directory tree, no upload feature
|
||||
@@ -3,9 +3,9 @@ FROM alpine:3.16
|
||||
WORKDIR /z
|
||||
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
||||
ver_hashwasm=4.9.0 \
|
||||
ver_marked=4.2.3 \
|
||||
ver_marked=4.2.5 \
|
||||
ver_mde=2.18.0 \
|
||||
ver_codemirror=5.65.10 \
|
||||
ver_codemirror=5.65.11 \
|
||||
ver_fontawesome=5.13.0 \
|
||||
ver_zopfli=1.0.3
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ font-family: 'fa';
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
font-display: block;
|
||||
src: url("/.cpr/deps/mini-fa.woff") format("woff");
|
||||
src: url("mini-fa.woff") format("woff");
|
||||
}
|
||||
|
||||
.fa,
|
||||
|
||||
@@ -266,6 +266,14 @@ necho() {
|
||||
cp -p "$f2" "$f1"
|
||||
); done
|
||||
|
||||
# resolve symlinks on windows
|
||||
[ "$OSTYPE" = msys ] &&
|
||||
(cd ..; git ls-files -s | awk '/^120000/{print$4}') |
|
||||
while IFS= read -r x; do
|
||||
[ $(wc -l <"$x") -gt 1 ] && continue
|
||||
(cd "${x%/*}"; cp -p "../$(cat "${x##*/}")" ${x##*/})
|
||||
done
|
||||
|
||||
# insert asynchat
|
||||
mkdir copyparty/vend
|
||||
for n in asyncore.py asynchat.py; do
|
||||
|
||||
@@ -10,7 +10,10 @@ if possible, for performance and security reasons, please use this instead:
|
||||
https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py
|
||||
"""
|
||||
|
||||
print(v.replace("\n", "\n▒▌ ")[1:] + "\n")
|
||||
try:
|
||||
print(v.replace("\n", "\n▒▌ ")[1:] + "\n")
|
||||
except:
|
||||
print(v.replace("\n", "\n|| ")[1:] + "\n")
|
||||
|
||||
|
||||
import re
|
||||
|
||||
@@ -429,7 +429,7 @@ def run_i(ld):
|
||||
|
||||
def run_s(ld):
|
||||
# fmt: off
|
||||
c = "import sys,runpy;" + "".join(['sys.path.insert(0,r"' + x + '");' for x in ld]) + 'runpy.run_module("copyparty",run_name="__main__")'
|
||||
c = "import sys,runpy;" + "".join(['sys.path.insert(0,r"' + x.replace("\\", "/") + '");' for x in ld]) + 'runpy.run_module("copyparty",run_name="__main__")'
|
||||
c = [str(x) for x in [sys.executable, "-c", c] + list(sys.argv[1:])]
|
||||
# fmt: on
|
||||
msg("\n", c, "\n")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
for f in README.md docs/devnotes.md; do
|
||||
for f in README.md docs/devnotes.md docs/versus.md; do
|
||||
|
||||
cat $f | awk '
|
||||
function pr() {
|
||||
@@ -20,6 +20,8 @@ cat $f | awk '
|
||||
/^#/{
|
||||
lv=length($1);
|
||||
sub(/[^ ]+ /,"");
|
||||
sub(/\[/,"");
|
||||
sub(/\]\([^)]+\)/,"");
|
||||
bab=$0;
|
||||
gsub(/ /,"-",bab);
|
||||
gsub(/\./,"",bab);
|
||||
@@ -31,9 +33,9 @@ cat $f | awk '
|
||||
{pr()}
|
||||
' > toc
|
||||
|
||||
grep -E '^#+ [^ ]+ toc$' -B1000 -A2 <$f >p1
|
||||
grep -E '^#+ *[^ ]+ toc$' -B1000 -A2 <$f >p1
|
||||
|
||||
h2="$(awk '/^#+ [^ ]+ toc$/{o=1;next} o&&/^#/{print;exit}' <$f)"
|
||||
h2="$(awk '/^#+ *[^ ]+ toc$/{o=1;next} o&&/^#/{print;exit}' <$f)"
|
||||
|
||||
grep -F "$h2" -B2 -A999999 <$f >p2
|
||||
|
||||
|
||||
1
setup.py
1
setup.py
@@ -107,6 +107,7 @@ args = {
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: Jython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
|
||||
@@ -54,6 +54,8 @@ the values in the `ex:` columns are linkified to `example.com/$value`
|
||||
|
||||
and the table can be sorted by clicking the headers
|
||||
|
||||
the sandbox also makes `location` unavailable but there is `loc` instead; this website's url is <big><big><b id="whereami">foo</b></big></big>
|
||||
|
||||
the difference is that with `copyparty_pre` you'll probably break various copyparty features but if you use `copyparty_post` then future copyparty versions will probably break you
|
||||
|
||||
|
||||
@@ -136,6 +138,10 @@ render(dom) {
|
||||
}
|
||||
},
|
||||
render2(dom) {
|
||||
// loc == window.location except available inside sandbox
|
||||
ebi('whereami').innerHTML = loc.href;
|
||||
|
||||
// this one also works because util.js gets pulled into the sandbox
|
||||
window.makeSortable(dom.getElementsByTagName('table')[0]);
|
||||
}
|
||||
```
|
||||
|
||||
@@ -98,7 +98,7 @@ class Cfg(Namespace):
|
||||
def __init__(self, a=None, v=None, c=None):
|
||||
ka = {}
|
||||
|
||||
ex = "daw dav_inf dav_mac e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js ihead magic nid nih no_acode no_athumb no_dav no_del no_dupe no_logues no_mv no_readme no_robots no_scandir no_thumb no_vthumb no_zip nw xdev xlink xvol"
|
||||
ex = "daw dav_inf dav_mac e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js getmod hardlink ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_thumb no_vthumb no_zip nw xdev xlink xvol"
|
||||
ka.update(**{k: False for k in ex.split()})
|
||||
|
||||
ex = "dotpart no_rescan no_sendfile no_voldump plain_ip"
|
||||
@@ -110,14 +110,18 @@ class Cfg(Namespace):
|
||||
ex = "df loris re_maxage rproxy rsp_slp s_wr_slp theme themes turbo"
|
||||
ka.update(**{k: 0 for k in ex.split()})
|
||||
|
||||
ex = "doctitle favico html_head log_fk mth textfiles"
|
||||
ex = "doctitle favico html_head lg_sbf log_fk md_sbf mth textfiles R RS SR"
|
||||
ka.update(**{k: "" for k in ex.split()})
|
||||
|
||||
ex = "xad xar xau xbd xbr xbu xm"
|
||||
ka.update(**{k: [] for k in ex.split()})
|
||||
|
||||
super(Cfg, self).__init__(
|
||||
a=a or [],
|
||||
v=v or [],
|
||||
c=c,
|
||||
E=E,
|
||||
dbd="wal",
|
||||
s_wr_sz=512 * 1024,
|
||||
unpost=600,
|
||||
u2sort="s",
|
||||
@@ -192,4 +196,5 @@ class VHttpConn(object):
|
||||
self.nbyte = 0
|
||||
self.ico = None
|
||||
self.thumbcli = None
|
||||
self.freshen_pwd = 0.0
|
||||
self.t0 = time.time()
|
||||
|
||||
Reference in New Issue
Block a user