Compare commits
127 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e2dec2510f | ||
|
|
da5ad2ab9f | ||
|
|
eaa4b04a22 | ||
|
|
3051b13108 | ||
|
|
4c4e48bab7 | ||
|
|
01a3eb29cb | ||
|
|
73f7249c5f | ||
|
|
18c6559199 | ||
|
|
e66ece993f | ||
|
|
0686860624 | ||
|
|
24ce46b380 | ||
|
|
a49bf81ff2 | ||
|
|
64501fd7f1 | ||
|
|
db3c0b0907 | ||
|
|
edda117a7a | ||
|
|
cdface0dd5 | ||
|
|
be6afe2d3a | ||
|
|
9163780000 | ||
|
|
d7aa7dfe64 | ||
|
|
f1decb531d | ||
|
|
99399c698b | ||
|
|
1f5f42f216 | ||
|
|
9082c4702f | ||
|
|
6cedcfbf77 | ||
|
|
8a631f045e | ||
|
|
a6a2ee5b6b | ||
|
|
016708276c | ||
|
|
4cfdc4c513 | ||
|
|
0f257c9308 | ||
|
|
c8104b6e78 | ||
|
|
1a1d731043 | ||
|
|
c5a000d2ae | ||
|
|
94d1924fa9 | ||
|
|
6c1cf68bca | ||
|
|
395af051bd | ||
|
|
42fd66675e | ||
|
|
21a3f3699b | ||
|
|
d168b2acac | ||
|
|
2ce8233921 | ||
|
|
697a4fa8a4 | ||
|
|
2f83c6c7d1 | ||
|
|
127f414e9c | ||
|
|
33c4ccffab | ||
|
|
bafe7f5a09 | ||
|
|
baf41112d1 | ||
|
|
a90dde94e1 | ||
|
|
7dfbfc7227 | ||
|
|
b10843d051 | ||
|
|
520ac8f4dc | ||
|
|
537a6e50e9 | ||
|
|
2d0cbdf1a8 | ||
|
|
5afb562aa3 | ||
|
|
db069c3d4a | ||
|
|
fae40c7e2f | ||
|
|
0c43b592dc | ||
|
|
2ab8924e2d | ||
|
|
0e31cfa784 | ||
|
|
8f7ffcf350 | ||
|
|
9c8507a0fd | ||
|
|
e9b2cab088 | ||
|
|
d3ccacccb1 | ||
|
|
df386c8fbc | ||
|
|
4d15dd6e17 | ||
|
|
56a0499636 | ||
|
|
10fc4768e8 | ||
|
|
2b63d7d10d | ||
|
|
1f177528c1 | ||
|
|
fc3bbb70a3 | ||
|
|
ce3cab0295 | ||
|
|
c784e5285e | ||
|
|
2bf9055cae | ||
|
|
8aba5aed4f | ||
|
|
0ce7cf5e10 | ||
|
|
96edcbccd7 | ||
|
|
4603afb6de | ||
|
|
56317b00af | ||
|
|
cacec9c1f3 | ||
|
|
44ee07f0b2 | ||
|
|
6a8d5e1731 | ||
|
|
d9962f65b3 | ||
|
|
119e88d87b | ||
|
|
71d9e010d9 | ||
|
|
5718caa957 | ||
|
|
efd8a32ed6 | ||
|
|
b22d700e16 | ||
|
|
ccdacea0c4 | ||
|
|
4bdcbc1cb5 | ||
|
|
833c6cf2ec | ||
|
|
dd6dbdd90a | ||
|
|
63013cc565 | ||
|
|
912402364a | ||
|
|
159f51b12b | ||
|
|
7678a91b0e | ||
|
|
b13899c63d | ||
|
|
3a0d882c5e | ||
|
|
cb81f0ad6d | ||
|
|
518bacf628 | ||
|
|
ca63b03e55 | ||
|
|
cecef88d6b | ||
|
|
7ffd805a03 | ||
|
|
a7e2a0c981 | ||
|
|
2a570bb4ca | ||
|
|
5ca8f0706d | ||
|
|
a9b4436cdc | ||
|
|
5f91999512 | ||
|
|
9f000beeaf | ||
|
|
ff0a71f212 | ||
|
|
22dfc6ec24 | ||
|
|
48147c079e | ||
|
|
d715479ef6 | ||
|
|
fc8298c468 | ||
|
|
e94ca5dc91 | ||
|
|
114b71b751 | ||
|
|
b2770a2087 | ||
|
|
cba1878bb2 | ||
|
|
a2e037d6af | ||
|
|
65a2b6a223 | ||
|
|
9ed799e803 | ||
|
|
c1c0ecca13 | ||
|
|
ee62836383 | ||
|
|
705f598b1a | ||
|
|
414de88925 | ||
|
|
53ffd245dd | ||
|
|
cf1b756206 | ||
|
|
22b58e31ef | ||
|
|
b7f9bf5a28 | ||
|
|
aba680b6c2 |
89
README.md
89
README.md
@@ -47,6 +47,8 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
||||||
* [shares](#shares) - share a file or folder by creating a temporary link
|
* [shares](#shares) - share a file or folder by creating a temporary link
|
||||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||||
|
* [rss feeds](#rss-feeds) - monitor a folder with your RSS reader
|
||||||
|
* [recent uploads](#recent-uploads) - list all recent uploads
|
||||||
* [media player](#media-player) - plays almost every audio format there is
|
* [media player](#media-player) - plays almost every audio format there is
|
||||||
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||||
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
||||||
@@ -80,6 +82,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||||||
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
|
||||||
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
|
||||||
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
|
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
|
||||||
|
* [ip auth](#ip-auth) - autologin based on IP range (CIDR)
|
||||||
* [identity providers](#identity-providers) - replace copyparty passwords with oauth and such
|
* [identity providers](#identity-providers) - replace copyparty passwords with oauth and such
|
||||||
* [user-changeable passwords](#user-changeable-passwords) - if permitted, users can change their own passwords
|
* [user-changeable passwords](#user-changeable-passwords) - if permitted, users can change their own passwords
|
||||||
* [using the cloud as storage](#using-the-cloud-as-storage) - connecting to an aws s3 bucket and similar
|
* [using the cloud as storage](#using-the-cloud-as-storage) - connecting to an aws s3 bucket and similar
|
||||||
@@ -218,7 +221,7 @@ also see [comparison to similar software](./docs/versus.md)
|
|||||||
* upload
|
* upload
|
||||||
* ☑ basic: plain multipart, ie6 support
|
* ☑ basic: plain multipart, ie6 support
|
||||||
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
* ☑ [up2k](#uploading): js, resumable, multithreaded
|
||||||
* **no filesize limit!** ...unless you use Cloudflare, then it's 383.9 GiB
|
* **no filesize limit!** even on Cloudflare
|
||||||
* ☑ stash: simple PUT filedropper
|
* ☑ stash: simple PUT filedropper
|
||||||
* ☑ filename randomizer
|
* ☑ filename randomizer
|
||||||
* ☑ write-only folders
|
* ☑ write-only folders
|
||||||
@@ -337,6 +340,9 @@ same order here too
|
|||||||
|
|
||||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
||||||
|
|
||||||
|
* [Chrome issue 383568268](https://issues.chromium.org/issues/383568268) -- filereaders in webworkers can OOM / crash the browser-tab
|
||||||
|
* copyparty has a workaround which seems to work well enough
|
||||||
|
|
||||||
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- entire browser can crash after uploading ~4000 small files
|
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- entire browser can crash after uploading ~4000 small files
|
||||||
|
|
||||||
* Android: music playback randomly stops due to [battery usage settings](#fix-unreliable-playback-on-android)
|
* Android: music playback randomly stops due to [battery usage settings](#fix-unreliable-playback-on-android)
|
||||||
@@ -426,7 +432,7 @@ configuring accounts/volumes with arguments:
|
|||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
* `r` (read): browse folder contents, download files, download as zip/tar, see filekeys/dirkeys
|
* `r` (read): browse folder contents, download files, download as zip/tar, see filekeys/dirkeys
|
||||||
* `w` (write): upload files, move files *into* this folder
|
* `w` (write): upload files, move/copy files *into* this folder
|
||||||
* `m` (move): move files/folders *from* this folder
|
* `m` (move): move files/folders *from* this folder
|
||||||
* `d` (delete): delete files/folders
|
* `d` (delete): delete files/folders
|
||||||
* `.` (dots): user can ask to show dotfiles in directory listings
|
* `.` (dots): user can ask to show dotfiles in directory listings
|
||||||
@@ -506,7 +512,8 @@ the browser has the following hotkeys (always qwerty)
|
|||||||
* `ESC` close various things
|
* `ESC` close various things
|
||||||
* `ctrl-K` delete selected files/folders
|
* `ctrl-K` delete selected files/folders
|
||||||
* `ctrl-X` cut selected files/folders
|
* `ctrl-X` cut selected files/folders
|
||||||
* `ctrl-V` paste
|
* `ctrl-C` copy selected files/folders to clipboard
|
||||||
|
* `ctrl-V` paste (move/copy)
|
||||||
* `Y` download selected files
|
* `Y` download selected files
|
||||||
* `F2` [rename](#batch-rename) selected file/folder
|
* `F2` [rename](#batch-rename) selected file/folder
|
||||||
* when a file/folder is selected (in not-grid-view):
|
* when a file/folder is selected (in not-grid-view):
|
||||||
@@ -575,6 +582,7 @@ click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (
|
|||||||
|
|
||||||
press `g` or `田` to toggle grid-view instead of the file listing and `t` toggles icons / thumbnails
|
press `g` or `田` to toggle grid-view instead of the file listing and `t` toggles icons / thumbnails
|
||||||
* can be made default globally with `--grid` or per-volume with volflag `grid`
|
* can be made default globally with `--grid` or per-volume with volflag `grid`
|
||||||
|
* enable by adding `?imgs` to a link, or disable with `?imgs=0`
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -653,7 +661,7 @@ up2k has several advantages:
|
|||||||
* uploads resume if you reboot your browser or pc, just upload the same files again
|
* uploads resume if you reboot your browser or pc, just upload the same files again
|
||||||
* server detects any corruption; the client reuploads affected chunks
|
* server detects any corruption; the client reuploads affected chunks
|
||||||
* the client doesn't upload anything that already exists on the server
|
* the client doesn't upload anything that already exists on the server
|
||||||
* no filesize limit unless imposed by a proxy, for example Cloudflare, which blocks uploads over 383.9 GiB
|
* no filesize limit, even when a proxy limits the request size (for example Cloudflare)
|
||||||
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
|
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
|
||||||
* the last-modified timestamp of the file is preserved
|
* the last-modified timestamp of the file is preserved
|
||||||
|
|
||||||
@@ -689,6 +697,8 @@ note that since up2k has to read each file twice, `[🎈] bup` can *theoreticall
|
|||||||
|
|
||||||
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
|
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
|
||||||
|
|
||||||
|
if the server is behind a proxy which imposes a request-size limit, you can configure up2k to sneak below the limit with server-option `--u2sz` (the default is 96 MiB to support Cloudflare)
|
||||||
|
|
||||||
|
|
||||||
### file-search
|
### file-search
|
||||||
|
|
||||||
@@ -708,7 +718,7 @@ files go into `[ok]` if they exist (and you get a link to where it is), otherwis
|
|||||||
|
|
||||||
### unpost
|
### unpost
|
||||||
|
|
||||||
undo/delete accidental uploads
|
undo/delete accidental uploads using the `[🧯]` tab in the UI
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -752,10 +762,11 @@ file selection: click somewhere on the line (not the link itself), then:
|
|||||||
* shift-click another line for range-select
|
* shift-click another line for range-select
|
||||||
|
|
||||||
* cut: select some files and `ctrl-x`
|
* cut: select some files and `ctrl-x`
|
||||||
|
* copy: select some files and `ctrl-c`
|
||||||
* paste: `ctrl-v` in another folder
|
* paste: `ctrl-v` in another folder
|
||||||
* rename: `F2`
|
* rename: `F2`
|
||||||
|
|
||||||
you can move files across browser tabs (cut in one tab, paste in another)
|
you can copy/move files across browser tabs (cut/copy in one tab, paste in another)
|
||||||
|
|
||||||
|
|
||||||
## shares
|
## shares
|
||||||
@@ -842,6 +853,41 @@ or a mix of both:
|
|||||||
the metadata keys you can use in the format field are the ones in the file-browser table header (whatever is collected with `-mte` and `-mtp`)
|
the metadata keys you can use in the format field are the ones in the file-browser table header (whatever is collected with `-mte` and `-mtp`)
|
||||||
|
|
||||||
|
|
||||||
|
## rss feeds
|
||||||
|
|
||||||
|
monitor a folder with your RSS reader , optionally recursive
|
||||||
|
|
||||||
|
must be enabled per-volume with volflag `rss` or globally with `--rss`
|
||||||
|
|
||||||
|
the feed includes itunes metadata for use with podcast readers such as [AntennaPod](https://antennapod.org/)
|
||||||
|
|
||||||
|
a feed example: https://cd.ocv.me/a/d2/d22/?rss&fext=mp3
|
||||||
|
|
||||||
|
url parameters:
|
||||||
|
|
||||||
|
* `pw=hunter2` for password auth
|
||||||
|
* `recursive` to also include subfolders
|
||||||
|
* `title=foo` changes the feed title (default: folder name)
|
||||||
|
* `fext=mp3,opus` only include mp3 and opus files (default: all)
|
||||||
|
* `nf=30` only show the first 30 results (default: 250)
|
||||||
|
* `sort=m` sort by mtime (file last-modified), newest first (default)
|
||||||
|
* `u` = upload-time; NOTE: non-uploaded files have upload-time `0`
|
||||||
|
* `n` = filename
|
||||||
|
* `a` = filesize
|
||||||
|
* uppercase = reverse-sort; `M` = oldest file first
|
||||||
|
|
||||||
|
|
||||||
|
## recent uploads
|
||||||
|
|
||||||
|
list all recent uploads by clicking "show recent uploads" in the controlpanel
|
||||||
|
|
||||||
|
will show uploader IP and upload-time if the visitor has the admin permission
|
||||||
|
|
||||||
|
* global-option `--ups-when` makes upload-time visible to all users, and not just admins
|
||||||
|
|
||||||
|
note that the [🧯 unpost](#unpost) feature is better suited for viewing *your own* recent uploads, as it includes the option to undo/delete them
|
||||||
|
|
||||||
|
|
||||||
## media player
|
## media player
|
||||||
|
|
||||||
plays almost every audio format there is (if the server has FFmpeg installed for on-demand transcoding)
|
plays almost every audio format there is (if the server has FFmpeg installed for on-demand transcoding)
|
||||||
@@ -1066,11 +1112,12 @@ using the GUI (winXP or later):
|
|||||||
* on winXP only, click the `Sign up for online storage` hyperlink instead and put the URL there
|
* on winXP only, click the `Sign up for online storage` hyperlink instead and put the URL there
|
||||||
* providing your password as the username is recommended; the password field can be anything or empty
|
* providing your password as the username is recommended; the password field can be anything or empty
|
||||||
|
|
||||||
known client bugs:
|
the webdav client that's built into windows has the following list of bugs; you can avoid all of these by connecting with rclone instead:
|
||||||
* win7+ doesn't actually send the password to the server when reauthenticating after a reboot unless you first try to login with an incorrect password and then switch to the correct password
|
* win7+ doesn't actually send the password to the server when reauthenticating after a reboot unless you first try to login with an incorrect password and then switch to the correct password
|
||||||
* or just type your password into the username field instead to get around it entirely
|
* or just type your password into the username field instead to get around it entirely
|
||||||
* connecting to a folder which allows anonymous read will make writing impossible, as windows has decided it doesn't need to login
|
* connecting to a folder which allows anonymous read will make writing impossible, as windows has decided it doesn't need to login
|
||||||
* workaround: connect twice; first to a folder which requires auth, then to the folder you actually want, and leave both of those mounted
|
* workaround: connect twice; first to a folder which requires auth, then to the folder you actually want, and leave both of those mounted
|
||||||
|
* or set the server-option `--dav-auth` to force password-auth for all webdav clients
|
||||||
* win7+ may open a new tcp connection for every file and sometimes forgets to close them, eventually needing a reboot
|
* win7+ may open a new tcp connection for every file and sometimes forgets to close them, eventually needing a reboot
|
||||||
* maybe NIC-related (??), happens with win10-ltsc on e1000e but not virtio
|
* maybe NIC-related (??), happens with win10-ltsc on e1000e but not virtio
|
||||||
* windows cannot access folders which contain filenames with invalid unicode or forbidden characters (`<>:"/\|?*`), or names ending with `.`
|
* windows cannot access folders which contain filenames with invalid unicode or forbidden characters (`<>:"/\|?*`), or names ending with `.`
|
||||||
@@ -1237,7 +1284,7 @@ note:
|
|||||||
|
|
||||||
### exclude-patterns
|
### exclude-patterns
|
||||||
|
|
||||||
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash \.iso$` or the volflag `:c,nohash=\.iso$`, this has the following consequences:
|
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash '\.iso$'` or the volflag `:c,nohash=\.iso$`, this has the following consequences:
|
||||||
* initial indexing is way faster, especially when the volume is on a network disk
|
* initial indexing is way faster, especially when the volume is on a network disk
|
||||||
* makes it impossible to [file-search](#file-search)
|
* makes it impossible to [file-search](#file-search)
|
||||||
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
||||||
@@ -1248,6 +1295,8 @@ similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noi
|
|||||||
|
|
||||||
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
|
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
|
||||||
|
|
||||||
|
to exclude certain filepaths from search-results, use `--srch-excl` or volflag `srch_excl` instead of `--no-idx`, for example `--srch-excl 'password|logs/[0-9]'`
|
||||||
|
|
||||||
### filesystem guards
|
### filesystem guards
|
||||||
|
|
||||||
avoid traversing into other filesystems using `--xdev` / volflag `:c,xdev`, skipping any symlinks or bind-mounts to another HDD for example
|
avoid traversing into other filesystems using `--xdev` / volflag `:c,xdev`, skipping any symlinks or bind-mounts to another HDD for example
|
||||||
@@ -1432,13 +1481,31 @@ redefine behavior with plugins ([examples](./bin/handlers/))
|
|||||||
replace 404 and 403 errors with something completely different (that's it for now)
|
replace 404 and 403 errors with something completely different (that's it for now)
|
||||||
|
|
||||||
|
|
||||||
|
## ip auth
|
||||||
|
|
||||||
|
autologin based on IP range (CIDR) , using the global-option `--ipu`
|
||||||
|
|
||||||
|
for example, if everyone with an IP that starts with `192.168.123` should automatically log in as the user `spartacus`, then you can either specify `--ipu=192.168.123.0/24=spartacus` as a commandline option, or put this in a config file:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
[global]
|
||||||
|
ipu: 192.168.123.0/24=spartacus
|
||||||
|
```
|
||||||
|
|
||||||
|
repeat the option to map additional subnets
|
||||||
|
|
||||||
|
**be careful with this one!** if you have a reverseproxy, then you definitely want to make sure you have [real-ip](#real-ip) configured correctly, and it's probably a good idea to nullmap the reverseproxy's IP just in case; so if your reverseproxy is sending requests from `172.24.27.9` then that would be `--ipu=172.24.27.9/32=`
|
||||||
|
|
||||||
|
|
||||||
## identity providers
|
## identity providers
|
||||||
|
|
||||||
replace copyparty passwords with oauth and such
|
replace copyparty passwords with oauth and such
|
||||||
|
|
||||||
you can disable the built-in password-based login system, and instead replace it with a separate piece of software (an identity provider) which will then handle authenticating / authorizing of users; this makes it possible to login with passkeys / fido2 / webauthn / yubikey / ldap / active directory / oauth / many other single-sign-on contraptions
|
you can disable the built-in password-based login system, and instead replace it with a separate piece of software (an identity provider) which will then handle authenticating / authorizing of users; this makes it possible to login with passkeys / fido2 / webauthn / yubikey / ldap / active directory / oauth / many other single-sign-on contraptions
|
||||||
|
|
||||||
a popular choice is [Authelia](https://www.authelia.com/) (config-file based), another one is [authentik](https://goauthentik.io/) (GUI-based, more complex)
|
* the regular config-defined users will be used as a fallback for requests which don't include a valid (trusted) IdP username header
|
||||||
|
|
||||||
|
some popular identity providers are [Authelia](https://www.authelia.com/) (config-file based) and [authentik](https://goauthentik.io/) (GUI-based, more complex)
|
||||||
|
|
||||||
there is a [docker-compose example](./docs/examples/docker/idp-authelia-traefik) which is hopefully a good starting point (alternatively see [./docs/idp.md](./docs/idp.md) if you're the DIY type)
|
there is a [docker-compose example](./docs/examples/docker/idp-authelia-traefik) which is hopefully a good starting point (alternatively see [./docs/idp.md](./docs/idp.md) if you're the DIY type)
|
||||||
|
|
||||||
@@ -1640,6 +1707,7 @@ scrape_configs:
|
|||||||
currently the following metrics are available,
|
currently the following metrics are available,
|
||||||
* `cpp_uptime_seconds` time since last copyparty restart
|
* `cpp_uptime_seconds` time since last copyparty restart
|
||||||
* `cpp_boot_unixtime_seconds` same but as an absolute timestamp
|
* `cpp_boot_unixtime_seconds` same but as an absolute timestamp
|
||||||
|
* `cpp_active_dl` number of active downloads
|
||||||
* `cpp_http_conns` number of open http(s) connections
|
* `cpp_http_conns` number of open http(s) connections
|
||||||
* `cpp_http_reqs` number of http(s) requests handled
|
* `cpp_http_reqs` number of http(s) requests handled
|
||||||
* `cpp_sus_reqs` number of 403/422/malicious requests
|
* `cpp_sus_reqs` number of 403/422/malicious requests
|
||||||
@@ -1889,6 +1957,9 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
|||||||
| **ie4** and **netscape** 4.0 | can browse, upload with `?b=u`, auth with `&pw=wark` |
|
| **ie4** and **netscape** 4.0 | can browse, upload with `?b=u`, auth with `&pw=wark` |
|
||||||
| **ncsa mosaic** 2.7 | does not get a pass, [pic1](https://user-images.githubusercontent.com/241032/174189227-ae816026-cf6f-4be5-a26e-1b3b072c1b2f.png) - [pic2](https://user-images.githubusercontent.com/241032/174189225-5651c059-5152-46e9-ac26-7e98e497901b.png) |
|
| **ncsa mosaic** 2.7 | does not get a pass, [pic1](https://user-images.githubusercontent.com/241032/174189227-ae816026-cf6f-4be5-a26e-1b3b072c1b2f.png) - [pic2](https://user-images.githubusercontent.com/241032/174189225-5651c059-5152-46e9-ac26-7e98e497901b.png) |
|
||||||
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
|
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
|
||||||
|
| **nintendo 3ds** | can browse, upload, view thumbnails (thx bnjmn) |
|
||||||
|
|
||||||
|
<p align="center"><img src="https://github.com/user-attachments/assets/88deab3d-6cad-4017-8841-2f041472b853" /></p>
|
||||||
|
|
||||||
|
|
||||||
# client examples
|
# client examples
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ standalone programs which are executed by copyparty when an event happens (uploa
|
|||||||
|
|
||||||
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
||||||
|
|
||||||
run copyparty with `--help-hooks` for usage details / hook type explanations (xm/xbu/xau/xiu/xbr/xar/xbd/xad/xban)
|
run copyparty with `--help-hooks` for usage details / hook type explanations (xm/xbu/xau/xiu/xbc/xac/xbr/xar/xbd/xad/xban)
|
||||||
|
|
||||||
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
||||||
|
|
||||||
|
|||||||
@@ -393,7 +393,8 @@ class Gateway(object):
|
|||||||
if r.status != 200:
|
if r.status != 200:
|
||||||
self.closeconn()
|
self.closeconn()
|
||||||
info("http error %s reading dir %r", r.status, web_path)
|
info("http error %s reading dir %r", r.status, web_path)
|
||||||
raise FuseOSError(errno.ENOENT)
|
err = errno.ENOENT if r.status == 404 else errno.EIO
|
||||||
|
raise FuseOSError(err)
|
||||||
|
|
||||||
ctype = r.getheader("Content-Type", "")
|
ctype = r.getheader("Content-Type", "")
|
||||||
if ctype == "application/json":
|
if ctype == "application/json":
|
||||||
@@ -1128,7 +1129,7 @@ def main():
|
|||||||
|
|
||||||
# dircache is always a boost,
|
# dircache is always a boost,
|
||||||
# only want to disable it for tests etc,
|
# only want to disable it for tests etc,
|
||||||
cdn = 9 # max num dirs; 0=disable
|
cdn = 24 # max num dirs; keep larger than max dir depth; 0=disable
|
||||||
cds = 1 # numsec until an entry goes stale
|
cds = 1 # numsec until an entry goes stale
|
||||||
|
|
||||||
where = "local directory"
|
where = "local directory"
|
||||||
|
|||||||
231
bin/u2c.py
231
bin/u2c.py
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
S_VERSION = "2.1"
|
S_VERSION = "2.7"
|
||||||
S_BUILD_DT = "2024-09-23"
|
S_BUILD_DT = "2024-12-06"
|
||||||
|
|
||||||
"""
|
"""
|
||||||
u2c.py: upload to copyparty
|
u2c.py: upload to copyparty
|
||||||
@@ -62,6 +62,9 @@ else:
|
|||||||
|
|
||||||
unicode = str
|
unicode = str
|
||||||
|
|
||||||
|
|
||||||
|
WTF8 = "replace" if PY2 else "surrogateescape"
|
||||||
|
|
||||||
VT100 = platform.system() != "Windows"
|
VT100 = platform.system() != "Windows"
|
||||||
|
|
||||||
|
|
||||||
@@ -151,6 +154,7 @@ class HCli(object):
|
|||||||
self.tls = tls
|
self.tls = tls
|
||||||
self.verify = ar.te or not ar.td
|
self.verify = ar.te or not ar.td
|
||||||
self.conns = []
|
self.conns = []
|
||||||
|
self.hconns = []
|
||||||
if tls:
|
if tls:
|
||||||
import ssl
|
import ssl
|
||||||
|
|
||||||
@@ -170,7 +174,7 @@ class HCli(object):
|
|||||||
"User-Agent": "u2c/%s" % (S_VERSION,),
|
"User-Agent": "u2c/%s" % (S_VERSION,),
|
||||||
}
|
}
|
||||||
|
|
||||||
def _connect(self):
|
def _connect(self, timeout):
|
||||||
args = {}
|
args = {}
|
||||||
if PY37:
|
if PY37:
|
||||||
args["blocksize"] = 1048576
|
args["blocksize"] = 1048576
|
||||||
@@ -182,9 +186,11 @@ class HCli(object):
|
|||||||
if self.ctx:
|
if self.ctx:
|
||||||
args = {"context": self.ctx}
|
args = {"context": self.ctx}
|
||||||
|
|
||||||
return C(self.addr, self.port, timeout=999, **args)
|
return C(self.addr, self.port, timeout=timeout, **args)
|
||||||
|
|
||||||
def req(self, meth, vpath, hdrs, body=None, ctype=None):
|
def req(self, meth, vpath, hdrs, body=None, ctype=None):
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
hdrs.update(self.base_hdrs)
|
hdrs.update(self.base_hdrs)
|
||||||
if self.ar.a:
|
if self.ar.a:
|
||||||
hdrs["PW"] = self.ar.a
|
hdrs["PW"] = self.ar.a
|
||||||
@@ -195,7 +201,11 @@ class HCli(object):
|
|||||||
0 if not body else body.len if hasattr(body, "len") else len(body)
|
0 if not body else body.len if hasattr(body, "len") else len(body)
|
||||||
)
|
)
|
||||||
|
|
||||||
c = self.conns.pop() if self.conns else self._connect()
|
# large timeout for handshakes (safededup)
|
||||||
|
conns = self.hconns if ctype == MJ else self.conns
|
||||||
|
while conns and self.ar.cxp < now - conns[0][0]:
|
||||||
|
conns.pop(0)[1].close()
|
||||||
|
c = conns.pop()[1] if conns else self._connect(999 if ctype == MJ else 128)
|
||||||
try:
|
try:
|
||||||
c.request(meth, vpath, body, hdrs)
|
c.request(meth, vpath, body, hdrs)
|
||||||
if PY27:
|
if PY27:
|
||||||
@@ -204,8 +214,15 @@ class HCli(object):
|
|||||||
rsp = c.getresponse()
|
rsp = c.getresponse()
|
||||||
|
|
||||||
data = rsp.read()
|
data = rsp.read()
|
||||||
self.conns.append(c)
|
conns.append((time.time(), c))
|
||||||
return rsp.status, data.decode("utf-8")
|
return rsp.status, data.decode("utf-8")
|
||||||
|
except http_client.BadStatusLine:
|
||||||
|
if self.ar.cxp > 4:
|
||||||
|
t = "\nWARNING: --cxp probably too high; reducing from %d to 4"
|
||||||
|
print(t % (self.ar.cxp,))
|
||||||
|
self.ar.cxp = 4
|
||||||
|
c.close()
|
||||||
|
raise
|
||||||
except:
|
except:
|
||||||
c.close()
|
c.close()
|
||||||
raise
|
raise
|
||||||
@@ -228,7 +245,7 @@ class File(object):
|
|||||||
self.lmod = lmod # type: float
|
self.lmod = lmod # type: float
|
||||||
|
|
||||||
self.abs = os.path.join(top, rel) # type: bytes
|
self.abs = os.path.join(top, rel) # type: bytes
|
||||||
self.name = self.rel.split(b"/")[-1].decode("utf-8", "replace") # type: str
|
self.name = self.rel.split(b"/")[-1].decode("utf-8", WTF8) # type: str
|
||||||
|
|
||||||
# set by get_hashlist
|
# set by get_hashlist
|
||||||
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
||||||
@@ -267,10 +284,41 @@ class FileSlice(object):
|
|||||||
raise Exception(9)
|
raise Exception(9)
|
||||||
tlen += clen
|
tlen += clen
|
||||||
|
|
||||||
self.len = tlen
|
self.len = self.tlen = tlen
|
||||||
self.cdr = self.car + self.len
|
self.cdr = self.car + self.len
|
||||||
self.ofs = 0 # type: int
|
self.ofs = 0 # type: int
|
||||||
self.f = open(file.abs, "rb", 512 * 1024)
|
|
||||||
|
self.f = None
|
||||||
|
self.seek = self._seek0
|
||||||
|
self.read = self._read0
|
||||||
|
|
||||||
|
def subchunk(self, maxsz, nth):
|
||||||
|
if self.tlen <= maxsz:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
if not nth:
|
||||||
|
self.car0 = self.car
|
||||||
|
self.cdr0 = self.cdr
|
||||||
|
|
||||||
|
self.car = self.car0 + maxsz * nth
|
||||||
|
if self.car >= self.cdr0:
|
||||||
|
return -2
|
||||||
|
|
||||||
|
self.cdr = self.car + min(self.cdr0 - self.car, maxsz)
|
||||||
|
self.len = self.cdr - self.car
|
||||||
|
self.seek(0)
|
||||||
|
return nth
|
||||||
|
|
||||||
|
def unsub(self):
|
||||||
|
self.car = self.car0
|
||||||
|
self.cdr = self.cdr0
|
||||||
|
self.len = self.tlen
|
||||||
|
|
||||||
|
def _open(self):
|
||||||
|
self.seek = self._seek
|
||||||
|
self.read = self._read
|
||||||
|
|
||||||
|
self.f = open(self.file.abs, "rb", 512 * 1024)
|
||||||
self.f.seek(self.car)
|
self.f.seek(self.car)
|
||||||
|
|
||||||
# https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python
|
# https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python
|
||||||
@@ -282,10 +330,15 @@ class FileSlice(object):
|
|||||||
except:
|
except:
|
||||||
pass # py27 probably
|
pass # py27 probably
|
||||||
|
|
||||||
|
def close(self, *a, **ka):
|
||||||
|
return # until _open
|
||||||
|
|
||||||
def tell(self):
|
def tell(self):
|
||||||
return self.ofs
|
return self.ofs
|
||||||
|
|
||||||
def seek(self, ofs, wh=0):
|
def _seek(self, ofs, wh=0):
|
||||||
|
assert self.f # !rm
|
||||||
|
|
||||||
if wh == 1:
|
if wh == 1:
|
||||||
ofs = self.ofs + ofs
|
ofs = self.ofs + ofs
|
||||||
elif wh == 2:
|
elif wh == 2:
|
||||||
@@ -299,12 +352,22 @@ class FileSlice(object):
|
|||||||
self.ofs = ofs
|
self.ofs = ofs
|
||||||
self.f.seek(self.car + ofs)
|
self.f.seek(self.car + ofs)
|
||||||
|
|
||||||
def read(self, sz):
|
def _read(self, sz):
|
||||||
|
assert self.f # !rm
|
||||||
|
|
||||||
sz = min(sz, self.len - self.ofs)
|
sz = min(sz, self.len - self.ofs)
|
||||||
ret = self.f.read(sz)
|
ret = self.f.read(sz)
|
||||||
self.ofs += len(ret)
|
self.ofs += len(ret)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
def _seek0(self, ofs, wh=0):
|
||||||
|
self._open()
|
||||||
|
return self.seek(ofs, wh)
|
||||||
|
|
||||||
|
def _read0(self, sz):
|
||||||
|
self._open()
|
||||||
|
return self.read(sz)
|
||||||
|
|
||||||
|
|
||||||
class MTHash(object):
|
class MTHash(object):
|
||||||
def __init__(self, cores):
|
def __init__(self, cores):
|
||||||
@@ -557,13 +620,17 @@ def walkdir(err, top, excl, seen):
|
|||||||
for ap, inf in sorted(statdir(err, top)):
|
for ap, inf in sorted(statdir(err, top)):
|
||||||
if excl.match(ap):
|
if excl.match(ap):
|
||||||
continue
|
continue
|
||||||
yield ap, inf
|
|
||||||
if stat.S_ISDIR(inf.st_mode):
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
|
yield ap, inf
|
||||||
try:
|
try:
|
||||||
for x in walkdir(err, ap, excl, seen):
|
for x in walkdir(err, ap, excl, seen):
|
||||||
yield x
|
yield x
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
err.append((ap, str(ex)))
|
err.append((ap, str(ex)))
|
||||||
|
elif stat.S_ISREG(inf.st_mode):
|
||||||
|
yield ap, inf
|
||||||
|
else:
|
||||||
|
err.append((ap, "irregular filetype 0%o" % (inf.st_mode,)))
|
||||||
|
|
||||||
|
|
||||||
def walkdirs(err, tops, excl):
|
def walkdirs(err, tops, excl):
|
||||||
@@ -609,11 +676,12 @@ def walkdirs(err, tops, excl):
|
|||||||
|
|
||||||
# mostly from copyparty/util.py
|
# mostly from copyparty/util.py
|
||||||
def quotep(btxt):
|
def quotep(btxt):
|
||||||
|
# type: (bytes) -> bytes
|
||||||
quot1 = quote(btxt, safe=b"/")
|
quot1 = quote(btxt, safe=b"/")
|
||||||
if not PY2:
|
if not PY2:
|
||||||
quot1 = quot1.encode("ascii")
|
quot1 = quot1.encode("ascii")
|
||||||
|
|
||||||
return quot1.replace(b" ", b"+") # type: ignore
|
return quot1.replace(b" ", b"%20") # type: ignore
|
||||||
|
|
||||||
|
|
||||||
# from copyparty/util.py
|
# from copyparty/util.py
|
||||||
@@ -641,7 +709,7 @@ def up2k_chunksize(filesize):
|
|||||||
while True:
|
while True:
|
||||||
for mul in [1, 2]:
|
for mul in [1, 2]:
|
||||||
nchunks = math.ceil(filesize * 1.0 / chunksize)
|
nchunks = math.ceil(filesize * 1.0 / chunksize)
|
||||||
if nchunks <= 256 or (chunksize >= 32 * 1024 * 1024 and nchunks < 4096):
|
if nchunks <= 256 or (chunksize >= 32 * 1024 * 1024 and nchunks <= 4096):
|
||||||
return chunksize
|
return chunksize
|
||||||
|
|
||||||
chunksize += stepsize
|
chunksize += stepsize
|
||||||
@@ -720,7 +788,7 @@ def handshake(ar, file, search):
|
|||||||
url = file.url
|
url = file.url
|
||||||
else:
|
else:
|
||||||
if b"/" in file.rel:
|
if b"/" in file.rel:
|
||||||
url = quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
url = quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8")
|
||||||
else:
|
else:
|
||||||
url = ""
|
url = ""
|
||||||
url = ar.vtop + url
|
url = ar.vtop + url
|
||||||
@@ -728,6 +796,7 @@ def handshake(ar, file, search):
|
|||||||
while True:
|
while True:
|
||||||
sc = 600
|
sc = 600
|
||||||
txt = ""
|
txt = ""
|
||||||
|
t0 = time.time()
|
||||||
try:
|
try:
|
||||||
zs = json.dumps(req, separators=(",\n", ": "))
|
zs = json.dumps(req, separators=(",\n", ": "))
|
||||||
sc, txt = web.req("POST", url, {}, zs.encode("utf-8"), MJ)
|
sc, txt = web.req("POST", url, {}, zs.encode("utf-8"), MJ)
|
||||||
@@ -752,7 +821,9 @@ def handshake(ar, file, search):
|
|||||||
print("\nERROR: login required, or wrong password:\n%s" % (txt,))
|
print("\nERROR: login required, or wrong password:\n%s" % (txt,))
|
||||||
raise BadAuth()
|
raise BadAuth()
|
||||||
|
|
||||||
eprint("handshake failed, retrying: %s\n %s\n\n" % (file.name, em))
|
t = "handshake failed, retrying: %s\n t0=%.3f t1=%.3f td=%.3f\n %s\n\n"
|
||||||
|
now = time.time()
|
||||||
|
eprint(t % (file.name, t0, now, now - t0, em))
|
||||||
time.sleep(ar.cd)
|
time.sleep(ar.cd)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -763,15 +834,15 @@ def handshake(ar, file, search):
|
|||||||
if search:
|
if search:
|
||||||
return r["hits"], False
|
return r["hits"], False
|
||||||
|
|
||||||
file.url = r["purl"]
|
file.url = quotep(r["purl"].encode("utf-8", WTF8)).decode("utf-8")
|
||||||
file.name = r["name"]
|
file.name = r["name"]
|
||||||
file.wark = r["wark"]
|
file.wark = r["wark"]
|
||||||
|
|
||||||
return r["hash"], r["sprs"]
|
return r["hash"], r["sprs"]
|
||||||
|
|
||||||
|
|
||||||
def upload(fsl, stats):
|
def upload(fsl, stats, maxsz):
|
||||||
# type: (FileSlice, str) -> None
|
# type: (FileSlice, str, int) -> None
|
||||||
"""upload a range of file data, defined by one or more `cid` (chunk-hash)"""
|
"""upload a range of file data, defined by one or more `cid` (chunk-hash)"""
|
||||||
|
|
||||||
ctxt = fsl.cids[0]
|
ctxt = fsl.cids[0]
|
||||||
@@ -789,7 +860,17 @@ def upload(fsl, stats):
|
|||||||
if stats:
|
if stats:
|
||||||
headers["X-Up2k-Stat"] = stats
|
headers["X-Up2k-Stat"] = stats
|
||||||
|
|
||||||
|
nsub = 0
|
||||||
try:
|
try:
|
||||||
|
while nsub != -1:
|
||||||
|
nsub = fsl.subchunk(maxsz, nsub)
|
||||||
|
if nsub == -2:
|
||||||
|
return
|
||||||
|
if nsub >= 0:
|
||||||
|
headers["X-Up2k-Subc"] = str(maxsz * nsub)
|
||||||
|
headers.pop(CLEN, None)
|
||||||
|
nsub += 1
|
||||||
|
|
||||||
sc, txt = web.req("POST", fsl.file.url, headers, fsl, MO)
|
sc, txt = web.req("POST", fsl.file.url, headers, fsl, MO)
|
||||||
|
|
||||||
if sc == 400:
|
if sc == 400:
|
||||||
@@ -803,7 +884,10 @@ def upload(fsl, stats):
|
|||||||
if sc >= 400:
|
if sc >= 400:
|
||||||
raise Exception("http %s: %s" % (sc, txt))
|
raise Exception("http %s: %s" % (sc, txt))
|
||||||
finally:
|
finally:
|
||||||
|
if fsl.f:
|
||||||
fsl.f.close()
|
fsl.f.close()
|
||||||
|
if nsub != -1:
|
||||||
|
fsl.unsub()
|
||||||
|
|
||||||
|
|
||||||
class Ctl(object):
|
class Ctl(object):
|
||||||
@@ -869,8 +953,8 @@ class Ctl(object):
|
|||||||
self.hash_b = 0
|
self.hash_b = 0
|
||||||
self.up_f = 0
|
self.up_f = 0
|
||||||
self.up_c = 0
|
self.up_c = 0
|
||||||
self.up_b = 0
|
self.up_b = 0 # num bytes handled
|
||||||
self.up_br = 0
|
self.up_br = 0 # num bytes actually transferred
|
||||||
self.uploader_busy = 0
|
self.uploader_busy = 0
|
||||||
self.serialized = False
|
self.serialized = False
|
||||||
|
|
||||||
@@ -935,7 +1019,7 @@ class Ctl(object):
|
|||||||
print(" %d up %s" % (ncs - nc, cid))
|
print(" %d up %s" % (ncs - nc, cid))
|
||||||
stats = "%d/0/0/%d" % (nf, self.nfiles - nf)
|
stats = "%d/0/0/%d" % (nf, self.nfiles - nf)
|
||||||
fslice = FileSlice(file, [cid])
|
fslice = FileSlice(file, [cid])
|
||||||
upload(fslice, stats)
|
upload(fslice, stats, self.ar.szm)
|
||||||
|
|
||||||
print(" ok!")
|
print(" ok!")
|
||||||
if file.recheck:
|
if file.recheck:
|
||||||
@@ -949,8 +1033,8 @@ class Ctl(object):
|
|||||||
handshake(self.ar, file, False)
|
handshake(self.ar, file, False)
|
||||||
|
|
||||||
def _fancy(self):
|
def _fancy(self):
|
||||||
if VT100 and not self.ar.ns:
|
|
||||||
atexit.register(self.cleanup_vt100)
|
atexit.register(self.cleanup_vt100)
|
||||||
|
if VT100 and not self.ar.ns:
|
||||||
ss.scroll_region(3)
|
ss.scroll_region(3)
|
||||||
|
|
||||||
Daemon(self.hasher)
|
Daemon(self.hasher)
|
||||||
@@ -958,6 +1042,7 @@ class Ctl(object):
|
|||||||
Daemon(self.handshaker)
|
Daemon(self.handshaker)
|
||||||
Daemon(self.uploader)
|
Daemon(self.uploader)
|
||||||
|
|
||||||
|
last_sp = -1
|
||||||
while True:
|
while True:
|
||||||
with self.exit_cond:
|
with self.exit_cond:
|
||||||
self.exit_cond.wait(0.07)
|
self.exit_cond.wait(0.07)
|
||||||
@@ -996,6 +1081,12 @@ class Ctl(object):
|
|||||||
else:
|
else:
|
||||||
txt = " "
|
txt = " "
|
||||||
|
|
||||||
|
if not VT100: # OSC9;4 (taskbar-progress)
|
||||||
|
sp = int(self.up_b * 100 / self.nbytes) or 1
|
||||||
|
if last_sp != sp:
|
||||||
|
last_sp = sp
|
||||||
|
txt += "\033]9;4;1;%d\033\\" % (sp,)
|
||||||
|
|
||||||
if not self.up_br:
|
if not self.up_br:
|
||||||
spd = self.hash_b / ((time.time() - self.t0) or 1)
|
spd = self.hash_b / ((time.time() - self.t0) or 1)
|
||||||
eta = (self.nbytes - self.hash_b) / (spd or 1)
|
eta = (self.nbytes - self.hash_b) / (spd or 1)
|
||||||
@@ -1006,18 +1097,25 @@ class Ctl(object):
|
|||||||
|
|
||||||
spd = humansize(spd)
|
spd = humansize(spd)
|
||||||
self.eta = str(datetime.timedelta(seconds=int(eta)))
|
self.eta = str(datetime.timedelta(seconds=int(eta)))
|
||||||
|
if eta > 2591999:
|
||||||
|
self.eta = self.eta.split(",")[0] # truncate HH:MM:SS
|
||||||
sleft = humansize(self.nbytes - self.up_b)
|
sleft = humansize(self.nbytes - self.up_b)
|
||||||
nleft = self.nfiles - self.up_f
|
nleft = self.nfiles - self.up_f
|
||||||
tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r"
|
tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r"
|
||||||
|
|
||||||
t = "%s eta @ %s/s, %s, %d# left\033[K" % (self.eta, spd, sleft, nleft)
|
t = "%s eta @ %s/s, %s, %d# left\033[K" % (self.eta, spd, sleft, nleft)
|
||||||
|
if not self.hash_b:
|
||||||
|
t = " now hashing..."
|
||||||
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
||||||
|
|
||||||
|
if self.ar.wlist:
|
||||||
|
self.at_hash = time.time() - self.t0
|
||||||
|
|
||||||
if self.hash_b and self.at_hash:
|
if self.hash_b and self.at_hash:
|
||||||
spd = humansize(self.hash_b / self.at_hash)
|
spd = humansize(self.hash_b / self.at_hash)
|
||||||
eprint("\nhasher: %.2f sec, %s/s\n" % (self.at_hash, spd))
|
eprint("\nhasher: %.2f sec, %s/s\n" % (self.at_hash, spd))
|
||||||
if self.up_b and self.at_up:
|
if self.up_br and self.at_up:
|
||||||
spd = humansize(self.up_b / self.at_up)
|
spd = humansize(self.up_br / self.at_up)
|
||||||
eprint("upload: %.2f sec, %s/s\n" % (self.at_up, spd))
|
eprint("upload: %.2f sec, %s/s\n" % (self.at_up, spd))
|
||||||
|
|
||||||
if not self.recheck:
|
if not self.recheck:
|
||||||
@@ -1028,7 +1126,10 @@ class Ctl(object):
|
|||||||
handshake(self.ar, file, False)
|
handshake(self.ar, file, False)
|
||||||
|
|
||||||
def cleanup_vt100(self):
|
def cleanup_vt100(self):
|
||||||
|
if VT100:
|
||||||
ss.scroll_region(None)
|
ss.scroll_region(None)
|
||||||
|
else:
|
||||||
|
eprint("\033]9;4;0\033\\")
|
||||||
eprint("\033[J\033]0;\033\\")
|
eprint("\033[J\033]0;\033\\")
|
||||||
|
|
||||||
def cb_hasher(self, file, ofs):
|
def cb_hasher(self, file, ofs):
|
||||||
@@ -1043,7 +1144,9 @@ class Ctl(object):
|
|||||||
isdir = stat.S_ISDIR(inf.st_mode)
|
isdir = stat.S_ISDIR(inf.st_mode)
|
||||||
if self.ar.z or self.ar.drd:
|
if self.ar.z or self.ar.drd:
|
||||||
rd = rel if isdir else os.path.dirname(rel)
|
rd = rel if isdir else os.path.dirname(rel)
|
||||||
srd = rd.decode("utf-8", "replace").replace("\\", "/")
|
srd = rd.decode("utf-8", "replace").replace("\\", "/").rstrip("/")
|
||||||
|
if srd:
|
||||||
|
srd += "/"
|
||||||
if prd != rd:
|
if prd != rd:
|
||||||
prd = rd
|
prd = rd
|
||||||
ls = {}
|
ls = {}
|
||||||
@@ -1051,7 +1154,7 @@ class Ctl(object):
|
|||||||
print(" ls ~{0}".format(srd))
|
print(" ls ~{0}".format(srd))
|
||||||
zt = (
|
zt = (
|
||||||
self.ar.vtop,
|
self.ar.vtop,
|
||||||
quotep(rd.replace(b"\\", b"/")).decode("utf-8", "replace"),
|
quotep(rd.replace(b"\\", b"/")).decode("utf-8"),
|
||||||
)
|
)
|
||||||
sc, txt = web.req("GET", "%s%s?ls<&dots" % zt, {})
|
sc, txt = web.req("GET", "%s%s?ls<&dots" % zt, {})
|
||||||
if sc >= 400:
|
if sc >= 400:
|
||||||
@@ -1060,13 +1163,16 @@ class Ctl(object):
|
|||||||
j = json.loads(txt)
|
j = json.loads(txt)
|
||||||
for f in j["dirs"] + j["files"]:
|
for f in j["dirs"] + j["files"]:
|
||||||
rfn = f["href"].split("?")[0].rstrip("/")
|
rfn = f["href"].split("?")[0].rstrip("/")
|
||||||
ls[unquote(rfn.encode("utf-8", "replace"))] = f
|
ls[unquote(rfn.encode("utf-8", WTF8))] = f
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
print(" mkdir ~{0} ({1})".format(srd, ex))
|
print(" mkdir ~{0} ({1})".format(srd, ex))
|
||||||
|
|
||||||
if self.ar.drd:
|
if self.ar.drd:
|
||||||
dp = os.path.join(top, rd)
|
dp = os.path.join(top, rd)
|
||||||
|
try:
|
||||||
lnodes = set(os.listdir(dp))
|
lnodes = set(os.listdir(dp))
|
||||||
|
except:
|
||||||
|
lnodes = list(ls) # fs eio; don't delete
|
||||||
if ptn:
|
if ptn:
|
||||||
zs = dp.replace(sep, b"/").rstrip(b"/") + b"/"
|
zs = dp.replace(sep, b"/").rstrip(b"/") + b"/"
|
||||||
zls = [zs + x for x in lnodes]
|
zls = [zs + x for x in lnodes]
|
||||||
@@ -1074,12 +1180,12 @@ class Ctl(object):
|
|||||||
lnodes = [x.split(b"/")[-1] for x in zls]
|
lnodes = [x.split(b"/")[-1] for x in zls]
|
||||||
bnames = [x for x in ls if x not in lnodes and x != b".hist"]
|
bnames = [x for x in ls if x not in lnodes and x != b".hist"]
|
||||||
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||||
names = [x.decode("utf-8", "replace") for x in bnames]
|
names = [x.decode("utf-8", WTF8) for x in bnames]
|
||||||
locs = [vpath + srd + "/" + x for x in names]
|
locs = [vpath + srd + x for x in names]
|
||||||
while locs:
|
while locs:
|
||||||
req = locs
|
req = locs
|
||||||
while req:
|
while req:
|
||||||
print("DELETING ~%s/#%s" % (srd, len(req)))
|
print("DELETING ~%s#%s" % (srd, len(req)))
|
||||||
body = json.dumps(req).encode("utf-8")
|
body = json.dumps(req).encode("utf-8")
|
||||||
sc, txt = web.req(
|
sc, txt = web.req(
|
||||||
"POST", self.ar.url + "?delete", {}, body, MJ
|
"POST", self.ar.url + "?delete", {}, body, MJ
|
||||||
@@ -1136,10 +1242,16 @@ class Ctl(object):
|
|||||||
self.up_b = self.hash_b
|
self.up_b = self.hash_b
|
||||||
|
|
||||||
if self.ar.wlist:
|
if self.ar.wlist:
|
||||||
|
vp = file.rel.decode("utf-8")
|
||||||
|
if self.ar.chs:
|
||||||
|
zsl = [
|
||||||
|
"%s %d %d" % (zsii[0], n, zsii[1])
|
||||||
|
for n, zsii in enumerate(file.cids)
|
||||||
|
]
|
||||||
|
print("chs: %s\n%s" % (vp, "\n".join(zsl)))
|
||||||
zsl = [self.ar.wsalt, str(file.size)] + [x[0] for x in file.kchunks]
|
zsl = [self.ar.wsalt, str(file.size)] + [x[0] for x in file.kchunks]
|
||||||
zb = hashlib.sha512("\n".join(zsl).encode("utf-8")).digest()[:33]
|
zb = hashlib.sha512("\n".join(zsl).encode("utf-8")).digest()[:33]
|
||||||
wark = ub64enc(zb).decode("utf-8")
|
wark = ub64enc(zb).decode("utf-8")
|
||||||
vp = file.rel.decode("utf-8")
|
|
||||||
if self.ar.jw:
|
if self.ar.jw:
|
||||||
print("%s %s" % (wark, vp))
|
print("%s %s" % (wark, vp))
|
||||||
else:
|
else:
|
||||||
@@ -1177,6 +1289,7 @@ class Ctl(object):
|
|||||||
self.q_upload.put(None)
|
self.q_upload.put(None)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
chunksz = up2k_chunksize(file.size)
|
||||||
upath = file.abs.decode("utf-8", "replace")
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
if not VT100:
|
if not VT100:
|
||||||
upath = upath.lstrip("\\?")
|
upath = upath.lstrip("\\?")
|
||||||
@@ -1236,9 +1349,14 @@ class Ctl(object):
|
|||||||
file.up_c -= len(hs)
|
file.up_c -= len(hs)
|
||||||
for cid in hs:
|
for cid in hs:
|
||||||
sz = file.kchunks[cid][1]
|
sz = file.kchunks[cid][1]
|
||||||
|
self.up_br -= sz
|
||||||
self.up_b -= sz
|
self.up_b -= sz
|
||||||
file.up_b -= sz
|
file.up_b -= sz
|
||||||
|
|
||||||
|
if hs and not file.up_b:
|
||||||
|
# first hs of this file; is this an upload resume?
|
||||||
|
file.up_b = chunksz * max(0, len(file.kchunks) - len(hs))
|
||||||
|
|
||||||
file.ucids = hs
|
file.ucids = hs
|
||||||
|
|
||||||
if not hs:
|
if not hs:
|
||||||
@@ -1252,7 +1370,7 @@ class Ctl(object):
|
|||||||
c1 = c2 = ""
|
c1 = c2 = ""
|
||||||
|
|
||||||
spd_h = humansize(file.size / file.t_hash, True)
|
spd_h = humansize(file.size / file.t_hash, True)
|
||||||
if file.up_b:
|
if file.up_c:
|
||||||
t_up = file.t1_up - file.t0_up
|
t_up = file.t1_up - file.t0_up
|
||||||
spd_u = humansize(file.size / t_up, True)
|
spd_u = humansize(file.size / t_up, True)
|
||||||
|
|
||||||
@@ -1262,14 +1380,13 @@ class Ctl(object):
|
|||||||
t = " found %s %s(%.2fs,%s/s)%s"
|
t = " found %s %s(%.2fs,%s/s)%s"
|
||||||
print(t % (upath, c1, file.t_hash, spd_h, c2))
|
print(t % (upath, c1, file.t_hash, spd_h, c2))
|
||||||
else:
|
else:
|
||||||
kw = "uploaded" if file.up_b else " found"
|
kw = "uploaded" if file.up_c else " found"
|
||||||
print("{0} {1}".format(kw, upath))
|
print("{0} {1}".format(kw, upath))
|
||||||
|
|
||||||
self._check_if_done()
|
self._check_if_done()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
chunksz = up2k_chunksize(file.size)
|
njoin = self.ar.sz // chunksz
|
||||||
njoin = (self.ar.sz * 1024 * 1024) // chunksz
|
|
||||||
cs = hs[:]
|
cs = hs[:]
|
||||||
while cs:
|
while cs:
|
||||||
fsl = FileSlice(file, cs[:1])
|
fsl = FileSlice(file, cs[:1])
|
||||||
@@ -1321,7 +1438,7 @@ class Ctl(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
upload(fsl, stats)
|
upload(fsl, stats, self.ar.szm)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
t = "upload failed, retrying: %s #%s+%d (%s)\n"
|
t = "upload failed, retrying: %s #%s+%d (%s)\n"
|
||||||
eprint(t % (file.name, cids[0][:8], len(cids) - 1, ex))
|
eprint(t % (file.name, cids[0][:8], len(cids) - 1, ex))
|
||||||
@@ -1365,7 +1482,7 @@ def main():
|
|||||||
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
||||||
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
||||||
|
|
||||||
ver = "{0} v{1} https://youtu.be/BIcOO6TLKaY".format(S_BUILD_DT, S_VERSION)
|
ver = "{0}, v{1}".format(S_BUILD_DT, S_VERSION)
|
||||||
if "--version" in sys.argv:
|
if "--version" in sys.argv:
|
||||||
print(ver)
|
print(ver)
|
||||||
return
|
return
|
||||||
@@ -1403,14 +1520,17 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
|
|
||||||
ap = app.add_argument_group("file-ID calculator; enable with url '-' to list warks (file identifiers) instead of upload/search")
|
ap = app.add_argument_group("file-ID calculator; enable with url '-' to list warks (file identifiers) instead of upload/search")
|
||||||
ap.add_argument("--wsalt", type=unicode, metavar="S", default="hunter2", help="salt to use when creating warks; must match server config")
|
ap.add_argument("--wsalt", type=unicode, metavar="S", default="hunter2", help="salt to use when creating warks; must match server config")
|
||||||
|
ap.add_argument("--chs", action="store_true", help="verbose (print the hash/offset of each chunk in each file)")
|
||||||
ap.add_argument("--jw", action="store_true", help="just identifier+filepath, not mtime/size too")
|
ap.add_argument("--jw", action="store_true", help="just identifier+filepath, not mtime/size too")
|
||||||
|
|
||||||
ap = app.add_argument_group("performance tweaks")
|
ap = app.add_argument_group("performance tweaks")
|
||||||
ap.add_argument("-j", type=int, metavar="CONNS", default=2, help="parallel connections")
|
ap.add_argument("-j", type=int, metavar="CONNS", default=2, help="parallel connections")
|
||||||
ap.add_argument("-J", type=int, metavar="CORES", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
ap.add_argument("-J", type=int, metavar="CORES", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||||
ap.add_argument("--sz", type=int, metavar="MiB", default=64, help="try to make each POST this big")
|
ap.add_argument("--sz", type=int, metavar="MiB", default=64, help="try to make each POST this big")
|
||||||
|
ap.add_argument("--szm", type=int, metavar="MiB", default=96, help="max size of each POST (default is cloudflare max)")
|
||||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||||
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
|
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
|
||||||
|
ap.add_argument("--cxp", type=float, metavar="SEC", default=57, help="assume http connections expired after SEConds")
|
||||||
ap.add_argument("--cd", type=float, metavar="SEC", default=5, help="delay before reattempting a failed handshake/upload")
|
ap.add_argument("--cd", type=float, metavar="SEC", default=5, help="delay before reattempting a failed handshake/upload")
|
||||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||||
@@ -1430,12 +1550,47 @@ source file/folder selection uses rsync syntax, meaning that:
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# msys2 doesn't uncygpath absolute paths with whitespace
|
||||||
|
if not VT100:
|
||||||
|
zsl = []
|
||||||
|
for fn in ar.files:
|
||||||
|
if re.search("^/[a-z]/", fn):
|
||||||
|
fn = r"%s:\%s" % (fn[1:2], fn[3:])
|
||||||
|
zsl.append(fn.replace("/", "\\"))
|
||||||
|
ar.files = zsl
|
||||||
|
|
||||||
|
fok = []
|
||||||
|
fng = []
|
||||||
|
for fn in ar.files:
|
||||||
|
if os.path.exists(fn):
|
||||||
|
fok.append(fn)
|
||||||
|
elif VT100:
|
||||||
|
fng.append(fn)
|
||||||
|
else:
|
||||||
|
# windows leaves glob-expansion to the invoked process... okayyy let's get to work
|
||||||
|
from glob import glob
|
||||||
|
|
||||||
|
fns = glob(fn)
|
||||||
|
if fns:
|
||||||
|
fok.extend(fns)
|
||||||
|
else:
|
||||||
|
fng.append(fn)
|
||||||
|
|
||||||
|
if fng:
|
||||||
|
t = "some files/folders were not found:\n %s"
|
||||||
|
raise Exception(t % ("\n ".join(fng),))
|
||||||
|
|
||||||
|
ar.files = fok
|
||||||
|
|
||||||
if ar.drd:
|
if ar.drd:
|
||||||
ar.dr = True
|
ar.dr = True
|
||||||
|
|
||||||
if ar.dr:
|
if ar.dr:
|
||||||
ar.ow = True
|
ar.ow = True
|
||||||
|
|
||||||
|
ar.sz *= 1024 * 1024
|
||||||
|
ar.szm *= 1024 * 1024
|
||||||
|
|
||||||
ar.x = "|".join(ar.x or [])
|
ar.x = "|".join(ar.x or [])
|
||||||
|
|
||||||
setattr(ar, "wlist", ar.url == "-")
|
setattr(ar, "wlist", ar.url == "-")
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Maintainer: icxes <dev.null@need.moe>
|
# Maintainer: icxes <dev.null@need.moe>
|
||||||
pkgname=copyparty
|
pkgname=copyparty
|
||||||
pkgver="1.15.5"
|
pkgver="1.16.5"
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||||
arch=("any")
|
arch=("any")
|
||||||
@@ -21,7 +21,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
|||||||
)
|
)
|
||||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||||
backup=("etc/${pkgname}.d/init" )
|
backup=("etc/${pkgname}.d/init" )
|
||||||
sha256sums=("c380ad1d20787d80077123ced583d45bc26467386bbceac35436662f435a6b8c")
|
sha256sums=("2830086bd872aaa5174c2ca73ba395439e85c883d85438263bd89521c5f37d9c")
|
||||||
|
|
||||||
build() {
|
build() {
|
||||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"url": "https://github.com/9001/copyparty/releases/download/v1.15.5/copyparty-sfx.py",
|
"url": "https://github.com/9001/copyparty/releases/download/v1.16.5/copyparty-sfx.py",
|
||||||
"version": "1.15.5",
|
"version": "1.16.5",
|
||||||
"hash": "sha256-2JcXSbtyEn+EtpyQTcE9U4XuckVKvAowVGqBZ110Jt4="
|
"hash": "sha256-rfZ76ujA6bLXYW52qP2pK8gdwDdED91mLOF2gzquG8E="
|
||||||
}
|
}
|
||||||
@@ -80,6 +80,7 @@ web/deps/prismd.css
|
|||||||
web/deps/scp.woff2
|
web/deps/scp.woff2
|
||||||
web/deps/sha512.ac.js
|
web/deps/sha512.ac.js
|
||||||
web/deps/sha512.hw.js
|
web/deps/sha512.hw.js
|
||||||
|
web/iiam.gif
|
||||||
web/md.css
|
web/md.css
|
||||||
web/md.html
|
web/md.html
|
||||||
web/md.js
|
web/md.js
|
||||||
@@ -90,6 +91,9 @@ web/mde.html
|
|||||||
web/mde.js
|
web/mde.js
|
||||||
web/msg.css
|
web/msg.css
|
||||||
web/msg.html
|
web/msg.html
|
||||||
|
web/rups.css
|
||||||
|
web/rups.html
|
||||||
|
web/rups.js
|
||||||
web/shares.css
|
web/shares.css
|
||||||
web/shares.html
|
web/shares.html
|
||||||
web/shares.js
|
web/shares.js
|
||||||
|
|||||||
@@ -50,6 +50,8 @@ from .util import (
|
|||||||
PARTFTPY_VER,
|
PARTFTPY_VER,
|
||||||
PY_DESC,
|
PY_DESC,
|
||||||
PYFTPD_VER,
|
PYFTPD_VER,
|
||||||
|
RAM_AVAIL,
|
||||||
|
RAM_TOTAL,
|
||||||
SQLITE_VER,
|
SQLITE_VER,
|
||||||
UNPLICATIONS,
|
UNPLICATIONS,
|
||||||
Daemon,
|
Daemon,
|
||||||
@@ -684,6 +686,8 @@ def get_sects():
|
|||||||
\033[36mxbu\033[35m executes CMD before a file upload starts
|
\033[36mxbu\033[35m executes CMD before a file upload starts
|
||||||
\033[36mxau\033[35m executes CMD after a file upload finishes
|
\033[36mxau\033[35m executes CMD after a file upload finishes
|
||||||
\033[36mxiu\033[35m executes CMD after all uploads finish and volume is idle
|
\033[36mxiu\033[35m executes CMD after all uploads finish and volume is idle
|
||||||
|
\033[36mxbc\033[35m executes CMD before a file copy
|
||||||
|
\033[36mxac\033[35m executes CMD after a file copy
|
||||||
\033[36mxbr\033[35m executes CMD before a file rename/move
|
\033[36mxbr\033[35m executes CMD before a file rename/move
|
||||||
\033[36mxar\033[35m executes CMD after a file rename/move
|
\033[36mxar\033[35m executes CMD after a file rename/move
|
||||||
\033[36mxbd\033[35m executes CMD before a file delete
|
\033[36mxbd\033[35m executes CMD before a file delete
|
||||||
@@ -874,8 +878,9 @@ def get_sects():
|
|||||||
use argon2id with timecost 3, 256 MiB, 4 threads, version 19 (0x13/v1.3)
|
use argon2id with timecost 3, 256 MiB, 4 threads, version 19 (0x13/v1.3)
|
||||||
|
|
||||||
\033[36m--ah-alg scrypt\033[0m # which is the same as:
|
\033[36m--ah-alg scrypt\033[0m # which is the same as:
|
||||||
\033[36m--ah-alg scrypt,13,2,8,4\033[0m
|
\033[36m--ah-alg scrypt,13,2,8,4,32\033[0m
|
||||||
use scrypt with cost 2**13, 2 iterations, blocksize 8, 4 threads
|
use scrypt with cost 2**13, 2 iterations, blocksize 8, 4 threads,
|
||||||
|
and allow using up to 32 MiB RAM (ram=cost*blksz roughly)
|
||||||
|
|
||||||
\033[36m--ah-alg sha2\033[0m # which is the same as:
|
\033[36m--ah-alg sha2\033[0m # which is the same as:
|
||||||
\033[36m--ah-alg sha2,424242\033[0m
|
\033[36m--ah-alg sha2,424242\033[0m
|
||||||
@@ -1017,7 +1022,7 @@ def add_upload(ap):
|
|||||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
|
||||||
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
|
||||||
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
|
ap2.add_argument("--u2j", metavar="JOBS", type=int, default=2, help="web-client: number of file chunks to upload in parallel; 1 or 2 is good for low-latency (same-country) connections, 4-8 for android clients, 16 for cross-atlantic (max=64)")
|
||||||
ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for this size. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
|
ap2.add_argument("--u2sz", metavar="N,N,N", type=u, default="1,64,96", help="web-client: default upload chunksize (MiB); sets \033[33mmin,default,max\033[0m in the settings gui. Each HTTP POST will aim for \033[33mdefault\033[0m, and never exceed \033[33mmax\033[0m. Cloudflare max is 96. Big values are good for cross-atlantic but may increase HDD fragmentation on some FS. Disable this optimization with [\033[32m1,1,1\033[0m]")
|
||||||
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
|
||||||
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
ap2.add_argument("--write-uplog", action="store_true", help="write POST reports to textfiles in working-directory")
|
||||||
|
|
||||||
@@ -1037,7 +1042,7 @@ def add_network(ap):
|
|||||||
else:
|
else:
|
||||||
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
||||||
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
|
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
|
||||||
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=186.0, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=128.0, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
||||||
ap2.add_argument("--s-rd-sz", metavar="B", type=int, default=256*1024, help="socket read size in bytes (indirectly affects filesystem writes; recommendation: keep equal-to or lower-than \033[33m--iobuf\033[0m)")
|
ap2.add_argument("--s-rd-sz", metavar="B", type=int, default=256*1024, help="socket read size in bytes (indirectly affects filesystem writes; recommendation: keep equal-to or lower-than \033[33m--iobuf\033[0m)")
|
||||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0.0, help="debug: socket write delay in seconds")
|
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0.0, help="debug: socket write delay in seconds")
|
||||||
@@ -1078,7 +1083,7 @@ def add_cert(ap, cert_path):
|
|||||||
def add_auth(ap):
|
def add_auth(ap):
|
||||||
ses_db = os.path.join(E.cfg, "sessions.db")
|
ses_db = os.path.join(E.cfg, "sessions.db")
|
||||||
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
||||||
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks and assume the request-header \033[33mHN\033[0m contains the username of the requesting user (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks if the request-header \033[33mHN\033[0m contains a username to associate the request with (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
||||||
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
||||||
ap2.add_argument("--idp-h-key", metavar="HN", type=u, default="", help="optional but recommended safeguard; your reverse-proxy will insert a secret header named \033[33mHN\033[0m into all requests, and the other IdP headers will be ignored if this header is not present")
|
ap2.add_argument("--idp-h-key", metavar="HN", type=u, default="", help="optional but recommended safeguard; your reverse-proxy will insert a secret header named \033[33mHN\033[0m into all requests, and the other IdP headers will be ignored if this header is not present")
|
||||||
ap2.add_argument("--idp-gsep", metavar="RE", type=u, default="|:;+,", help="if there are multiple groups in \033[33m--idp-h-grp\033[0m, they are separated by one of the characters in \033[33mRE\033[0m")
|
ap2.add_argument("--idp-gsep", metavar="RE", type=u, default="|:;+,", help="if there are multiple groups in \033[33m--idp-h-grp\033[0m, they are separated by one of the characters in \033[33mRE\033[0m")
|
||||||
@@ -1087,6 +1092,7 @@ def add_auth(ap):
|
|||||||
ap2.add_argument("--ses-db", metavar="PATH", type=u, default=ses_db, help="where to store the sessions database (if you run multiple copyparty instances, make sure they use different DBs)")
|
ap2.add_argument("--ses-db", metavar="PATH", type=u, default=ses_db, help="where to store the sessions database (if you run multiple copyparty instances, make sure they use different DBs)")
|
||||||
ap2.add_argument("--ses-len", metavar="CHARS", type=int, default=20, help="session key length; default is 120 bits ((20//4)*4*6)")
|
ap2.add_argument("--ses-len", metavar="CHARS", type=int, default=20, help="session key length; default is 120 bits ((20//4)*4*6)")
|
||||||
ap2.add_argument("--no-ses", action="store_true", help="disable sessions; use plaintext passwords in cookies")
|
ap2.add_argument("--no-ses", action="store_true", help="disable sessions; use plaintext passwords in cookies")
|
||||||
|
ap2.add_argument("--ipu", metavar="CIDR=USR", type=u, action="append", help="users with IP matching \033[33mCIDR\033[0m are auto-authenticated as username \033[33mUSR\033[0m; example: [\033[32m172.16.24.0/24=dave]")
|
||||||
|
|
||||||
|
|
||||||
def add_chpw(ap):
|
def add_chpw(ap):
|
||||||
@@ -1118,6 +1124,8 @@ def add_zc_mdns(ap):
|
|||||||
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
||||||
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
||||||
ap2.add_argument("--zmvv", action="store_true", help="verboser mdns")
|
ap2.add_argument("--zmvv", action="store_true", help="verboser mdns")
|
||||||
|
ap2.add_argument("--zm-no-pe", action="store_true", help="mute parser errors (invalid incoming MDNS packets)")
|
||||||
|
ap2.add_argument("--zm-nwa-1", action="store_true", help="disable workaround for avahi-bug #379 (corruption in Avahi's mDNS reflection feature)")
|
||||||
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set, \033[32mDd\033[0m otherwise)")
|
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set, \033[32mDd\033[0m otherwise)")
|
||||||
ap2.add_argument("--zm-ld", metavar="PATH", type=u, default="", help="link a specific folder for webdav shares")
|
ap2.add_argument("--zm-ld", metavar="PATH", type=u, default="", help="link a specific folder for webdav shares")
|
||||||
ap2.add_argument("--zm-lh", metavar="PATH", type=u, default="", help="link a specific folder for http shares")
|
ap2.add_argument("--zm-lh", metavar="PATH", type=u, default="", help="link a specific folder for http shares")
|
||||||
@@ -1200,6 +1208,8 @@ def add_hooks(ap):
|
|||||||
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file upload starts")
|
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file upload starts")
|
||||||
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file upload finishes")
|
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file upload finishes")
|
||||||
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after all uploads finish and volume is idle")
|
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after all uploads finish and volume is idle")
|
||||||
|
ap2.add_argument("--xbc", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file copy")
|
||||||
|
ap2.add_argument("--xac", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file copy")
|
||||||
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file move/rename")
|
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file move/rename")
|
||||||
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file move/rename")
|
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file move/rename")
|
||||||
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file delete")
|
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file delete")
|
||||||
@@ -1232,6 +1242,7 @@ def add_optouts(ap):
|
|||||||
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
|
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
|
||||||
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
|
||||||
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
|
||||||
|
ap2.add_argument("--no-cp", action="store_true", help="disable copy operations")
|
||||||
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show \033[33m--name\033[0m in <title>")
|
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show \033[33m--name\033[0m in <title>")
|
||||||
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
|
||||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
|
||||||
@@ -1239,7 +1250,6 @@ def add_optouts(ap):
|
|||||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||||
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
||||||
ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
|
ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
|
||||||
ap2.add_argument("--no-up-list", action="store_true", help="don't show list of incoming files in controlpanel")
|
|
||||||
ap2.add_argument("--no-pipe", action="store_true", help="disable race-the-beam (lockstep download of files which are currently being uploaded) (volflag=nopipe)")
|
ap2.add_argument("--no-pipe", action="store_true", help="disable race-the-beam (lockstep download of files which are currently being uploaded) (volflag=nopipe)")
|
||||||
ap2.add_argument("--no-db-ip", action="store_true", help="do not write uploader IPs into the database")
|
ap2.add_argument("--no-db-ip", action="store_true", help="do not write uploader IPs into the database")
|
||||||
|
|
||||||
@@ -1306,7 +1316,8 @@ def add_logging(ap):
|
|||||||
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
|
||||||
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
||||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="print request \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="print request \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
||||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
|
ap2.add_argument("--ohead", metavar="HEADER", type=u, action='append', help="print response \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
||||||
|
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|[?&]th=[wjp]|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
|
||||||
|
|
||||||
|
|
||||||
def add_admin(ap):
|
def add_admin(ap):
|
||||||
@@ -1314,9 +1325,15 @@ def add_admin(ap):
|
|||||||
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
||||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||||
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
||||||
|
ap2.add_argument("--no-ups-page", action="store_true", help="disable ?ru (list of recent uploads)")
|
||||||
|
ap2.add_argument("--no-up-list", action="store_true", help="don't show list of incoming files in controlpanel")
|
||||||
|
ap2.add_argument("--dl-list", metavar="LVL", type=int, default=2, help="who can see active downloads in the controlpanel? [\033[32m0\033[0m]=nobody, [\033[32m1\033[0m]=admins, [\033[32m2\033[0m]=everyone")
|
||||||
|
ap2.add_argument("--ups-when", action="store_true", help="let everyone see upload timestamps on the ?ru page, not just admins")
|
||||||
|
|
||||||
|
|
||||||
def add_thumbnail(ap):
|
def add_thumbnail(ap):
|
||||||
|
th_ram = (RAM_AVAIL or RAM_TOTAL or 9) * 0.6
|
||||||
|
th_ram = int(max(min(th_ram, 6), 1) * 10) / 10
|
||||||
ap2 = ap.add_argument_group('thumbnail options')
|
ap2 = ap.add_argument_group('thumbnail options')
|
||||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails (volflag=dthumb)")
|
||||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails (volflag=dvthumb)")
|
||||||
@@ -1324,7 +1341,7 @@ def add_thumbnail(ap):
|
|||||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
|
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res (volflag=thsize)")
|
||||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=CORES, help="num cpu cores to use for generating thumbnails")
|
||||||
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60.0, help="conversion timeout in seconds (volflag=convt)")
|
ap2.add_argument("--th-convt", metavar="SEC", type=float, default=60.0, help="conversion timeout in seconds (volflag=convt)")
|
||||||
ap2.add_argument("--th-ram-max", metavar="GB", type=float, default=6.0, help="max memory usage (GiB) permitted by thumbnailer; not very accurate")
|
ap2.add_argument("--th-ram-max", metavar="GB", type=float, default=th_ram, help="max memory usage (GiB) permitted by thumbnailer; not very accurate")
|
||||||
ap2.add_argument("--th-crop", metavar="TXT", type=u, default="y", help="crop thumbnails to 4:3 or keep dynamic height; client can override in UI unless force. [\033[32my\033[0m]=crop, [\033[32mn\033[0m]=nocrop, [\033[32mfy\033[0m]=force-y, [\033[32mfn\033[0m]=force-n (volflag=crop)")
|
ap2.add_argument("--th-crop", metavar="TXT", type=u, default="y", help="crop thumbnails to 4:3 or keep dynamic height; client can override in UI unless force. [\033[32my\033[0m]=crop, [\033[32mn\033[0m]=nocrop, [\033[32mfy\033[0m]=force-y, [\033[32mfn\033[0m]=force-n (volflag=crop)")
|
||||||
ap2.add_argument("--th-x3", metavar="TXT", type=u, default="n", help="show thumbs at 3x resolution; client can override in UI unless force. [\033[32my\033[0m]=yes, [\033[32mn\033[0m]=no, [\033[32mfy\033[0m]=force-yes, [\033[32mfn\033[0m]=force-no (volflag=th3x)")
|
ap2.add_argument("--th-x3", metavar="TXT", type=u, default="n", help="show thumbs at 3x resolution; client can override in UI unless force. [\033[32my\033[0m]=yes, [\033[32mn\033[0m]=no, [\033[32mfy\033[0m]=force-yes, [\033[32mfn\033[0m]=force-no (volflag=th3x)")
|
||||||
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="image decoders, in order of preference")
|
||||||
@@ -1339,12 +1356,12 @@ def add_thumbnail(ap):
|
|||||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||||
# https://github.com/libvips/libvips
|
# https://github.com/libvips/libvips
|
||||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||||
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,qoi,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="avif,avifs,blp,bmp,cbz,dcx,dds,dib,emf,eps,fits,flc,fli,fpx,gif,heic,heics,heif,heifs,icns,ico,im,j2p,j2k,jp2,jpeg,jpg,jpx,pbm,pcx,pgm,png,pnm,ppm,psd,qoi,sgi,spi,tga,tif,tiff,webp,wmf,xbm,xpm", help="image formats to decode using pillow")
|
||||||
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="avif,exr,fit,fits,fts,gif,hdr,heic,jp2,jpeg,jpg,jpx,jxl,nii,pfm,pgm,png,ppm,svg,tif,tiff,webp", help="image formats to decode using pyvips")
|
||||||
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,cbz,dds,dib,fit,fits,fts,gif,hdr,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,qoi,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="3gp,asf,av1,avc,avi,flv,h264,h265,hevc,m4v,mjpeg,mjpg,mkv,mov,mp4,mpeg,mpeg2,mpegts,mpg,mpg2,mts,nut,ogm,ogv,rm,ts,vob,webm,wmv", help="video formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,itgz,itxz,itz,m4a,mdgz,mdxz,mdz,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,s3gz,s3xz,s3z,tak,tta,ulaw,wav,wma,wv,xm,xmgz,xmxz,xmz,xpk", help="audio formats to decode using ffmpeg")
|
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,ac3,aif,aiff,alac,alaw,amr,apac,ape,au,bonk,dfpwm,dts,flac,gsm,ilbc,it,itgz,itxz,itz,m4a,mdgz,mdxz,mdz,mo3,mod,mp2,mp3,mpc,mptm,mt2,mulaw,ogg,okt,opus,ra,s3m,s3gz,s3xz,s3z,tak,tta,ulaw,wav,wma,wv,xm,xmgz,xmxz,xmz,xpk", help="audio formats to decode using ffmpeg")
|
||||||
ap2.add_argument("--au-unpk", metavar="E=F.C", type=u, default="mdz=mod.zip, mdgz=mod.gz, mdxz=mod.xz, s3z=s3m.zip, s3gz=s3m.gz, s3xz=s3m.xz, xmz=xm.zip, xmgz=xm.gz, xmxz=xm.xz, itz=it.zip, itgz=it.gz, itxz=it.xz", help="audio formats to decompress before passing to ffmpeg")
|
ap2.add_argument("--au-unpk", metavar="E=F.C", type=u, default="mdz=mod.zip, mdgz=mod.gz, mdxz=mod.xz, s3z=s3m.zip, s3gz=s3m.gz, s3xz=s3m.xz, xmz=xm.zip, xmgz=xm.gz, xmxz=xm.xz, itz=it.zip, itgz=it.gz, itxz=it.xz, cbz=jpg.cbz", help="audio/image formats to decompress before passing to ffmpeg")
|
||||||
|
|
||||||
|
|
||||||
def add_transcoding(ap):
|
def add_transcoding(ap):
|
||||||
@@ -1356,6 +1373,14 @@ def add_transcoding(ap):
|
|||||||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
||||||
|
|
||||||
|
|
||||||
|
def add_rss(ap):
|
||||||
|
ap2 = ap.add_argument_group('RSS options')
|
||||||
|
ap2.add_argument("--rss", action="store_true", help="enable RSS output (experimental)")
|
||||||
|
ap2.add_argument("--rss-nf", metavar="HITS", type=int, default=250, help="default number of files to return (url-param 'nf')")
|
||||||
|
ap2.add_argument("--rss-fext", metavar="E,E", type=u, default="", help="default list of file extensions to include (url-param 'fext'); blank=all")
|
||||||
|
ap2.add_argument("--rss-sort", metavar="ORD", type=u, default="m", help="default sort order (url-param 'sort'); [\033[32mm\033[0m]=last-modified [\033[32mu\033[0m]=upload-time [\033[32mn\033[0m]=filename [\033[32ms\033[0m]=filesize; Uppercase=oldest-first. Note that upload-time is 0 for non-uploaded files")
|
||||||
|
|
||||||
|
|
||||||
def add_db_general(ap, hcores):
|
def add_db_general(ap, hcores):
|
||||||
noidx = APPLESAN_TXT if MACOS else ""
|
noidx = APPLESAN_TXT if MACOS else ""
|
||||||
ap2 = ap.add_argument_group('general db options')
|
ap2 = ap.add_argument_group('general db options')
|
||||||
@@ -1380,6 +1405,7 @@ def add_db_general(ap, hcores):
|
|||||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10.0, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
|
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10.0, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
|
||||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than \033[33mSEC\033[0m seconds")
|
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than \033[33mSEC\033[0m seconds")
|
||||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||||
|
ap2.add_argument("--srch-excl", metavar="PTN", type=u, default="", help="regex: exclude files from search results if the file-URL matches \033[33mPTN\033[0m (case-sensitive). Example: [\033[32mpassword|logs/[0-9]\033[0m] any URL containing 'password' or 'logs/DIGIT' (volflag=srch_excl)")
|
||||||
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
||||||
|
|
||||||
|
|
||||||
@@ -1436,6 +1462,8 @@ def add_ui(ap, retry):
|
|||||||
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
|
||||||
ap2.add_argument("--au-vol", metavar="0-100", type=int, default=50, choices=range(0, 101), help="default audio/video volume percent")
|
ap2.add_argument("--au-vol", metavar="0-100", type=int, default=50, choices=range(0, 101), help="default audio/video volume percent")
|
||||||
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
||||||
|
ap2.add_argument("--nsort", action="store_true", help="default-enable natural sort of filenames with leading numbers (volflag=nsort)")
|
||||||
|
ap2.add_argument("--hsortn", metavar="N", type=int, default=2, help="number of sorting rules to include in media URLs by default (volflag=hsortn)")
|
||||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||||
@@ -1451,6 +1479,7 @@ def add_ui(ap, retry):
|
|||||||
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with \033[33m-np\033[0m")
|
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with \033[33m-np\033[0m")
|
||||||
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
|
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible with \033[33m-nb\033[0m)")
|
||||||
ap2.add_argument("--k304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable k304 on the controlpanel (workaround for buggy reverse-proxies); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
ap2.add_argument("--k304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable k304 on the controlpanel (workaround for buggy reverse-proxies); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
||||||
|
ap2.add_argument("--no304", metavar="NUM", type=int, default=0, help="configure the option to enable/disable no304 on the controlpanel (workaround for buggy caching in browsers); [\033[32m0\033[0m] = hidden and default-off, [\033[32m1\033[0m] = visible and default-off, [\033[32m2\033[0m] = visible and default-on")
|
||||||
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
|
||||||
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
ap2.add_argument("--lg-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for prologue/epilogue docs (volflag=lg_sbf)")
|
||||||
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README/PREADME.md documents (volflags: no_sb_md | sb_md)")
|
ap2.add_argument("--no-sb-md", action="store_true", help="don't sandbox README/PREADME.md documents (volflags: no_sb_md | sb_md)")
|
||||||
@@ -1477,6 +1506,8 @@ def add_debug(ap):
|
|||||||
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to \033[33m--bf-nc\033[0m and \033[33m--bf-dir\033[0m")
|
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to \033[33m--bf-nc\033[0m and \033[33m--bf-dir\033[0m")
|
||||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than \033[33mNUM\033[0m files at \033[33m--kf-dir\033[0m already; default: 6.3 GiB max (200*32M)")
|
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than \033[33mNUM\033[0m files at \033[33m--kf-dir\033[0m already; default: 6.3 GiB max (200*32M)")
|
||||||
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at \033[33mPATH\033[0m; default: folder named 'bf' wherever copyparty was started")
|
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at \033[33mPATH\033[0m; default: folder named 'bf' wherever copyparty was started")
|
||||||
|
ap2.add_argument("--bf-log", metavar="PATH", type=u, default="", help="bak-flips: log corruption info to a textfile at \033[33mPATH\033[0m")
|
||||||
|
ap2.add_argument("--no-cfg-cmt-warn", action="store_true", help=argparse.SUPPRESS)
|
||||||
|
|
||||||
|
|
||||||
# fmt: on
|
# fmt: on
|
||||||
@@ -1524,6 +1555,7 @@ def run_argparse(
|
|||||||
add_db_metadata(ap)
|
add_db_metadata(ap)
|
||||||
add_thumbnail(ap)
|
add_thumbnail(ap)
|
||||||
add_transcoding(ap)
|
add_transcoding(ap)
|
||||||
|
add_rss(ap)
|
||||||
add_ftp(ap)
|
add_ftp(ap)
|
||||||
add_webdav(ap)
|
add_webdav(ap)
|
||||||
add_tftp(ap)
|
add_tftp(ap)
|
||||||
@@ -1708,7 +1740,7 @@ def main(argv: Optional[list[str]] = None) -> None:
|
|||||||
except:
|
except:
|
||||||
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
|
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
|
||||||
|
|
||||||
if al.ansi:
|
if not al.ansi:
|
||||||
al.wintitle = ""
|
al.wintitle = ""
|
||||||
|
|
||||||
# propagate implications
|
# propagate implications
|
||||||
@@ -1746,6 +1778,9 @@ def main(argv: Optional[list[str]] = None) -> None:
|
|||||||
if al.ihead:
|
if al.ihead:
|
||||||
al.ihead = [x.lower() for x in al.ihead]
|
al.ihead = [x.lower() for x in al.ihead]
|
||||||
|
|
||||||
|
if al.ohead:
|
||||||
|
al.ohead = [x.lower() for x in al.ohead]
|
||||||
|
|
||||||
if HAVE_SSL:
|
if HAVE_SSL:
|
||||||
if al.ssl_ver:
|
if al.ssl_ver:
|
||||||
configure_ssl_ver(al)
|
configure_ssl_ver(al)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (1, 15, 6)
|
VERSION = (1, 16, 6)
|
||||||
CODENAME = "fill the drives"
|
CODENAME = "COPYparty"
|
||||||
BUILD_DT = (2024, 10, 12)
|
BUILD_DT = (2024, 12, 19)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
@@ -66,6 +66,7 @@ if PY2:
|
|||||||
LEELOO_DALLAS = "leeloo_dallas"
|
LEELOO_DALLAS = "leeloo_dallas"
|
||||||
|
|
||||||
SEE_LOG = "see log for details"
|
SEE_LOG = "see log for details"
|
||||||
|
SEESLOG = " (see serverlog for details)"
|
||||||
SSEELOG = " ({})".format(SEE_LOG)
|
SSEELOG = " ({})".format(SEE_LOG)
|
||||||
BAD_CFG = "invalid config; {}".format(SEE_LOG)
|
BAD_CFG = "invalid config; {}".format(SEE_LOG)
|
||||||
SBADCFG = " ({})".format(BAD_CFG)
|
SBADCFG = " ({})".format(BAD_CFG)
|
||||||
@@ -164,6 +165,9 @@ class Lim(object):
|
|||||||
self.chk_rem(rem)
|
self.chk_rem(rem)
|
||||||
if sz != -1:
|
if sz != -1:
|
||||||
self.chk_sz(sz)
|
self.chk_sz(sz)
|
||||||
|
else:
|
||||||
|
sz = 0
|
||||||
|
|
||||||
self.chk_vsz(broker, ptop, sz, volgetter)
|
self.chk_vsz(broker, ptop, sz, volgetter)
|
||||||
self.chk_df(abspath, sz) # side effects; keep last-ish
|
self.chk_df(abspath, sz) # side effects; keep last-ish
|
||||||
|
|
||||||
@@ -205,7 +209,15 @@ class Lim(object):
|
|||||||
|
|
||||||
if self.dft < time.time():
|
if self.dft < time.time():
|
||||||
self.dft = int(time.time()) + 300
|
self.dft = int(time.time()) + 300
|
||||||
self.dfv = get_df(abspath)[0] or 0
|
|
||||||
|
df, du, err = get_df(abspath, True)
|
||||||
|
if err:
|
||||||
|
t = "failed to read disk space usage for %r: %s"
|
||||||
|
self.log(t % (abspath, err), 3)
|
||||||
|
self.dfv = 0xAAAAAAAAA # 42.6 GiB
|
||||||
|
else:
|
||||||
|
self.dfv = df or 0
|
||||||
|
|
||||||
for j in list(self.reg.values()) if self.reg else []:
|
for j in list(self.reg.values()) if self.reg else []:
|
||||||
self.dfv -= int(j["size"] / (len(j["hash"]) or 999) * len(j["need"]))
|
self.dfv -= int(j["size"] / (len(j["hash"]) or 999) * len(j["need"]))
|
||||||
|
|
||||||
@@ -355,18 +367,21 @@ class VFS(object):
|
|||||||
self.ahtml: dict[str, list[str]] = {}
|
self.ahtml: dict[str, list[str]] = {}
|
||||||
self.aadmin: dict[str, list[str]] = {}
|
self.aadmin: dict[str, list[str]] = {}
|
||||||
self.adot: dict[str, list[str]] = {}
|
self.adot: dict[str, list[str]] = {}
|
||||||
self.all_vols: dict[str, VFS] = {}
|
self.js_ls = {}
|
||||||
|
self.js_htm = ""
|
||||||
|
|
||||||
if realpath:
|
if realpath:
|
||||||
rp = realpath + ("" if realpath.endswith(os.sep) else os.sep)
|
rp = realpath + ("" if realpath.endswith(os.sep) else os.sep)
|
||||||
vp = vpath + ("/" if vpath else "")
|
vp = vpath + ("/" if vpath else "")
|
||||||
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
|
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
|
||||||
self.all_vols = {vpath: self} # flattened recursive
|
self.all_vols = {vpath: self} # flattened recursive
|
||||||
|
self.all_nodes = {vpath: self} # also jumpvols
|
||||||
self.all_aps = [(rp, self)]
|
self.all_aps = [(rp, self)]
|
||||||
self.all_vps = [(vp, self)]
|
self.all_vps = [(vp, self)]
|
||||||
else:
|
else:
|
||||||
self.histpath = ""
|
self.histpath = ""
|
||||||
self.all_vols = {}
|
self.all_vols = {}
|
||||||
|
self.all_nodes = {}
|
||||||
self.all_aps = []
|
self.all_aps = []
|
||||||
self.all_vps = []
|
self.all_vps = []
|
||||||
|
|
||||||
@@ -384,9 +399,11 @@ class VFS(object):
|
|||||||
def get_all_vols(
|
def get_all_vols(
|
||||||
self,
|
self,
|
||||||
vols: dict[str, "VFS"],
|
vols: dict[str, "VFS"],
|
||||||
|
nodes: dict[str, "VFS"],
|
||||||
aps: list[tuple[str, "VFS"]],
|
aps: list[tuple[str, "VFS"]],
|
||||||
vps: list[tuple[str, "VFS"]],
|
vps: list[tuple[str, "VFS"]],
|
||||||
) -> None:
|
) -> None:
|
||||||
|
nodes[self.vpath] = self
|
||||||
if self.realpath:
|
if self.realpath:
|
||||||
vols[self.vpath] = self
|
vols[self.vpath] = self
|
||||||
rp = self.realpath
|
rp = self.realpath
|
||||||
@@ -396,7 +413,7 @@ class VFS(object):
|
|||||||
vps.append((vp, self))
|
vps.append((vp, self))
|
||||||
|
|
||||||
for v in self.nodes.values():
|
for v in self.nodes.values():
|
||||||
v.get_all_vols(vols, aps, vps)
|
v.get_all_vols(vols, nodes, aps, vps)
|
||||||
|
|
||||||
def add(self, src: str, dst: str) -> "VFS":
|
def add(self, src: str, dst: str) -> "VFS":
|
||||||
"""get existing, or add new path to the vfs"""
|
"""get existing, or add new path to the vfs"""
|
||||||
@@ -509,7 +526,7 @@ class VFS(object):
|
|||||||
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
||||||
if relchk(vpath):
|
if relchk(vpath):
|
||||||
if self.log:
|
if self.log:
|
||||||
self.log("vfs", "invalid relpath [{}]".format(vpath))
|
self.log("vfs", "invalid relpath %r @%s" % (vpath, uname))
|
||||||
raise Pebkac(422)
|
raise Pebkac(422)
|
||||||
|
|
||||||
cvpath = undot(vpath)
|
cvpath = undot(vpath)
|
||||||
@@ -526,11 +543,11 @@ class VFS(object):
|
|||||||
if req and uname not in d and uname != LEELOO_DALLAS:
|
if req and uname not in d and uname != LEELOO_DALLAS:
|
||||||
if vpath != cvpath and vpath != "." and self.log:
|
if vpath != cvpath and vpath != "." and self.log:
|
||||||
ap = vn.canonical(rem)
|
ap = vn.canonical(rem)
|
||||||
t = "{} has no {} in [{}] => [{}] => [{}]"
|
t = "%s has no %s in %r => %r => %r"
|
||||||
self.log("vfs", t.format(uname, msg, vpath, cvpath, ap), 6)
|
self.log("vfs", t % (uname, msg, vpath, cvpath, ap), 6)
|
||||||
|
|
||||||
t = 'you don\'t have %s-access in "/%s" or below "/%s"'
|
t = "you don't have %s-access in %r or below %r"
|
||||||
raise Pebkac(err, t % (msg, cvpath, vn.vpath))
|
raise Pebkac(err, t % (msg, "/" + cvpath, "/" + vn.vpath))
|
||||||
|
|
||||||
return vn, rem
|
return vn, rem
|
||||||
|
|
||||||
@@ -540,15 +557,14 @@ class VFS(object):
|
|||||||
return self._get_dbv(vrem)
|
return self._get_dbv(vrem)
|
||||||
|
|
||||||
shv, srem = src
|
shv, srem = src
|
||||||
return shv, vjoin(srem, vrem)
|
return shv._get_dbv(vjoin(srem, vrem))
|
||||||
|
|
||||||
def _get_dbv(self, vrem: str) -> tuple["VFS", str]:
|
def _get_dbv(self, vrem: str) -> tuple["VFS", str]:
|
||||||
dbv = self.dbv
|
dbv = self.dbv
|
||||||
if not dbv:
|
if not dbv:
|
||||||
return self, vrem
|
return self, vrem
|
||||||
|
|
||||||
tv = [self.vpath[len(dbv.vpath) :].lstrip("/"), vrem]
|
vrem = vjoin(self.vpath[len(dbv.vpath) :].lstrip("/"), vrem)
|
||||||
vrem = "/".join([x for x in tv if x])
|
|
||||||
return dbv, vrem
|
return dbv, vrem
|
||||||
|
|
||||||
def canonical(self, rem: str, resolve: bool = True) -> str:
|
def canonical(self, rem: str, resolve: bool = True) -> str:
|
||||||
@@ -580,10 +596,11 @@ class VFS(object):
|
|||||||
scandir: bool,
|
scandir: bool,
|
||||||
permsets: list[list[bool]],
|
permsets: list[list[bool]],
|
||||||
lstat: bool = False,
|
lstat: bool = False,
|
||||||
|
throw: bool = False,
|
||||||
) -> tuple[str, list[tuple[str, os.stat_result]], dict[str, "VFS"]]:
|
) -> tuple[str, list[tuple[str, os.stat_result]], dict[str, "VFS"]]:
|
||||||
"""replaces _ls for certain shares (single-file, or file selection)"""
|
"""replaces _ls for certain shares (single-file, or file selection)"""
|
||||||
vn, rem = self.shr_src # type: ignore
|
vn, rem = self.shr_src # type: ignore
|
||||||
abspath, real, _ = vn.ls(rem, "\n", scandir, permsets, lstat)
|
abspath, real, _ = vn.ls(rem, "\n", scandir, permsets, lstat, throw)
|
||||||
real = [x for x in real if os.path.basename(x[0]) in self.shr_files]
|
real = [x for x in real if os.path.basename(x[0]) in self.shr_files]
|
||||||
return abspath, real, {}
|
return abspath, real, {}
|
||||||
|
|
||||||
@@ -594,11 +611,12 @@ class VFS(object):
|
|||||||
scandir: bool,
|
scandir: bool,
|
||||||
permsets: list[list[bool]],
|
permsets: list[list[bool]],
|
||||||
lstat: bool = False,
|
lstat: bool = False,
|
||||||
|
throw: bool = False,
|
||||||
) -> tuple[str, list[tuple[str, os.stat_result]], dict[str, "VFS"]]:
|
) -> tuple[str, list[tuple[str, os.stat_result]], dict[str, "VFS"]]:
|
||||||
"""return user-readable [fsdir,real,virt] items at vpath"""
|
"""return user-readable [fsdir,real,virt] items at vpath"""
|
||||||
virt_vis = {} # nodes readable by user
|
virt_vis = {} # nodes readable by user
|
||||||
abspath = self.canonical(rem)
|
abspath = self.canonical(rem)
|
||||||
real = list(statdir(self.log, scandir, lstat, abspath))
|
real = list(statdir(self.log, scandir, lstat, abspath, throw))
|
||||||
real.sort()
|
real.sort()
|
||||||
if not rem:
|
if not rem:
|
||||||
# no vfs nodes in the list of real inodes
|
# no vfs nodes in the list of real inodes
|
||||||
@@ -640,7 +658,7 @@ class VFS(object):
|
|||||||
seen: list[str],
|
seen: list[str],
|
||||||
uname: str,
|
uname: str,
|
||||||
permsets: list[list[bool]],
|
permsets: list[list[bool]],
|
||||||
wantdots: bool,
|
wantdots: int,
|
||||||
scandir: bool,
|
scandir: bool,
|
||||||
lstat: bool,
|
lstat: bool,
|
||||||
subvols: bool = True,
|
subvols: bool = True,
|
||||||
@@ -660,6 +678,10 @@ class VFS(object):
|
|||||||
"""
|
"""
|
||||||
recursively yields from ./rem;
|
recursively yields from ./rem;
|
||||||
rel is a unix-style user-defined vpath (not vfs-related)
|
rel is a unix-style user-defined vpath (not vfs-related)
|
||||||
|
|
||||||
|
NOTE: don't invoke this function from a dbv; subvols are only
|
||||||
|
descended into if rem is blank due to the _ls `if not rem:`
|
||||||
|
which intention is to prevent unintended access to subvols
|
||||||
"""
|
"""
|
||||||
|
|
||||||
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, permsets, lstat=lstat)
|
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, permsets, lstat=lstat)
|
||||||
@@ -671,8 +693,8 @@ class VFS(object):
|
|||||||
and fsroot in seen
|
and fsroot in seen
|
||||||
):
|
):
|
||||||
if self.log:
|
if self.log:
|
||||||
t = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}/{}"
|
t = "bailing from symlink loop,\n prev: %r\n curr: %r\n from: %r / %r"
|
||||||
self.log("vfs.walk", t.format(seen[-1], fsroot, self.vpath, rem), 3)
|
self.log("vfs.walk", t % (seen[-1], fsroot, self.vpath, rem), 3)
|
||||||
return
|
return
|
||||||
|
|
||||||
if "xdev" in self.flags or "xvol" in self.flags:
|
if "xdev" in self.flags or "xvol" in self.flags:
|
||||||
@@ -684,7 +706,7 @@ class VFS(object):
|
|||||||
rm1.append(le)
|
rm1.append(le)
|
||||||
_ = [vfs_ls.remove(x) for x in rm1] # type: ignore
|
_ = [vfs_ls.remove(x) for x in rm1] # type: ignore
|
||||||
|
|
||||||
dots_ok = wantdots and uname in dbv.axs.udot
|
dots_ok = wantdots and (wantdots == 2 or uname in dbv.axs.udot)
|
||||||
if not dots_ok:
|
if not dots_ok:
|
||||||
vfs_ls = [x for x in vfs_ls if "/." not in "/" + x[0]]
|
vfs_ls = [x for x in vfs_ls if "/." not in "/" + x[0]]
|
||||||
|
|
||||||
@@ -738,7 +760,7 @@ class VFS(object):
|
|||||||
# if single folder: the folder itself is the top-level item
|
# if single folder: the folder itself is the top-level item
|
||||||
folder = "" if flt or not wrap else (vpath.split("/")[-1].lstrip(".") or "top")
|
folder = "" if flt or not wrap else (vpath.split("/")[-1].lstrip(".") or "top")
|
||||||
|
|
||||||
g = self.walk(folder, vrem, [], uname, [[True, False]], True, scandir, False)
|
g = self.walk(folder, vrem, [], uname, [[True, False]], 1, scandir, False)
|
||||||
for _, _, vpath, apath, files, rd, vd in g:
|
for _, _, vpath, apath, files, rd, vd in g:
|
||||||
if flt:
|
if flt:
|
||||||
files = [x for x in files if x[0] in flt]
|
files = [x for x in files if x[0] in flt]
|
||||||
@@ -796,8 +818,8 @@ class VFS(object):
|
|||||||
|
|
||||||
if vdev != st.st_dev:
|
if vdev != st.st_dev:
|
||||||
if self.log:
|
if self.log:
|
||||||
t = "xdev: {}[{}] => {}[{}]"
|
t = "xdev: %s[%r] => %s[%r]"
|
||||||
self.log("vfs", t.format(vdev, self.realpath, st.st_dev, ap), 3)
|
self.log("vfs", t % (vdev, self.realpath, st.st_dev, ap), 3)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -807,7 +829,7 @@ class VFS(object):
|
|||||||
return vn
|
return vn
|
||||||
|
|
||||||
if self.log:
|
if self.log:
|
||||||
self.log("vfs", "xvol: [{}]".format(ap), 3)
|
self.log("vfs", "xvol: %r" % (ap,), 3)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -892,7 +914,7 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
self.idp_accs[uname] = gnames
|
self.idp_accs[uname] = gnames
|
||||||
|
|
||||||
t = "reinitializing due to new user from IdP: [%s:%s]"
|
t = "reinitializing due to new user from IdP: [%r:%r]"
|
||||||
self.log(t % (uname, gnames), 3)
|
self.log(t % (uname, gnames), 3)
|
||||||
|
|
||||||
if not broker:
|
if not broker:
|
||||||
@@ -900,7 +922,7 @@ class AuthSrv(object):
|
|||||||
self._reload()
|
self._reload()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
broker.ask("_reload_blocking", False).get()
|
broker.ask("reload", False, True).get()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _map_volume_idp(
|
def _map_volume_idp(
|
||||||
@@ -1370,7 +1392,7 @@ class AuthSrv(object):
|
|||||||
flags[name] = True
|
flags[name] = True
|
||||||
return
|
return
|
||||||
|
|
||||||
zs = "mtp on403 on404 xbu xau xiu xbr xar xbd xad xm xban"
|
zs = "mtp on403 on404 xbu xau xiu xbc xac xbr xar xbd xad xm xban"
|
||||||
if name not in zs.split():
|
if name not in zs.split():
|
||||||
if value is True:
|
if value is True:
|
||||||
t = "└─add volflag [{}] = {} ({})"
|
t = "└─add volflag [{}] = {} ({})"
|
||||||
@@ -1518,10 +1540,11 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
assert vfs # type: ignore
|
assert vfs # type: ignore
|
||||||
vfs.all_vols = {}
|
vfs.all_vols = {}
|
||||||
|
vfs.all_nodes = {}
|
||||||
vfs.all_aps = []
|
vfs.all_aps = []
|
||||||
vfs.all_vps = []
|
vfs.all_vps = []
|
||||||
vfs.get_all_vols(vfs.all_vols, vfs.all_aps, vfs.all_vps)
|
vfs.get_all_vols(vfs.all_vols, vfs.all_nodes, vfs.all_aps, vfs.all_vps)
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_nodes.values():
|
||||||
vol.all_aps.sort(key=lambda x: len(x[0]), reverse=True)
|
vol.all_aps.sort(key=lambda x: len(x[0]), reverse=True)
|
||||||
vol.all_vps.sort(key=lambda x: len(x[0]), reverse=True)
|
vol.all_vps.sort(key=lambda x: len(x[0]), reverse=True)
|
||||||
vol.root = vfs
|
vol.root = vfs
|
||||||
@@ -1545,7 +1568,7 @@ class AuthSrv(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if self.args.shr_v:
|
if self.args.shr_v:
|
||||||
t = "loading %s share [%s] by [%s] => [%s]"
|
t = "loading %s share %r by %r => %r"
|
||||||
self.log(t % (s_pr, s_k, s_un, s_vp))
|
self.log(t % (s_pr, s_k, s_un, s_vp))
|
||||||
|
|
||||||
if s_pw:
|
if s_pw:
|
||||||
@@ -1572,7 +1595,7 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
vfs.nodes[shr] = vfs.all_vols[shr] = shv
|
vfs.nodes[shr] = vfs.all_vols[shr] = shv
|
||||||
for vol in shv.nodes.values():
|
for vol in shv.nodes.values():
|
||||||
vfs.all_vols[vol.vpath] = vol
|
vfs.all_vols[vol.vpath] = vfs.all_nodes[vol.vpath] = vol
|
||||||
vol.get_dbv = vol._get_share_src
|
vol.get_dbv = vol._get_share_src
|
||||||
vol.ls = vol._ls_nope
|
vol.ls = vol._ls_nope
|
||||||
|
|
||||||
@@ -1715,7 +1738,19 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
self.log("\n\n".join(ta) + "\n", c=3)
|
self.log("\n\n".join(ta) + "\n", c=3)
|
||||||
|
|
||||||
vfs.histtab = {zv.realpath: zv.histpath for zv in vfs.all_vols.values()}
|
rhisttab = {}
|
||||||
|
vfs.histtab = {}
|
||||||
|
for zv in vfs.all_vols.values():
|
||||||
|
histp = zv.histpath
|
||||||
|
is_shr = shr and zv.vpath.split("/")[0] == shr
|
||||||
|
if histp and not is_shr and histp in rhisttab:
|
||||||
|
zv2 = rhisttab[histp]
|
||||||
|
t = "invalid config; multiple volumes share the same histpath (database location):\n histpath: %s\n volume 1: /%s [%s]\n volume 2: %s [%s]"
|
||||||
|
t = t % (histp, zv2.vpath, zv2.realpath, zv.vpath, zv.realpath)
|
||||||
|
self.log(t, 1)
|
||||||
|
raise Exception(t)
|
||||||
|
rhisttab[histp] = zv
|
||||||
|
vfs.histtab[zv.realpath] = histp
|
||||||
|
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_vols.values():
|
||||||
lim = Lim(self.log_func)
|
lim = Lim(self.log_func)
|
||||||
@@ -1730,7 +1765,7 @@ class AuthSrv(object):
|
|||||||
use = True
|
use = True
|
||||||
try:
|
try:
|
||||||
_ = float(zs)
|
_ = float(zs)
|
||||||
zs = "%sg" % (zs)
|
zs = "%sg" % (zs,)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
lim.dfl = unhumanize(zs)
|
lim.dfl = unhumanize(zs)
|
||||||
@@ -1774,12 +1809,12 @@ class AuthSrv(object):
|
|||||||
vol.lim = lim
|
vol.lim = lim
|
||||||
|
|
||||||
if self.args.no_robots:
|
if self.args.no_robots:
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_nodes.values():
|
||||||
# volflag "robots" overrides global "norobots", allowing indexing by search engines for this vol
|
# volflag "robots" overrides global "norobots", allowing indexing by search engines for this vol
|
||||||
if not vol.flags.get("robots"):
|
if not vol.flags.get("robots"):
|
||||||
vol.flags["norobots"] = True
|
vol.flags["norobots"] = True
|
||||||
|
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_nodes.values():
|
||||||
if self.args.no_vthumb:
|
if self.args.no_vthumb:
|
||||||
vol.flags["dvthumb"] = True
|
vol.flags["dvthumb"] = True
|
||||||
if self.args.no_athumb:
|
if self.args.no_athumb:
|
||||||
@@ -1791,7 +1826,7 @@ class AuthSrv(object):
|
|||||||
vol.flags["dithumb"] = True
|
vol.flags["dithumb"] = True
|
||||||
|
|
||||||
have_fk = False
|
have_fk = False
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_nodes.values():
|
||||||
fk = vol.flags.get("fk")
|
fk = vol.flags.get("fk")
|
||||||
fka = vol.flags.get("fka")
|
fka = vol.flags.get("fka")
|
||||||
if fka and not fk:
|
if fka and not fk:
|
||||||
@@ -1823,7 +1858,7 @@ class AuthSrv(object):
|
|||||||
zs = os.path.join(E.cfg, "fk-salt.txt")
|
zs = os.path.join(E.cfg, "fk-salt.txt")
|
||||||
self.log(t % (fk_len, 16, zs), 3)
|
self.log(t % (fk_len, 16, zs), 3)
|
||||||
|
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_nodes.values():
|
||||||
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
||||||
vol.flags["gz"] = False # def.pk
|
vol.flags["gz"] = False # def.pk
|
||||||
|
|
||||||
@@ -1834,7 +1869,7 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
all_mte = {}
|
all_mte = {}
|
||||||
errors = False
|
errors = False
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_nodes.values():
|
||||||
if (self.args.e2ds and vol.axs.uwrite) or self.args.e2dsa:
|
if (self.args.e2ds and vol.axs.uwrite) or self.args.e2dsa:
|
||||||
vol.flags["e2ds"] = True
|
vol.flags["e2ds"] = True
|
||||||
|
|
||||||
@@ -1845,6 +1880,7 @@ class AuthSrv(object):
|
|||||||
["no_hash", "nohash"],
|
["no_hash", "nohash"],
|
||||||
["no_idx", "noidx"],
|
["no_idx", "noidx"],
|
||||||
["og_ua", "og_ua"],
|
["og_ua", "og_ua"],
|
||||||
|
["srch_excl", "srch_excl"],
|
||||||
]:
|
]:
|
||||||
if vf in vol.flags:
|
if vf in vol.flags:
|
||||||
ptn = re.compile(vol.flags.pop(vf))
|
ptn = re.compile(vol.flags.pop(vf))
|
||||||
@@ -1925,7 +1961,7 @@ class AuthSrv(object):
|
|||||||
vol.flags[k] = odfusion(getattr(self.args, k), vol.flags[k])
|
vol.flags[k] = odfusion(getattr(self.args, k), vol.flags[k])
|
||||||
|
|
||||||
# append additive args from argv to volflags
|
# append additive args from argv to volflags
|
||||||
hooks = "xbu xau xiu xbr xar xbd xad xm xban".split()
|
hooks = "xbu xau xiu xbc xac xbr xar xbd xad xm xban".split()
|
||||||
for name in "mtp on404 on403".split() + hooks:
|
for name in "mtp on404 on403".split() + hooks:
|
||||||
self._read_volflag(vol.flags, name, getattr(self.args, name), True)
|
self._read_volflag(vol.flags, name, getattr(self.args, name), True)
|
||||||
|
|
||||||
@@ -2051,8 +2087,24 @@ class AuthSrv(object):
|
|||||||
self.log(t.format(mtp), 1)
|
self.log(t.format(mtp), 1)
|
||||||
errors = True
|
errors = True
|
||||||
|
|
||||||
have_daw = False
|
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_vols.values():
|
||||||
|
re1: Optional[re.Pattern] = vol.flags.get("srch_excl")
|
||||||
|
excl = [re1.pattern] if re1 else []
|
||||||
|
|
||||||
|
vpaths = []
|
||||||
|
vtop = vol.vpath
|
||||||
|
for vp2 in vfs.all_vols.keys():
|
||||||
|
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
|
||||||
|
vpaths.append(re.escape(vp2[len(vtop) :].lstrip("/")))
|
||||||
|
if vpaths:
|
||||||
|
excl.append("^(%s)/" % ("|".join(vpaths),))
|
||||||
|
|
||||||
|
vol.flags["srch_re_dots"] = re.compile("|".join(excl or ["^$"]))
|
||||||
|
excl.extend([r"^\.", r"/\."])
|
||||||
|
vol.flags["srch_re_nodot"] = re.compile("|".join(excl))
|
||||||
|
|
||||||
|
have_daw = False
|
||||||
|
for vol in vfs.all_nodes.values():
|
||||||
daw = vol.flags.get("daw") or self.args.daw
|
daw = vol.flags.get("daw") or self.args.daw
|
||||||
if daw:
|
if daw:
|
||||||
vol.flags["daw"] = True
|
vol.flags["daw"] = True
|
||||||
@@ -2067,13 +2119,12 @@ class AuthSrv(object):
|
|||||||
self.log("--smb can only be used when --ah-alg is none", 1)
|
self.log("--smb can only be used when --ah-alg is none", 1)
|
||||||
errors = True
|
errors = True
|
||||||
|
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_nodes.values():
|
||||||
for k in list(vol.flags.keys()):
|
for k in list(vol.flags.keys()):
|
||||||
if re.match("^-[^-]+$", k):
|
if re.match("^-[^-]+$", k):
|
||||||
vol.flags.pop(k[1:], None)
|
vol.flags.pop(k[1:], None)
|
||||||
vol.flags.pop(k)
|
vol.flags.pop(k)
|
||||||
|
|
||||||
for vol in vfs.all_vols.values():
|
|
||||||
if vol.flags.get("dots"):
|
if vol.flags.get("dots"):
|
||||||
for name in vol.axs.uread:
|
for name in vol.axs.uread:
|
||||||
vol.axs.udot.add(name)
|
vol.axs.udot.add(name)
|
||||||
@@ -2215,6 +2266,11 @@ class AuthSrv(object):
|
|||||||
for x, y in vfs.all_vols.items()
|
for x, y in vfs.all_vols.items()
|
||||||
if x != shr and not x.startswith(shrs)
|
if x != shr and not x.startswith(shrs)
|
||||||
}
|
}
|
||||||
|
vfs.all_nodes = {
|
||||||
|
x: y
|
||||||
|
for x, y in vfs.all_nodes.items()
|
||||||
|
if x != shr and not x.startswith(shrs)
|
||||||
|
}
|
||||||
|
|
||||||
assert db and cur and cur2 and shv # type: ignore
|
assert db and cur and cur2 and shv # type: ignore
|
||||||
for row in cur.execute("select * from sh"):
|
for row in cur.execute("select * from sh"):
|
||||||
@@ -2267,6 +2323,71 @@ class AuthSrv(object):
|
|||||||
cur.close()
|
cur.close()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
|
self.js_ls = {}
|
||||||
|
self.js_htm = {}
|
||||||
|
for vn in self.vfs.all_nodes.values():
|
||||||
|
vf = vn.flags
|
||||||
|
vn.js_ls = {
|
||||||
|
"idx": "e2d" in vf,
|
||||||
|
"itag": "e2t" in vf,
|
||||||
|
"dnsort": "nsort" in vf,
|
||||||
|
"dhsortn": vf["hsortn"],
|
||||||
|
"dsort": vf["sort"],
|
||||||
|
"dcrop": vf["crop"],
|
||||||
|
"dth3x": vf["th3x"],
|
||||||
|
"u2ts": vf["u2ts"],
|
||||||
|
"frand": bool(vf.get("rand")),
|
||||||
|
"lifetime": vf.get("lifetime") or 0,
|
||||||
|
"unlist": vf.get("unlist") or "",
|
||||||
|
}
|
||||||
|
js_htm = {
|
||||||
|
"s_name": self.args.bname,
|
||||||
|
"have_up2k_idx": "e2d" in vf,
|
||||||
|
"have_acode": not self.args.no_acode,
|
||||||
|
"have_shr": self.args.shr,
|
||||||
|
"have_zip": not self.args.no_zip,
|
||||||
|
"have_mv": not self.args.no_mv,
|
||||||
|
"have_del": not self.args.no_del,
|
||||||
|
"have_unpost": int(self.args.unpost),
|
||||||
|
"have_emp": self.args.emp,
|
||||||
|
"sb_md": "" if "no_sb_md" in vf else (vf.get("md_sbf") or "y"),
|
||||||
|
"txt_ext": self.args.textfiles.replace(",", " "),
|
||||||
|
"def_hcols": list(vf.get("mth") or []),
|
||||||
|
"unlist0": vf.get("unlist") or "",
|
||||||
|
"dgrid": "grid" in vf,
|
||||||
|
"dgsel": "gsel" in vf,
|
||||||
|
"dnsort": "nsort" in vf,
|
||||||
|
"dhsortn": vf["hsortn"],
|
||||||
|
"dsort": vf["sort"],
|
||||||
|
"dcrop": vf["crop"],
|
||||||
|
"dth3x": vf["th3x"],
|
||||||
|
"dvol": self.args.au_vol,
|
||||||
|
"idxh": int(self.args.ih),
|
||||||
|
"themes": self.args.themes,
|
||||||
|
"turbolvl": self.args.turbo,
|
||||||
|
"u2j": self.args.u2j,
|
||||||
|
"u2sz": self.args.u2sz,
|
||||||
|
"u2ts": vf["u2ts"],
|
||||||
|
"frand": bool(vf.get("rand")),
|
||||||
|
"lifetime": vn.js_ls["lifetime"],
|
||||||
|
"u2sort": self.args.u2sort,
|
||||||
|
}
|
||||||
|
vn.js_htm = json.dumps(js_htm)
|
||||||
|
|
||||||
|
vols = list(vfs.all_nodes.values())
|
||||||
|
if enshare:
|
||||||
|
assert shv # type: ignore # !rm
|
||||||
|
vols.append(shv)
|
||||||
|
vols.extend(list(shv.nodes.values()))
|
||||||
|
|
||||||
|
for vol in vols:
|
||||||
|
dbv = vol.get_dbv("")[0]
|
||||||
|
vol.js_ls = vol.js_ls or dbv.js_ls or {}
|
||||||
|
vol.js_htm = vol.js_htm or dbv.js_htm or "{}"
|
||||||
|
|
||||||
|
zs = str(vol.flags.get("tcolor") or self.args.tcolor)
|
||||||
|
vol.flags["tcolor"] = zs.lstrip("#")
|
||||||
|
|
||||||
def load_sessions(self, quiet=False) -> None:
|
def load_sessions(self, quiet=False) -> None:
|
||||||
# mutex me
|
# mutex me
|
||||||
if self.args.no_ses:
|
if self.args.no_ses:
|
||||||
@@ -2376,7 +2497,7 @@ class AuthSrv(object):
|
|||||||
self._reload()
|
self._reload()
|
||||||
return True, "new password OK"
|
return True, "new password OK"
|
||||||
|
|
||||||
broker.ask("_reload_blocking", False, False).get()
|
broker.ask("reload", False, False).get()
|
||||||
return True, "new password OK"
|
return True, "new password OK"
|
||||||
|
|
||||||
def setup_chpw(self, acct: dict[str, str]) -> None:
|
def setup_chpw(self, acct: dict[str, str]) -> None:
|
||||||
@@ -2417,7 +2538,7 @@ class AuthSrv(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
elif self.args.chpw_v == 2:
|
elif self.args.chpw_v == 2:
|
||||||
t = "chpw: %d changed" % (len(uok))
|
t = "chpw: %d changed" % (len(uok),)
|
||||||
if urst:
|
if urst:
|
||||||
t += ", \033[0munchanged:\033[35m %s" % (", ".join(list(urst)))
|
t += ", \033[0munchanged:\033[35m %s" % (", ".join(list(urst)))
|
||||||
|
|
||||||
@@ -2575,7 +2696,7 @@ class AuthSrv(object):
|
|||||||
[],
|
[],
|
||||||
u,
|
u,
|
||||||
[[True, False]],
|
[[True, False]],
|
||||||
True,
|
1,
|
||||||
not self.args.no_scandir,
|
not self.args.no_scandir,
|
||||||
False,
|
False,
|
||||||
False,
|
False,
|
||||||
@@ -2628,7 +2749,7 @@ class AuthSrv(object):
|
|||||||
]
|
]
|
||||||
|
|
||||||
csv = set("i p th_covers zm_on zm_off zs_on zs_off".split())
|
csv = set("i p th_covers zm_on zm_off zs_on zs_off".split())
|
||||||
zs = "c ihead mtm mtp on403 on404 xad xar xau xiu xban xbd xbr xbu xm"
|
zs = "c ihead ohead mtm mtp on403 on404 xac xad xar xau xiu xban xbc xbd xbr xbu xm"
|
||||||
lst = set(zs.split())
|
lst = set(zs.split())
|
||||||
askip = set("a v c vc cgen exp_lg exp_md theme".split())
|
askip = set("a v c vc cgen exp_lg exp_md theme".split())
|
||||||
fskip = set("exp_lg exp_md mv_re_r mv_re_t rm_re_r rm_re_t".split())
|
fskip = set("exp_lg exp_md mv_re_r mv_re_t rm_re_r rm_re_t".split())
|
||||||
@@ -2896,6 +3017,19 @@ def expand_config_file(
|
|||||||
|
|
||||||
ret.append("#\033[36m closed{}\033[0m".format(ipath))
|
ret.append("#\033[36m closed{}\033[0m".format(ipath))
|
||||||
|
|
||||||
|
zsl = []
|
||||||
|
for ln in ret:
|
||||||
|
zs = ln.split(" #")[0]
|
||||||
|
if " #" in zs and zs.split("#")[0].strip():
|
||||||
|
zsl.append(ln)
|
||||||
|
if zsl and "no-cfg-cmt-warn" not in "\n".join(ret):
|
||||||
|
t = "\033[33mWARNING: there is less than two spaces before the # in the following config lines, so instead of assuming that this is a comment, the whole line will become part of the config value:\n\n>>> %s\n\nif you are familiar with this and would like to mute this warning, specify the global-option no-cfg-cmt-warn\n\033[0m"
|
||||||
|
t = t % ("\n>>> ".join(zsl),)
|
||||||
|
if log:
|
||||||
|
log(t)
|
||||||
|
else:
|
||||||
|
print(t, file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
def upgrade_cfg_fmt(
|
def upgrade_cfg_fmt(
|
||||||
log: Optional["NamedLogger"], args: argparse.Namespace, orig: list[str], cfg_fp: str
|
log: Optional["NamedLogger"], args: argparse.Namespace, orig: list[str], cfg_fp: str
|
||||||
|
|||||||
@@ -43,6 +43,9 @@ class BrokerMp(object):
|
|||||||
self.procs = []
|
self.procs = []
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
|
|
||||||
|
self.retpend: dict[int, Any] = {}
|
||||||
|
self.retpend_mutex = threading.Lock()
|
||||||
|
|
||||||
self.num_workers = self.args.j or CORES
|
self.num_workers = self.args.j or CORES
|
||||||
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
self.log("broker", "booting {} subprocesses".format(self.num_workers))
|
||||||
for n in range(1, self.num_workers + 1):
|
for n in range(1, self.num_workers + 1):
|
||||||
@@ -54,6 +57,8 @@ class BrokerMp(object):
|
|||||||
self.procs.append(proc)
|
self.procs.append(proc)
|
||||||
proc.start()
|
proc.start()
|
||||||
|
|
||||||
|
Daemon(self.periodic, "mp-periodic")
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
self.log("broker", "shutting down")
|
self.log("broker", "shutting down")
|
||||||
for n, proc in enumerate(self.procs):
|
for n, proc in enumerate(self.procs):
|
||||||
@@ -90,8 +95,10 @@ class BrokerMp(object):
|
|||||||
self.log(*args)
|
self.log(*args)
|
||||||
|
|
||||||
elif dest == "retq":
|
elif dest == "retq":
|
||||||
# response from previous ipc call
|
with self.retpend_mutex:
|
||||||
raise Exception("invalid broker_mp usage")
|
retq = self.retpend.pop(retq_id)
|
||||||
|
|
||||||
|
retq.put(args[0])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# new ipc invoking managed service in hub
|
# new ipc invoking managed service in hub
|
||||||
@@ -109,7 +116,6 @@ class BrokerMp(object):
|
|||||||
proc.q_pend.put((retq_id, "retq", rv))
|
proc.q_pend.put((retq_id, "retq", rv))
|
||||||
|
|
||||||
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||||
|
|
||||||
# new non-ipc invoking managed service in hub
|
# new non-ipc invoking managed service in hub
|
||||||
obj = self.hub
|
obj = self.hub
|
||||||
for node in dest.split("."):
|
for node in dest.split("."):
|
||||||
@@ -121,17 +127,30 @@ class BrokerMp(object):
|
|||||||
retq.put(rv)
|
retq.put(rv)
|
||||||
return retq
|
return retq
|
||||||
|
|
||||||
|
def wask(self, dest: str, *args: Any) -> list[Union[ExceptionalQueue, NotExQueue]]:
|
||||||
|
# call from hub to workers
|
||||||
|
ret = []
|
||||||
|
for p in self.procs:
|
||||||
|
retq = ExceptionalQueue(1)
|
||||||
|
retq_id = id(retq)
|
||||||
|
with self.retpend_mutex:
|
||||||
|
self.retpend[retq_id] = retq
|
||||||
|
|
||||||
|
p.q_pend.put((retq_id, dest, list(args)))
|
||||||
|
ret.append(retq)
|
||||||
|
return ret
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any) -> None:
|
||||||
"""
|
"""
|
||||||
send message to non-hub component in other process,
|
send message to non-hub component in other process,
|
||||||
returns a Queue object which eventually contains the response if want_retval
|
returns a Queue object which eventually contains the response if want_retval
|
||||||
(not-impl here since nothing uses it yet)
|
(not-impl here since nothing uses it yet)
|
||||||
"""
|
"""
|
||||||
if dest == "listen":
|
if dest == "httpsrv.listen":
|
||||||
for p in self.procs:
|
for p in self.procs:
|
||||||
p.q_pend.put((0, dest, [args[0], len(self.procs)]))
|
p.q_pend.put((0, dest, [args[0], len(self.procs)]))
|
||||||
|
|
||||||
elif dest == "set_netdevs":
|
elif dest == "httpsrv.set_netdevs":
|
||||||
for p in self.procs:
|
for p in self.procs:
|
||||||
p.q_pend.put((0, dest, list(args)))
|
p.q_pend.put((0, dest, list(args)))
|
||||||
|
|
||||||
@@ -140,3 +159,19 @@ class BrokerMp(object):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
raise Exception("what is " + str(dest))
|
raise Exception("what is " + str(dest))
|
||||||
|
|
||||||
|
def periodic(self) -> None:
|
||||||
|
while True:
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
tdli = {}
|
||||||
|
tdls = {}
|
||||||
|
qs = self.wask("httpsrv.read_dls")
|
||||||
|
for q in qs:
|
||||||
|
qr = q.get()
|
||||||
|
dli, dls = qr
|
||||||
|
tdli.update(dli)
|
||||||
|
tdls.update(dls)
|
||||||
|
tdl = (tdli, tdls)
|
||||||
|
for p in self.procs:
|
||||||
|
p.q_pend.put((0, "httpsrv.write_dls", tdl))
|
||||||
|
|||||||
@@ -82,37 +82,38 @@ class MpWorker(BrokerCli):
|
|||||||
while True:
|
while True:
|
||||||
retq_id, dest, args = self.q_pend.get()
|
retq_id, dest, args = self.q_pend.get()
|
||||||
|
|
||||||
# self.logw("work: [{}]".format(d[0]))
|
if dest == "retq":
|
||||||
|
# response from previous ipc call
|
||||||
|
with self.retpend_mutex:
|
||||||
|
retq = self.retpend.pop(retq_id)
|
||||||
|
|
||||||
|
retq.put(args)
|
||||||
|
continue
|
||||||
|
|
||||||
if dest == "shutdown":
|
if dest == "shutdown":
|
||||||
self.httpsrv.shutdown()
|
self.httpsrv.shutdown()
|
||||||
self.logw("ok bye")
|
self.logw("ok bye")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
return
|
return
|
||||||
|
|
||||||
elif dest == "reload":
|
if dest == "reload":
|
||||||
self.logw("mpw.asrv reloading")
|
self.logw("mpw.asrv reloading")
|
||||||
self.asrv.reload()
|
self.asrv.reload()
|
||||||
self.logw("mpw.asrv reloaded")
|
self.logw("mpw.asrv reloaded")
|
||||||
|
continue
|
||||||
|
|
||||||
elif dest == "reload_sessions":
|
if dest == "reload_sessions":
|
||||||
with self.asrv.mutex:
|
with self.asrv.mutex:
|
||||||
self.asrv.load_sessions()
|
self.asrv.load_sessions()
|
||||||
|
continue
|
||||||
|
|
||||||
elif dest == "listen":
|
obj = self
|
||||||
self.httpsrv.listen(args[0], args[1])
|
for node in dest.split("."):
|
||||||
|
obj = getattr(obj, node)
|
||||||
|
|
||||||
elif dest == "set_netdevs":
|
rv = obj(*args) # type: ignore
|
||||||
self.httpsrv.set_netdevs(args[0])
|
if retq_id:
|
||||||
|
self.say("retq", rv, retq_id=retq_id)
|
||||||
elif dest == "retq":
|
|
||||||
# response from previous ipc call
|
|
||||||
with self.retpend_mutex:
|
|
||||||
retq = self.retpend.pop(retq_id)
|
|
||||||
|
|
||||||
retq.put(args)
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise Exception("what is " + str(dest))
|
|
||||||
|
|
||||||
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
def ask(self, dest: str, *args: Any) -> Union[ExceptionalQueue, NotExQueue]:
|
||||||
retq = ExceptionalQueue(1)
|
retq = ExceptionalQueue(1)
|
||||||
@@ -123,5 +124,5 @@ class MpWorker(BrokerCli):
|
|||||||
self.q_yield.put((retq_id, dest, list(args)))
|
self.q_yield.put((retq_id, dest, list(args)))
|
||||||
return retq
|
return retq
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any, retq_id=0) -> None:
|
||||||
self.q_yield.put((0, dest, list(args)))
|
self.q_yield.put((retq_id, dest, list(args)))
|
||||||
|
|||||||
@@ -53,11 +53,11 @@ class BrokerThr(BrokerCli):
|
|||||||
return NotExQueue(obj(*args)) # type: ignore
|
return NotExQueue(obj(*args)) # type: ignore
|
||||||
|
|
||||||
def say(self, dest: str, *args: Any) -> None:
|
def say(self, dest: str, *args: Any) -> None:
|
||||||
if dest == "listen":
|
if dest == "httpsrv.listen":
|
||||||
self.httpsrv.listen(args[0], 1)
|
self.httpsrv.listen(args[0], 1)
|
||||||
return
|
return
|
||||||
|
|
||||||
if dest == "set_netdevs":
|
if dest == "httpsrv.set_netdevs":
|
||||||
self.httpsrv.set_netdevs(args[0])
|
self.httpsrv.set_netdevs(args[0])
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -42,10 +42,12 @@ def vf_bmap() -> dict[str, str]:
|
|||||||
"magic",
|
"magic",
|
||||||
"no_sb_md",
|
"no_sb_md",
|
||||||
"no_sb_lg",
|
"no_sb_lg",
|
||||||
|
"nsort",
|
||||||
"og",
|
"og",
|
||||||
"og_no_head",
|
"og_no_head",
|
||||||
"og_s_title",
|
"og_s_title",
|
||||||
"rand",
|
"rand",
|
||||||
|
"rss",
|
||||||
"xdev",
|
"xdev",
|
||||||
"xlink",
|
"xlink",
|
||||||
"xvol",
|
"xvol",
|
||||||
@@ -68,6 +70,7 @@ def vf_vmap() -> dict[str, str]:
|
|||||||
}
|
}
|
||||||
for k in (
|
for k in (
|
||||||
"dbd",
|
"dbd",
|
||||||
|
"hsortn",
|
||||||
"html_head",
|
"html_head",
|
||||||
"lg_sbf",
|
"lg_sbf",
|
||||||
"md_sbf",
|
"md_sbf",
|
||||||
@@ -102,10 +105,12 @@ def vf_cmap() -> dict[str, str]:
|
|||||||
"mte",
|
"mte",
|
||||||
"mth",
|
"mth",
|
||||||
"mtp",
|
"mtp",
|
||||||
|
"xac",
|
||||||
"xad",
|
"xad",
|
||||||
"xar",
|
"xar",
|
||||||
"xau",
|
"xau",
|
||||||
"xban",
|
"xban",
|
||||||
|
"xbc",
|
||||||
"xbd",
|
"xbd",
|
||||||
"xbr",
|
"xbr",
|
||||||
"xbu",
|
"xbu",
|
||||||
@@ -187,6 +192,7 @@ flagcats = {
|
|||||||
"xvol": "do not follow symlinks leaving the volume root",
|
"xvol": "do not follow symlinks leaving the volume root",
|
||||||
"dotsrch": "show dotfiles in search results",
|
"dotsrch": "show dotfiles in search results",
|
||||||
"nodotsrch": "hide dotfiles in search results (default)",
|
"nodotsrch": "hide dotfiles in search results (default)",
|
||||||
|
"srch_excl": "exclude search results with URL matching this regex",
|
||||||
},
|
},
|
||||||
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
|
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
|
||||||
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
|
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
|
||||||
@@ -211,6 +217,8 @@ flagcats = {
|
|||||||
"xbu=CMD": "execute CMD before a file upload starts",
|
"xbu=CMD": "execute CMD before a file upload starts",
|
||||||
"xau=CMD": "execute CMD after a file upload finishes",
|
"xau=CMD": "execute CMD after a file upload finishes",
|
||||||
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
|
"xiu=CMD": "execute CMD after all uploads finish and volume is idle",
|
||||||
|
"xbc=CMD": "execute CMD before a file copy",
|
||||||
|
"xac=CMD": "execute CMD after a file copy",
|
||||||
"xbr=CMD": "execute CMD before a file rename/move",
|
"xbr=CMD": "execute CMD before a file rename/move",
|
||||||
"xar=CMD": "execute CMD after a file rename/move",
|
"xar=CMD": "execute CMD after a file rename/move",
|
||||||
"xbd=CMD": "execute CMD before a file delete",
|
"xbd=CMD": "execute CMD before a file delete",
|
||||||
|
|||||||
@@ -42,14 +42,14 @@ class Fstab(object):
|
|||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
fs = "ext4"
|
fs = "ext4"
|
||||||
msg = "failed to determine filesystem at [{}]; assuming {}\n{}"
|
msg = "failed to determine filesystem at %r; assuming %s\n%s"
|
||||||
|
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
fs = "vfat"
|
fs = "vfat"
|
||||||
try:
|
try:
|
||||||
path = self._winpath(path)
|
path = self._winpath(path)
|
||||||
except:
|
except:
|
||||||
self.log(msg.format(path, fs, min_ex()), 3)
|
self.log(msg % (path, fs, min_ex()), 3)
|
||||||
return fs
|
return fs
|
||||||
|
|
||||||
path = undot(path)
|
path = undot(path)
|
||||||
@@ -61,11 +61,11 @@ class Fstab(object):
|
|||||||
try:
|
try:
|
||||||
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
||||||
except:
|
except:
|
||||||
self.log(msg.format(path, fs, min_ex()), 3)
|
self.log(msg % (path, fs, min_ex()), 3)
|
||||||
|
|
||||||
fs = fs.lower()
|
fs = fs.lower()
|
||||||
self.cache[path] = fs
|
self.cache[path] = fs
|
||||||
self.log("found {} at {}".format(fs, path))
|
self.log("found %s at %r" % (fs, path))
|
||||||
return fs
|
return fs
|
||||||
|
|
||||||
def _winpath(self, path: str) -> str:
|
def _winpath(self, path: str) -> str:
|
||||||
|
|||||||
@@ -76,6 +76,7 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
else:
|
else:
|
||||||
raise AuthenticationFailed("banned")
|
raise AuthenticationFailed("banned")
|
||||||
|
|
||||||
|
args = self.hub.args
|
||||||
asrv = self.hub.asrv
|
asrv = self.hub.asrv
|
||||||
uname = "*"
|
uname = "*"
|
||||||
if username != "anonymous":
|
if username != "anonymous":
|
||||||
@@ -86,6 +87,9 @@ class FtpAuth(DummyAuthorizer):
|
|||||||
uname = zs
|
uname = zs
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if args.ipu and uname == "*":
|
||||||
|
uname = args.ipu_iu[args.ipu_nm.map(ip)]
|
||||||
|
|
||||||
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
|
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
|
||||||
g = self.hub.gpwd
|
g = self.hub.gpwd
|
||||||
if g.lim:
|
if g.lim:
|
||||||
@@ -292,6 +296,7 @@ class FtpFs(AbstractedFS):
|
|||||||
self.uname,
|
self.uname,
|
||||||
not self.args.no_scandir,
|
not self.args.no_scandir,
|
||||||
[[True, False], [False, True]],
|
[[True, False], [False, True]],
|
||||||
|
throw=True,
|
||||||
)
|
)
|
||||||
vfs_ls = [x[0] for x in vfs_ls1]
|
vfs_ls = [x[0] for x in vfs_ls1]
|
||||||
vfs_ls.extend(vfs_virt.keys())
|
vfs_ls.extend(vfs_virt.keys())
|
||||||
|
|||||||
1053
copyparty/httpcli.py
1053
copyparty/httpcli.py
File diff suppressed because it is too large
Load Diff
@@ -59,6 +59,8 @@ class HttpConn(object):
|
|||||||
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
self.asrv: AuthSrv = hsrv.asrv # mypy404
|
||||||
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
self.u2fh: Util.FHC = hsrv.u2fh # mypy404
|
||||||
self.pipes: Util.CachedDict = hsrv.pipes # mypy404
|
self.pipes: Util.CachedDict = hsrv.pipes # mypy404
|
||||||
|
self.ipu_iu: Optional[dict[str, str]] = hsrv.ipu_iu
|
||||||
|
self.ipu_nm: Optional[NetMap] = hsrv.ipu_nm
|
||||||
self.ipa_nm: Optional[NetMap] = hsrv.ipa_nm
|
self.ipa_nm: Optional[NetMap] = hsrv.ipa_nm
|
||||||
self.xff_nm: Optional[NetMap] = hsrv.xff_nm
|
self.xff_nm: Optional[NetMap] = hsrv.xff_nm
|
||||||
self.xff_lan: NetMap = hsrv.xff_lan # type: ignore
|
self.xff_lan: NetMap = hsrv.xff_lan # type: ignore
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import hashlib
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -69,6 +70,7 @@ from .util import (
|
|||||||
build_netmap,
|
build_netmap,
|
||||||
has_resource,
|
has_resource,
|
||||||
ipnorm,
|
ipnorm,
|
||||||
|
load_ipu,
|
||||||
load_resource,
|
load_resource,
|
||||||
min_ex,
|
min_ex,
|
||||||
shut_socket,
|
shut_socket,
|
||||||
@@ -79,6 +81,7 @@ from .util import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from .authsrv import VFS
|
||||||
from .broker_util import BrokerCli
|
from .broker_util import BrokerCli
|
||||||
from .ssdp import SSDPr
|
from .ssdp import SSDPr
|
||||||
|
|
||||||
@@ -128,6 +131,12 @@ class HttpSrv(object):
|
|||||||
self.bans: dict[str, int] = {}
|
self.bans: dict[str, int] = {}
|
||||||
self.aclose: dict[str, int] = {}
|
self.aclose: dict[str, int] = {}
|
||||||
|
|
||||||
|
dli: dict[str, tuple[float, int, "VFS", str, str]] = {} # info
|
||||||
|
dls: dict[str, tuple[float, int]] = {} # state
|
||||||
|
self.dli = self.tdli = dli
|
||||||
|
self.dls = self.tdls = dls
|
||||||
|
self.iiam = '<img src="%s.cpr/iiam.gif?cache=i" />' % (self.args.SRS,)
|
||||||
|
|
||||||
self.bound: set[tuple[str, int]] = set()
|
self.bound: set[tuple[str, int]] = set()
|
||||||
self.name = "hsrv" + nsuf
|
self.name = "hsrv" + nsuf
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
@@ -143,6 +152,7 @@ class HttpSrv(object):
|
|||||||
self.t_periodic: Optional[threading.Thread] = None
|
self.t_periodic: Optional[threading.Thread] = None
|
||||||
|
|
||||||
self.u2fh = FHC()
|
self.u2fh = FHC()
|
||||||
|
self.u2sc: dict[str, tuple[int, "hashlib._Hash"]] = {}
|
||||||
self.pipes = CachedDict(0.2)
|
self.pipes = CachedDict(0.2)
|
||||||
self.metrics = Metrics(self)
|
self.metrics = Metrics(self)
|
||||||
self.nreq = 0
|
self.nreq = 0
|
||||||
@@ -162,19 +172,25 @@ class HttpSrv(object):
|
|||||||
env = jinja2.Environment()
|
env = jinja2.Environment()
|
||||||
env.loader = jinja2.FunctionLoader(lambda f: load_jinja2_resource(self.E, f))
|
env.loader = jinja2.FunctionLoader(lambda f: load_jinja2_resource(self.E, f))
|
||||||
jn = [
|
jn = [
|
||||||
"splash",
|
|
||||||
"shares",
|
|
||||||
"svcs",
|
|
||||||
"browser",
|
"browser",
|
||||||
"browser2",
|
"browser2",
|
||||||
"msg",
|
"cf",
|
||||||
"md",
|
"md",
|
||||||
"mde",
|
"mde",
|
||||||
"cf",
|
"msg",
|
||||||
|
"rups",
|
||||||
|
"shares",
|
||||||
|
"splash",
|
||||||
|
"svcs",
|
||||||
]
|
]
|
||||||
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
||||||
self.prism = has_resource(self.E, "web/deps/prism.js.gz")
|
self.prism = has_resource(self.E, "web/deps/prism.js.gz")
|
||||||
|
|
||||||
|
if self.args.ipu:
|
||||||
|
self.ipu_iu, self.ipu_nm = load_ipu(self.log, self.args.ipu)
|
||||||
|
else:
|
||||||
|
self.ipu_iu = self.ipu_nm = None
|
||||||
|
|
||||||
self.ipa_nm = build_netmap(self.args.ipa)
|
self.ipa_nm = build_netmap(self.args.ipa)
|
||||||
self.xff_nm = build_netmap(self.args.xff_src)
|
self.xff_nm = build_netmap(self.args.xff_src)
|
||||||
self.xff_lan = build_netmap("lan")
|
self.xff_lan = build_netmap("lan")
|
||||||
@@ -197,6 +213,9 @@ class HttpSrv(object):
|
|||||||
self.start_threads(4)
|
self.start_threads(4)
|
||||||
|
|
||||||
if nid:
|
if nid:
|
||||||
|
self.tdli = {}
|
||||||
|
self.tdls = {}
|
||||||
|
|
||||||
if self.args.stackmon:
|
if self.args.stackmon:
|
||||||
start_stackmon(self.args.stackmon, nid)
|
start_stackmon(self.args.stackmon, nid)
|
||||||
|
|
||||||
@@ -571,3 +590,32 @@ class HttpSrv(object):
|
|||||||
ident += "a"
|
ident += "a"
|
||||||
|
|
||||||
self.u2idx_free[ident] = u2idx
|
self.u2idx_free[ident] = u2idx
|
||||||
|
|
||||||
|
def read_dls(
|
||||||
|
self,
|
||||||
|
) -> tuple[
|
||||||
|
dict[str, tuple[float, int, str, str, str]], dict[str, tuple[float, int]]
|
||||||
|
]:
|
||||||
|
"""
|
||||||
|
mp-broker asking for local dl-info + dl-state;
|
||||||
|
reduce overhead by sending just the vfs vpath
|
||||||
|
"""
|
||||||
|
dli = {k: (a, b, c.vpath, d, e) for k, (a, b, c, d, e) in self.dli.items()}
|
||||||
|
return (dli, self.dls)
|
||||||
|
|
||||||
|
def write_dls(
|
||||||
|
self,
|
||||||
|
sdli: dict[str, tuple[float, int, str, str, str]],
|
||||||
|
dls: dict[str, tuple[float, int]],
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
mp-broker pushing total dl-info + dl-state;
|
||||||
|
swap out the vfs vpath with the vfs node
|
||||||
|
"""
|
||||||
|
dli: dict[str, tuple[float, int, "VFS", str, str]] = {}
|
||||||
|
for k, (a, b, c, d, e) in sdli.items():
|
||||||
|
vn = self.asrv.vfs.all_nodes[c]
|
||||||
|
dli[k] = (a, b, vn, d, e)
|
||||||
|
|
||||||
|
self.tdli = dli
|
||||||
|
self.tdls = dls
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ from .stolen.dnslib import (
|
|||||||
DNSHeader,
|
DNSHeader,
|
||||||
DNSQuestion,
|
DNSQuestion,
|
||||||
DNSRecord,
|
DNSRecord,
|
||||||
|
set_avahi_379,
|
||||||
)
|
)
|
||||||
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
|
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
|
||||||
|
|
||||||
@@ -72,6 +73,9 @@ class MDNS(MCast):
|
|||||||
self.ngen = ngen
|
self.ngen = ngen
|
||||||
self.ttl = 300
|
self.ttl = 300
|
||||||
|
|
||||||
|
if not self.args.zm_nwa_1:
|
||||||
|
set_avahi_379()
|
||||||
|
|
||||||
zs = self.args.name + ".local."
|
zs = self.args.name + ".local."
|
||||||
zs = zs.encode("ascii", "replace").decode("ascii", "replace")
|
zs = zs.encode("ascii", "replace").decode("ascii", "replace")
|
||||||
self.hn = "-".join(x for x in zs.split("?") if x) or (
|
self.hn = "-".join(x for x in zs.split("?") if x) or (
|
||||||
@@ -336,6 +340,9 @@ class MDNS(MCast):
|
|||||||
self.log("stopped", 2)
|
self.log("stopped", 2)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self.args.zm_no_pe:
|
||||||
|
continue
|
||||||
|
|
||||||
t = "{} {} \033[33m|{}| {}\n{}".format(
|
t = "{} {} \033[33m|{}| {}\n{}".format(
|
||||||
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -72,6 +72,9 @@ class Metrics(object):
|
|||||||
v = "{:.3f}".format(self.hsrv.t0)
|
v = "{:.3f}".format(self.hsrv.t0)
|
||||||
addug("cpp_boot_unixtime", "seconds", v, t)
|
addug("cpp_boot_unixtime", "seconds", v, t)
|
||||||
|
|
||||||
|
t = "number of active downloads"
|
||||||
|
addg("cpp_active_dl", str(len(self.hsrv.tdls)), t)
|
||||||
|
|
||||||
t = "number of open http(s) client connections"
|
t = "number of open http(s) client connections"
|
||||||
addg("cpp_http_conns", str(self.hsrv.ncli), t)
|
addg("cpp_http_conns", str(self.hsrv.ncli), t)
|
||||||
|
|
||||||
@@ -128,7 +131,7 @@ class Metrics(object):
|
|||||||
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
addbh("cpp_disk_size_bytes", "total HDD size of volume")
|
||||||
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
addbh("cpp_disk_free_bytes", "free HDD space in volume")
|
||||||
for vpath, vol in allvols:
|
for vpath, vol in allvols:
|
||||||
free, total = get_df(vol.realpath)
|
free, total, _ = get_df(vol.realpath, False)
|
||||||
if free is None or total is None:
|
if free is None or total is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
|||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import sys
|
import sys
|
||||||
@@ -62,6 +63,9 @@ def have_ff(scmd: str) -> bool:
|
|||||||
HAVE_FFMPEG = not os.environ.get("PRTY_NO_FFMPEG") and have_ff("ffmpeg")
|
HAVE_FFMPEG = not os.environ.get("PRTY_NO_FFMPEG") and have_ff("ffmpeg")
|
||||||
HAVE_FFPROBE = not os.environ.get("PRTY_NO_FFPROBE") and have_ff("ffprobe")
|
HAVE_FFPROBE = not os.environ.get("PRTY_NO_FFPROBE") and have_ff("ffprobe")
|
||||||
|
|
||||||
|
CBZ_PICS = set("png jpg jpeg gif bmp tga tif tiff webp avif".split())
|
||||||
|
CBZ_01 = re.compile(r"(^|[^0-9v])0+[01]\b")
|
||||||
|
|
||||||
|
|
||||||
class MParser(object):
|
class MParser(object):
|
||||||
def __init__(self, cmdline: str) -> None:
|
def __init__(self, cmdline: str) -> None:
|
||||||
@@ -126,6 +130,7 @@ def au_unpk(
|
|||||||
log: "NamedLogger", fmt_map: dict[str, str], abspath: str, vn: Optional[VFS] = None
|
log: "NamedLogger", fmt_map: dict[str, str], abspath: str, vn: Optional[VFS] = None
|
||||||
) -> str:
|
) -> str:
|
||||||
ret = ""
|
ret = ""
|
||||||
|
maxsz = 1024 * 1024 * 64
|
||||||
try:
|
try:
|
||||||
ext = abspath.split(".")[-1].lower()
|
ext = abspath.split(".")[-1].lower()
|
||||||
au, pk = fmt_map[ext].split(".")
|
au, pk = fmt_map[ext].split(".")
|
||||||
@@ -148,24 +153,48 @@ def au_unpk(
|
|||||||
zf = zipfile.ZipFile(abspath, "r")
|
zf = zipfile.ZipFile(abspath, "r")
|
||||||
zil = zf.infolist()
|
zil = zf.infolist()
|
||||||
zil = [x for x in zil if x.filename.lower().split(".")[-1] == au]
|
zil = [x for x in zil if x.filename.lower().split(".")[-1] == au]
|
||||||
|
if not zil:
|
||||||
|
raise Exception("no audio inside zip")
|
||||||
fi = zf.open(zil[0])
|
fi = zf.open(zil[0])
|
||||||
|
|
||||||
|
elif pk == "cbz":
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
zf = zipfile.ZipFile(abspath, "r")
|
||||||
|
znil = [(x.filename.lower(), x) for x in zf.infolist()]
|
||||||
|
nf = len(znil)
|
||||||
|
znil = [x for x in znil if x[0].split(".")[-1] in CBZ_PICS]
|
||||||
|
znil = [x for x in znil if "cover" in x[0]] or znil
|
||||||
|
znil = [x for x in znil if CBZ_01.search(x[0])] or znil
|
||||||
|
t = "cbz: %d files, %d hits" % (nf, len(znil))
|
||||||
|
if znil:
|
||||||
|
t += ", using " + znil[0][1].filename
|
||||||
|
log(t)
|
||||||
|
if not znil:
|
||||||
|
raise Exception("no images inside cbz")
|
||||||
|
fi = zf.open(znil[0][1])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise Exception("unknown compression %s" % (pk,))
|
raise Exception("unknown compression %s" % (pk,))
|
||||||
|
|
||||||
|
fsz = 0
|
||||||
with os.fdopen(fd, "wb") as fo:
|
with os.fdopen(fd, "wb") as fo:
|
||||||
while True:
|
while True:
|
||||||
buf = fi.read(32768)
|
buf = fi.read(32768)
|
||||||
if not buf:
|
if not buf:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
fsz += len(buf)
|
||||||
|
if fsz > maxsz:
|
||||||
|
raise Exception("zipbomb defused")
|
||||||
|
|
||||||
fo.write(buf)
|
fo.write(buf)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if ret:
|
if ret:
|
||||||
t = "failed to decompress audio file [%s]: %r"
|
t = "failed to decompress audio file %r: %r"
|
||||||
log(t % (abspath, ex))
|
log(t % (abspath, ex))
|
||||||
wunlink(log, ret, vn.flags if vn else VF_CAREFUL)
|
wunlink(log, ret, vn.flags if vn else VF_CAREFUL)
|
||||||
|
|
||||||
@@ -553,7 +582,7 @@ class MTag(object):
|
|||||||
raise Exception()
|
raise Exception()
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if self.args.mtag_v:
|
if self.args.mtag_v:
|
||||||
self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90")
|
self.log("mutagen-err [%s] @ %r" % (ex, abspath), "90")
|
||||||
|
|
||||||
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||||
|
|
||||||
@@ -670,8 +699,8 @@ class MTag(object):
|
|||||||
ret[tag] = zj[tag]
|
ret[tag] = zj[tag]
|
||||||
except:
|
except:
|
||||||
if self.args.mtag_v:
|
if self.args.mtag_v:
|
||||||
t = "mtag error: tagname {}, parser {}, file {} => {}"
|
t = "mtag error: tagname %r, parser %r, file %r => %r"
|
||||||
self.log(t.format(tagname, parser.bin, abspath, min_ex()))
|
self.log(t % (tagname, parser.bin, abspath, min_ex()), 6)
|
||||||
|
|
||||||
if ap != abspath:
|
if ap != abspath:
|
||||||
wunlink(self.log, ap, VF_CAREFUL)
|
wunlink(self.log, ap, VF_CAREFUL)
|
||||||
|
|||||||
@@ -24,17 +24,13 @@ class PWHash(object):
|
|||||||
def __init__(self, args: argparse.Namespace):
|
def __init__(self, args: argparse.Namespace):
|
||||||
self.args = args
|
self.args = args
|
||||||
|
|
||||||
try:
|
zsl = args.ah_alg.split(",")
|
||||||
alg, ac = args.ah_alg.split(",")
|
alg = zsl[0]
|
||||||
except:
|
|
||||||
alg = args.ah_alg
|
|
||||||
ac = {}
|
|
||||||
|
|
||||||
if alg == "none":
|
if alg == "none":
|
||||||
alg = ""
|
alg = ""
|
||||||
|
|
||||||
self.alg = alg
|
self.alg = alg
|
||||||
self.ac = ac
|
self.ac = zsl[1:]
|
||||||
if not alg:
|
if not alg:
|
||||||
self.on = False
|
self.on = False
|
||||||
self.hash = unicode
|
self.hash = unicode
|
||||||
@@ -90,17 +86,23 @@ class PWHash(object):
|
|||||||
its = 2
|
its = 2
|
||||||
blksz = 8
|
blksz = 8
|
||||||
para = 4
|
para = 4
|
||||||
|
ramcap = 0 # openssl 1.1 = 32 MiB
|
||||||
try:
|
try:
|
||||||
cost = 2 << int(self.ac[0])
|
cost = 2 << int(self.ac[0])
|
||||||
its = int(self.ac[1])
|
its = int(self.ac[1])
|
||||||
blksz = int(self.ac[2])
|
blksz = int(self.ac[2])
|
||||||
para = int(self.ac[3])
|
para = int(self.ac[3])
|
||||||
|
ramcap = int(self.ac[4]) * 1024 * 1024
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
cfg = {"salt": self.salt, "n": cost, "r": blksz, "p": para, "dklen": 24}
|
||||||
|
if ramcap:
|
||||||
|
cfg["maxmem"] = ramcap
|
||||||
|
|
||||||
ret = plain.encode("utf-8")
|
ret = plain.encode("utf-8")
|
||||||
for _ in range(its):
|
for _ in range(its):
|
||||||
ret = hashlib.scrypt(ret, salt=self.salt, n=cost, r=blksz, p=para, dklen=24)
|
ret = hashlib.scrypt(ret, **cfg)
|
||||||
|
|
||||||
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
|
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
|
||||||
|
|
||||||
|
|||||||
@@ -263,7 +263,7 @@ class SMB(object):
|
|||||||
time.time(),
|
time.time(),
|
||||||
"",
|
"",
|
||||||
):
|
):
|
||||||
yeet("blocked by xbu server config: " + vpath)
|
yeet("blocked by xbu server config: %r" % (vpath,))
|
||||||
|
|
||||||
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
||||||
if wr:
|
if wr:
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from itertools import chain
|
|||||||
from .bimap import Bimap, BimapError
|
from .bimap import Bimap, BimapError
|
||||||
from .bit import get_bits, set_bits
|
from .bit import get_bits, set_bits
|
||||||
from .buffer import BufferError
|
from .buffer import BufferError
|
||||||
from .label import DNSBuffer, DNSLabel
|
from .label import DNSBuffer, DNSLabel, set_avahi_379
|
||||||
from .ranges import IP4, IP6, H, I, check_bytes
|
from .ranges import IP4, IP6, H, I, check_bytes
|
||||||
|
|
||||||
|
|
||||||
@@ -426,7 +426,7 @@ class RR(object):
|
|||||||
if rdlength:
|
if rdlength:
|
||||||
rdata = RDMAP.get(QTYPE.get(rtype), RD).parse(buffer, rdlength)
|
rdata = RDMAP.get(QTYPE.get(rtype), RD).parse(buffer, rdlength)
|
||||||
else:
|
else:
|
||||||
rdata = ""
|
rdata = RD(b"a")
|
||||||
return cls(rname, rtype, rclass, ttl, rdata)
|
return cls(rname, rtype, rclass, ttl, rdata)
|
||||||
except (BufferError, BimapError) as e:
|
except (BufferError, BimapError) as e:
|
||||||
raise DNSError("Error unpacking RR [offset=%d]: %s" % (buffer.offset, e))
|
raise DNSError("Error unpacking RR [offset=%d]: %s" % (buffer.offset, e))
|
||||||
|
|||||||
@@ -11,6 +11,23 @@ LDH = set(range(33, 127))
|
|||||||
ESCAPE = re.compile(r"\\([0-9][0-9][0-9])")
|
ESCAPE = re.compile(r"\\([0-9][0-9][0-9])")
|
||||||
|
|
||||||
|
|
||||||
|
avahi_379 = 0
|
||||||
|
|
||||||
|
|
||||||
|
def set_avahi_379():
|
||||||
|
global avahi_379
|
||||||
|
avahi_379 = 1
|
||||||
|
|
||||||
|
|
||||||
|
def log_avahi_379(args):
|
||||||
|
global avahi_379
|
||||||
|
if avahi_379 == 2:
|
||||||
|
return
|
||||||
|
avahi_379 = 2
|
||||||
|
t = "Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d];\n\033[35m NOTE: this is probably avahi-bug #379, packet corruption in Avahi's mDNS-reflection feature. Copyparty has a workaround and is OK, but other devices need either --zm4 or --zm6"
|
||||||
|
raise BufferError(t % args)
|
||||||
|
|
||||||
|
|
||||||
class DNSLabelError(Exception):
|
class DNSLabelError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -96,8 +113,11 @@ class DNSBuffer(Buffer):
|
|||||||
)
|
)
|
||||||
if pointer < self.offset:
|
if pointer < self.offset:
|
||||||
self.offset = pointer
|
self.offset = pointer
|
||||||
|
elif avahi_379:
|
||||||
|
log_avahi_379((self.offset, pointer, len(self.data)))
|
||||||
|
label.extend(b"a")
|
||||||
|
break
|
||||||
else:
|
else:
|
||||||
|
|
||||||
raise BufferError(
|
raise BufferError(
|
||||||
"Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d]"
|
"Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d]"
|
||||||
% (self.offset, pointer, len(self.data))
|
% (self.offset, pointer, len(self.data))
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ def errdesc(
|
|||||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||||
|
|
||||||
for fn, err in errors:
|
for fn, err in errors:
|
||||||
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
|
report.extend([" file: %r" % (fn,), "error: %s" % (err,), ""])
|
||||||
|
|
||||||
btxt = "\r\n".join(report).encode("utf-8", "replace")
|
btxt = "\r\n".join(report).encode("utf-8", "replace")
|
||||||
btxt = vol_san(list(vfs.all_vols.values()), btxt)
|
btxt = vol_san(list(vfs.all_vols.values()), btxt)
|
||||||
|
|||||||
@@ -60,6 +60,7 @@ from .util import (
|
|||||||
alltrace,
|
alltrace,
|
||||||
ansi_re,
|
ansi_re,
|
||||||
build_netmap,
|
build_netmap,
|
||||||
|
load_ipu,
|
||||||
min_ex,
|
min_ex,
|
||||||
mp,
|
mp,
|
||||||
odfusion,
|
odfusion,
|
||||||
@@ -111,7 +112,7 @@ class SvcHub(object):
|
|||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.stopped = False
|
self.stopped = False
|
||||||
self.reload_req = False
|
self.reload_req = False
|
||||||
self.reloading = 0
|
self.reload_mutex = threading.Lock()
|
||||||
self.stop_cond = threading.Condition()
|
self.stop_cond = threading.Condition()
|
||||||
self.nsigs = 3
|
self.nsigs = 3
|
||||||
self.retcode = 0
|
self.retcode = 0
|
||||||
@@ -210,6 +211,15 @@ class SvcHub(object):
|
|||||||
t = "WARNING: --s-rd-sz (%d) is larger than --iobuf (%d); this may lead to reduced performance"
|
t = "WARNING: --s-rd-sz (%d) is larger than --iobuf (%d); this may lead to reduced performance"
|
||||||
self.log("root", t % (args.s_rd_sz, args.iobuf), 3)
|
self.log("root", t % (args.s_rd_sz, args.iobuf), 3)
|
||||||
|
|
||||||
|
zs = ""
|
||||||
|
if args.th_ram_max < 0.22:
|
||||||
|
zs = "generate thumbnails"
|
||||||
|
elif args.th_ram_max < 1:
|
||||||
|
zs = "generate audio waveforms or spectrograms"
|
||||||
|
if zs:
|
||||||
|
t = "WARNING: --th-ram-max is very small (%.2f GiB); will not be able to %s"
|
||||||
|
self.log("root", t % (args.th_ram_max, zs), 3)
|
||||||
|
|
||||||
if args.chpw and args.idp_h_usr:
|
if args.chpw and args.idp_h_usr:
|
||||||
t = "ERROR: user-changeable passwords is incompatible with IdP/identity-providers; you must disable either --chpw or --idp-h-usr"
|
t = "ERROR: user-changeable passwords is incompatible with IdP/identity-providers; you must disable either --chpw or --idp-h-usr"
|
||||||
self.log("root", t, 1)
|
self.log("root", t, 1)
|
||||||
@@ -221,9 +231,15 @@ class SvcHub(object):
|
|||||||
noch.update([x for x in zsl if x])
|
noch.update([x for x in zsl if x])
|
||||||
args.chpw_no = noch
|
args.chpw_no = noch
|
||||||
|
|
||||||
|
if args.ipu:
|
||||||
|
iu, nm = load_ipu(self.log, args.ipu, True)
|
||||||
|
setattr(args, "ipu_iu", iu)
|
||||||
|
setattr(args, "ipu_nm", nm)
|
||||||
|
|
||||||
if not self.args.no_ses:
|
if not self.args.no_ses:
|
||||||
self.setup_session_db()
|
self.setup_session_db()
|
||||||
|
|
||||||
|
args.shr1 = ""
|
||||||
if args.shr:
|
if args.shr:
|
||||||
self.setup_share_db()
|
self.setup_share_db()
|
||||||
|
|
||||||
@@ -372,6 +388,14 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.broker = Broker(self)
|
self.broker = Broker(self)
|
||||||
|
|
||||||
|
# create netmaps early to avoid firewall gaps,
|
||||||
|
# but the mutex blocks multiprocessing startup
|
||||||
|
for zs in "ipu_iu ftp_ipa_nm tftp_ipa_nm".split():
|
||||||
|
try:
|
||||||
|
getattr(args, zs).mutex = threading.Lock()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
def setup_session_db(self) -> None:
|
def setup_session_db(self) -> None:
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
self.args.no_ses = True
|
self.args.no_ses = True
|
||||||
@@ -446,6 +470,7 @@ class SvcHub(object):
|
|||||||
raise Exception(t)
|
raise Exception(t)
|
||||||
|
|
||||||
al.shr = "/%s/" % (al.shr,)
|
al.shr = "/%s/" % (al.shr,)
|
||||||
|
al.shr1 = al.shr[1:]
|
||||||
|
|
||||||
create = True
|
create = True
|
||||||
modified = False
|
modified = False
|
||||||
@@ -755,8 +780,8 @@ class SvcHub(object):
|
|||||||
al.idp_h_grp = al.idp_h_grp.lower()
|
al.idp_h_grp = al.idp_h_grp.lower()
|
||||||
al.idp_h_key = al.idp_h_key.lower()
|
al.idp_h_key = al.idp_h_key.lower()
|
||||||
|
|
||||||
al.ftp_ipa_nm = build_netmap(al.ftp_ipa or al.ipa)
|
al.ftp_ipa_nm = build_netmap(al.ftp_ipa or al.ipa, True)
|
||||||
al.tftp_ipa_nm = build_netmap(al.tftp_ipa or al.ipa)
|
al.tftp_ipa_nm = build_netmap(al.tftp_ipa or al.ipa, True)
|
||||||
|
|
||||||
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
mte = ODict.fromkeys(DEF_MTE.split(","), True)
|
||||||
al.mte = odfusion(mte, al.mte)
|
al.mte = odfusion(mte, al.mte)
|
||||||
@@ -768,7 +793,7 @@ class SvcHub(object):
|
|||||||
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
||||||
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
||||||
|
|
||||||
for k in ["no_hash", "no_idx", "og_ua"]:
|
for k in ["no_hash", "no_idx", "og_ua", "srch_excl"]:
|
||||||
ptn = getattr(self.args, k)
|
ptn = getattr(self.args, k)
|
||||||
if ptn:
|
if ptn:
|
||||||
setattr(self.args, k, re.compile(ptn))
|
setattr(self.args, k, re.compile(ptn))
|
||||||
@@ -803,6 +828,24 @@ class SvcHub(object):
|
|||||||
if len(al.tcolor) == 3: # fc5 => ffcc55
|
if len(al.tcolor) == 3: # fc5 => ffcc55
|
||||||
al.tcolor = "".join([x * 2 for x in al.tcolor])
|
al.tcolor = "".join([x * 2 for x in al.tcolor])
|
||||||
|
|
||||||
|
zs = al.u2sz
|
||||||
|
zsl = zs.split(",")
|
||||||
|
if len(zsl) not in (1, 3):
|
||||||
|
t = "invalid --u2sz; must be either one number, or a comma-separated list of three numbers (min,default,max)"
|
||||||
|
raise Exception(t)
|
||||||
|
if len(zsl) < 3:
|
||||||
|
zsl = ["1", zs, zs]
|
||||||
|
zi2 = 1
|
||||||
|
for zs in zsl:
|
||||||
|
zi = int(zs)
|
||||||
|
# arbitrary constraint (anything above 2 GiB is probably unintended)
|
||||||
|
if zi < 1 or zi > 2047:
|
||||||
|
raise Exception("invalid --u2sz; minimum is 1, max is 2047")
|
||||||
|
if zi < zi2:
|
||||||
|
raise Exception("invalid --u2sz; values must be equal or ascending")
|
||||||
|
zi2 = zi
|
||||||
|
al.u2sz = ",".join(zsl)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
def _ipa2re(self, txt) -> Optional[re.Pattern]:
|
||||||
@@ -970,41 +1013,18 @@ class SvcHub(object):
|
|||||||
except:
|
except:
|
||||||
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
self.log("root", "ssdp startup failed;\n" + min_ex(), 3)
|
||||||
|
|
||||||
def reload(self) -> str:
|
def reload(self, rescan_all_vols: bool, up2k: bool) -> str:
|
||||||
with self.up2k.mutex:
|
t = "config has been reloaded"
|
||||||
if self.reloading:
|
with self.reload_mutex:
|
||||||
return "cannot reload; already in progress"
|
|
||||||
self.reloading = 1
|
|
||||||
|
|
||||||
Daemon(self._reload, "reloading")
|
|
||||||
return "reload initiated"
|
|
||||||
|
|
||||||
def _reload(self, rescan_all_vols: bool = True, up2k: bool = True) -> None:
|
|
||||||
with self.up2k.mutex:
|
|
||||||
if self.reloading != 1:
|
|
||||||
return
|
|
||||||
self.reloading = 2
|
|
||||||
self.log("root", "reloading config")
|
self.log("root", "reloading config")
|
||||||
self.asrv.reload(9 if up2k else 4)
|
self.asrv.reload(9 if up2k else 4)
|
||||||
if up2k:
|
if up2k:
|
||||||
self.up2k.reload(rescan_all_vols)
|
self.up2k.reload(rescan_all_vols)
|
||||||
|
t += "; volumes are now reinitializing"
|
||||||
else:
|
else:
|
||||||
self.log("root", "reload done")
|
self.log("root", "reload done")
|
||||||
self.broker.reload()
|
self.broker.reload()
|
||||||
self.reloading = 0
|
return t
|
||||||
|
|
||||||
def _reload_blocking(self, rescan_all_vols: bool = True, up2k: bool = True) -> None:
|
|
||||||
while True:
|
|
||||||
with self.up2k.mutex:
|
|
||||||
if self.reloading < 2:
|
|
||||||
self.reloading = 1
|
|
||||||
break
|
|
||||||
time.sleep(0.05)
|
|
||||||
|
|
||||||
# try to handle multiple pending IdP reloads at once:
|
|
||||||
time.sleep(0.2)
|
|
||||||
|
|
||||||
self._reload(rescan_all_vols=rescan_all_vols, up2k=up2k)
|
|
||||||
|
|
||||||
def _reload_sessions(self) -> None:
|
def _reload_sessions(self) -> None:
|
||||||
with self.asrv.mutex:
|
with self.asrv.mutex:
|
||||||
@@ -1018,7 +1038,7 @@ class SvcHub(object):
|
|||||||
|
|
||||||
if self.reload_req:
|
if self.reload_req:
|
||||||
self.reload_req = False
|
self.reload_req = False
|
||||||
self.reload()
|
self.reload(True, True)
|
||||||
|
|
||||||
self.shutdown()
|
self.shutdown()
|
||||||
|
|
||||||
|
|||||||
@@ -100,7 +100,7 @@ def gen_hdr(
|
|||||||
|
|
||||||
# spec says to put zeros when !crc if bit3 (streaming)
|
# spec says to put zeros when !crc if bit3 (streaming)
|
||||||
# however infozip does actual sz and it even works on winxp
|
# however infozip does actual sz and it even works on winxp
|
||||||
# (same reasning for z64 extradata later)
|
# (same reasoning for z64 extradata later)
|
||||||
vsz = 0xFFFFFFFF if z64 else sz
|
vsz = 0xFFFFFFFF if z64 else sz
|
||||||
ret += spack(b"<LL", vsz, vsz)
|
ret += spack(b"<LL", vsz, vsz)
|
||||||
|
|
||||||
|
|||||||
@@ -371,7 +371,7 @@ class TcpSrv(object):
|
|||||||
if self.args.q:
|
if self.args.q:
|
||||||
print(msg)
|
print(msg)
|
||||||
|
|
||||||
self.hub.broker.say("listen", srv)
|
self.hub.broker.say("httpsrv.listen", srv)
|
||||||
|
|
||||||
self.srv = srvs
|
self.srv = srvs
|
||||||
self.bound = bound
|
self.bound = bound
|
||||||
@@ -379,7 +379,7 @@ class TcpSrv(object):
|
|||||||
self._distribute_netdevs()
|
self._distribute_netdevs()
|
||||||
|
|
||||||
def _distribute_netdevs(self):
|
def _distribute_netdevs(self):
|
||||||
self.hub.broker.say("set_netdevs", self.netdevs)
|
self.hub.broker.say("httpsrv.set_netdevs", self.netdevs)
|
||||||
self.hub.start_zeroconf()
|
self.hub.start_zeroconf()
|
||||||
gencert(self.log, self.args, self.netdevs)
|
gencert(self.log, self.args, self.netdevs)
|
||||||
self.hub.restart_ftpd()
|
self.hub.restart_ftpd()
|
||||||
@@ -402,17 +402,17 @@ class TcpSrv(object):
|
|||||||
if not netdevs:
|
if not netdevs:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
added = "nothing"
|
add = []
|
||||||
removed = "nothing"
|
rem = []
|
||||||
for k, v in netdevs.items():
|
for k, v in netdevs.items():
|
||||||
if k not in self.netdevs:
|
if k not in self.netdevs:
|
||||||
added = "{} = {}".format(k, v)
|
add.append("\n added %s = %s" % (k, v))
|
||||||
for k, v in self.netdevs.items():
|
for k, v in self.netdevs.items():
|
||||||
if k not in netdevs:
|
if k not in netdevs:
|
||||||
removed = "{} = {}".format(k, v)
|
rem.append("\nremoved %s = %s" % (k, v))
|
||||||
|
|
||||||
t = "network change detected:\n added {}\033[0;33m\nremoved {}"
|
t = "network change detected:\033[32m%s\033[33m%s"
|
||||||
self.log("tcpsrv", t.format(added, removed), 3)
|
self.log("tcpsrv", t % ("".join(add), "".join(rem)), 3)
|
||||||
self.netdevs = netdevs
|
self.netdevs = netdevs
|
||||||
self._distribute_netdevs()
|
self._distribute_netdevs()
|
||||||
|
|
||||||
|
|||||||
@@ -269,6 +269,7 @@ class Tftpd(object):
|
|||||||
"*",
|
"*",
|
||||||
not self.args.no_scandir,
|
not self.args.no_scandir,
|
||||||
[[True, False]],
|
[[True, False]],
|
||||||
|
throw=True,
|
||||||
)
|
)
|
||||||
dnames = set([x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)])
|
dnames = set([x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)])
|
||||||
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
dirs1 = [(v.st_mtime, v.st_size, k + "/") for k, v in vfs_ls if k in dnames]
|
||||||
@@ -356,7 +357,7 @@ class Tftpd(object):
|
|||||||
time.time(),
|
time.time(),
|
||||||
"",
|
"",
|
||||||
):
|
):
|
||||||
yeet("blocked by xbu server config: " + vpath)
|
yeet("blocked by xbu server config: %r" % (vpath,))
|
||||||
|
|
||||||
if not self.args.tftp_nols and bos.path.isdir(ap):
|
if not self.args.tftp_nols and bos.path.isdir(ap):
|
||||||
return self._ls(vpath, "", 0, True)
|
return self._ls(vpath, "", 0, True)
|
||||||
|
|||||||
@@ -109,13 +109,13 @@ class ThumbCli(object):
|
|||||||
fmt = sfmt
|
fmt = sfmt
|
||||||
|
|
||||||
elif fmt[:1] == "p" and not is_au and not is_vid:
|
elif fmt[:1] == "p" and not is_au and not is_vid:
|
||||||
t = "cannot thumbnail [%s]: png only allowed for waveforms"
|
t = "cannot thumbnail %r: png only allowed for waveforms"
|
||||||
self.log(t % (rem), 6)
|
self.log(t % (rem,), 6)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for %r" % (ptop,))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ from .util import (
|
|||||||
FFMPEG_URL,
|
FFMPEG_URL,
|
||||||
Cooldown,
|
Cooldown,
|
||||||
Daemon,
|
Daemon,
|
||||||
Pebkac,
|
|
||||||
afsenc,
|
afsenc,
|
||||||
fsenc,
|
fsenc,
|
||||||
min_ex,
|
min_ex,
|
||||||
@@ -164,6 +163,7 @@ class ThumbSrv(object):
|
|||||||
self.ram: dict[str, float] = {}
|
self.ram: dict[str, float] = {}
|
||||||
self.memcond = threading.Condition(self.mutex)
|
self.memcond = threading.Condition(self.mutex)
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
|
self.rm_nullthumbs = True # forget failed conversions on startup
|
||||||
self.nthr = max(1, self.args.th_mt)
|
self.nthr = max(1, self.args.th_mt)
|
||||||
|
|
||||||
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
self.q: Queue[Optional[tuple[str, str, str, VFS]]] = Queue(self.nthr * 4)
|
||||||
@@ -239,7 +239,7 @@ class ThumbSrv(object):
|
|||||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for %r" % (ptop,))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||||
@@ -249,7 +249,7 @@ class ThumbSrv(object):
|
|||||||
with self.mutex:
|
with self.mutex:
|
||||||
try:
|
try:
|
||||||
self.busy[tpath].append(cond)
|
self.busy[tpath].append(cond)
|
||||||
self.log("joined waiting room for %s" % (tpath,))
|
self.log("joined waiting room for %r" % (tpath,))
|
||||||
except:
|
except:
|
||||||
thdir = os.path.dirname(tpath)
|
thdir = os.path.dirname(tpath)
|
||||||
bos.makedirs(os.path.join(thdir, "w"))
|
bos.makedirs(os.path.join(thdir, "w"))
|
||||||
@@ -266,11 +266,11 @@ class ThumbSrv(object):
|
|||||||
allvols = list(self.asrv.vfs.all_vols.values())
|
allvols = list(self.asrv.vfs.all_vols.values())
|
||||||
vn = next((x for x in allvols if x.realpath == ptop), None)
|
vn = next((x for x in allvols if x.realpath == ptop), None)
|
||||||
if not vn:
|
if not vn:
|
||||||
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
self.log("ptop %r not in %s" % (ptop, allvols), 3)
|
||||||
vn = self.asrv.vfs.all_aps[0][1]
|
vn = self.asrv.vfs.all_aps[0][1]
|
||||||
|
|
||||||
self.q.put((abspath, tpath, fmt, vn))
|
self.q.put((abspath, tpath, fmt, vn))
|
||||||
self.log("conv {} :{} \033[0m{}".format(tpath, fmt, abspath), c=6)
|
self.log("conv %r :%s \033[0m%r" % (tpath, fmt, abspath), 6)
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
@@ -375,8 +375,8 @@ class ThumbSrv(object):
|
|||||||
fun(ap_unpk, ttpath, fmt, vn)
|
fun(ap_unpk, ttpath, fmt, vn)
|
||||||
break
|
break
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
msg = "{} could not create thumbnail of {}\n{}"
|
msg = "%s could not create thumbnail of %r\n%s"
|
||||||
msg = msg.format(fun.__name__, abspath, min_ex())
|
msg = msg % (fun.__name__, abspath, min_ex())
|
||||||
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
||||||
self.log(msg, c)
|
self.log(msg, c)
|
||||||
if getattr(ex, "returncode", 0) != 321:
|
if getattr(ex, "returncode", 0) != 321:
|
||||||
@@ -862,7 +862,6 @@ class ThumbSrv(object):
|
|||||||
def cleaner(self) -> None:
|
def cleaner(self) -> None:
|
||||||
interval = self.args.th_clean
|
interval = self.args.th_clean
|
||||||
while True:
|
while True:
|
||||||
time.sleep(interval)
|
|
||||||
ndirs = 0
|
ndirs = 0
|
||||||
for vol, histpath in self.asrv.vfs.histtab.items():
|
for vol, histpath in self.asrv.vfs.histtab.items():
|
||||||
if histpath.startswith(vol):
|
if histpath.startswith(vol):
|
||||||
@@ -876,6 +875,8 @@ class ThumbSrv(object):
|
|||||||
self.log("\033[Jcln err in %s: %r" % (histpath, ex), 3)
|
self.log("\033[Jcln err in %s: %r" % (histpath, ex), 3)
|
||||||
|
|
||||||
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||||
|
self.rm_nullthumbs = False
|
||||||
|
time.sleep(interval)
|
||||||
|
|
||||||
def clean(self, histpath: str) -> int:
|
def clean(self, histpath: str) -> int:
|
||||||
ret = 0
|
ret = 0
|
||||||
@@ -896,7 +897,9 @@ class ThumbSrv(object):
|
|||||||
prev_b64 = None
|
prev_b64 = None
|
||||||
prev_fp = ""
|
prev_fp = ""
|
||||||
try:
|
try:
|
||||||
t1 = statdir(self.log_func, not self.args.no_scandir, False, thumbpath)
|
t1 = statdir(
|
||||||
|
self.log_func, not self.args.no_scandir, False, thumbpath, False
|
||||||
|
)
|
||||||
ents = sorted(list(t1))
|
ents = sorted(list(t1))
|
||||||
except:
|
except:
|
||||||
return 0
|
return 0
|
||||||
@@ -937,6 +940,10 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if self.rm_nullthumbs and not inf.st_size:
|
||||||
|
bos.unlink(fp)
|
||||||
|
continue
|
||||||
|
|
||||||
if b64 == prev_b64:
|
if b64 == prev_b64:
|
||||||
self.log("rm replaced [{}]".format(fp))
|
self.log("rm replaced [{}]".format(fp))
|
||||||
bos.unlink(prev_fp)
|
bos.unlink(prev_fp)
|
||||||
|
|||||||
@@ -70,6 +70,9 @@ class U2idx(object):
|
|||||||
self.log_func("u2idx", msg, c)
|
self.log_func("u2idx", msg, c)
|
||||||
|
|
||||||
def shutdown(self) -> None:
|
def shutdown(self) -> None:
|
||||||
|
if not HAVE_SQLITE3:
|
||||||
|
return
|
||||||
|
|
||||||
for cur in self.cur.values():
|
for cur in self.cur.values():
|
||||||
db = cur.connection
|
db = cur.connection
|
||||||
try:
|
try:
|
||||||
@@ -80,6 +83,12 @@ class U2idx(object):
|
|||||||
cur.close()
|
cur.close()
|
||||||
db.close()
|
db.close()
|
||||||
|
|
||||||
|
for cur in (self.mem_cur, self.sh_cur):
|
||||||
|
if cur:
|
||||||
|
db = cur.connection
|
||||||
|
cur.close()
|
||||||
|
db.close()
|
||||||
|
|
||||||
def fsearch(
|
def fsearch(
|
||||||
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
self, uname: str, vols: list[VFS], body: dict[str, Any]
|
||||||
) -> list[dict[str, Any]]:
|
) -> list[dict[str, Any]]:
|
||||||
@@ -95,7 +104,7 @@ class U2idx(object):
|
|||||||
uv: list[Union[str, int]] = [wark[:16], wark]
|
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(uname, vols, uq, uv, False, 99999)[0]
|
return self.run_query(uname, vols, uq, uv, False, True, 99999)[0]
|
||||||
except:
|
except:
|
||||||
raise Pebkac(500, min_ex())
|
raise Pebkac(500, min_ex())
|
||||||
|
|
||||||
@@ -127,7 +136,7 @@ class U2idx(object):
|
|||||||
ptop = vn.realpath
|
ptop = vn.realpath
|
||||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||||
if not histpath:
|
if not histpath:
|
||||||
self.log("no histpath for [{}]".format(ptop))
|
self.log("no histpath for %r" % (ptop,))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
db_path = os.path.join(histpath, "up2k.db")
|
db_path = os.path.join(histpath, "up2k.db")
|
||||||
@@ -142,7 +151,7 @@ class U2idx(object):
|
|||||||
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
||||||
cur = db.cursor()
|
cur = db.cursor()
|
||||||
cur.execute('pragma table_info("up")').fetchone()
|
cur.execute('pragma table_info("up")').fetchone()
|
||||||
self.log("ro: {}".format(db_path))
|
self.log("ro: %r" % (db_path,))
|
||||||
except:
|
except:
|
||||||
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
||||||
# may not fail until the pragma so unset it
|
# may not fail until the pragma so unset it
|
||||||
@@ -152,7 +161,7 @@ class U2idx(object):
|
|||||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||||
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
||||||
self.log("opened {}".format(db_path))
|
self.log("opened %r" % (db_path,))
|
||||||
|
|
||||||
self.cur[ptop] = cur
|
self.cur[ptop] = cur
|
||||||
return cur
|
return cur
|
||||||
@@ -301,7 +310,7 @@ class U2idx(object):
|
|||||||
q += " lower({}) {} ? ) ".format(field, oper)
|
q += " lower({}) {} ? ) ".format(field, oper)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(uname, vols, q, va, have_mt, lim)
|
return self.run_query(uname, vols, q, va, have_mt, True, lim)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
raise Pebkac(500, repr(ex))
|
raise Pebkac(500, repr(ex))
|
||||||
|
|
||||||
@@ -312,9 +321,11 @@ class U2idx(object):
|
|||||||
uq: str,
|
uq: str,
|
||||||
uv: list[Union[str, int]],
|
uv: list[Union[str, int]],
|
||||||
have_mt: bool,
|
have_mt: bool,
|
||||||
|
sort: bool,
|
||||||
lim: int,
|
lim: int,
|
||||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||||
if self.args.srch_dbg:
|
dbg = self.args.srch_dbg
|
||||||
|
if dbg:
|
||||||
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
||||||
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
||||||
self.log(t % (len(vols), zs), 5)
|
self.log(t % (len(vols), zs), 5)
|
||||||
@@ -357,14 +368,14 @@ class U2idx(object):
|
|||||||
if not cur:
|
if not cur:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
excl = []
|
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||||
for vp2 in self.asrv.vfs.all_vols.keys():
|
zs = "srch_re_dots" if dots else "srch_re_nodot"
|
||||||
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
|
rex: re.Pattern = flags.get(zs) # type: ignore
|
||||||
excl.append(vp2[len(vtop) :].lstrip("/"))
|
|
||||||
|
|
||||||
if self.args.srch_dbg:
|
if dbg:
|
||||||
t = "searching in volume /%s (%s), excludelist %s"
|
t = "searching in volume /%s (%s), excluding %s"
|
||||||
self.log(t % (vtop, ptop, excl), 5)
|
self.log(t % (vtop, ptop, rex.pattern), 5)
|
||||||
|
rex_cfg: Optional[re.Pattern] = flags.get("srch_excl")
|
||||||
|
|
||||||
self.active_cur = cur
|
self.active_cur = cur
|
||||||
|
|
||||||
@@ -377,7 +388,6 @@ class U2idx(object):
|
|||||||
|
|
||||||
sret = []
|
sret = []
|
||||||
fk = flags.get("fk")
|
fk = flags.get("fk")
|
||||||
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
|
||||||
fk_alg = 2 if "fka" in flags else 1
|
fk_alg = 2 if "fka" in flags else 1
|
||||||
c = cur.execute(uq, tuple(vuv))
|
c = cur.execute(uq, tuple(vuv))
|
||||||
for hit in c:
|
for hit in c:
|
||||||
@@ -386,20 +396,23 @@ class U2idx(object):
|
|||||||
if rd.startswith("//") or fn.startswith("//"):
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
if rd in excl or any([x for x in excl if rd.startswith(x + "/")]):
|
vp = vjoin(vjoin(vtop, rd), fn)
|
||||||
if self.args.srch_dbg:
|
|
||||||
zs = vjoin(vjoin(vtop, rd), fn)
|
if vp in seen_rps:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if rex.search(vp):
|
||||||
|
if dbg:
|
||||||
|
if rex_cfg and rex_cfg.search(vp): # type: ignore
|
||||||
|
self.log("filtered by srch_excl: %s" % (vp,), 6)
|
||||||
|
elif not dots and "/." in ("/" + vp):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
||||||
self.log(t % (vtop, zs), 1)
|
self.log(t % (vtop, vp), 1)
|
||||||
continue
|
|
||||||
|
|
||||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
|
||||||
if not dots and "/." in ("/" + rp):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if rp in seen_rps:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
rp = quotep(vp)
|
||||||
if not fk:
|
if not fk:
|
||||||
suf = ""
|
suf = ""
|
||||||
else:
|
else:
|
||||||
@@ -421,7 +434,7 @@ class U2idx(object):
|
|||||||
if lim < 0:
|
if lim < 0:
|
||||||
break
|
break
|
||||||
|
|
||||||
if self.args.srch_dbg:
|
if dbg:
|
||||||
t = "in volume '/%s': hit: %s"
|
t = "in volume '/%s': hit: %s"
|
||||||
self.log(t % (vtop, rp), 5)
|
self.log(t % (vtop, rp), 5)
|
||||||
|
|
||||||
@@ -451,13 +464,14 @@ class U2idx(object):
|
|||||||
ret.extend(sret)
|
ret.extend(sret)
|
||||||
# print("[{}] {}".format(ptop, sret))
|
# print("[{}] {}".format(ptop, sret))
|
||||||
|
|
||||||
if self.args.srch_dbg:
|
if dbg:
|
||||||
t = "in volume '/%s': got %d hits, %d total so far"
|
t = "in volume '/%s': got %d hits, %d total so far"
|
||||||
self.log(t % (vtop, len(sret), len(ret)), 5)
|
self.log(t % (vtop, len(sret), len(ret)), 5)
|
||||||
|
|
||||||
done_flag.append(True)
|
done_flag.append(True)
|
||||||
self.active_id = ""
|
self.active_id = ""
|
||||||
|
|
||||||
|
if sort:
|
||||||
ret.sort(key=itemgetter("rp"))
|
ret.sort(key=itemgetter("rp"))
|
||||||
|
|
||||||
return ret, list(taglist.keys()), lim < 0 and not clamped
|
return ret, list(taglist.keys()), lim < 0 and not clamped
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -213,6 +213,9 @@ except:
|
|||||||
ansi_re = re.compile("\033\\[[^mK]*[mK]")
|
ansi_re = re.compile("\033\\[[^mK]*[mK]")
|
||||||
|
|
||||||
|
|
||||||
|
BOS_SEP = ("%s" % (os.sep,)).encode("ascii")
|
||||||
|
|
||||||
|
|
||||||
surrogateescape.register_surrogateescape()
|
surrogateescape.register_surrogateescape()
|
||||||
if WINDOWS and PY2:
|
if WINDOWS and PY2:
|
||||||
FS_ENCODING = "utf-8"
|
FS_ENCODING = "utf-8"
|
||||||
@@ -433,6 +436,27 @@ UNHUMANIZE_UNITS = {
|
|||||||
VF_CAREFUL = {"mv_re_t": 5, "rm_re_t": 5, "mv_re_r": 0.1, "rm_re_r": 0.1}
|
VF_CAREFUL = {"mv_re_t": 5, "rm_re_t": 5, "mv_re_r": 0.1, "rm_re_r": 0.1}
|
||||||
|
|
||||||
|
|
||||||
|
def read_ram() -> tuple[float, float]:
|
||||||
|
a = b = 0
|
||||||
|
try:
|
||||||
|
with open("/proc/meminfo", "rb", 0x10000) as f:
|
||||||
|
zsl = f.read(0x10000).decode("ascii", "replace").split("\n")
|
||||||
|
|
||||||
|
p = re.compile("^MemTotal:.* kB")
|
||||||
|
zs = next((x for x in zsl if p.match(x)))
|
||||||
|
a = int((int(zs.split()[1]) / 0x100000) * 100) / 100
|
||||||
|
|
||||||
|
p = re.compile("^MemAvailable:.* kB")
|
||||||
|
zs = next((x for x in zsl if p.match(x)))
|
||||||
|
b = int((int(zs.split()[1]) / 0x100000) * 100) / 100
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
return a, b
|
||||||
|
|
||||||
|
|
||||||
|
RAM_TOTAL, RAM_AVAIL = read_ram()
|
||||||
|
|
||||||
|
|
||||||
pybin = sys.executable or ""
|
pybin = sys.executable or ""
|
||||||
if EXE:
|
if EXE:
|
||||||
pybin = ""
|
pybin = ""
|
||||||
@@ -665,11 +689,22 @@ class HLog(logging.Handler):
|
|||||||
|
|
||||||
|
|
||||||
class NetMap(object):
|
class NetMap(object):
|
||||||
def __init__(self, ips: list[str], cidrs: list[str], keep_lo=False) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
ips: list[str],
|
||||||
|
cidrs: list[str],
|
||||||
|
keep_lo=False,
|
||||||
|
strict_cidr=False,
|
||||||
|
defer_mutex=False,
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
ips: list of plain ipv4/ipv6 IPs, not cidr
|
ips: list of plain ipv4/ipv6 IPs, not cidr
|
||||||
cidrs: list of cidr-notation IPs (ip/prefix)
|
cidrs: list of cidr-notation IPs (ip/prefix)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# fails multiprocessing; defer assignment
|
||||||
|
self.mutex: Optional[threading.Lock] = None if defer_mutex else threading.Lock()
|
||||||
|
|
||||||
if "::" in ips:
|
if "::" in ips:
|
||||||
ips = [x for x in ips if x != "::"] + list(
|
ips = [x for x in ips if x != "::"] + list(
|
||||||
[x.split("/")[0] for x in cidrs if ":" in x]
|
[x.split("/")[0] for x in cidrs if ":" in x]
|
||||||
@@ -696,7 +731,7 @@ class NetMap(object):
|
|||||||
bip = socket.inet_pton(fam, ip.split("/")[0])
|
bip = socket.inet_pton(fam, ip.split("/")[0])
|
||||||
self.bip.append(bip)
|
self.bip.append(bip)
|
||||||
self.b2sip[bip] = ip.split("/")[0]
|
self.b2sip[bip] = ip.split("/")[0]
|
||||||
self.b2net[bip] = (IPv6Network if v6 else IPv4Network)(ip, False)
|
self.b2net[bip] = (IPv6Network if v6 else IPv4Network)(ip, strict_cidr)
|
||||||
|
|
||||||
self.bip.sort(reverse=True)
|
self.bip.sort(reverse=True)
|
||||||
|
|
||||||
@@ -707,8 +742,13 @@ class NetMap(object):
|
|||||||
try:
|
try:
|
||||||
return self.cache[ip]
|
return self.cache[ip]
|
||||||
except:
|
except:
|
||||||
pass
|
# intentionally crash the calling thread if unset:
|
||||||
|
assert self.mutex # type: ignore # !rm
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
return self._map(ip)
|
||||||
|
|
||||||
|
def _map(self, ip: str) -> str:
|
||||||
v6 = ":" in ip
|
v6 = ":" in ip
|
||||||
ci = IPv6Address(ip) if v6 else IPv4Address(ip)
|
ci = IPv6Address(ip) if v6 else IPv4Address(ip)
|
||||||
bip = next((x for x in self.bip if ci in self.b2net[x]), None)
|
bip = next((x for x in self.bip if ci in self.b2net[x]), None)
|
||||||
@@ -984,7 +1024,7 @@ class ProgressPrinter(threading.Thread):
|
|||||||
now = time.time()
|
now = time.time()
|
||||||
if msg and now - tp > 10:
|
if msg and now - tp > 10:
|
||||||
tp = now
|
tp = now
|
||||||
self.log("progress: %s" % (msg,), 6)
|
self.log("progress: %r" % (msg,), 6)
|
||||||
|
|
||||||
if no_stdout:
|
if no_stdout:
|
||||||
continue
|
continue
|
||||||
@@ -1011,6 +1051,7 @@ class MTHash(object):
|
|||||||
self.sz = 0
|
self.sz = 0
|
||||||
self.csz = 0
|
self.csz = 0
|
||||||
self.stop = False
|
self.stop = False
|
||||||
|
self.readsz = 1024 * 1024 * (2 if (RAM_AVAIL or 2) < 1 else 12)
|
||||||
self.omutex = threading.Lock()
|
self.omutex = threading.Lock()
|
||||||
self.imutex = threading.Lock()
|
self.imutex = threading.Lock()
|
||||||
self.work_q: Queue[int] = Queue()
|
self.work_q: Queue[int] = Queue()
|
||||||
@@ -1086,7 +1127,7 @@ class MTHash(object):
|
|||||||
while chunk_rem > 0:
|
while chunk_rem > 0:
|
||||||
with self.imutex:
|
with self.imutex:
|
||||||
f.seek(ofs)
|
f.seek(ofs)
|
||||||
buf = f.read(min(chunk_rem, 1024 * 1024 * 12))
|
buf = f.read(min(chunk_rem, self.readsz))
|
||||||
|
|
||||||
if not buf:
|
if not buf:
|
||||||
raise Exception("EOF at " + str(ofs))
|
raise Exception("EOF at " + str(ofs))
|
||||||
@@ -1585,7 +1626,7 @@ class MultipartParser(object):
|
|||||||
(only the fallback non-js uploader relies on these filenames)
|
(only the fallback non-js uploader relies on these filenames)
|
||||||
"""
|
"""
|
||||||
for ln in read_header(self.sr, 2, 2592000):
|
for ln in read_header(self.sr, 2, 2592000):
|
||||||
self.log(ln)
|
self.log(repr(ln))
|
||||||
|
|
||||||
m = self.re_ctype.match(ln)
|
m = self.re_ctype.match(ln)
|
||||||
if m:
|
if m:
|
||||||
@@ -1876,11 +1917,11 @@ def gen_filekey_dbg(
|
|||||||
if p2 != fspath:
|
if p2 != fspath:
|
||||||
raise Exception()
|
raise Exception()
|
||||||
except:
|
except:
|
||||||
t = "maybe wrong abspath for filekey;\norig: {}\nreal: {}"
|
t = "maybe wrong abspath for filekey;\norig: %r\nreal: %r"
|
||||||
log(t.format(fspath, p2), 1)
|
log(t % (fspath, p2), 1)
|
||||||
|
|
||||||
t = "fk({}) salt({}) size({}) inode({}) fspath({}) at({})"
|
t = "fk(%s) salt(%s) size(%d) inode(%d) fspath(%r) at(%s)"
|
||||||
log(t.format(ret[:8], salt, fsize, inode, fspath, ctx), 5)
|
log(t % (ret[:8], salt, fsize, inode, fspath, ctx), 5)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -2185,6 +2226,23 @@ def unquotep(txt: str) -> str:
|
|||||||
return w8dec(unq2)
|
return w8dec(unq2)
|
||||||
|
|
||||||
|
|
||||||
|
def vroots(vp1: str, vp2: str) -> tuple[str, str]:
|
||||||
|
"""
|
||||||
|
input("q/w/e/r","a/s/d/e/r") output("/q/w/","/a/s/d/")
|
||||||
|
"""
|
||||||
|
while vp1 and vp2:
|
||||||
|
zt1 = vp1.rsplit("/", 1) if "/" in vp1 else ("", vp1)
|
||||||
|
zt2 = vp2.rsplit("/", 1) if "/" in vp2 else ("", vp2)
|
||||||
|
if zt1[1] != zt2[1]:
|
||||||
|
break
|
||||||
|
vp1 = zt1[0]
|
||||||
|
vp2 = zt2[0]
|
||||||
|
return (
|
||||||
|
"/%s/" % (vp1,) if vp1 else "/",
|
||||||
|
"/%s/" % (vp2,) if vp2 else "/",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def vsplit(vpath: str) -> tuple[str, str]:
|
def vsplit(vpath: str) -> tuple[str, str]:
|
||||||
if "/" not in vpath:
|
if "/" not in vpath:
|
||||||
return "", vpath
|
return "", vpath
|
||||||
@@ -2219,7 +2277,7 @@ def log_reloc(
|
|||||||
rem: str,
|
rem: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
nap, nvp, nfn, (nvn, nrem) = pm
|
nap, nvp, nfn, (nvn, nrem) = pm
|
||||||
t = "reloc %s:\nold ap [%s]\nnew ap [%s\033[36m/%s\033[0m]\nold vp [%s]\nnew vp [%s\033[36m/%s\033[0m]\nold fn [%s]\nnew fn [%s]\nold vfs [%s]\nnew vfs [%s]\nold rem [%s]\nnew rem [%s]"
|
t = "reloc %s:\nold ap %r\nnew ap %r\033[36m/%r\033[0m\nold vp %r\nnew vp %r\033[36m/%r\033[0m\nold fn %r\nnew fn %r\nold vfs %r\nnew vfs %r\nold rem %r\nnew rem %r"
|
||||||
log(t % (re, ap, nap, nfn, vp, nvp, nfn, fn, nfn, vn.vpath, nvn.vpath, rem, nrem))
|
log(t % (re, ap, nap, nfn, vp, nvp, nfn, fn, nfn, vn.vpath, nvn.vpath, rem, nrem))
|
||||||
|
|
||||||
|
|
||||||
@@ -2390,7 +2448,7 @@ def lsof(log: "NamedLogger", abspath: str) -> None:
|
|||||||
try:
|
try:
|
||||||
rc, so, se = runcmd([b"lsof", b"-R", fsenc(abspath)], timeout=45)
|
rc, so, se = runcmd([b"lsof", b"-R", fsenc(abspath)], timeout=45)
|
||||||
zs = (so.strip() + "\n" + se.strip()).strip()
|
zs = (so.strip() + "\n" + se.strip()).strip()
|
||||||
log("lsof {} = {}\n{}".format(abspath, rc, zs), 3)
|
log("lsof %r = %s\n%s" % (abspath, rc, zs), 3)
|
||||||
except:
|
except:
|
||||||
log("lsof failed; " + min_ex(), 3)
|
log("lsof failed; " + min_ex(), 3)
|
||||||
|
|
||||||
@@ -2426,17 +2484,17 @@ def _fs_mvrm(
|
|||||||
for attempt in range(90210):
|
for attempt in range(90210):
|
||||||
try:
|
try:
|
||||||
if ino and os.stat(bsrc).st_ino != ino:
|
if ino and os.stat(bsrc).st_ino != ino:
|
||||||
t = "src inode changed; aborting %s %s"
|
t = "src inode changed; aborting %s %r"
|
||||||
log(t % (act, src), 1)
|
log(t % (act, src), 1)
|
||||||
return False
|
return False
|
||||||
if (dst and not atomic) and os.path.exists(bdst):
|
if (dst and not atomic) and os.path.exists(bdst):
|
||||||
t = "something appeared at dst; aborting rename [%s] ==> [%s]"
|
t = "something appeared at dst; aborting rename %r ==> %r"
|
||||||
log(t % (src, dst), 1)
|
log(t % (src, dst), 1)
|
||||||
return False
|
return False
|
||||||
osfun(*args)
|
osfun(*args)
|
||||||
if attempt:
|
if attempt:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
t = "%sd in %.2f sec, attempt %d: %s"
|
t = "%sd in %.2f sec, attempt %d: %r"
|
||||||
log(t % (act, now - t0, attempt + 1, src))
|
log(t % (act, now - t0, attempt + 1, src))
|
||||||
return True
|
return True
|
||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
@@ -2448,7 +2506,7 @@ def _fs_mvrm(
|
|||||||
if not attempt:
|
if not attempt:
|
||||||
if not PY2:
|
if not PY2:
|
||||||
ino = os.stat(bsrc).st_ino
|
ino = os.stat(bsrc).st_ino
|
||||||
t = "%s failed (err.%d); retrying for %d sec: [%s]"
|
t = "%s failed (err.%d); retrying for %d sec: %r"
|
||||||
log(t % (act, ex.errno, maxtime + 0.99, src))
|
log(t % (act, ex.errno, maxtime + 0.99, src))
|
||||||
|
|
||||||
time.sleep(chill)
|
time.sleep(chill)
|
||||||
@@ -2486,23 +2544,28 @@ def wunlink(log: "NamedLogger", abspath: str, flags: dict[str, Any]) -> bool:
|
|||||||
return _fs_mvrm(log, abspath, "", False, flags)
|
return _fs_mvrm(log, abspath, "", False, flags)
|
||||||
|
|
||||||
|
|
||||||
def get_df(abspath: str) -> tuple[Optional[int], Optional[int]]:
|
def get_df(abspath: str, prune: bool) -> tuple[Optional[int], Optional[int], str]:
|
||||||
try:
|
try:
|
||||||
# some fuses misbehave
|
ap = fsenc(abspath)
|
||||||
assert ctypes # type: ignore # !rm
|
while prune and not os.path.isdir(ap) and BOS_SEP in ap:
|
||||||
|
# strip leafs until it hits an existing folder
|
||||||
|
ap = ap.rsplit(BOS_SEP, 1)[0]
|
||||||
|
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
|
assert ctypes # type: ignore # !rm
|
||||||
|
abspath = fsdec(ap)
|
||||||
bfree = ctypes.c_ulonglong(0)
|
bfree = ctypes.c_ulonglong(0)
|
||||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW( # type: ignore
|
ctypes.windll.kernel32.GetDiskFreeSpaceExW( # type: ignore
|
||||||
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
|
||||||
)
|
)
|
||||||
return (bfree.value, None)
|
return (bfree.value, None, "")
|
||||||
else:
|
else:
|
||||||
sv = os.statvfs(fsenc(abspath))
|
sv = os.statvfs(ap)
|
||||||
free = sv.f_frsize * sv.f_bfree
|
free = sv.f_frsize * sv.f_bfree
|
||||||
total = sv.f_frsize * sv.f_blocks
|
total = sv.f_frsize * sv.f_blocks
|
||||||
return (free, total)
|
return (free, total, "")
|
||||||
except:
|
except Exception as ex:
|
||||||
return (None, None)
|
return (None, None, repr(ex))
|
||||||
|
|
||||||
|
|
||||||
if not ANYWIN and not MACOS:
|
if not ANYWIN and not MACOS:
|
||||||
@@ -2640,18 +2703,35 @@ def list_ips() -> list[str]:
|
|||||||
return list(ret)
|
return list(ret)
|
||||||
|
|
||||||
|
|
||||||
def build_netmap(csv: str):
|
def build_netmap(csv: str, defer_mutex: bool = False):
|
||||||
csv = csv.lower().strip()
|
csv = csv.lower().strip()
|
||||||
|
|
||||||
if csv in ("any", "all", "no", ",", ""):
|
if csv in ("any", "all", "no", ",", ""):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if csv in ("lan", "local", "private", "prvt"):
|
|
||||||
csv = "10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16, fd00::/8" # lan
|
|
||||||
csv += ", 169.254.0.0/16, fe80::/10" # link-local
|
|
||||||
csv += ", 127.0.0.0/8, ::1/128" # loopback
|
|
||||||
|
|
||||||
srcs = [x.strip() for x in csv.split(",") if x.strip()]
|
srcs = [x.strip() for x in csv.split(",") if x.strip()]
|
||||||
|
|
||||||
|
expanded_shorthands = False
|
||||||
|
for shorthand in ("lan", "local", "private", "prvt"):
|
||||||
|
if shorthand in srcs:
|
||||||
|
if not expanded_shorthands:
|
||||||
|
srcs += [
|
||||||
|
# lan:
|
||||||
|
"10.0.0.0/8",
|
||||||
|
"172.16.0.0/12",
|
||||||
|
"192.168.0.0/16",
|
||||||
|
"fd00::/8",
|
||||||
|
# link-local:
|
||||||
|
"169.254.0.0/16",
|
||||||
|
"fe80::/10",
|
||||||
|
# loopback:
|
||||||
|
"127.0.0.0/8",
|
||||||
|
"::1/128",
|
||||||
|
]
|
||||||
|
expanded_shorthands = True
|
||||||
|
|
||||||
|
srcs.remove(shorthand)
|
||||||
|
|
||||||
if not HAVE_IPV6:
|
if not HAVE_IPV6:
|
||||||
srcs = [x for x in srcs if ":" not in x]
|
srcs = [x for x in srcs if ":" not in x]
|
||||||
|
|
||||||
@@ -2675,7 +2755,34 @@ def build_netmap(csv: str):
|
|||||||
cidrs.append(zs)
|
cidrs.append(zs)
|
||||||
|
|
||||||
ips = [x.split("/")[0] for x in cidrs]
|
ips = [x.split("/")[0] for x in cidrs]
|
||||||
return NetMap(ips, cidrs, True)
|
return NetMap(ips, cidrs, True, False, defer_mutex)
|
||||||
|
|
||||||
|
|
||||||
|
def load_ipu(
|
||||||
|
log: "RootLogger", ipus: list[str], defer_mutex: bool = False
|
||||||
|
) -> tuple[dict[str, str], NetMap]:
|
||||||
|
ip_u = {"": "*"}
|
||||||
|
cidr_u = {}
|
||||||
|
for ipu in ipus:
|
||||||
|
try:
|
||||||
|
cidr, uname = ipu.split("=")
|
||||||
|
cip, csz = cidr.split("/")
|
||||||
|
except:
|
||||||
|
t = "\n invalid value %r for argument --ipu; must be CIDR=UNAME (192.168.0.0/16=amelia)"
|
||||||
|
raise Exception(t % (ipu,))
|
||||||
|
uname2 = cidr_u.get(cidr)
|
||||||
|
if uname2 is not None:
|
||||||
|
t = "\n invalid value %r for argument --ipu; cidr %s already mapped to %r"
|
||||||
|
raise Exception(t % (ipu, cidr, uname2))
|
||||||
|
cidr_u[cidr] = uname
|
||||||
|
ip_u[cip] = uname
|
||||||
|
try:
|
||||||
|
nm = NetMap(["::"], list(cidr_u.keys()), True, True, defer_mutex)
|
||||||
|
except Exception as ex:
|
||||||
|
t = "failed to translate --ipu into netmap, probably due to invalid config: %r"
|
||||||
|
log("root", t % (ex,), 1)
|
||||||
|
raise
|
||||||
|
return ip_u, nm
|
||||||
|
|
||||||
|
|
||||||
def yieldfile(fn: str, bufsz: int) -> Generator[bytes, None, None]:
|
def yieldfile(fn: str, bufsz: int) -> Generator[bytes, None, None]:
|
||||||
@@ -2689,12 +2796,34 @@ def yieldfile(fn: str, bufsz: int) -> Generator[bytes, None, None]:
|
|||||||
yield buf
|
yield buf
|
||||||
|
|
||||||
|
|
||||||
|
def justcopy(
|
||||||
|
fin: Generator[bytes, None, None],
|
||||||
|
fout: Union[typing.BinaryIO, typing.IO[Any]],
|
||||||
|
hashobj: Optional["hashlib._Hash"],
|
||||||
|
max_sz: int,
|
||||||
|
slp: float,
|
||||||
|
) -> tuple[int, str, str]:
|
||||||
|
tlen = 0
|
||||||
|
for buf in fin:
|
||||||
|
tlen += len(buf)
|
||||||
|
if max_sz and tlen > max_sz:
|
||||||
|
continue
|
||||||
|
|
||||||
|
fout.write(buf)
|
||||||
|
if slp:
|
||||||
|
time.sleep(slp)
|
||||||
|
|
||||||
|
return tlen, "checksum-disabled", "checksum-disabled"
|
||||||
|
|
||||||
|
|
||||||
def hashcopy(
|
def hashcopy(
|
||||||
fin: Generator[bytes, None, None],
|
fin: Generator[bytes, None, None],
|
||||||
fout: Union[typing.BinaryIO, typing.IO[Any]],
|
fout: Union[typing.BinaryIO, typing.IO[Any]],
|
||||||
slp: float = 0,
|
hashobj: Optional["hashlib._Hash"],
|
||||||
max_sz: int = 0,
|
max_sz: int,
|
||||||
|
slp: float,
|
||||||
) -> tuple[int, str, str]:
|
) -> tuple[int, str, str]:
|
||||||
|
if not hashobj:
|
||||||
hashobj = hashlib.sha512()
|
hashobj = hashlib.sha512()
|
||||||
tlen = 0
|
tlen = 0
|
||||||
for buf in fin:
|
for buf in fin:
|
||||||
@@ -2721,7 +2850,10 @@ def sendfile_py(
|
|||||||
bufsz: int,
|
bufsz: int,
|
||||||
slp: float,
|
slp: float,
|
||||||
use_poll: bool,
|
use_poll: bool,
|
||||||
|
dls: dict[str, tuple[float, int]],
|
||||||
|
dl_id: str,
|
||||||
) -> int:
|
) -> int:
|
||||||
|
sent = 0
|
||||||
remains = upper - lower
|
remains = upper - lower
|
||||||
f.seek(lower)
|
f.seek(lower)
|
||||||
while remains > 0:
|
while remains > 0:
|
||||||
@@ -2738,6 +2870,10 @@ def sendfile_py(
|
|||||||
except:
|
except:
|
||||||
return remains
|
return remains
|
||||||
|
|
||||||
|
if dl_id:
|
||||||
|
sent += len(buf)
|
||||||
|
dls[dl_id] = (time.time(), sent)
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
@@ -2750,6 +2886,8 @@ def sendfile_kern(
|
|||||||
bufsz: int,
|
bufsz: int,
|
||||||
slp: float,
|
slp: float,
|
||||||
use_poll: bool,
|
use_poll: bool,
|
||||||
|
dls: dict[str, tuple[float, int]],
|
||||||
|
dl_id: str,
|
||||||
) -> int:
|
) -> int:
|
||||||
out_fd = s.fileno()
|
out_fd = s.fileno()
|
||||||
in_fd = f.fileno()
|
in_fd = f.fileno()
|
||||||
@@ -2762,7 +2900,7 @@ def sendfile_kern(
|
|||||||
while ofs < upper:
|
while ofs < upper:
|
||||||
stuck = stuck or time.time()
|
stuck = stuck or time.time()
|
||||||
try:
|
try:
|
||||||
req = min(2 ** 30, upper - ofs)
|
req = min(0x2000000, upper - ofs) # 32 MiB
|
||||||
if use_poll:
|
if use_poll:
|
||||||
poll.poll(10000)
|
poll.poll(10000)
|
||||||
else:
|
else:
|
||||||
@@ -2786,13 +2924,16 @@ def sendfile_kern(
|
|||||||
return upper - ofs
|
return upper - ofs
|
||||||
|
|
||||||
ofs += n
|
ofs += n
|
||||||
|
if dl_id:
|
||||||
|
dls[dl_id] = (time.time(), ofs - lower)
|
||||||
|
|
||||||
# print("sendfile: ok, sent {} now, {} total, {} remains".format(n, ofs - lower, upper - ofs))
|
# print("sendfile: ok, sent {} now, {} total, {} remains".format(n, ofs - lower, upper - ofs))
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def statdir(
|
def statdir(
|
||||||
logger: Optional["RootLogger"], scandir: bool, lstat: bool, top: str
|
logger: Optional["RootLogger"], scandir: bool, lstat: bool, top: str, throw: bool
|
||||||
) -> Generator[tuple[str, os.stat_result], None, None]:
|
) -> Generator[tuple[str, os.stat_result], None, None]:
|
||||||
if lstat and ANYWIN:
|
if lstat and ANYWIN:
|
||||||
lstat = False
|
lstat = False
|
||||||
@@ -2828,6 +2969,12 @@ def statdir(
|
|||||||
logger(src, "[s] {} @ {}".format(repr(ex), fsdec(abspath)), 6)
|
logger(src, "[s] {} @ {}".format(repr(ex), fsdec(abspath)), 6)
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
|
if throw:
|
||||||
|
zi = getattr(ex, "errno", 0)
|
||||||
|
if zi == errno.ENOENT:
|
||||||
|
raise Pebkac(404, str(ex))
|
||||||
|
raise
|
||||||
|
|
||||||
t = "{} @ {}".format(repr(ex), top)
|
t = "{} @ {}".format(repr(ex), top)
|
||||||
if logger:
|
if logger:
|
||||||
logger(src, t, 1)
|
logger(src, t, 1)
|
||||||
@@ -2836,7 +2983,7 @@ def statdir(
|
|||||||
|
|
||||||
|
|
||||||
def dir_is_empty(logger: "RootLogger", scandir: bool, top: str):
|
def dir_is_empty(logger: "RootLogger", scandir: bool, top: str):
|
||||||
for _ in statdir(logger, scandir, False, top):
|
for _ in statdir(logger, scandir, False, top, False):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -2849,7 +2996,7 @@ def rmdirs(
|
|||||||
top = os.path.dirname(top)
|
top = os.path.dirname(top)
|
||||||
depth -= 1
|
depth -= 1
|
||||||
|
|
||||||
stats = statdir(logger, scandir, lstat, top)
|
stats = statdir(logger, scandir, lstat, top, False)
|
||||||
dirs = [x[0] for x in stats if stat.S_ISDIR(x[1].st_mode)]
|
dirs = [x[0] for x in stats if stat.S_ISDIR(x[1].st_mode)]
|
||||||
dirs = [os.path.join(top, x) for x in dirs]
|
dirs = [os.path.join(top, x) for x in dirs]
|
||||||
ok = []
|
ok = []
|
||||||
@@ -3379,7 +3526,6 @@ def runhook(
|
|||||||
txt: str,
|
txt: str,
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
assert broker or up2k # !rm
|
assert broker or up2k # !rm
|
||||||
asrv = (broker or up2k).asrv
|
|
||||||
args = (broker or up2k).args
|
args = (broker or up2k).args
|
||||||
vp = vp.replace("\\", "/")
|
vp = vp.replace("\\", "/")
|
||||||
ret = {"rc": 0}
|
ret = {"rc": 0}
|
||||||
@@ -3389,7 +3535,7 @@ def runhook(
|
|||||||
log, src, cmd, ap, vp, host, uname, perms, mt, sz, ip, at, txt
|
log, src, cmd, ap, vp, host, uname, perms, mt, sz, ip, at, txt
|
||||||
)
|
)
|
||||||
if log and args.hook_v:
|
if log and args.hook_v:
|
||||||
log("hook(%s) [%s] => \033[32m%s" % (src, cmd, hr), 6)
|
log("hook(%s) %r => \033[32m%s" % (src, cmd, hr), 6)
|
||||||
if not hr:
|
if not hr:
|
||||||
return {}
|
return {}
|
||||||
for k, v in hr.items():
|
for k, v in hr.items():
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ window.baguetteBox = (function () {
|
|||||||
scrollCSS = ['', ''],
|
scrollCSS = ['', ''],
|
||||||
scrollTimer = 0,
|
scrollTimer = 0,
|
||||||
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
re_i = /^[^?]+\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp)(\?|$)/i,
|
||||||
re_v = /^[^?]+\.(webm|mkv|mp4)(\?|$)/i,
|
re_v = /^[^?]+\.(webm|mkv|mp4|m4v|mov)(\?|$)/i,
|
||||||
anims = ['slideIn', 'fadeIn', 'none'],
|
anims = ['slideIn', 'fadeIn', 'none'],
|
||||||
data = {}, // all galleries
|
data = {}, // all galleries
|
||||||
imagesElements = [],
|
imagesElements = [],
|
||||||
|
|||||||
@@ -188,7 +188,6 @@ html.y {
|
|||||||
--srv-1: #555;
|
--srv-1: #555;
|
||||||
--srv-2: #c83;
|
--srv-2: #c83;
|
||||||
--srv-3: #c0a;
|
--srv-3: #c0a;
|
||||||
--srv-3b: rgba(255,68,204,0.6);
|
|
||||||
|
|
||||||
--tree-bg: #fff;
|
--tree-bg: #fff;
|
||||||
|
|
||||||
@@ -286,6 +285,7 @@ html.bz {
|
|||||||
--f-h-b1: #34384e;
|
--f-h-b1: #34384e;
|
||||||
--mp-sh: #11121d;
|
--mp-sh: #11121d;
|
||||||
/*--mp-b-bg: #2c3044;*/
|
/*--mp-b-bg: #2c3044;*/
|
||||||
|
--f-play-bg: var(--btn-1-bg);
|
||||||
}
|
}
|
||||||
html.by {
|
html.by {
|
||||||
--bg: #f2f2f2;
|
--bg: #f2f2f2;
|
||||||
@@ -389,8 +389,6 @@ html.cy {
|
|||||||
}
|
}
|
||||||
html.dz {
|
html.dz {
|
||||||
--fg: #4d4;
|
--fg: #4d4;
|
||||||
--fg-max: #fff;
|
|
||||||
--fg2-max: #fff;
|
|
||||||
--fg-weak: #2a2;
|
--fg-weak: #2a2;
|
||||||
|
|
||||||
--bg-u6: #020;
|
--bg-u6: #020;
|
||||||
@@ -400,11 +398,9 @@ html.dz {
|
|||||||
--bg-u2: #020;
|
--bg-u2: #020;
|
||||||
--bg-u1: #020;
|
--bg-u1: #020;
|
||||||
--bg: #010;
|
--bg: #010;
|
||||||
--bgg: var(--bg);
|
|
||||||
--bg-d1: #000;
|
--bg-d1: #000;
|
||||||
--bg-d2: #020;
|
--bg-d2: #020;
|
||||||
--bg-d3: #000;
|
--bg-d3: #000;
|
||||||
--bg-max: #000;
|
|
||||||
|
|
||||||
--tab-alt: #6f6;
|
--tab-alt: #6f6;
|
||||||
--row-alt: #030;
|
--row-alt: #030;
|
||||||
@@ -417,45 +413,21 @@ html.dz {
|
|||||||
--a-dark: #afa;
|
--a-dark: #afa;
|
||||||
--a-gray: #2a2;
|
--a-gray: #2a2;
|
||||||
|
|
||||||
--btn-fg: var(--a);
|
|
||||||
--btn-bg: rgba(64,128,64,0.15);
|
--btn-bg: rgba(64,128,64,0.15);
|
||||||
--btn-h-fg: var(--a-hil);
|
|
||||||
--btn-h-bg: #050;
|
--btn-h-bg: #050;
|
||||||
--btn-1-fg: #000;
|
--btn-1-fg: #000;
|
||||||
--btn-1-bg: #4f4;
|
--btn-1-bg: #4f4;
|
||||||
--btn-1h-fg: var(--btn-1-fg);
|
|
||||||
--btn-1h-bg: #3f3;
|
--btn-1h-bg: #3f3;
|
||||||
--btn-bs: 0 0 0 .1em #080 inset;
|
--btn-bs: 0 0 0 .1em #080 inset;
|
||||||
--btn-1-bs: a;
|
--btn-1-bs: a;
|
||||||
|
|
||||||
--chk-fg: var(--tab-alt);
|
|
||||||
--txt-sh: var(--bg-d2);
|
|
||||||
--txt-bg: var(--btn-bg);
|
|
||||||
|
|
||||||
--op-aa-fg: var(--a);
|
|
||||||
--op-aa-bg: var(--bg-d2);
|
|
||||||
--op-a-sh: rgba(0,0,0,0.5);
|
|
||||||
|
|
||||||
--u2-btn-b1: var(--fg-weak);
|
--u2-btn-b1: var(--fg-weak);
|
||||||
--u2-sbtn-b1: var(--fg-weak);
|
--u2-sbtn-b1: var(--fg-weak);
|
||||||
--u2-txt-bg: var(--bg-u5);
|
|
||||||
--u2-tab-bg: linear-gradient(to bottom, var(--bg), var(--bg-u1));
|
|
||||||
--u2-tab-b1: var(--fg-weak);
|
--u2-tab-b1: var(--fg-weak);
|
||||||
--u2-tab-1-fg: #fff;
|
--u2-tab-1-fg: #fff;
|
||||||
--u2-tab-1-bg: linear-gradient(to bottom, #151, var(--bg) 80%);
|
--u2-tab-1-bg: linear-gradient(to bottom, #151, var(--bg) 80%);
|
||||||
--u2-tab-1-b1: #7c5;
|
|
||||||
--u2-tab-1-b2: #583;
|
|
||||||
--u2-tab-1-sh: #280;
|
|
||||||
--u2-b-fg: #fff;
|
|
||||||
--u2-b1-bg: #3a3;
|
--u2-b1-bg: #3a3;
|
||||||
--u2-b2-bg: #3a3;
|
--u2-b2-bg: #3a3;
|
||||||
--u2-inf-bg: #07a;
|
|
||||||
--u2-inf-b1: #0be;
|
|
||||||
--u2-ok-bg: #380;
|
|
||||||
--u2-ok-b1: #8e4;
|
|
||||||
--u2-err-bg: #900;
|
|
||||||
--u2-err-b1: #d06;
|
|
||||||
--ud-b1: #888;
|
|
||||||
|
|
||||||
--sort-1: #fff;
|
--sort-1: #fff;
|
||||||
--sort-2: #3f3;
|
--sort-2: #3f3;
|
||||||
@@ -467,47 +439,12 @@ html.dz {
|
|||||||
|
|
||||||
--tree-bg: #010;
|
--tree-bg: #010;
|
||||||
|
|
||||||
--g-play-bg: #750;
|
|
||||||
--g-play-b1: #c90;
|
|
||||||
--g-play-b2: #da4;
|
|
||||||
--g-play-sh: #b83;
|
|
||||||
|
|
||||||
--g-sel-fg: #fff;
|
|
||||||
--g-sel-bg: #925;
|
|
||||||
--g-sel-b1: #c37;
|
--g-sel-b1: #c37;
|
||||||
--g-sel-sh: #b36;
|
--g-sel-sh: #b36;
|
||||||
--g-fsel-bg: #d39;
|
|
||||||
--g-fsel-b1: #d48;
|
--g-fsel-b1: #d48;
|
||||||
--g-fsel-ts: #804;
|
|
||||||
--g-fg: var(--a-hil);
|
|
||||||
--g-bg: var(--bg-u2);
|
|
||||||
--g-b1: var(--bg-u4);
|
|
||||||
--g-b2: var(--bg-u5);
|
|
||||||
--g-g1: var(--bg-u2);
|
|
||||||
--g-g2: var(--bg-u5);
|
|
||||||
--g-f-bg: var(--bg-u4);
|
|
||||||
--g-f-b1: var(--bg-u5);
|
|
||||||
--g-f-fg: var(--a-hil);
|
|
||||||
--g-sh: rgba(0,0,0,0.3);
|
|
||||||
|
|
||||||
--f-sh1: 0.33;
|
|
||||||
--f-sh2: 0.02;
|
|
||||||
--f-sh3: 0.2;
|
|
||||||
--f-h-b1: #3b3;
|
--f-h-b1: #3b3;
|
||||||
|
|
||||||
--f-play-bg: #fc5;
|
|
||||||
--f-play-fg: #000;
|
|
||||||
--f-sel-sh: #fc0;
|
|
||||||
--f-gray: #999;
|
|
||||||
|
|
||||||
--fm-off: #f6c;
|
|
||||||
--mp-sh: var(--bg-d3);
|
|
||||||
|
|
||||||
--err-fg: #fff;
|
|
||||||
--err-bg: #a20;
|
|
||||||
--err-b1: #f00;
|
|
||||||
--err-ts: #500;
|
|
||||||
|
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
font-family: 'scp', monospace, monospace;
|
font-family: 'scp', monospace, monospace;
|
||||||
font-family: var(--font-mono), 'scp', monospace, monospace;
|
font-family: var(--font-mono), 'scp', monospace, monospace;
|
||||||
@@ -1710,6 +1647,18 @@ html.dz .btn {
|
|||||||
background: var(--btn-1-bg);
|
background: var(--btn-1-bg);
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
|
#tree ul a.ld::before {
|
||||||
|
font-weight: bold;
|
||||||
|
font-family: sans-serif;
|
||||||
|
display: inline-block;
|
||||||
|
text-align: center;
|
||||||
|
width: 1em;
|
||||||
|
margin: 0 .3em 0 -1.3em;
|
||||||
|
color: var(--fg-max);
|
||||||
|
opacity: 0;
|
||||||
|
content: '◠';
|
||||||
|
animation: .5s linear infinite forwards spin, ease .25s 1 forwards fadein;
|
||||||
|
}
|
||||||
#tree ul a.par {
|
#tree ul a.par {
|
||||||
color: var(--fg-max);
|
color: var(--fg-max);
|
||||||
}
|
}
|
||||||
@@ -1931,11 +1880,10 @@ html.y #tree.nowrap .ntree a+a:hover {
|
|||||||
#rn_f.m td+td {
|
#rn_f.m td+td {
|
||||||
width: 50%;
|
width: 50%;
|
||||||
}
|
}
|
||||||
#rn_f .err td {
|
#rn_f .err td,
|
||||||
background: var(--err-bg);
|
#rn_f .err input[readonly],
|
||||||
color: var(--fg-max);
|
#rui .ng input[readonly] {
|
||||||
}
|
color: var(--err-fg);
|
||||||
#rn_f .err input[readonly] {
|
|
||||||
background: var(--err-bg);
|
background: var(--err-bg);
|
||||||
}
|
}
|
||||||
#rui input[readonly] {
|
#rui input[readonly] {
|
||||||
@@ -2837,6 +2785,7 @@ html.b #u2conf a.b:hover {
|
|||||||
padding-left: .2em;
|
padding-left: .2em;
|
||||||
}
|
}
|
||||||
.fsearch_explain {
|
.fsearch_explain {
|
||||||
|
color: var(--a-dark);
|
||||||
padding-left: .7em;
|
padding-left: .7em;
|
||||||
font-size: 1.1em;
|
font-size: 1.1em;
|
||||||
line-height: 0;
|
line-height: 0;
|
||||||
|
|||||||
@@ -132,16 +132,15 @@
|
|||||||
|
|
||||||
<script>
|
<script>
|
||||||
var SR = {{ r|tojson }},
|
var SR = {{ r|tojson }},
|
||||||
|
CGV1 = {{ cgv1 }},
|
||||||
CGV = {{ cgv|tojson }},
|
CGV = {{ cgv|tojson }},
|
||||||
TS = "{{ ts }}",
|
TS = "{{ ts }}",
|
||||||
dtheme = "{{ dtheme }}",
|
dtheme = "{{ dtheme }}",
|
||||||
srvinf = "{{ srv_info }}",
|
srvinf = "{{ srv_info }}",
|
||||||
s_name = "{{ s_name }}",
|
|
||||||
lang = "{{ lang }}",
|
lang = "{{ lang }}",
|
||||||
dfavico = "{{ favico }}",
|
dfavico = "{{ favico }}",
|
||||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
have_tags_idx = {{ have_tags_idx }},
|
||||||
sb_lg = "{{ sb_lg }}",
|
sb_lg = "{{ sb_lg }}",
|
||||||
txt_ext = "{{ txt_ext }}",
|
|
||||||
logues = {{ logues|tojson if sb_lg else "[]" }},
|
logues = {{ logues|tojson if sb_lg else "[]" }},
|
||||||
ls0 = {{ ls0|tojson }};
|
ls0 = {{ ls0|tojson }};
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
BIN
copyparty/web/iiam.gif
Normal file
BIN
copyparty/web/iiam.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 230 B |
@@ -17,8 +17,8 @@ var chromedbg = function () { console.log(arguments); }
|
|||||||
var dbg = function () { };
|
var dbg = function () { };
|
||||||
|
|
||||||
// replace dbg with the real deal here or in the console:
|
// replace dbg with the real deal here or in the console:
|
||||||
// dbg = chromedbg
|
// dbg = chromedbg;
|
||||||
// dbg = console.log
|
// dbg = console.log;
|
||||||
|
|
||||||
|
|
||||||
// dodge browser issues
|
// dodge browser issues
|
||||||
|
|||||||
114
copyparty/web/rups.css
Normal file
114
copyparty/web/rups.css
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
html {
|
||||||
|
color: #333;
|
||||||
|
background: #f7f7f7;
|
||||||
|
font-family: sans-serif;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
|
touch-action: manipulation;
|
||||||
|
}
|
||||||
|
#wrap {
|
||||||
|
margin: 2em auto;
|
||||||
|
padding: 0 1em 3em 1em;
|
||||||
|
line-height: 2.3em;
|
||||||
|
}
|
||||||
|
form {
|
||||||
|
display: inline;
|
||||||
|
padding-left: 1em;
|
||||||
|
}
|
||||||
|
input[type=submit],
|
||||||
|
a {
|
||||||
|
color: #047;
|
||||||
|
background: #fff;
|
||||||
|
text-decoration: none;
|
||||||
|
border: none;
|
||||||
|
border-bottom: 1px solid #8ab;
|
||||||
|
border-radius: .2em;
|
||||||
|
padding: .2em .6em;
|
||||||
|
margin: 0 .3em;
|
||||||
|
}
|
||||||
|
#wrap td a {
|
||||||
|
margin: 0;
|
||||||
|
line-height: 1em;
|
||||||
|
display: inline-block;
|
||||||
|
white-space: initial;
|
||||||
|
font-family: var(--font-main), sans-serif;
|
||||||
|
}
|
||||||
|
#repl {
|
||||||
|
border: none;
|
||||||
|
background: none;
|
||||||
|
color: inherit;
|
||||||
|
padding: 0;
|
||||||
|
position: fixed;
|
||||||
|
bottom: .25em;
|
||||||
|
left: .2em;
|
||||||
|
}
|
||||||
|
#wrap table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
position: relative;
|
||||||
|
margin-top: 2em;
|
||||||
|
}
|
||||||
|
#wrap th {
|
||||||
|
top: -1px;
|
||||||
|
position: sticky;
|
||||||
|
background: #f7f7f7;
|
||||||
|
}
|
||||||
|
#wrap td {
|
||||||
|
font-family: var(--font-mono), monospace, monospace;
|
||||||
|
white-space: pre; /*date*/
|
||||||
|
overflow: hidden; /*ipv6*/
|
||||||
|
}
|
||||||
|
#wrap th:first-child,
|
||||||
|
#wrap td:first-child {
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
#wrap td,
|
||||||
|
#wrap th {
|
||||||
|
text-align: left;
|
||||||
|
padding: .3em .6em;
|
||||||
|
max-width: 30vw;
|
||||||
|
}
|
||||||
|
#wrap tr:hover td {
|
||||||
|
background: #ddd;
|
||||||
|
box-shadow: 0 -1px 0 rgba(128, 128, 128, 0.5) inset;
|
||||||
|
}
|
||||||
|
#wrap th:first-child,
|
||||||
|
#wrap td:first-child {
|
||||||
|
border-radius: .5em 0 0 .5em;
|
||||||
|
}
|
||||||
|
#wrap th:last-child,
|
||||||
|
#wrap td:last-child {
|
||||||
|
border-radius: 0 .5em .5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
html.z {
|
||||||
|
background: #222;
|
||||||
|
color: #ccc;
|
||||||
|
}
|
||||||
|
html.bz {
|
||||||
|
background: #11121d;
|
||||||
|
color: #bbd;
|
||||||
|
}
|
||||||
|
html.z input[type=submit],
|
||||||
|
html.z a {
|
||||||
|
color: #fff;
|
||||||
|
background: #057;
|
||||||
|
border-color: #37a;
|
||||||
|
}
|
||||||
|
html.z input[type=text] {
|
||||||
|
color: #ddd;
|
||||||
|
background: #223;
|
||||||
|
border: none;
|
||||||
|
border-bottom: 1px solid #fc5;
|
||||||
|
border-radius: .2em;
|
||||||
|
padding: .2em .3em;
|
||||||
|
}
|
||||||
|
html.z #wrap th {
|
||||||
|
background: #222;
|
||||||
|
}
|
||||||
|
html.bz #wrap th {
|
||||||
|
background: #223;
|
||||||
|
}
|
||||||
|
html.z #wrap tr:hover td {
|
||||||
|
background: #000;
|
||||||
|
}
|
||||||
67
copyparty/web/rups.html
Normal file
67
copyparty/web/rups.html
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>{{ s_doctitle }}</title>
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
|
<meta name="theme-color" content="#{{ tcolor }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/rups.css?_={{ ts }}">
|
||||||
|
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||||
|
{{ html_head }}
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="wrap">
|
||||||
|
<a id="a" href="{{ r }}/?ru" class="af">refresh</a>
|
||||||
|
<a id="a" href="{{ r }}/?h" class="af">control-panel</a>
|
||||||
|
<form method="get" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ r }}">
|
||||||
|
<input type="hidden" name="ru" value="a" />
|
||||||
|
Filter: <input type="text" name="filter" size="20" placeholder="documents/passwords" value="{{ filt }}" />
|
||||||
|
<input type="submit" />
|
||||||
|
</form>
|
||||||
|
<span id="hits"></span>
|
||||||
|
<table id="tab"><thead><tr>
|
||||||
|
<th>size</th>
|
||||||
|
<th>who</th>
|
||||||
|
<th>when</th>
|
||||||
|
<th>age</th>
|
||||||
|
<th>dir</th>
|
||||||
|
<th>file</th>
|
||||||
|
</tr></thead><tbody>
|
||||||
|
{% for vp, evp, sz, ip, at in rows %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ sz }}</td>
|
||||||
|
<td>{{ ip }}</td>
|
||||||
|
<td>{{ at }}</td>
|
||||||
|
<td>{{ (now-at) }}</td>
|
||||||
|
<td></td>
|
||||||
|
<td><a href="{{ r }}{{ evp }}">{{ vp|e }}</a></td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody></table>
|
||||||
|
{% if not rows %}
|
||||||
|
(the database is not aware of any uploads)
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<a href="#" id="repl">π</a>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
var SR = {{ r|tojson }},
|
||||||
|
NOW = {{ now }},
|
||||||
|
lang="{{ lang }}",
|
||||||
|
dfavico="{{ favico }}";
|
||||||
|
|
||||||
|
var STG = window.localStorage;
|
||||||
|
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||||
|
|
||||||
|
</script>
|
||||||
|
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||||
|
<script src="{{ r }}/.cpr/rups.js?_={{ ts }}"></script>
|
||||||
|
{%- if js %}
|
||||||
|
<script src="{{ js }}_={{ ts }}"></script>
|
||||||
|
{%- endif %}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
||||||
34
copyparty/web/rups.js
Normal file
34
copyparty/web/rups.js
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
(function() {
|
||||||
|
var tab = ebi('tab').tBodies[0],
|
||||||
|
tr = Array.prototype.slice.call(tab.rows, 0),
|
||||||
|
rows = [];
|
||||||
|
|
||||||
|
for (var a = 0; a < tr.length; a++) {
|
||||||
|
var td = tr[a].cells,
|
||||||
|
an = td[5].children[0];
|
||||||
|
|
||||||
|
rows.push([
|
||||||
|
td[0].textContent,
|
||||||
|
td[2].textContent,
|
||||||
|
td[3].textContent,
|
||||||
|
an.textContent,
|
||||||
|
an.getAttribute('href'),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var a = 0; a < rows.length; a++) {
|
||||||
|
var t = rows[a],
|
||||||
|
sz = t[0],
|
||||||
|
at = parseInt(t[1]),
|
||||||
|
nam = vsplit(t[3]),
|
||||||
|
dh = vsplit(t[4])[0];
|
||||||
|
|
||||||
|
tr[a].cells[0].innerHTML = sz.replace(/\B(?=(\d{3})+(?!\d))/g, " ");
|
||||||
|
tr[a].cells[2].innerHTML = at ? unix2iso(at) : '(?)';
|
||||||
|
tr[a].cells[3].innerHTML = at ? shumantime(t[2]) : '(?)';
|
||||||
|
tr[a].cells[4].innerHTML = '<a href="' + dh + '">' + nam[0] + '</a>';
|
||||||
|
tr[a].cells[5].children[0].innerHTML = nam[1].split('?')[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
ebi('hits').innerHTML = '-- showing ' + rows.length + ' files';
|
||||||
|
})();
|
||||||
@@ -44,9 +44,10 @@ a {
|
|||||||
bottom: .25em;
|
bottom: .25em;
|
||||||
left: .2em;
|
left: .2em;
|
||||||
}
|
}
|
||||||
table {
|
#wrap table {
|
||||||
border-collapse: collapse;
|
border-collapse: collapse;
|
||||||
position: relative;
|
position: relative;
|
||||||
|
margin-top: 2em;
|
||||||
}
|
}
|
||||||
th {
|
th {
|
||||||
top: -1px;
|
top: -1px;
|
||||||
@@ -62,6 +63,14 @@ th {
|
|||||||
#wrap td+td+td+td+td+td+td+td {
|
#wrap td+td+td+td+td+td+td+td {
|
||||||
font-family: var(--font-mono), monospace, monospace;
|
font-family: var(--font-mono), monospace, monospace;
|
||||||
}
|
}
|
||||||
|
#wrap th:first-child,
|
||||||
|
#wrap td:first-child {
|
||||||
|
border-radius: .5em 0 0 .5em;
|
||||||
|
}
|
||||||
|
#wrap th:last-child,
|
||||||
|
#wrap td:last-child {
|
||||||
|
border-radius: 0 .5em .5em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -81,3 +90,6 @@ html.bz {
|
|||||||
color: #bbd;
|
color: #bbd;
|
||||||
background: #11121d;
|
background: #11121d;
|
||||||
}
|
}
|
||||||
|
html.bz th {
|
||||||
|
background: #223;
|
||||||
|
}
|
||||||
|
|||||||
@@ -58,6 +58,8 @@
|
|||||||
{% if not rows %}
|
{% if not rows %}
|
||||||
(you don't have any active shares btw)
|
(you don't have any active shares btw)
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
<a href="#" id="repl">π</a>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
var SR = {{ r|tojson }},
|
var SR = {{ r|tojson }},
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ function qr(e) {
|
|||||||
|
|
||||||
function showqr(href) {
|
function showqr(href) {
|
||||||
var vhref = href.replace('?qr&', '?').replace('?qr', '');
|
var vhref = href.replace('?qr&', '?').replace('?qr', '');
|
||||||
modal.alert(esc(vhref) + '<img class="b64" src="' + href + '" />');
|
modal.alert(esc(vhref) + '<img class="b64" width="100" height="100" src="' + href + '" />');
|
||||||
}
|
}
|
||||||
|
|
||||||
(function() {
|
(function() {
|
||||||
|
|||||||
@@ -90,6 +90,13 @@ table {
|
|||||||
text-align: left;
|
text-align: left;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
|
.vols td:empty,
|
||||||
|
.vols th:empty {
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
.vols img {
|
||||||
|
margin: -4px 0;
|
||||||
|
}
|
||||||
.num {
|
.num {
|
||||||
border-right: 1px solid #bbb;
|
border-right: 1px solid #bbb;
|
||||||
}
|
}
|
||||||
@@ -222,3 +229,6 @@ html.bz {
|
|||||||
color: #bbd;
|
color: #bbd;
|
||||||
background: #11121d;
|
background: #11121d;
|
||||||
}
|
}
|
||||||
|
html.bz .vols img {
|
||||||
|
filter: sepia(0.8) hue-rotate(180deg);
|
||||||
|
}
|
||||||
|
|||||||
@@ -44,6 +44,18 @@
|
|||||||
</table>
|
</table>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- if dls %}
|
||||||
|
<h1 id="ae">active downloads:</h1>
|
||||||
|
<table class="vols">
|
||||||
|
<thead><tr><th>%</th><th>sent</th><th>speed</th><th>eta</th><th>idle</th><th></th><th>dir</th><th>file</th></tr></thead>
|
||||||
|
<tbody>
|
||||||
|
{% for u in dls %}
|
||||||
|
<tr><td>{{ u[0] }}</td><td>{{ u[1] }}</td><td>{{ u[2] }}</td><td>{{ u[3] }}</td><td>{{ u[4] }}</td><td>{{ u[5] }}</td><td><a href="{{ u[6] }}">{{ u[7]|e }}</a></td><td>{{ u[8] }}</td></tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
{%- if avol %}
|
{%- if avol %}
|
||||||
<h1>admin panel:</h1>
|
<h1>admin panel:</h1>
|
||||||
<table><tr><td> <!-- hehehe -->
|
<table><tr><td> <!-- hehehe -->
|
||||||
@@ -129,13 +141,23 @@
|
|||||||
|
|
||||||
{% if k304 or k304vis %}
|
{% if k304 or k304vis %}
|
||||||
{% if k304 %}
|
{% if k304 %}
|
||||||
<li><a id="h" href="{{ r }}/?k304=n">disable k304</a> (currently enabled)
|
<li><a id="h" href="{{ r }}/?cc&setck=k304=n">disable k304</a> (currently enabled)
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<li><a id="i" href="{{ r }}/?k304=y" class="r">enable k304</a> (currently disabled)
|
<li><a id="i" href="{{ r }}/?cc&setck=k304=y" class="r">enable k304</a> (currently disabled)
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<blockquote id="j">enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
<blockquote id="j">enabling k304 will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general</blockquote></li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
{% if no304 or no304vis %}
|
||||||
|
{% if no304 %}
|
||||||
|
<li><a id="ab" href="{{ r }}/?cc&setck=no304=n">disable no304</a> (currently enabled)
|
||||||
|
{%- else %}
|
||||||
|
<li><a id="ac" href="{{ r }}/?cc&setck=no304=y" class="r">enable no304</a> (currently disabled)
|
||||||
|
{% endif %}
|
||||||
|
<blockquote id="ad">enabling no304 will disable all caching; try this if k304 wasn't enough. This will waste a huge amount of network traffic!</blockquote></li>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<li><a id="af" href="{{ r }}/?ru">show recent uploads</a></li>
|
||||||
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
|
|||||||
@@ -34,6 +34,11 @@ var Ls = {
|
|||||||
"ta2": "gjenta for å bekrefte nytt passord:",
|
"ta2": "gjenta for å bekrefte nytt passord:",
|
||||||
"ta3": "fant en skrivefeil; vennligst prøv igjen",
|
"ta3": "fant en skrivefeil; vennligst prøv igjen",
|
||||||
"aa1": "innkommende:",
|
"aa1": "innkommende:",
|
||||||
|
"ab1": "skru av no304",
|
||||||
|
"ac1": "skru på no304",
|
||||||
|
"ad1": "no304 stopper all bruk av cache. Hvis ikke k304 var nok, prøv denne. Vil mangedoble dataforbruk!",
|
||||||
|
"ae1": "utgående:",
|
||||||
|
"af1": "vis nylig opplastede filer",
|
||||||
},
|
},
|
||||||
"eng": {
|
"eng": {
|
||||||
"d2": "shows the state of all active threads",
|
"d2": "shows the state of all active threads",
|
||||||
@@ -80,6 +85,11 @@ var Ls = {
|
|||||||
"ta2": "重复以确认新密码:",
|
"ta2": "重复以确认新密码:",
|
||||||
"ta3": "发现拼写错误;请重试",
|
"ta3": "发现拼写错误;请重试",
|
||||||
"aa1": "正在接收的文件:", //m
|
"aa1": "正在接收的文件:", //m
|
||||||
|
"ab1": "关闭 k304",
|
||||||
|
"ac1": "开启 k304",
|
||||||
|
"ad1": "启用 no304 将禁用所有缓存;如果 k304 不够,可以尝试此选项。这将消耗大量的网络流量!", //m
|
||||||
|
"ae1": "正在下载:", //m
|
||||||
|
"af1": "显示最近上传的文件", //m
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -53,7 +53,6 @@
|
|||||||
{% if s %}
|
{% if s %}
|
||||||
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
|
||||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
@@ -137,7 +136,6 @@
|
|||||||
{% if args.ftps %}
|
{% if args.ftps %}
|
||||||
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
|
||||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||||
</ul>
|
</ul>
|
||||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||||
|
|||||||
@@ -73,9 +73,9 @@ html {
|
|||||||
position: absolute;
|
position: absolute;
|
||||||
height: 1px;
|
height: 1px;
|
||||||
top: 1px;
|
top: 1px;
|
||||||
right: 1%;
|
right: 1px;
|
||||||
width: 99%;
|
left: 1px;
|
||||||
animation: toastt var(--tmtime) steps(var(--tmstep)) forwards;
|
animation: toastt var(--tmtime) 0.07s steps(var(--tmstep)) forwards;
|
||||||
transform-origin: right;
|
transform-origin: right;
|
||||||
}
|
}
|
||||||
@keyframes toastt {
|
@keyframes toastt {
|
||||||
@@ -322,6 +322,8 @@ html.y #tth {
|
|||||||
margin: .1em auto;
|
margin: .1em auto;
|
||||||
width: 60%;
|
width: 60%;
|
||||||
height: 60%;
|
height: 60%;
|
||||||
|
background: #999;
|
||||||
|
background: rgba(128,128,128,0.2);
|
||||||
}
|
}
|
||||||
#modalb {
|
#modalb {
|
||||||
position: sticky;
|
position: sticky;
|
||||||
|
|||||||
@@ -17,10 +17,14 @@ function goto_up2k() {
|
|||||||
var up2k = null,
|
var up2k = null,
|
||||||
up2k_hooks = [],
|
up2k_hooks = [],
|
||||||
hws = [],
|
hws = [],
|
||||||
|
hws_ok = 0,
|
||||||
|
hws_ng = false,
|
||||||
sha_js = WebAssembly ? 'hw' : 'ac', // ff53,c57,sa11
|
sha_js = WebAssembly ? 'hw' : 'ac', // ff53,c57,sa11
|
||||||
m = 'will use ' + sha_js + ' instead of native sha512 due to';
|
m = 'will use ' + sha_js + ' instead of native sha512 due to';
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
if (sread('nosubtle') || window.nosubtle)
|
||||||
|
throw 'chickenbit';
|
||||||
var cf = crypto.subtle || crypto.webkitSubtle;
|
var cf = crypto.subtle || crypto.webkitSubtle;
|
||||||
cf.digest('SHA-512', new Uint8Array(1)).then(
|
cf.digest('SHA-512', new Uint8Array(1)).then(
|
||||||
function (x) { console.log('sha-ok'); up2k = up2k_init(cf); },
|
function (x) { console.log('sha-ok'); up2k = up2k_init(cf); },
|
||||||
@@ -242,7 +246,7 @@ function U2pvis(act, btns, uc, st) {
|
|||||||
p = bd * 100.0 / sz,
|
p = bd * 100.0 / sz,
|
||||||
nb = bd - bd0,
|
nb = bd - bd0,
|
||||||
spd = nb / (td / 1000),
|
spd = nb / (td / 1000),
|
||||||
eta = (sz - bd) / spd;
|
eta = spd ? (sz - bd) / spd : 3599;
|
||||||
|
|
||||||
return [p, s2ms(eta), spd / (1024 * 1024)];
|
return [p, s2ms(eta), spd / (1024 * 1024)];
|
||||||
};
|
};
|
||||||
@@ -691,8 +695,9 @@ function Donut(uc, st) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (++r.tc >= 10) {
|
if (++r.tc >= 10) {
|
||||||
|
var s = r.eta === null ? 'paused' : r.eta > 60 ? shumantime(r.eta) : (r.eta + 's');
|
||||||
wintitle("{0}%, {1}, #{2}, ".format(
|
wintitle("{0}%, {1}, #{2}, ".format(
|
||||||
f2f(v * 100 / t, 1), shumantime(r.eta), st.files.length - st.nfile.upload), true);
|
f2f(v * 100 / t, 1), s, st.files.length - st.nfile.upload), true);
|
||||||
r.tc = 0;
|
r.tc = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -853,8 +858,13 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
setmsg(suggest_up2k, 'msg');
|
setmsg(suggest_up2k, 'msg');
|
||||||
|
|
||||||
|
var u2szs = u2sz.split(','),
|
||||||
|
u2sz_min = parseInt(u2szs[0]),
|
||||||
|
u2sz_tgt = parseInt(u2szs[1]),
|
||||||
|
u2sz_max = parseInt(u2szs[2]);
|
||||||
|
|
||||||
var parallel_uploads = ebi('nthread').value = icfg_get('nthread', u2j),
|
var parallel_uploads = ebi('nthread').value = icfg_get('nthread', u2j),
|
||||||
stitch_tgt = ebi('u2szg').value = icfg_get('u2sz', u2sz.split(',')[1]),
|
stitch_tgt = ebi('u2szg').value = icfg_get('u2sz', u2sz_tgt),
|
||||||
uc = {},
|
uc = {},
|
||||||
fdom_ctr = 0,
|
fdom_ctr = 0,
|
||||||
biggest_file = 0;
|
biggest_file = 0;
|
||||||
@@ -1350,9 +1360,21 @@ function up2k_init(subtle) {
|
|||||||
draw_each = good_files.length < 50;
|
draw_each = good_files.length < 50;
|
||||||
|
|
||||||
if (WebAssembly && !hws.length) {
|
if (WebAssembly && !hws.length) {
|
||||||
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
|
var nw = Math.min(navigator.hardwareConcurrency || 4, 16);
|
||||||
|
|
||||||
|
if (CHROME) {
|
||||||
|
// chrome-bug 383568268 // #124
|
||||||
|
nw = Math.max(1, (nw > 4 ? 4 : (nw - 1)));
|
||||||
|
nw = (subtle && !MOBILE && nw > 2) ? 2 : nw;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var a = 0; a < nw; a++)
|
||||||
hws.push(new Worker(SR + '/.cpr/w.hash.js?_=' + TS));
|
hws.push(new Worker(SR + '/.cpr/w.hash.js?_=' + TS));
|
||||||
|
|
||||||
|
if (!subtle)
|
||||||
|
for (var a = 0; a < hws.length; a++)
|
||||||
|
hws[a].postMessage('nosubtle');
|
||||||
|
|
||||||
console.log(hws.length + " hashers");
|
console.log(hws.length + " hashers");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1543,9 +1565,11 @@ function up2k_init(subtle) {
|
|||||||
if (nhash) {
|
if (nhash) {
|
||||||
st.time.hashing += td;
|
st.time.hashing += td;
|
||||||
t.push(['u2etah', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
t.push(['u2etah', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||||
if (uc.fsearch)
|
if (uc.fsearch) {
|
||||||
|
st.time.busy += td;
|
||||||
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var b_up = st.bytes.inflight + st.bytes.uploaded,
|
var b_up = st.bytes.inflight + st.bytes.uploaded,
|
||||||
b_fin = st.bytes.inflight + st.bytes.finished;
|
b_fin = st.bytes.inflight + st.bytes.finished;
|
||||||
@@ -1863,10 +1887,12 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
function chill(t) {
|
function chill(t) {
|
||||||
var now = Date.now();
|
var now = Date.now();
|
||||||
if ((t.coolmul || 0) < 2 || now - t.cooldown < t.coolmul * 700)
|
if ((t.coolmul || 0) < 5 || now - t.cooldown < t.coolmul * 700)
|
||||||
t.coolmul = Math.min((t.coolmul || 0.5) * 2, 32);
|
t.coolmul = Math.min((t.coolmul || 0.5) * 2, 32);
|
||||||
|
|
||||||
t.cooldown = Math.max(t.cooldown || 1, Date.now() + t.coolmul * 1000);
|
var cd = now + 1000 * (t.coolmul + Math.random() * 4 + 2);
|
||||||
|
t.cooldown = Math.floor(Math.max(cd, t.cooldown || 1));
|
||||||
|
return t;
|
||||||
}
|
}
|
||||||
|
|
||||||
/////
|
/////
|
||||||
@@ -1951,7 +1977,7 @@ function up2k_init(subtle) {
|
|||||||
pvis.setab(t.n, nchunks);
|
pvis.setab(t.n, nchunks);
|
||||||
pvis.move(t.n, 'bz');
|
pvis.move(t.n, 'bz');
|
||||||
|
|
||||||
if (hws.length && uc.hashw && (nchunks > 1 || document.visibilityState == 'hidden'))
|
if (hws.length && !hws_ng && uc.hashw && (nchunks > 1 || document.visibilityState == 'hidden'))
|
||||||
// resolving subtle.digest w/o worker takes 1sec on blur if the actx hack breaks
|
// resolving subtle.digest w/o worker takes 1sec on blur if the actx hack breaks
|
||||||
return wexec_hash(t, chunksize, nchunks);
|
return wexec_hash(t, chunksize, nchunks);
|
||||||
|
|
||||||
@@ -2060,16 +2086,27 @@ function up2k_init(subtle) {
|
|||||||
free = [],
|
free = [],
|
||||||
busy = {},
|
busy = {},
|
||||||
nbusy = 0,
|
nbusy = 0,
|
||||||
|
init = 0,
|
||||||
hashtab = {},
|
hashtab = {},
|
||||||
mem = (MOBILE ? 128 : 256) * 1024 * 1024;
|
mem = (MOBILE ? 128 : 256) * 1024 * 1024;
|
||||||
|
|
||||||
|
if (!hws_ok)
|
||||||
|
init = setTimeout(function() {
|
||||||
|
hws_ng = true;
|
||||||
|
toast.warn(30, 'webworkers failed to start\n\nwill be a bit slower due to\nhashing on main-thread');
|
||||||
|
apop(st.busy.hash, t);
|
||||||
|
st.todo.hash.unshift(t);
|
||||||
|
exec_hash();
|
||||||
|
}, 5000);
|
||||||
|
|
||||||
for (var a = 0; a < hws.length; a++) {
|
for (var a = 0; a < hws.length; a++) {
|
||||||
var w = hws[a];
|
var w = hws[a];
|
||||||
free.push(w);
|
|
||||||
w.onmessage = onmsg;
|
w.onmessage = onmsg;
|
||||||
|
if (init)
|
||||||
|
w.postMessage('ping');
|
||||||
|
if (mem > 0)
|
||||||
|
free.push(w);
|
||||||
mem -= chunksize;
|
mem -= chunksize;
|
||||||
if (mem <= 0)
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function go_next() {
|
function go_next() {
|
||||||
@@ -2099,6 +2136,12 @@ function up2k_init(subtle) {
|
|||||||
d = d.data;
|
d = d.data;
|
||||||
var k = d[0];
|
var k = d[0];
|
||||||
|
|
||||||
|
if (k == "pong")
|
||||||
|
if (++hws_ok == hws.length) {
|
||||||
|
clearTimeout(init);
|
||||||
|
go_next();
|
||||||
|
}
|
||||||
|
|
||||||
if (k == "panic")
|
if (k == "panic")
|
||||||
return vis_exh(d[1], 'up2k.js', '', '', d[1]);
|
return vis_exh(d[1], 'up2k.js', '', '', d[1]);
|
||||||
|
|
||||||
@@ -2161,6 +2204,7 @@ function up2k_init(subtle) {
|
|||||||
tasker();
|
tasker();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (!init)
|
||||||
go_next();
|
go_next();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2259,8 +2303,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
console.log('handshake onerror, retrying', t.name, t);
|
console.log('handshake onerror, retrying', t.name, t);
|
||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
st.todo.handshake.unshift(t);
|
st.todo.handshake.unshift(chill(t));
|
||||||
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
|
||||||
t.keepalive = keepalive;
|
t.keepalive = keepalive;
|
||||||
};
|
};
|
||||||
var orz = function (e) {
|
var orz = function (e) {
|
||||||
@@ -2273,8 +2316,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
st.todo.handshake.unshift(t);
|
st.todo.handshake.unshift(chill(t));
|
||||||
t.cooldown = Date.now() + 5000 + Math.floor(Math.random() * 3000);
|
|
||||||
var txt = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
var txt = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
||||||
return toast.err(0, txt + '\n\n' + L.badreply + ':\n\n' + unpre(xhr.responseText));
|
return toast.err(0, txt + '\n\n' + L.badreply + ':\n\n' + unpre(xhr.responseText));
|
||||||
}
|
}
|
||||||
@@ -2375,6 +2417,9 @@ function up2k_init(subtle) {
|
|||||||
msg = 'done';
|
msg = 'done';
|
||||||
|
|
||||||
if (t.postlist.length) {
|
if (t.postlist.length) {
|
||||||
|
if (t.rechecks && QS('#opa_del.act'))
|
||||||
|
toast.inf(30, L.u_started, L.u_unpt);
|
||||||
|
|
||||||
var arr = st.todo.upload,
|
var arr = st.todo.upload,
|
||||||
sort = arr.length && arr[arr.length - 1].nfile > t.n;
|
sort = arr.length && arr[arr.length - 1].nfile > t.n;
|
||||||
|
|
||||||
@@ -2453,6 +2498,7 @@ function up2k_init(subtle) {
|
|||||||
else {
|
else {
|
||||||
pvis.seth(t.n, 1, "ERROR");
|
pvis.seth(t.n, 1, "ERROR");
|
||||||
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
pvis.seth(t.n, 2, L.u_ehstmp, t);
|
||||||
|
apop(st.busy.handshake, t);
|
||||||
|
|
||||||
var err = "",
|
var err = "",
|
||||||
cls = "ERROR",
|
cls = "ERROR",
|
||||||
@@ -2466,7 +2512,6 @@ function up2k_init(subtle) {
|
|||||||
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
||||||
console.log("rate-limit: " + penalty);
|
console.log("rate-limit: " + penalty);
|
||||||
t.cooldown = Date.now() + parseFloat(penalty) * 1000;
|
t.cooldown = Date.now() + parseFloat(penalty) * 1000;
|
||||||
apop(st.busy.handshake, t);
|
|
||||||
st.todo.handshake.unshift(t);
|
st.todo.handshake.unshift(t);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -2485,12 +2530,15 @@ function up2k_init(subtle) {
|
|||||||
if (!t.rechecks && (err_pend || err_srcb)) {
|
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||||
t.rechecks = 0;
|
t.rechecks = 0;
|
||||||
t.want_recheck = true;
|
t.want_recheck = true;
|
||||||
|
if (st.busy.upload.length || st.busy.handshake.length || st.bytes.uploaded) {
|
||||||
err = L.u_dupdefer;
|
err = L.u_dupdefer;
|
||||||
cls = 'defer';
|
cls = 'defer';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (rsp.indexOf('server HDD is full') + 1)
|
if (err_pend) {
|
||||||
return toast.err(0, L.u_ehsdf + "\n\n" + rsp.replace(/.*; /, ''));
|
err += ' <a href="#" onclick="toast.inf(60, L.ue_ab);" class="fsearch_explain">(' + L.u_expl + ')</a>';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (err != "") {
|
if (err != "") {
|
||||||
if (!t.t_uploading)
|
if (!t.t_uploading)
|
||||||
@@ -2500,10 +2548,15 @@ function up2k_init(subtle) {
|
|||||||
pvis.seth(t.n, 2, err);
|
pvis.seth(t.n, 2, err);
|
||||||
pvis.move(t.n, 'ng');
|
pvis.move(t.n, 'ng');
|
||||||
|
|
||||||
apop(st.busy.handshake, t);
|
|
||||||
tasker();
|
tasker();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
st.todo.handshake.unshift(chill(t));
|
||||||
|
|
||||||
|
if (rsp.indexOf('server HDD is full') + 1)
|
||||||
|
return toast.err(0, L.u_ehsdf + "\n\n" + rsp.replace(/.*; /, ''));
|
||||||
|
|
||||||
err = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
err = t.t_uploading ? L.u_ehsfin : t.srch ? L.u_ehssrch : L.u_ehsinit;
|
||||||
xhrchk(xhr, err + "\n\nfile: " + t.name + "\n\nerror ", "404, target folder not found", "warn", t);
|
xhrchk(xhr, err + "\n\nfile: " + t.name + "\n\nerror ", "404, target folder not found", "warn", t);
|
||||||
}
|
}
|
||||||
@@ -2574,8 +2627,7 @@ function up2k_init(subtle) {
|
|||||||
nparts = upt.nparts,
|
nparts = upt.nparts,
|
||||||
pcar = nparts[0],
|
pcar = nparts[0],
|
||||||
pcdr = nparts[nparts.length - 1],
|
pcdr = nparts[nparts.length - 1],
|
||||||
snpart = pcar == pcdr ? pcar : ('' + pcar + '~' + pcdr),
|
maxsz = (u2sz_max > 1 ? u2sz_max : 2040) * 1024 * 1024;
|
||||||
tries = 0;
|
|
||||||
|
|
||||||
if (t.done)
|
if (t.done)
|
||||||
return console.log('done; skip chunk', t.name, t);
|
return console.log('done; skip chunk', t.name, t);
|
||||||
@@ -2595,6 +2647,30 @@ function up2k_init(subtle) {
|
|||||||
if (cdr >= t.size)
|
if (cdr >= t.size)
|
||||||
cdr = t.size;
|
cdr = t.size;
|
||||||
|
|
||||||
|
if (cdr - car <= maxsz)
|
||||||
|
return upload_sub(t, upt, pcar, pcdr, car, cdr, chunksize, car, []);
|
||||||
|
|
||||||
|
var car0 = car, subs = [];
|
||||||
|
while (car < cdr) {
|
||||||
|
subs.push([car, Math.min(cdr, car + maxsz)]);
|
||||||
|
car += maxsz;
|
||||||
|
}
|
||||||
|
upload_sub(t, upt, pcar, pcdr, 0, 0, chunksize, car0, subs);
|
||||||
|
}
|
||||||
|
|
||||||
|
function upload_sub(t, upt, pcar, pcdr, car, cdr, chunksize, car0, subs) {
|
||||||
|
var nparts = upt.nparts,
|
||||||
|
is_sub = subs.length;
|
||||||
|
|
||||||
|
if (is_sub) {
|
||||||
|
var x = subs.shift();
|
||||||
|
car = x[0];
|
||||||
|
cdr = x[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
var snpart = is_sub ? ('' + pcar + '(' + (car-car0) +'+'+ (cdr-car)) :
|
||||||
|
pcar == pcdr ? pcar : ('' + pcar + '~' + pcdr);
|
||||||
|
|
||||||
var orz = function (xhr) {
|
var orz = function (xhr) {
|
||||||
st.bytes.inflight -= xhr.bsent;
|
st.bytes.inflight -= xhr.bsent;
|
||||||
var txt = unpre((xhr.response && xhr.response.err) || xhr.responseText);
|
var txt = unpre((xhr.response && xhr.response.err) || xhr.responseText);
|
||||||
@@ -2608,6 +2684,10 @@ function up2k_init(subtle) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (xhr.status == 200) {
|
if (xhr.status == 200) {
|
||||||
|
car = car0;
|
||||||
|
if (subs.length)
|
||||||
|
return upload_sub(t, upt, pcar, pcdr, 0, 0, chunksize, car0, subs);
|
||||||
|
|
||||||
var bdone = cdr - car;
|
var bdone = cdr - car;
|
||||||
for (var a = pcar; a <= pcdr; a++) {
|
for (var a = pcar; a <= pcdr; a++) {
|
||||||
pvis.prog(t, a, Math.min(bdone, chunksize));
|
pvis.prog(t, a, Math.min(bdone, chunksize));
|
||||||
@@ -2616,6 +2696,7 @@ function up2k_init(subtle) {
|
|||||||
st.bytes.finished += cdr - car;
|
st.bytes.finished += cdr - car;
|
||||||
st.bytes.uploaded += cdr - car;
|
st.bytes.uploaded += cdr - car;
|
||||||
t.bytes_uploaded += cdr - car;
|
t.bytes_uploaded += cdr - car;
|
||||||
|
t.cooldown = t.coolmul = 0;
|
||||||
st.etac.u++;
|
st.etac.u++;
|
||||||
st.etac.t++;
|
st.etac.t++;
|
||||||
}
|
}
|
||||||
@@ -2674,7 +2755,7 @@ function up2k_init(subtle) {
|
|||||||
toast.warn(9.98, L.u_cuerr.format(snpart, Math.ceil(t.size / chunksize), t.name), t);
|
toast.warn(9.98, L.u_cuerr.format(snpart, Math.ceil(t.size / chunksize), t.name), t);
|
||||||
|
|
||||||
t.nojoin = t.nojoin || t.postlist.length; // maybe rproxy postsize limit
|
t.nojoin = t.nojoin || t.postlist.length; // maybe rproxy postsize limit
|
||||||
console.log('chunkpit onerror,', ++tries, t.name, t);
|
console.log('chunkpit onerror,', t.name, t);
|
||||||
orz2(xhr);
|
orz2(xhr);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -2692,9 +2773,13 @@ function up2k_init(subtle) {
|
|||||||
xhr.open('POST', t.purl, true);
|
xhr.open('POST', t.purl, true);
|
||||||
xhr.setRequestHeader("X-Up2k-Hash", ctxt);
|
xhr.setRequestHeader("X-Up2k-Hash", ctxt);
|
||||||
xhr.setRequestHeader("X-Up2k-Wark", t.wark);
|
xhr.setRequestHeader("X-Up2k-Wark", t.wark);
|
||||||
|
if (is_sub)
|
||||||
|
xhr.setRequestHeader("X-Up2k-Subc", car - car0);
|
||||||
|
|
||||||
xhr.setRequestHeader("X-Up2k-Stat", "{0}/{1}/{2}/{3} {4}/{5} {6}".format(
|
xhr.setRequestHeader("X-Up2k-Stat", "{0}/{1}/{2}/{3} {4}/{5} {6}".format(
|
||||||
pvis.ctr.ok, pvis.ctr.ng, pvis.ctr.bz, pvis.ctr.q, btot, btot - bfin,
|
pvis.ctr.ok, pvis.ctr.ng, pvis.ctr.bz, pvis.ctr.q, btot, btot - bfin,
|
||||||
st.eta.t.split(' ').pop()));
|
st.eta.t.indexOf('/s, ')+1 ? st.eta.t.split(' ').pop() : 'x'));
|
||||||
|
|
||||||
xhr.setRequestHeader('Content-Type', 'application/octet-stream');
|
xhr.setRequestHeader('Content-Type', 'application/octet-stream');
|
||||||
if (xhr.overrideMimeType)
|
if (xhr.overrideMimeType)
|
||||||
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
|
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
|
||||||
@@ -2812,13 +2897,13 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var read_u2sz = function () {
|
var read_u2sz = function () {
|
||||||
var el = ebi('u2szg'), n = parseInt(el.value), dv = u2sz.split(',');
|
var el = ebi('u2szg'), n = parseInt(el.value);
|
||||||
stitch_tgt = n = (
|
stitch_tgt = n = (
|
||||||
isNaN(n) ? dv[1] :
|
isNaN(n) ? u2sz_tgt :
|
||||||
n < dv[0] ? dv[0] :
|
n < u2sz_min ? u2sz_min :
|
||||||
n > dv[2] ? dv[2] : n
|
n > u2sz_max ? u2sz_max : n
|
||||||
);
|
);
|
||||||
if (n == dv[1]) sdrop('u2sz'); else swrite('u2sz', n);
|
if (n == u2sz_tgt) sdrop('u2sz'); else swrite('u2sz', n);
|
||||||
if (el.value != n) el.value = n;
|
if (el.value != n) el.value = n;
|
||||||
};
|
};
|
||||||
ebi('u2szg').addEventListener('blur', read_u2sz);
|
ebi('u2szg').addEventListener('blur', read_u2sz);
|
||||||
|
|||||||
@@ -5,10 +5,17 @@ if (!window.console || !console.log)
|
|||||||
"log": function (msg) { }
|
"log": function (msg) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (!Object.assign)
|
||||||
|
Object.assign = function (a, b) {
|
||||||
|
for (var k in b)
|
||||||
|
a[k] = b[k];
|
||||||
|
};
|
||||||
|
|
||||||
|
if (window.CGV1)
|
||||||
|
Object.assign(window, window.CGV1);
|
||||||
|
|
||||||
if (window.CGV)
|
if (window.CGV)
|
||||||
for (var k in CGV)
|
Object.assign(window, window.CGV);
|
||||||
window[k] = CGV[k];
|
|
||||||
|
|
||||||
|
|
||||||
var wah = '',
|
var wah = '',
|
||||||
@@ -571,7 +578,9 @@ function yscroll() {
|
|||||||
|
|
||||||
function showsort(tab) {
|
function showsort(tab) {
|
||||||
var v, vn, v1, v2, th = tab.tHead,
|
var v, vn, v1, v2, th = tab.tHead,
|
||||||
sopts = jread('fsort', jcp(dsort));
|
sopts = jread('fsort');
|
||||||
|
|
||||||
|
sopts = sopts && sopts.length ? sopts : dsort;
|
||||||
|
|
||||||
th && (th = th.rows[0]) && (th = th.cells);
|
th && (th = th.rows[0]) && (th = th.cells);
|
||||||
|
|
||||||
@@ -608,10 +617,13 @@ function sortTable(table, col, cb) {
|
|||||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||||
i, reverse = /s0[^r]/.exec(th[col].className + ' ') ? -1 : 1;
|
i, reverse = /s0[^r]/.exec(th[col].className + ' ') ? -1 : 1;
|
||||||
|
|
||||||
var stype = th[col].getAttribute('sort');
|
var kname = th[col].getAttribute('name'),
|
||||||
|
stype = th[col].getAttribute('sort');
|
||||||
try {
|
try {
|
||||||
var nrules = [], rules = jread("fsort", []);
|
var nrules = [],
|
||||||
rules.unshift([th[col].getAttribute('name'), reverse, stype || '']);
|
rules = kname == 'href' ? [] : jread("fsort", []);
|
||||||
|
|
||||||
|
rules.unshift([kname, reverse, stype || '']);
|
||||||
for (var a = 0; a < rules.length; a++) {
|
for (var a = 0; a < rules.length; a++) {
|
||||||
var add = true;
|
var add = true;
|
||||||
for (var b = 0; b < a; b++)
|
for (var b = 0; b < a; b++)
|
||||||
@@ -874,6 +886,11 @@ if (window.Number && Number.isFinite)
|
|||||||
|
|
||||||
function f2f(val, nd) {
|
function f2f(val, nd) {
|
||||||
// 10.toFixed(1) returns 10.00 for certain values of 10
|
// 10.toFixed(1) returns 10.00 for certain values of 10
|
||||||
|
if (!isNum(val)) {
|
||||||
|
val = parseFloat(val);
|
||||||
|
if (!isNum(val))
|
||||||
|
val = 999;
|
||||||
|
}
|
||||||
val = (val * Math.pow(10, nd)).toFixed(0).split('.')[0];
|
val = (val * Math.pow(10, nd)).toFixed(0).split('.')[0];
|
||||||
return nd ? (val.slice(0, -nd) || '0') + '.' + val.slice(-nd) : val;
|
return nd ? (val.slice(0, -nd) || '0') + '.' + val.slice(-nd) : val;
|
||||||
}
|
}
|
||||||
@@ -970,11 +987,33 @@ function apop(arr, v) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function jcp(obj) {
|
function jcp1(obj) {
|
||||||
return JSON.parse(JSON.stringify(obj));
|
return JSON.parse(JSON.stringify(obj));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function jcp2(src) {
|
||||||
|
if (Array.isArray(src)) {
|
||||||
|
var ret = [];
|
||||||
|
for (var a = 0; a < src.length; ++a) {
|
||||||
|
var sub = src[a];
|
||||||
|
ret.push((sub === null) ? sub : (sub instanceof Date) ? new Date(sub.valueOf()) : (typeof sub === 'object') ? jcp2(sub) : sub);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var ret = {};
|
||||||
|
for (var key in src) {
|
||||||
|
var sub = src[key];
|
||||||
|
ret[key] = sub === null ? sub : (sub instanceof Date) ? new Date(sub.valueOf()) : (typeof sub === 'object') ? jcp2(sub) : sub;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
// jcp1 50% faster on android-chrome, jcp2 7x everywhere else
|
||||||
|
var jcp = MOBILE && CHROME ? jcp1 : jcp2;
|
||||||
|
|
||||||
|
|
||||||
function sdrop(key) {
|
function sdrop(key) {
|
||||||
try {
|
try {
|
||||||
STG.removeItem(key);
|
STG.removeItem(key);
|
||||||
@@ -1527,21 +1566,26 @@ var toast = (function () {
|
|||||||
if (sec)
|
if (sec)
|
||||||
te = setTimeout(r.hide, sec * 1000);
|
te = setTimeout(r.hide, sec * 1000);
|
||||||
|
|
||||||
|
if (same && delta < 1000) {
|
||||||
var tb = ebi('toastt');
|
var tb = ebi('toastt');
|
||||||
if (same && delta < 1000 && tb) {
|
if (tb) {
|
||||||
tb.style.animation = 'none';
|
tb.style.animation = 'none';
|
||||||
tb.offsetHeight;
|
tb.offsetHeight;
|
||||||
tb.style.animation = null;
|
tb.style.animation = null;
|
||||||
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (txt.indexOf('<body>') + 1)
|
if (txt.indexOf('<body>') + 1)
|
||||||
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
|
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
|
||||||
|
|
||||||
setcvar('--tmtime', sec + 's');
|
var html = '';
|
||||||
setcvar('--tmstep', sec * 15);
|
if (sec) {
|
||||||
|
setcvar('--tmtime', (sec - 0.15) + 's');
|
||||||
obj.innerHTML = '<div id="toastt"></div><a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
setcvar('--tmstep', Math.floor(sec * 20));
|
||||||
|
html += '<div id="toastt"></div>';
|
||||||
|
}
|
||||||
|
obj.innerHTML = html + '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
||||||
obj.className = cl;
|
obj.className = cl;
|
||||||
sec += obj.offsetWidth;
|
sec += obj.offsetWidth;
|
||||||
obj.className += ' vis';
|
obj.className += ' vis';
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ catch (ex) {
|
|||||||
function load_fb() {
|
function load_fb() {
|
||||||
subtle = null;
|
subtle = null;
|
||||||
importScripts('deps/sha512.hw.js');
|
importScripts('deps/sha512.hw.js');
|
||||||
|
console.log('using fallback hasher');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -29,6 +30,12 @@ var reader = null,
|
|||||||
|
|
||||||
|
|
||||||
onmessage = (d) => {
|
onmessage = (d) => {
|
||||||
|
if (d.data == 'nosubtle')
|
||||||
|
return load_fb();
|
||||||
|
|
||||||
|
if (d.data == 'ping')
|
||||||
|
return postMessage(['pong']);
|
||||||
|
|
||||||
if (busy)
|
if (busy)
|
||||||
return postMessage(["panic", 'worker got another task while busy']);
|
return postMessage(["panic", 'worker got another task while busy']);
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,307 @@
|
|||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2024-1211-2236 `v1.16.5` 4chrome
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* #124 add workaround for a chrome bug (crash during upload) 24ce46b3
|
||||||
|
* chrome and chromium-based browsers could OOM
|
||||||
|
* https://issues.chromium.org/issues/383568268
|
||||||
|
|
||||||
|
* #122 "hybrid IdP", regular users can still auth while [IdP](https://github.com/9001/copyparty#identity-providers) is enabled 64501fd7
|
||||||
|
* previously, enabling IdP would entirely disable password-based login
|
||||||
|
* now, password-auth is attempted for requests without a valid IdP header
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* the terminal window title would only change if `--no-ansi` was specified, which is exactly the opposite of what it should be (and now is) doing db3c0b09
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* mDNS: better log messages when several IPs are added/removed a49bf81f
|
||||||
|
* webdeps: update dompurify 06868606
|
||||||
|
|
||||||
|
----
|
||||||
|
|
||||||
|
this release includes a build of [copyparty-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.16.5/copyparty-winpe64.exe) since the last one was [almost a year ago](https://github.com/9001/copyparty/releases/tag/v1.10.1)
|
||||||
|
|
||||||
|
* winpe64.exe is only for *very* specific usecases, you almost definitely *do not* want to download it, please just grab the regular [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) instead (works on all 64bit machines running win8 or newer)
|
||||||
|
|
||||||
|
* the only difference between winpe64.exe and [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is that winpe64.exe works in the win7x64 PE (rescue-env), which makes it *almost* entirely useless, and every bit as dangerous to use as copyparty32.exe
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2024-1207-0024 `v1.16.4` ux is hard
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* improve the upload ui so it explains how to abort an unfinished upload when someone uploads to the wrong folder by accident be6afe2d
|
||||||
|
* also reduces serverload slightly when cloning an incoming file to multiple destinations
|
||||||
|
* u2c (commandline uploader): windows improvements 91637800
|
||||||
|
* now supports globbing (filename wildcards) on windows
|
||||||
|
* progressbar in the windows taskbar (requires conemu or the "new windows terminal")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2024-1204-0003 `v1.16.3` 120%
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* #120 add option `--srch-excl` and volflag `srch_excl` for excluding certain paths from search results 697a4fa8
|
||||||
|
* mDNS: add workaround for https://github.com/avahi/avahi/issues/379 6c1cf68b 94d1924f
|
||||||
|
* Avahi mDNS Reflection, sometimes used in intricate LAN setups, doesn't understand NSEC records and corrupts them
|
||||||
|
* the workaround makes copyparty able to read the corrupted packets, but clients without a similar workaround will require either `--zm4` or `--zm6` so copyparty doesn't include the usual NSEC records
|
||||||
|
* this is mentioned in a very loud warning in the logs when necessary
|
||||||
|
* mDNS: option to silently ignore buggy devices instead of spamming the log with parser errors 395af051
|
||||||
|
* webdav: support listing unmapped root with infinite recursion (Depth:0) 21a3f369
|
||||||
|
* embed current sort config into media URLs (gallery/music) 0f257c93 4cfdc4c5 01670827
|
||||||
|
* ensures that anyone clicking your link will see the files in the same order as you
|
||||||
|
* can be confgured serverside (`--hsortn`, volflag `hsortn`) and clientside (`#sort` in settings)
|
||||||
|
* URL and UI options to disable checksum calculation of PUT, bup, basic uploads c5a000d2
|
||||||
|
* also allows [choosing either md5, sha1, sha256, or blake2](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#write) instead of the default sha512
|
||||||
|
* can give uploads a nice speed boost when copyparty is running on a potato
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* webdav: more correct login challenge 2ce82339
|
||||||
|
* the previous behavior could make some clients reluctant to send the password
|
||||||
|
* #120 forget metadata of all files (including uploads) when shadowed d168b2ac
|
||||||
|
* thanks to @Gremious for all the debugging to narrow this down!
|
||||||
|
* #120 drop volume caches if relevant config is changed (mainly indexing filters) 2f83c6c7
|
||||||
|
* #121 couldn't access arbitrary toplevel files from accounts with `h` permission 1f5f42f2
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* exclude thumbnails from accesslog by default 9082c470
|
||||||
|
* filesearch: show a final summary of time-elapsed and average hashing speed 8a631f04
|
||||||
|
* improve phrasing of debug messages during indexing at startup 127f414e
|
||||||
|
* `--license` no longer depends on opensource.org at build time 33c4ccff
|
||||||
|
* update deps 6cedcfbf
|
||||||
|
* copyparty.exe: python 3.12.7 => 3.12.8
|
||||||
|
* webdeps: hashwasm, dompurify
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2024-1123-2336 `v1.16.2` webdav upload fix
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* add `--nsort` and volflag `nsort` to default-enable natural sort of filenames with leading digits 8f7ffcf3
|
||||||
|
* video-player: support `.mov` files which contain browser-native codecs 2d0cbdf1
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* #119 v1.16.0 broke webdav uploads from rclone and possibly other clients 7dfbfc72
|
||||||
|
* a collection of webdav unittests will be added soon to prevent similar issues in the future
|
||||||
|
* #118 ip-ranges can be mixed with `lan` when specifying the list of trusted proxies for `x-forwarded-for` with `--xff-src`
|
||||||
|
* found and fixed by @codemicro (thx!) 0e31cfa7
|
||||||
|
* ux:
|
||||||
|
* in the grid-view, markdown files would open in the generic text viewer 520ac8f4
|
||||||
|
* qr-codes (create-share, view-share) didn't render on chrome db069c3d
|
||||||
|
* qr-codes could cause layout-shifting 5afb562a
|
||||||
|
* fix layout-shifting for ongoing downloads in controlpanel 9c8507a0
|
||||||
|
* cosmetic eta jank b10843d0
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* upto 7% faster folder listings due to refactoring for more ux knobs 0c43b592
|
||||||
|
* fix resource leaks (only affected tests/debug) 2ab8924e
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2024-1115-2218 `v1.16.1` cbz thumbnails
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* thumbnails of .cbz manga archives 4d15dd6e
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* when running with `-j0`, download-ETA could break in complex volume layouts 10fc4768
|
||||||
|
* linking to the image gallery didn't quite work if multiselect was enabled 56a04996
|
||||||
|
* password-hashing parameters (cpu/ram cost) could not be customized 1f177528
|
||||||
|
* the defaults must be perfect considering nobody ever tried changing them ¯\\_(ツ)_/¯
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* add intentional crash on startup if two volumes are configured to use the same histpath 2b63d7d1
|
||||||
|
* prevents funky deadlocks and an eventual database loss in case of a no-thoughts-head-empty moment, purely hypothetical of course 🗿
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2024-1110-1932 `v1.16.0` COPYparty
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* #46 #115 copy/paste files and folders cacec9c1
|
||||||
|
* cut/paste still exists, but now you can copy too
|
||||||
|
* with a UI to rename files in case of filename collisions 56317b00
|
||||||
|
* files are created according to the dedup settings in the target volume (either full copies or symlinks/hardlinks)
|
||||||
|
* show currently active downloads in the controlpanel 8aba5aed
|
||||||
|
* can be made admin-only with `--dl-list=1` or disabled with `--dl-list=0`
|
||||||
|
* hides filenames of hidden files, and files from volumes where the viewer doesn't have access
|
||||||
|
* #114 async reinit on new [IdP users](https://github.com/9001/copyparty#identity-providers) 44ee07f0
|
||||||
|
* new IdP users can now always auth, even while a filesystem reindex is running
|
||||||
|
* ux:
|
||||||
|
* remember batch-rename settings from last time 6a8d5e17
|
||||||
|
* URL parameters to force grid/thumbs on/off 5718caa9
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* folders that fail to list due to a corrupt HDD/filesystem will now return a 404 instead of an empty listing 119e88d8
|
||||||
|
* also fixes similar issues in u2c and partyfuse
|
||||||
|
* u2c (commandline uploader): detect and adapt to proxies with short connection keepalives c784e528
|
||||||
|
* ui/ux:
|
||||||
|
* show the "switch-to-https" button in 404-messages too efd8a32e
|
||||||
|
* the folder-loading indicator could steal keyboard focus d9962f65
|
||||||
|
* hotkey-help was very trigger-happy 71d9e010
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* choose more conservative defaults when server has less than 1 GiB RAM 2bf9055c
|
||||||
|
* runs okay down to 128 MiB, but thumbnails die below 256 MiB
|
||||||
|
* update the [comparison to similar software](https://github.com/9001/copyparty/blob/hovudstraum/docs/versus.md) after years of optimizations on both sides 0ce7cf5e
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2024-1027-0751 `v1.15.10` temporary upload links
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* [shares](https://github.com/9001/copyparty#shares) can now be uploaded into, and unpost works too 4bdcbc1c
|
||||||
|
* useful to create temporary URLs for other people to upload to
|
||||||
|
* shares can be write-only, so visitors can't browse or see any files
|
||||||
|
* #110 HTTP 304 (caching):
|
||||||
|
* support `If-Range` for HTTP 206 159f51b1
|
||||||
|
* add server-side and client-side options to force-disable cache dd6dbdd9
|
||||||
|
* `--no304=1` shows a button in the controlpanel to disable caching
|
||||||
|
* `--no304=2` makes that button auto-enabled
|
||||||
|
* even when `--no304` is not specified, accessing the URL `/?setck=no304=y` force-disables cache
|
||||||
|
* when cache is force-disabled, browsers will waste a lot of network traffic / data usage
|
||||||
|
* might help to avoid bugs in browsers or proxies, for example if media files suddenly stop loading
|
||||||
|
* but such bugs should be exceedingly rare, so do not enable this unless actually necessary
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* #110 HTTP 304 (caching):
|
||||||
|
* remove `Content-Length` and `Content-Type` response headers from 304 replies 91240236
|
||||||
|
* browsers don't need these, and some middlewares might get confused if they're present
|
||||||
|
* #113 fix crash on startup if `-j0` was combined with `--ipa` or `--ipu` 3a0d882c
|
||||||
|
* #111 fix javascript crash if `--u2sz` was set to an invalid value b13899c6
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* #110 HTTP 304 (caching):
|
||||||
|
* never automatically enable k304 because the `Vary` header killed support for caching in msie anyways 63013cc5
|
||||||
|
* change time comparison for `If-Modified-Since` to require an exact timestamp match, instead of the intended "modified since". This technically violates the http-spec, but should be safer for backdating file mtimes 159f51b1
|
||||||
|
* new option `--ohead` to log response headers 7678a91b
|
||||||
|
* added [nintendo 3ds](https://github.com/user-attachments/assets/88deab3d-6cad-4017-8841-2f041472b853) to the [list of supported browsers](https://github.com/9001/copyparty#browser-support) cb81f0ad
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2024-1018-2342 `v1.15.9` rss server
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* #109 [rss feed generator](https://github.com/9001/copyparty#rss-feeds) 7ffd805a
|
||||||
|
* monitor folders recursively with RSS readers
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* #107 `--df` diskspace limits was incompatible with webdav 2a570bb4
|
||||||
|
* #108 up2k javascript crash (only affected the Chinese translation) a7e2a0c9
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* up2k: detect buggy webworkers 5ca8f070
|
||||||
|
* up2k: improve upload retry/timeout logic a9b4436c
|
||||||
|
* js: make handshake retries more aggressive
|
||||||
|
* u2c: reduce chunks timeout + ^
|
||||||
|
* main: reduce tcp timeout to 128sec (js is 42s)
|
||||||
|
* httpcli: less confusing log messages
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2024-1016-2153 `v1.15.8` the sky is the limit
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* subchunks; avoid the Cloudflare filesize limit entirely fc8298c4 48147c07
|
||||||
|
* the previous max filesize was `383.9 GiB`, now only the sky is the limit
|
||||||
|
* if you're using another proxy with a more restrictive limit than Cloudflare's 100 MiB, for example 64 MiB, then `--u2sz 1,64,64`
|
||||||
|
* m4v videos can be played in the gallery ff0a71f2
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* up2k: uploading duplicate files could initially fail (but would succeed after a few automatic retries) due to a toctou 114b71b7
|
||||||
|
* [u2c](https://github.com/9001/copyparty/blob/hovudstraum/bin/README.md#u2cpy) / commandline uploader:
|
||||||
|
* directory scanner got stuck if it found a FIFO cba1878b
|
||||||
|
* excessive number of FDs when uploading large files 65a2b6a2
|
||||||
|
* chunksize calculation; only affected files exactly 128 GiB large a2e037d6
|
||||||
|
* support filenames with newlines and invalid utf-8 b2770a20
|
||||||
|
* invalid utf-8 is replaced by `?` when they hit the server
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* don't show the toast countdown bar if duration is infinite 22dfc6ec
|
||||||
|
* chickenbit to disable the browser's built-in sha512 implementation and force the bundled wasm instead d715479e
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2024-1013-2244 `v1.15.7` the 'a' in "ip address" stands for authentication
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* [cidr-based autologin](https://github.com/9001/copyparty#ip-auth) b7f9bf5a
|
||||||
|
* map a cidr ip-range to a username; anyone connecting from that ip-range will autologin as that user
|
||||||
|
* thx to @byteturtle for the idea!
|
||||||
|
* [u2c](https://github.com/9001/copyparty/blob/hovudstraum/bin/README.md#u2cpy) / commandline uploader:
|
||||||
|
* option `--chs` to list individual chunk hashes cf1b7562
|
||||||
|
* fix progress indicator when resuming an upload 53ffd245
|
||||||
|
* up2k: verbose logging of detected/corrected bitflips ee628363
|
||||||
|
* *foreshadowing intensifies* (story still developing)
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* up2k with database disabled / running without `-e2d` 705f598b
|
||||||
|
* respect `noforget` when loading snaps
|
||||||
|
* ...but actually forget deleted files otherwise
|
||||||
|
* snap-loader adds empty need/hash entries as necessary
|
||||||
|
|
||||||
|
## 🔧 other changes
|
||||||
|
|
||||||
|
* authed users can now unpost recent uploads of unauthed users from the same IP 22b58e31
|
||||||
|
* would have become problematic now that cidr-based autologin is a thing
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
|
# 2024-1011-2256 `v1.15.6` preadme
|
||||||
|
|
||||||
|
## 🧪 new features
|
||||||
|
|
||||||
|
* #105 files named `preadme.md` appear at the top of directory listings 1d68acf8
|
||||||
|
* entirely disable dedup with `--no-clone` / volflag `noclone` 3d7facd7 6b7ebdb7
|
||||||
|
* even if a file exists for sure on the server HDD, let the client continue uploading instead of reusing the existing data
|
||||||
|
* using this option "never" makes sense, unless you're using something like S3 Glacier storage where reading is really expensive but writing is cheap
|
||||||
|
|
||||||
|
## 🩹 bugfixes
|
||||||
|
|
||||||
|
* up2k jank after detecting a bitflip or network glitch 4a4ec88d
|
||||||
|
* instead of resuming the interrupted upload like it should, the upload client could get stuck or start over
|
||||||
|
* #104 support viewing dotfile documents when dotfiles are hidden 9ccd8bb3
|
||||||
|
* fix a buttload of typos 6adc778d 1e7697b5
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||||
# 2024-1005-1803 `v1.15.5` pyz all the cores
|
# 2024-1005-1803 `v1.15.5` pyz all the cores
|
||||||
|
|
||||||
|
|||||||
@@ -140,8 +140,12 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
|||||||
| GET | `?tar&j` | pregenerate jpg thumbnails |
|
| GET | `?tar&j` | pregenerate jpg thumbnails |
|
||||||
| GET | `?tar&p` | pregenerate audio waveforms |
|
| GET | `?tar&p` | pregenerate audio waveforms |
|
||||||
| GET | `?shares` | list your shared files/folders |
|
| GET | `?shares` | list your shared files/folders |
|
||||||
|
| GET | `?dls` | show active downloads (do this as admin) |
|
||||||
| GET | `?ups` | show recent uploads from your IP |
|
| GET | `?ups` | show recent uploads from your IP |
|
||||||
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
||||||
|
| GET | `?ru` | show all recent uploads |
|
||||||
|
| GET | `?ru&filter=f` | ...where URL contains `f` |
|
||||||
|
| GET | `?ru&j` | ...as json |
|
||||||
| GET | `?mime=foo` | specify return mimetype `foo` |
|
| GET | `?mime=foo` | specify return mimetype `foo` |
|
||||||
| GET | `?v` | render markdown file at URL |
|
| GET | `?v` | render markdown file at URL |
|
||||||
| GET | `?v` | open image/video/audio in mediaplayer |
|
| GET | `?v` | open image/video/audio in mediaplayer |
|
||||||
@@ -163,15 +167,20 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
|||||||
|
|
||||||
| method | params | result |
|
| method | params | result |
|
||||||
|--|--|--|
|
|--|--|--|
|
||||||
|
| POST | `?copy=/foo/bar` | copy the file/folder at URL to /foo/bar |
|
||||||
| POST | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
|
| POST | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar |
|
||||||
|
|
||||||
| method | params | body | result |
|
| method | params | body | result |
|
||||||
|--|--|--|--|
|
|--|--|--|--|
|
||||||
| PUT | | (binary data) | upload into file at URL |
|
| PUT | | (binary data) | upload into file at URL |
|
||||||
|
| PUT | `?ck` | (binary data) | upload without checksum gen (faster) |
|
||||||
|
| PUT | `?ck=md5` | (binary data) | return md5 instead of sha512 |
|
||||||
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
|
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
|
||||||
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
|
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
|
||||||
| mPOST | | `f=FILE` | upload `FILE` into the folder at URL |
|
| mPOST | | `f=FILE` | upload `FILE` into the folder at URL |
|
||||||
| mPOST | `?j` | `f=FILE` | ...and reply with json |
|
| mPOST | `?j` | `f=FILE` | ...and reply with json |
|
||||||
|
| mPOST | `?ck` | `f=FILE` | ...and disable checksum gen (faster) |
|
||||||
|
| mPOST | `?ck=md5` | `f=FILE` | ...and return md5 instead of sha512 |
|
||||||
| mPOST | `?replace` | `f=FILE` | ...and overwrite existing files |
|
| mPOST | `?replace` | `f=FILE` | ...and overwrite existing files |
|
||||||
| mPOST | `?media` | `f=FILE` | ...and return medialink (not hotlink) |
|
| mPOST | `?media` | `f=FILE` | ...and return medialink (not hotlink) |
|
||||||
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
||||||
@@ -190,6 +199,12 @@ upload modifiers:
|
|||||||
| `Accept: url` | `want=url` | return just the file URL |
|
| `Accept: url` | `want=url` | return just the file URL |
|
||||||
| `Rand: 4` | `rand=4` | generate random filename with 4 characters |
|
| `Rand: 4` | `rand=4` | generate random filename with 4 characters |
|
||||||
| `Life: 30` | `life=30` | delete file after 30 seconds |
|
| `Life: 30` | `life=30` | delete file after 30 seconds |
|
||||||
|
| `CK: no` | `ck` | disable serverside checksum (maybe faster) |
|
||||||
|
| `CK: md5` | `ck=md5` | return md5 checksum instead of sha512 |
|
||||||
|
| `CK: sha1` | `ck=sha1` | return sha1 checksum |
|
||||||
|
| `CK: sha256` | `ck=sha256` | return sha256 checksum |
|
||||||
|
| `CK: b2` | `ck=b2` | return blake2b checksum |
|
||||||
|
| `CK: b2s` | `ck=b2s` | return blake2s checksum |
|
||||||
|
|
||||||
* `life` only has an effect if the volume has a lifetime, and the volume lifetime must be greater than the file's
|
* `life` only has an effect if the volume has a lifetime, and the volume lifetime must be greater than the file's
|
||||||
|
|
||||||
@@ -208,6 +223,12 @@ upload modifiers:
|
|||||||
| method | params | result |
|
| method | params | result |
|
||||||
|--|--|--|
|
|--|--|--|
|
||||||
| GET | `?pw=x` | logout |
|
| GET | `?pw=x` | logout |
|
||||||
|
| GET | `?grid` | ui: show grid-view |
|
||||||
|
| GET | `?imgs` | ui: show grid-view with thumbnails |
|
||||||
|
| GET | `?grid=0` | ui: show list-view |
|
||||||
|
| GET | `?imgs=0` | ui: show list-view |
|
||||||
|
| GET | `?thumb` | ui, grid-mode: show thumbnails |
|
||||||
|
| GET | `?thumb=0` | ui, grid-mode: show icons |
|
||||||
|
|
||||||
|
|
||||||
# event hooks
|
# event hooks
|
||||||
|
|||||||
@@ -58,7 +58,9 @@ currently up to date with [awesome-selfhosted](https://github.com/awesome-selfho
|
|||||||
* [h5ai](#h5ai)
|
* [h5ai](#h5ai)
|
||||||
* [autoindex](#autoindex)
|
* [autoindex](#autoindex)
|
||||||
* [miniserve](#miniserve)
|
* [miniserve](#miniserve)
|
||||||
|
* [pingvin-share](#pingvin-share)
|
||||||
* [briefly considered](#briefly-considered)
|
* [briefly considered](#briefly-considered)
|
||||||
|
* [notes](#notes)
|
||||||
|
|
||||||
|
|
||||||
# recommendations
|
# recommendations
|
||||||
@@ -106,6 +108,7 @@ some softwares not in the matrixes,
|
|||||||
* [h5ai](#h5ai)
|
* [h5ai](#h5ai)
|
||||||
* [autoindex](#autoindex)
|
* [autoindex](#autoindex)
|
||||||
* [miniserve](#miniserve)
|
* [miniserve](#miniserve)
|
||||||
|
* [pingvin-share](#pingvin-share)
|
||||||
|
|
||||||
symbol legend,
|
symbol legend,
|
||||||
* `█` = absolutely
|
* `█` = absolutely
|
||||||
@@ -426,6 +429,10 @@ symbol legend,
|
|||||||
| gimme-that | python | █ mit | 4.8 MB |
|
| gimme-that | python | █ mit | 4.8 MB |
|
||||||
| ass | ts | █ isc | • |
|
| ass | ts | █ isc | • |
|
||||||
| linx | go | ░ gpl3 | 20 MB |
|
| linx | go | ░ gpl3 | 20 MB |
|
||||||
|
| h5ai | php | █ mit | • |
|
||||||
|
| autoindex | go | █ mpl2 | 11 MB |
|
||||||
|
| miniserve | rust | █ mit | 2 MB |
|
||||||
|
| pingvin-share | go | █ bsd2 | 487 MB |
|
||||||
|
|
||||||
* `size` = binary (if available) or installed size of program and its dependencies
|
* `size` = binary (if available) or installed size of program and its dependencies
|
||||||
* copyparty size is for the [standalone python](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) file; the [windows exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) is **6 MiB**
|
* copyparty size is for the [standalone python](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) file; the [windows exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) is **6 MiB**
|
||||||
@@ -458,11 +465,13 @@ symbol legend,
|
|||||||
## [hfs3](https://rejetto.com/hfs/)
|
## [hfs3](https://rejetto.com/hfs/)
|
||||||
* nodejs; cross-platform
|
* nodejs; cross-platform
|
||||||
* vfs with gui config, per-volume permissions
|
* vfs with gui config, per-volume permissions
|
||||||
|
* tested locally, v0.53.2 on archlinux
|
||||||
* 🔵 uploads are resumable
|
* 🔵 uploads are resumable
|
||||||
* ⚠️ uploads are not segmented; max upload size 100 MiB on cloudflare
|
* ⚠️ uploads are not segmented; max upload size 100 MiB on cloudflare
|
||||||
* ⚠️ uploads are not accelerated (copyparty is 3x faster across the atlantic)
|
* ⚠️ uploads are not accelerated (copyparty is 3x faster across the atlantic)
|
||||||
* ⚠️ uploads are not integrity-checked
|
* ⚠️ uploads are not integrity-checked
|
||||||
* ⚠️ copies the file after upload; need twice filesize free disk space
|
* ⚠️ copies the file after upload; need twice filesize free disk space
|
||||||
|
* ⚠️ uploading small files is decent; `107` files per sec (copyparty does `670`/sec, 6x faster)
|
||||||
* ⚠️ doesn't support crazy filenames
|
* ⚠️ doesn't support crazy filenames
|
||||||
* ✅ config GUI
|
* ✅ config GUI
|
||||||
* ✅ download counter
|
* ✅ download counter
|
||||||
@@ -471,11 +480,12 @@ symbol legend,
|
|||||||
|
|
||||||
## [nextcloud](https://github.com/nextcloud/server)
|
## [nextcloud](https://github.com/nextcloud/server)
|
||||||
* php, mariadb
|
* php, mariadb
|
||||||
|
* tested locally, [linuxserver/nextcloud](https://hub.docker.com/r/linuxserver/nextcloud) v30.0.2 (sqlite)
|
||||||
* ⚠️ [isolated on-disk file hierarchy] in per-user folders
|
* ⚠️ [isolated on-disk file hierarchy] in per-user folders
|
||||||
* not that bad, can probably be remedied with bindmounts or maybe symlinks
|
* not that bad, can probably be remedied with bindmounts or maybe symlinks
|
||||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
* ⚠️ uploading small files is slow; `2.2` files per sec (copyparty does `87`/sec), tested locally with [linuxserver/nextcloud](https://hub.docker.com/r/linuxserver/nextcloud) (sqlite)
|
* ⚠️ uploading small files is slow; `4` files per sec (copyparty does `670`/sec, 160x faster)
|
||||||
* ⚠️ no write-only / upload-only folders
|
* ⚠️ no write-only / upload-only folders
|
||||||
* ⚠️ http/webdav only; no ftp, zeroconf
|
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||||
* ⚠️ less awesome music player
|
* ⚠️ less awesome music player
|
||||||
@@ -491,11 +501,12 @@ symbol legend,
|
|||||||
|
|
||||||
## [seafile](https://github.com/haiwen/seafile)
|
## [seafile](https://github.com/haiwen/seafile)
|
||||||
* c, mariadb
|
* c, mariadb
|
||||||
|
* tested locally, [official container](https://manual.seafile.com/latest/docker/deploy_seafile_with_docker/) v11.0.13
|
||||||
* ⚠️ [isolated on-disk file hierarchy](https://manual.seafile.com/maintain/seafile_fsck/), incompatible with other software
|
* ⚠️ [isolated on-disk file hierarchy](https://manual.seafile.com/maintain/seafile_fsck/), incompatible with other software
|
||||||
* *much worse than nextcloud* in that regard
|
* *much worse than nextcloud* in that regard
|
||||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
* ⚠️ uploading small files is slow; `2.7` files per sec (copyparty does `87`/sec), tested locally with [official container](https://manual.seafile.com/docker/deploy_seafile_with_docker/)
|
* ⚠️ uploading small files is slow; `4.7` files per sec (copyparty does `670`/sec, 140x faster)
|
||||||
* ⚠️ no write-only / upload-only folders
|
* ⚠️ no write-only / upload-only folders
|
||||||
* ⚠️ big folders cannot be zip-downloaded
|
* ⚠️ big folders cannot be zip-downloaded
|
||||||
* ⚠️ http/webdav only; no ftp, zeroconf
|
* ⚠️ http/webdav only; no ftp, zeroconf
|
||||||
@@ -519,9 +530,11 @@ symbol legend,
|
|||||||
|
|
||||||
## [dufs](https://github.com/sigoden/dufs)
|
## [dufs](https://github.com/sigoden/dufs)
|
||||||
* rust; cross-platform (windows, linux, macos)
|
* rust; cross-platform (windows, linux, macos)
|
||||||
|
* tested locally, v0.43.0 on archlinux (plain binary)
|
||||||
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
* ⚠️ uploads not resumable / accelerated / integrity-checked
|
||||||
* ⚠️ on cloudflare: max upload size 100 MiB
|
* ⚠️ on cloudflare: max upload size 100 MiB
|
||||||
* ⚠️ across the atlantic, copyparty is 3x faster
|
* ⚠️ across the atlantic, copyparty is 3x faster
|
||||||
|
* ⚠️ uploading small files is decent; `97` files per sec (copyparty does `670`/sec, 7x faster)
|
||||||
* ⚠️ doesn't support crazy filenames
|
* ⚠️ doesn't support crazy filenames
|
||||||
* ✅ per-url access control (copyparty is per-volume)
|
* ✅ per-url access control (copyparty is per-volume)
|
||||||
* 🔵 basic but really snappy ui
|
* 🔵 basic but really snappy ui
|
||||||
@@ -564,10 +577,12 @@ symbol legend,
|
|||||||
|
|
||||||
## [filebrowser](https://github.com/filebrowser/filebrowser)
|
## [filebrowser](https://github.com/filebrowser/filebrowser)
|
||||||
* go; cross-platform (windows, linux, mac)
|
* go; cross-platform (windows, linux, mac)
|
||||||
|
* tested locally, v2.31.2 on archlinux (plain binary)
|
||||||
* 🔵 uploads are resumable and segmented
|
* 🔵 uploads are resumable and segmented
|
||||||
* 🔵 multiple files are uploaded in parallel, but...
|
* 🔵 multiple files are uploaded in parallel, but...
|
||||||
* ⚠️ big files are not accelerated (copyparty is 5x faster across the atlantic)
|
* ⚠️ big files are not accelerated (copyparty is 5x faster across the atlantic)
|
||||||
* ⚠️ uploads are not integrity-checked
|
* ⚠️ uploads are not integrity-checked
|
||||||
|
* ⚠️ uploading small files is decent; `69` files per sec (copyparty does `670`/sec, 9x faster)
|
||||||
* ⚠️ http only; no webdav / ftp / zeroconf
|
* ⚠️ http only; no webdav / ftp / zeroconf
|
||||||
* ⚠️ doesn't support crazy filenames
|
* ⚠️ doesn't support crazy filenames
|
||||||
* ⚠️ no directory tree nav
|
* ⚠️ no directory tree nav
|
||||||
@@ -605,6 +620,7 @@ symbol legend,
|
|||||||
* ⚠️ no zeroconf (mdns/ssdp)
|
* ⚠️ no zeroconf (mdns/ssdp)
|
||||||
* ⚠️ impractical directory URLs
|
* ⚠️ impractical directory URLs
|
||||||
* ⚠️ AGPL licensed
|
* ⚠️ AGPL licensed
|
||||||
|
* 🔵 uploading small files is fast; `340` files per sec (copyparty does `670`/sec)
|
||||||
* 🔵 ftp, ftps, webdav
|
* 🔵 ftp, ftps, webdav
|
||||||
* ✅ sftp server
|
* ✅ sftp server
|
||||||
* ✅ settings gui
|
* ✅ settings gui
|
||||||
@@ -719,7 +735,31 @@ symbol legend,
|
|||||||
* 🔵 upload, tar/zip download, qr-code
|
* 🔵 upload, tar/zip download, qr-code
|
||||||
* ✅ faster at loading huge folders
|
* ✅ faster at loading huge folders
|
||||||
|
|
||||||
|
## [pingvin-share](https://github.com/stonith404/pingvin-share)
|
||||||
|
* node; linux (docker)
|
||||||
|
* mainly for uploads, not a general file server
|
||||||
|
* 🔵 uploads are segmented (avoids cloudflare size limit)
|
||||||
|
* 🔵 segments are written directly to target file (HDD-friendly)
|
||||||
|
* ⚠️ uploads not resumable after a browser or laptop crash
|
||||||
|
* ⚠️ uploads are not accelerated / integrity-checked
|
||||||
|
* ⚠️ across the atlantic, copyparty is 3x faster
|
||||||
|
* measured with chunksize 96 MiB; pingvin's default 10 MiB is much slower
|
||||||
|
* ⚠️ can't upload folders with subfolders
|
||||||
|
* ⚠️ no upload ETA
|
||||||
|
* 🔵 expiration times, shares, upload-undo
|
||||||
|
* ✅ config + user-registration gui
|
||||||
|
* ✅ built-in OpenID and LDAP support
|
||||||
|
* 💾 [IdP middleware](https://github.com/9001/copyparty#identity-providers) and config-files
|
||||||
|
* ✅ probably more than one person who understands the code
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# briefly considered
|
# briefly considered
|
||||||
* [pydio](https://github.com/pydio/cells): python/agpl3, looks great, fantastic ux -- but needs mariadb, systemwide install
|
* [pydio](https://github.com/pydio/cells): python/agpl3, looks great, fantastic ux -- but needs mariadb, systemwide install
|
||||||
* [gossa](https://github.com/pldubouilh/gossa): go/mit, minimalistic, basic file upload, text editor, mkdir and rename (no delete/move)
|
* [gossa](https://github.com/pldubouilh/gossa): go/mit, minimalistic, basic file upload, text editor, mkdir and rename (no delete/move)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# notes
|
||||||
|
|
||||||
|
* high-latency connections (cross-atlantic uploads) can be accurately simulated with `tc qdisc add dev eth0 root netem delay 100ms`
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ classifiers = [
|
|||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.13",
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: Implementation :: Jython",
|
"Programming Language :: Python :: Implementation :: Jython",
|
||||||
"Programming Language :: Python :: Implementation :: PyPy",
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
FROM alpine:3.18
|
FROM alpine:3.18
|
||||||
WORKDIR /z
|
WORKDIR /z
|
||||||
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
||||||
ver_hashwasm=4.10.0 \
|
ver_hashwasm=4.12.0 \
|
||||||
ver_marked=4.3.0 \
|
ver_marked=4.3.0 \
|
||||||
ver_dompf=3.1.7 \
|
ver_dompf=3.2.3 \
|
||||||
ver_mde=2.18.0 \
|
ver_mde=2.18.0 \
|
||||||
ver_codemirror=5.65.16 \
|
ver_codemirror=5.65.18 \
|
||||||
ver_fontawesome=5.13.0 \
|
ver_fontawesome=5.13.0 \
|
||||||
ver_prism=1.29.0 \
|
ver_prism=1.29.0 \
|
||||||
ver_zopfli=1.0.3
|
ver_zopfli=1.0.3
|
||||||
@@ -16,7 +16,7 @@ ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
|||||||
# https://github.com/codemirror/codemirror5/releases
|
# https://github.com/codemirror/codemirror5/releases
|
||||||
# https://github.com/cure53/DOMPurify/releases
|
# https://github.com/cure53/DOMPurify/releases
|
||||||
# https://github.com/Daninet/hash-wasm/releases
|
# https://github.com/Daninet/hash-wasm/releases
|
||||||
# https://github.com/openpgpjs/asmcrypto.js
|
# https://github.com/openpgpjs/asmcrypto.js/commits/main/
|
||||||
# https://github.com/google/zopfli/tags
|
# https://github.com/google/zopfli/tags
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
40
scripts/genlic.py
Executable file
40
scripts/genlic.py
Executable file
@@ -0,0 +1,40 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import re, os, sys, codecs
|
||||||
|
|
||||||
|
outfile = os.path.realpath(sys.argv[1])
|
||||||
|
|
||||||
|
os.chdir(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
with open("../docs/lics.txt", "rb") as f:
|
||||||
|
s = f.read().decode("utf-8").rstrip("\n") + "\n\n\n\n"
|
||||||
|
s = re.sub("\nC: ", "\nCopyright (c) ", s)
|
||||||
|
s = re.sub("\nL: ", "\nLicense: ", s)
|
||||||
|
ret = s.split("\n")
|
||||||
|
|
||||||
|
lics = [
|
||||||
|
"MIT License",
|
||||||
|
"BSD 2-Clause License",
|
||||||
|
"BSD 3-Clause License",
|
||||||
|
"SIL Open Font License v1.1",
|
||||||
|
]
|
||||||
|
|
||||||
|
for n, lic in enumerate(lics, 1):
|
||||||
|
with open("lics/%d.r13" % (n,), "rb") as f:
|
||||||
|
s = f.read().decode("utf-8")
|
||||||
|
s = codecs.decode(s, "rot_13")
|
||||||
|
s = "\n--- %s ---\n\n%s" % (lic, s)
|
||||||
|
ret.extend(s.split("\n"))
|
||||||
|
|
||||||
|
for n, ln in enumerate(ret):
|
||||||
|
if not ln.startswith("--- "):
|
||||||
|
continue
|
||||||
|
pad = " " * ((80 - len(ln)) // 2)
|
||||||
|
ln = "%s\033[07m%s\033[0m" % (pad, ln)
|
||||||
|
ret[n] = ln
|
||||||
|
|
||||||
|
ret.append("")
|
||||||
|
ret.append("")
|
||||||
|
|
||||||
|
with open(outfile, "wb") as f:
|
||||||
|
f.write(("\n".join(ret)).encode("utf-8"))
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
outfile="$($(command -v realpath || command -v grealpath) "$1")"
|
|
||||||
|
|
||||||
[ -e genlic.sh ] || cd scripts
|
|
||||||
[ -e genlic.sh ]
|
|
||||||
|
|
||||||
f=../build/mit.txt
|
|
||||||
[ -e $f ] ||
|
|
||||||
curl https://opensource.org/licenses/MIT |
|
|
||||||
awk '/div>/{o=0}o>1;o{o++}/;COPYRIGHT HOLDER/{o=1}' |
|
|
||||||
awk '{gsub(/<[^>]+>/,"")};1' >$f
|
|
||||||
|
|
||||||
f=../build/isc.txt
|
|
||||||
[ -e $f ] ||
|
|
||||||
curl https://opensource.org/licenses/ISC |
|
|
||||||
awk '/div>/{o=0}o>2;o{o++}/;OWNER/{o=1}' |
|
|
||||||
awk '{gsub(/<[^>]+>/,"")};/./{b=0}!/./{b++}b>1{next}1' >$f
|
|
||||||
|
|
||||||
f=../build/2bsd.txt
|
|
||||||
[ -e $f ] ||
|
|
||||||
curl https://opensource.org/licenses/BSD-2-Clause |
|
|
||||||
awk '/div>/{o=0}o>1;o{o++}/HOLDER/{o=1}' |
|
|
||||||
awk '{gsub(/<[^>]+>/,"")};1' >$f
|
|
||||||
|
|
||||||
f=../build/3bsd.txt
|
|
||||||
[ -e $f ] ||
|
|
||||||
curl https://opensource.org/licenses/BSD-3-Clause |
|
|
||||||
awk '/div>/{o=0}o>1;o{o++}/HOLDER/{o=1}' |
|
|
||||||
awk '{gsub(/<[^>]+>/,"")};1' >$f
|
|
||||||
|
|
||||||
f=../build/ofl.txt
|
|
||||||
[ -e $f ] ||
|
|
||||||
curl https://opensource.org/licenses/OFL-1.1 |
|
|
||||||
awk '/PREAMBLE/{o=1}/sil\.org/{o=0}!o{next}/./{printf "%s ",$0;next}{print"\n"}' |
|
|
||||||
awk '{gsub(/<[^>]+>/,"");gsub(/^\s+/,"");gsub(/&/,"\\&")}/./{b=0}!/./{b++}b>1{next}1' >$f
|
|
||||||
|
|
||||||
(sed -r 's/^L: /License: /;s/^C: /Copyright (c) /' <../docs/lics.txt
|
|
||||||
printf '\n\n--- MIT License ---\n\n'; cat ../build/mit.txt
|
|
||||||
printf '\n\n--- ISC License ---\n\n'; cat ../build/isc.txt
|
|
||||||
printf '\n\n--- BSD 2-Clause License ---\n\n'; cat ../build/2bsd.txt
|
|
||||||
printf '\n\n--- BSD 3-Clause License ---\n\n'; cat ../build/3bsd.txt
|
|
||||||
printf '\n\n--- SIL Open Font License v1.1 ---\n\n'; cat ../build/ofl.txt
|
|
||||||
) |
|
|
||||||
while IFS= read -r x; do
|
|
||||||
[ "${x:0:4}" = "--- " ] || {
|
|
||||||
printf '%s\n' "$x"
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
n=${#x}
|
|
||||||
p=$(( (80-n)/2 ))
|
|
||||||
printf "%${p}s\033[07m%s\033[0m\n" "" "$x"
|
|
||||||
done > "$outfile"
|
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
import socket
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
|
|
||||||
|
|
||||||
@@ -25,13 +26,20 @@ def readclip():
|
|||||||
return sp.check_output(cmd.split()).decode("utf-8")
|
return sp.check_output(cmd.split()).decode("utf-8")
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
raise Exception("need one of these: xsel xclip pbpaste")
|
||||||
|
|
||||||
|
|
||||||
def cnv(src):
|
def cnv(src):
|
||||||
|
hostname = str(socket.gethostname()).split(".")[0]
|
||||||
|
|
||||||
yield '<html style="background:#222;color:#fff"><body>'
|
yield '<html style="background:#222;color:#fff"><body>'
|
||||||
skip_sfx = False
|
skip_sfx = False
|
||||||
in_sfx = 0
|
in_sfx = 0
|
||||||
in_salt = 0
|
in_salt = 0
|
||||||
|
in_name = 0
|
||||||
|
in_cores = 0
|
||||||
|
in_hash_mt = False
|
||||||
|
in_th_ram_max = 0
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
ln = next(src)
|
ln = next(src)
|
||||||
@@ -43,6 +51,7 @@ def cnv(src):
|
|||||||
|
|
||||||
for ln in src:
|
for ln in src:
|
||||||
ln = ln.rstrip()
|
ln = ln.rstrip()
|
||||||
|
t = ln
|
||||||
if re.search(r"^<font[^>]+>copyparty v[0-9]", ln):
|
if re.search(r"^<font[^>]+>copyparty v[0-9]", ln):
|
||||||
in_sfx = 3
|
in_sfx = 3
|
||||||
if in_sfx:
|
if in_sfx:
|
||||||
@@ -56,11 +65,39 @@ def cnv(src):
|
|||||||
in_salt = 3
|
in_salt = 3
|
||||||
if in_salt:
|
if in_salt:
|
||||||
in_salt -= 1
|
in_salt -= 1
|
||||||
t = ln
|
|
||||||
ln = re.sub(r">[0-9a-zA-Z/+]{24}<", ">24-character-autogenerated<", ln)
|
ln = re.sub(r">[0-9a-zA-Z/+]{24}<", ">24-character-autogenerated<", ln)
|
||||||
ln = re.sub(r">[0-9a-zA-Z/+]{40}<", ">40-character-autogenerated<", ln)
|
ln = re.sub(r">[0-9a-zA-Z/+]{40}<", ">40-character-autogenerated<", ln)
|
||||||
if t != ln:
|
if t != ln:
|
||||||
in_salt = 0
|
in_salt = 0
|
||||||
|
if "--name TXT" in ln:
|
||||||
|
in_name = 3
|
||||||
|
if in_name:
|
||||||
|
in_name -= 1
|
||||||
|
ln = ln.replace(">" + hostname + "<", ">hostname<")
|
||||||
|
if t != ln:
|
||||||
|
in_name = 0
|
||||||
|
if "--hash-mt CORES" in ln:
|
||||||
|
in_cores = 3
|
||||||
|
in_hash_mt = True
|
||||||
|
if "--mtag-mt CORES" in ln or "--th-mt CORES" in ln:
|
||||||
|
in_cores = 3
|
||||||
|
if in_cores:
|
||||||
|
in_cores -= 1
|
||||||
|
zs = ">numCores"
|
||||||
|
if in_hash_mt:
|
||||||
|
zs += " if 5 or less"
|
||||||
|
ln = re.sub(r">[0-9]{1,2}<", zs + "<", ln)
|
||||||
|
if t != ln:
|
||||||
|
in_cores = 0
|
||||||
|
in_hash_mt = False
|
||||||
|
if "--th-ram-max GB" in ln:
|
||||||
|
in_th_ram_max = 3
|
||||||
|
if in_th_ram_max:
|
||||||
|
in_th_ram_max -= 1
|
||||||
|
ln = re.sub(r">[0-9]{1,2}\.[0-9]<", ">dynamic<", ln)
|
||||||
|
if t != ln:
|
||||||
|
in_th_ram_max = 0
|
||||||
|
|
||||||
ln = ln.replace(">/home/ed/", ">~/")
|
ln = ln.replace(">/home/ed/", ">~/")
|
||||||
if ln.startswith("0" * 20):
|
if ln.startswith("0" * 20):
|
||||||
skip_sfx = True
|
skip_sfx = True
|
||||||
|
|||||||
@@ -6,6 +6,10 @@ s`/home/ed/`~/`;
|
|||||||
s/uuid:[0-9a-f-]{36}/autogenerated/;
|
s/uuid:[0-9a-f-]{36}/autogenerated/;
|
||||||
s/(-salt SALT.*default: )[0-9a-zA-Z/+]{24}\)/\124-character-autogenerated)/;
|
s/(-salt SALT.*default: )[0-9a-zA-Z/+]{24}\)/\124-character-autogenerated)/;
|
||||||
s/(-salt SALT.*default: )[0-9a-zA-Z/+]{40}\)/\140-character-autogenerated)/;
|
s/(-salt SALT.*default: )[0-9a-zA-Z/+]{40}\)/\140-character-autogenerated)/;
|
||||||
|
s/(--name TXT.*default: )[^)]+/\1hostname/;
|
||||||
|
s/(--hash-mt CORES.*default: )[0-9]+/\1numCores if 5 or less/;
|
||||||
|
s/(--mtag-mt|th-mt)( CORES.*default: )[0-9]+/\1\2numCores/;
|
||||||
|
s/(--th-ram-max GB.*default: )[0-9\.]+/\1dynamic/;
|
||||||
' | awk '
|
' | awk '
|
||||||
/^copyparty/{a=1} !a{next}
|
/^copyparty/{a=1} !a{next}
|
||||||
/^0{20}/{b=1} b&&/^copyparty v[0-9]+\./{s=3}
|
/^0{20}/{b=1} b&&/^copyparty v[0-9]+\./{s=3}
|
||||||
|
|||||||
5
scripts/lics/1.r13
Normal file
5
scripts/lics/1.r13
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
Crezvffvba vf urerol tenagrq, serr bs punetr, gb nal crefba bognvavat n pbcl bs guvf fbsgjner naq nffbpvngrq qbphzragngvba svyrf (gur "Fbsgjner"), gb qrny va gur Fbsgjner jvgubhg erfgevpgvba, vapyhqvat jvgubhg yvzvgngvba gur evtugf gb hfr, pbcl, zbqvsl, zretr, choyvfu, qvfgevohgr, fhoyvprafr, naq/be fryy pbcvrf bs gur Fbsgjner, naq gb crezvg crefbaf gb jubz gur Fbsgjner vf sheavfurq gb qb fb, fhowrpg gb gur sbyybjvat pbaqvgvbaf:
|
||||||
|
|
||||||
|
Gur nobir pbclevtug abgvpr naq guvf crezvffvba abgvpr funyy or vapyhqrq va nyy pbcvrf be fhofgnagvny cbegvbaf bs gur Fbsgjner.
|
||||||
|
|
||||||
|
GUR FBSGJNER VF CEBIVQRQ "NF VF", JVGUBHG JNEENAGL BS NAL XVAQ, RKCERFF BE VZCYVRQ, VAPYHQVAT OHG ABG YVZVGRQ GB GUR JNEENAGVRF BS ZREPUNAGNOVYVGL, SVGARFF SBE N CNEGVPHYNE CHECBFR NAQ ABAVASEVATRZRAG. VA AB RIRAG FUNYY GUR NHGUBEF BE PBCLEVTUG UBYQREF OR YVNOYR SBE NAL PYNVZ, QNZNTRF BE BGURE YVNOVYVGL, JURGURE VA NA NPGVBA BS PBAGENPG, GBEG BE BGUREJVFR, NEVFVAT SEBZ, BHG BS BE VA PBAARPGVBA JVGU GUR FBSGJNER BE GUR HFR BE BGURE QRNYVATF VA GUR FBSGJNER.
|
||||||
7
scripts/lics/2.r13
Normal file
7
scripts/lics/2.r13
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
Erqvfgevohgvba naq hfr va fbhepr naq ovanel sbezf, jvgu be jvgubhg zbqvsvpngvba, ner crezvggrq cebivqrq gung gur sbyybjvat pbaqvgvbaf ner zrg:
|
||||||
|
|
||||||
|
1. Erqvfgevohgvbaf bs fbhepr pbqr zhfg ergnva gur nobir pbclevtug abgvpr, guvf yvfg bs pbaqvgvbaf naq gur sbyybjvat qvfpynvzre.
|
||||||
|
|
||||||
|
2. Erqvfgevohgvbaf va ovanel sbez zhfg ercebqhpr gur nobir pbclevtug abgvpr, guvf yvfg bs pbaqvgvbaf naq gur sbyybjvat qvfpynvzre va gur qbphzragngvba naq/be bgure zngrevnyf cebivqrq jvgu gur qvfgevohgvba.
|
||||||
|
|
||||||
|
GUVF FBSGJNER VF CEBIVQRQ OL GUR PBCLEVTUG UBYQREF NAQ PBAGEVOHGBEF "NF VF" NAQ NAL RKCERFF BE VZCYVRQ JNEENAGVRF, VAPYHQVAT, OHG ABG YVZVGRQ GB, GUR VZCYVRQ JNEENAGVRF BS ZREPUNAGNOVYVGL NAQ SVGARFF SBE N CNEGVPHYNE CHECBFR NER QVFPYNVZRQ. VA AB RIRAG FUNYY GUR PBCLEVTUG UBYQRE BE PBAGEVOHGBEF OR YVNOYR SBE NAL QVERPG, VAQVERPG, VAPVQRAGNY, FCRPVNY, RKRZCYNEL, BE PBAFRDHRAGVNY QNZNTRF (VAPYHQVAT, OHG ABG YVZVGRQ GB, CEBPHERZRAG BS FHOFGVGHGR TBBQF BE FREIVPRF; YBFF BS HFR, QNGN, BE CEBSVGF; BE OHFVARFF VAGREEHCGVBA) UBJRIRE PNHFRQ NAQ BA NAL GURBEL BS YVNOVYVGL, JURGURE VA PBAGENPG, FGEVPG YVNOVYVGL, BE GBEG (VAPYHQVAT ARTYVTRAPR BE BGUREJVFR) NEVFVAT VA NAL JNL BHG BS GUR HFR BS GUVF FBSGJNER, RIRA VS NQIVFRQ BS GUR CBFFVOVYVGL BS FHPU QNZNTR.
|
||||||
9
scripts/lics/3.r13
Normal file
9
scripts/lics/3.r13
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
Erqvfgevohgvba naq hfr va fbhepr naq ovanel sbezf, jvgu be jvgubhg zbqvsvpngvba, ner crezvggrq cebivqrq gung gur sbyybjvat pbaqvgvbaf ner zrg:
|
||||||
|
|
||||||
|
1. Erqvfgevohgvbaf bs fbhepr pbqr zhfg ergnva gur nobir pbclevtug abgvpr, guvf yvfg bs pbaqvgvbaf naq gur sbyybjvat qvfpynvzre.
|
||||||
|
|
||||||
|
2. Erqvfgevohgvbaf va ovanel sbez zhfg ercebqhpr gur nobir pbclevtug abgvpr, guvf yvfg bs pbaqvgvbaf naq gur sbyybjvat qvfpynvzre va gur qbphzragngvba naq/be bgure zngrevnyf cebivqrq jvgu gur qvfgevohgvba.
|
||||||
|
|
||||||
|
3. Arvgure gur anzr bs gur pbclevtug ubyqre abe gur anzrf bs vgf pbagevohgbef znl or hfrq gb raqbefr be cebzbgr cebqhpgf qrevirq sebz guvf fbsgjner jvgubhg fcrpvsvp cevbe jevggra crezvffvba.
|
||||||
|
|
||||||
|
GUVF FBSGJNER VF CEBIVQRQ OL GUR PBCLEVTUG UBYQREF NAQ PBAGEVOHGBEF "NF VF" NAQ NAL RKCERFF BE VZCYVRQ JNEENAGVRF, VAPYHQVAT, OHG ABG YVZVGRQ GB, GUR VZCYVRQ JNEENAGVRF BS ZREPUNAGNOVYVGL NAQ SVGARFF SBE N CNEGVPHYNE CHECBFR NER QVFPYNVZRQ. VA AB RIRAG FUNYY GUR PBCLEVTUG UBYQRE BE PBAGEVOHGBEF OR YVNOYR SBE NAL QVERPG, VAQVERPG, VAPVQRAGNY, FCRPVNY, RKRZCYNEL, BE PBAFRDHRAGVNY QNZNTRF (VAPYHQVAT, OHG ABG YVZVGRQ GB, CEBPHERZRAG BS FHOFGVGHGR TBBQF BE FREIVPRF; YBFF BS HFR, QNGN, BE CEBSVGF; BE OHFVARFF VAGREEHCGVBA) UBJRIRE PNHFRQ NAQ BA NAL GURBEL BS YVNOVYVGL, JURGURE VA PBAGENPG, FGEVPG YVNOVYVGL, BE GBEG (VAPYHQVAT ARTYVTRAPR BE BGUREJVFR) NEVFVAT VA NAL JNL BHG BS GUR HFR BS GUVF FBSGJNER, RIRA VS NQIVFRQ BS GUR CBFFVOVYVGL BS FHPU QNZNTR.
|
||||||
39
scripts/lics/4.r13
Normal file
39
scripts/lics/4.r13
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
CERNZOYR
|
||||||
|
|
||||||
|
Gur tbnyf bs gur Bcra Sbag Yvprafr (BSY) ner gb fgvzhyngr jbeyqjvqr qrirybczrag bs pbyynobengvir sbag cebwrpgf, gb fhccbeg gur sbag perngvba rssbegf bs npnqrzvp naq yvathvfgvp pbzzhavgvrf, naq gb cebivqr n serr naq bcra senzrjbex va juvpu sbagf znl or funerq naq vzcebirq va cnegarefuvc jvgu bguref.
|
||||||
|
|
||||||
|
Gur BSY nyybjf gur yvprafrq sbagf gb or hfrq, fghqvrq, zbqvsvrq naq erqvfgevohgrq serryl nf ybat nf gurl ner abg fbyq ol gurzfryirf. Gur sbagf, vapyhqvat nal qrevingvir jbexf, pna or ohaqyrq, rzorqqrq, erqvfgevohgrq naq/be fbyq jvgu nal fbsgjner cebivqrq gung nal erfreirq anzrf ner abg hfrq ol qrevingvir jbexf. Gur sbagf naq qrevingvirf, ubjrire, pnaabg or eryrnfrq haqre nal bgure glcr bs yvprafr. Gur erdhverzrag sbe sbagf gb erznva haqre guvf yvprafr qbrf abg nccyl gb nal qbphzrag perngrq hfvat gur sbagf be gurve qrevingvirf.
|
||||||
|
|
||||||
|
QRSVAVGVBAF
|
||||||
|
|
||||||
|
"Sbag Fbsgjner" ersref gb gur frg bs svyrf eryrnfrq ol gur Pbclevtug Ubyqre(f) haqre guvf yvprafr naq pyrneyl znexrq nf fhpu. Guvf znl vapyhqr fbhepr svyrf, ohvyq fpevcgf naq qbphzragngvba.
|
||||||
|
|
||||||
|
"Erfreirq Sbag Anzr" ersref gb nal anzrf fcrpvsvrq nf fhpu nsgre gur pbclevtug fgngrzrag(f).
|
||||||
|
|
||||||
|
"Bevtvany Irefvba" ersref gb gur pbyyrpgvba bs Sbag Fbsgjner pbzcbaragf nf qvfgevohgrq ol gur Pbclevtug Ubyqre(f).
|
||||||
|
|
||||||
|
"Zbqvsvrq Irefvba" ersref gb nal qrevingvir znqr ol nqqvat gb, qryrgvat, be fhofgvghgvat - va cneg be va jubyr - nal bs gur pbzcbaragf bs gur Bevtvany Irefvba, ol punatvat sbezngf be ol cbegvat gur Sbag Fbsgjner gb n arj raivebazrag.
|
||||||
|
|
||||||
|
"Nhgube" ersref gb nal qrfvtare, ratvarre, cebtenzzre, grpuavpny jevgre be bgure crefba jub pbagevohgrq gb gur Sbag Fbsgjner.
|
||||||
|
|
||||||
|
CREZVFFVBA & PBAQVGVBAF
|
||||||
|
|
||||||
|
Crezvffvba vf urerol tenagrq, serr bs punetr, gb nal crefba bognvavat n pbcl bs gur Sbag Fbsgjner, gb hfr, fghql, pbcl, zretr, rzorq, zbqvsl, erqvfgevohgr, naq fryy zbqvsvrq naq hazbqvsvrq pbcvrf bs gur Sbag Fbsgjner, fhowrpg gb gur sbyybjvat pbaqvgvbaf:
|
||||||
|
|
||||||
|
1) Arvgure gur Sbag Fbsgjner abe nal bs vgf vaqvivqhny pbzcbaragf, va Bevtvany be Zbqvsvrq Irefvbaf, znl or fbyq ol vgfrys.
|
||||||
|
|
||||||
|
2) Bevtvany be Zbqvsvrq Irefvbaf bs gur Sbag Fbsgjner znl or ohaqyrq, erqvfgevohgrq naq/be fbyq jvgu nal fbsgjner, cebivqrq gung rnpu pbcl pbagnvaf gur nobir pbclevtug abgvpr naq guvf yvprafr. Gurfr pna or vapyhqrq rvgure nf fgnaq-nybar grkg svyrf, uhzna-ernqnoyr urnqref be va gur nccebcevngr znpuvar-ernqnoyr zrgnqngn svryqf jvguva grkg be ovanel svyrf nf ybat nf gubfr svryqf pna or rnfvyl ivrjrq ol gur hfre.
|
||||||
|
|
||||||
|
3) Ab Zbqvsvrq Irefvba bs gur Sbag Fbsgjner znl hfr gur Erfreirq Sbag Anzr(f) hayrff rkcyvpvg jevggra crezvffvba vf tenagrq ol gur pbeerfcbaqvat Pbclevtug Ubyqre. Guvf erfgevpgvba bayl nccyvrf gb gur cevznel sbag anzr nf cerfragrq gb gur hfref.
|
||||||
|
|
||||||
|
4) Gur anzr(f) bs gur Pbclevtug Ubyqre(f) be gur Nhgube(f) bs gur Sbag Fbsgjner funyy abg or hfrq gb cebzbgr, raqbefr be nqiregvfr nal Zbqvsvrq Irefvba, rkprcg gb npxabjyrqtr gur pbagevohgvba(f) bs gur Pbclevtug Ubyqre(f) naq gur Nhgube(f) be jvgu gurve rkcyvpvg jevggra crezvffvba.
|
||||||
|
|
||||||
|
5) Gur Sbag Fbsgjner, zbqvsvrq be hazbqvsvrq, va cneg be va jubyr, zhfg or qvfgevohgrq ragveryl haqre guvf yvprafr, naq zhfg abg or qvfgevohgrq haqre nal bgure yvprafr. Gur erdhverzrag sbe sbagf gb erznva haqre guvf yvprafr qbrf abg nccyl gb nal qbphzrag perngrq hfvat gur Sbag Fbsgjner.
|
||||||
|
|
||||||
|
GREZVANGVBA
|
||||||
|
|
||||||
|
Guvf yvprafr orpbzrf ahyy naq ibvq vs nal bs gur nobir pbaqvgvbaf ner abg zrg.
|
||||||
|
|
||||||
|
QVFPYNVZRE
|
||||||
|
|
||||||
|
GUR SBAG FBSGJNER VF CEBIVQRQ "NF VF", JVGUBHG JNEENAGL BS NAL XVAQ, RKCERFF BE VZCYVRQ, VAPYHQVAT OHG ABG YVZVGRQ GB NAL JNEENAGVRF BS ZREPUNAGNOVYVGL, SVGARFF SBE N CNEGVPHYNE CHECBFR NAQ ABAVASEVATRZRAG BS PBCLEVTUG, CNGRAG, GENQRZNEX, BE BGURE EVTUG. VA AB RIRAG FUNYY GUR PBCLEVTUG UBYQRE OR YVNOYR SBE NAL PYNVZ, QNZNTRF BE BGURE YVNOVYVGL, VAPYHQVAT NAL TRARENY, FCRPVNY, VAQVERPG, VAPVQRAGNY, BE PBAFRDHRAGVNY QNZNTRF, JURGURE VA NA NPGVBA BS PBAGENPG, GBEG BE BGUREJVFR, NEVFVAT SEBZ, BHG BS GUR HFR BE VANOVYVGL GB HFR GUR SBAG FBSGJNER BE SEBZ BGURE QRNYVATF VA GUR SBAG FBSGJNER.
|
||||||
3
scripts/lics/README.md
Normal file
3
scripts/lics/README.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
these are foss licenses in rot13 so scanners don't think copyparty isn't mit
|
||||||
|
|
||||||
|
1=mit 2=2bsd 3=3bsd 4=ofl
|
||||||
12
scripts/lics/rot.py
Executable file
12
scripts/lics/rot.py
Executable file
@@ -0,0 +1,12 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os, codecs
|
||||||
|
|
||||||
|
for fn in os.listdir("."):
|
||||||
|
if not fn.endswith(".txt"):
|
||||||
|
continue
|
||||||
|
with open(fn, "rb") as f:
|
||||||
|
s = f.read().decode("utf-8")
|
||||||
|
b = codecs.encode(s, "rot_13").encode("utf-8")
|
||||||
|
with open(fn.replace("txt", "r13"), "wb") as f:
|
||||||
|
f.write(b)
|
||||||
@@ -100,14 +100,13 @@ load_env || {
|
|||||||
# cleanup
|
# cleanup
|
||||||
rm -rf unt build/pypi
|
rm -rf unt build/pypi
|
||||||
|
|
||||||
# grab licenses
|
# generate license list
|
||||||
scripts/genlic.sh copyparty/res/COPYING.txt
|
scripts/genlic.py copyparty/res/COPYING.txt
|
||||||
|
|
||||||
# clean-ish packaging env
|
# clean-ish packaging env
|
||||||
rm -rf build/pypi
|
rm -rf build/pypi
|
||||||
mkdir -p build/pypi
|
mkdir -p build/pypi
|
||||||
cp -pR pyproject.toml README.md LICENSE copyparty contrib bin scripts/strip_hints build/pypi/
|
cp -pR pyproject.toml README.md LICENSE copyparty contrib bin scripts/strip_hints build/pypi/
|
||||||
tar -c docs/lics.txt scripts/genlic.sh build/*.txt | tar -xC build/pypi/
|
|
||||||
cd build/pypi
|
cd build/pypi
|
||||||
|
|
||||||
# delete junk
|
# delete junk
|
||||||
|
|||||||
@@ -308,8 +308,7 @@ necho() {
|
|||||||
# remove type hints before build instead
|
# remove type hints before build instead
|
||||||
(cd copyparty; PYTHONPATH="..:$PYTHONPATH" "$pybin" ../../scripts/strip_hints/a.py; rm uh)
|
(cd copyparty; PYTHONPATH="..:$PYTHONPATH" "$pybin" ../../scripts/strip_hints/a.py; rm uh)
|
||||||
|
|
||||||
licfile=$(realpath copyparty/res/COPYING.txt)
|
(cd ../scripts; ./genlic.py ../copyparty/res/COPYING.txt)
|
||||||
(cd ../scripts; ./genlic.sh "$licfile")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
[ ! -e copyparty/web/deps/mini-fa.woff ] && [ $dl_wd ] && {
|
[ ! -e copyparty/web/deps/mini-fa.woff ] && [ $dl_wd ] && {
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ git archive hovudstraum | tar -xC "$rls_dir"
|
|||||||
echo ">>> export untracked deps"
|
echo ">>> export untracked deps"
|
||||||
tar -c copyparty/web/deps | tar -xC "$rls_dir"
|
tar -c copyparty/web/deps | tar -xC "$rls_dir"
|
||||||
|
|
||||||
scripts/genlic.sh "$rls_dir/copyparty/res/COPYING.txt"
|
scripts/genlic.py "$rls_dir/copyparty/res/COPYING.txt"
|
||||||
|
|
||||||
cd "$rls_dir"
|
cd "$rls_dir"
|
||||||
find -type d -exec chmod 755 '{}' \+
|
find -type d -exec chmod 755 '{}' \+
|
||||||
|
|||||||
@@ -30,4 +30,4 @@ d1420c8417fad7888766dd26b9706a87c63e8f33dceeb8e26d0056d5127b0b3ed9272e44b4b76113
|
|||||||
2be320b4191f208cdd6af183c77ba2cf460ea52164ee45ac3ff17d6dfa57acd9deff016636c2dd42a21f4f6af977d5f72df7dacf599bebcf41757272354d14c1 pillow-10.4.0-cp312-cp312-win_amd64.whl
|
2be320b4191f208cdd6af183c77ba2cf460ea52164ee45ac3ff17d6dfa57acd9deff016636c2dd42a21f4f6af977d5f72df7dacf599bebcf41757272354d14c1 pillow-10.4.0-cp312-cp312-win_amd64.whl
|
||||||
896ddddbd4b85e86e0600cb65eb4c07fbc7f3802d47e7f660411e20b5500831469b97ed4770f25820f4e75cbfac40308da624fd86d4f62e578149d5c276a9cde pyinstaller-6.10.0-py3-none-win_amd64.whl
|
896ddddbd4b85e86e0600cb65eb4c07fbc7f3802d47e7f660411e20b5500831469b97ed4770f25820f4e75cbfac40308da624fd86d4f62e578149d5c276a9cde pyinstaller-6.10.0-py3-none-win_amd64.whl
|
||||||
873781decaeef07f6a79b0ed8b9f35f3fa534a1ea0d866991e40278a10818fa5b60c70b0d5828971b045364f1099694cd1e5d5d60d480acb93fcfbfbced4a09e pyinstaller_hooks_contrib-2024.8-py3-none-any.whl
|
873781decaeef07f6a79b0ed8b9f35f3fa534a1ea0d866991e40278a10818fa5b60c70b0d5828971b045364f1099694cd1e5d5d60d480acb93fcfbfbced4a09e pyinstaller_hooks_contrib-2024.8-py3-none-any.whl
|
||||||
912b710007c7b29f29c0097aff8f825412166eed7777a7cef135b14316e8fff31b5df56d26d835d8ca090468cc0e914730f201a56caa3dd6dbef2f91088942b1 python-3.12.7-amd64.exe
|
0f623c9ab52d050283e97a986ba626d86b04cd02fa7ffdf352740576940b142b264709abadb5d875c90f625b28103d7210b900e0d77f12c1c140108bd2a159aa python-3.12.8-amd64.exe
|
||||||
|
|||||||
@@ -94,6 +94,7 @@ copyparty/web/deps/prismd.css,
|
|||||||
copyparty/web/deps/scp.woff2,
|
copyparty/web/deps/scp.woff2,
|
||||||
copyparty/web/deps/sha512.ac.js,
|
copyparty/web/deps/sha512.ac.js,
|
||||||
copyparty/web/deps/sha512.hw.js,
|
copyparty/web/deps/sha512.hw.js,
|
||||||
|
copyparty/web/iiam.gif,
|
||||||
copyparty/web/md.css,
|
copyparty/web/md.css,
|
||||||
copyparty/web/md.html,
|
copyparty/web/md.html,
|
||||||
copyparty/web/md.js,
|
copyparty/web/md.js,
|
||||||
@@ -104,6 +105,9 @@ copyparty/web/mde.html,
|
|||||||
copyparty/web/mde.js,
|
copyparty/web/mde.js,
|
||||||
copyparty/web/msg.css,
|
copyparty/web/msg.css,
|
||||||
copyparty/web/msg.html,
|
copyparty/web/msg.html,
|
||||||
|
copyparty/web/rups.css,
|
||||||
|
copyparty/web/rups.html,
|
||||||
|
copyparty/web/rups.js,
|
||||||
copyparty/web/shares.css,
|
copyparty/web/shares.css,
|
||||||
copyparty/web/shares.html,
|
copyparty/web/shares.html,
|
||||||
copyparty/web/shares.js,
|
copyparty/web/shares.js,
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ var tl_cpanel = {
|
|||||||
"cc1": "other stuff:",
|
"cc1": "other stuff:",
|
||||||
"h1": "disable k304", // TLNote: "j1" explains what k304 is
|
"h1": "disable k304", // TLNote: "j1" explains what k304 is
|
||||||
"i1": "enable k304",
|
"i1": "enable k304",
|
||||||
"j1": "enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general",
|
"j1": "enabling k304 will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general",
|
||||||
"k1": "reset client settings",
|
"k1": "reset client settings",
|
||||||
"l1": "login for more:",
|
"l1": "login for more:",
|
||||||
"m1": "welcome back,", // TLNote: "welcome back, USERNAME"
|
"m1": "welcome back,", // TLNote: "welcome back, USERNAME"
|
||||||
@@ -76,6 +76,11 @@ var tl_cpanel = {
|
|||||||
"ta2": "repeat to confirm new password:",
|
"ta2": "repeat to confirm new password:",
|
||||||
"ta3": "found a typo; please try again",
|
"ta3": "found a typo; please try again",
|
||||||
"aa1": "incoming files:",
|
"aa1": "incoming files:",
|
||||||
|
"ab1": "disable no304",
|
||||||
|
"ac1": "enable no304",
|
||||||
|
"ad1": "enabling no304 will disable all caching; try this if k304 wasn't enough. This will waste a huge amount of network traffic!",
|
||||||
|
"ae1": "active downloads:",
|
||||||
|
"af1": "show recent uploads",
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -118,8 +123,9 @@ var tl_browser = {
|
|||||||
["T", "toggle thumbnails / icons"],
|
["T", "toggle thumbnails / icons"],
|
||||||
["🡅 A/D", "thumbnail size"],
|
["🡅 A/D", "thumbnail size"],
|
||||||
["ctrl-K", "delete selected"],
|
["ctrl-K", "delete selected"],
|
||||||
["ctrl-X", "cut selected"],
|
["ctrl-X", "cut selection to clipboard"],
|
||||||
["ctrl-V", "paste into folder"],
|
["ctrl-C", "copy selection to clipboard"],
|
||||||
|
["ctrl-V", "paste (move/copy) here"],
|
||||||
["Y", "download selected"],
|
["Y", "download selected"],
|
||||||
["F2", "rename selected"],
|
["F2", "rename selected"],
|
||||||
|
|
||||||
@@ -164,7 +170,7 @@ var tl_browser = {
|
|||||||
["I/K", "prev/next file"],
|
["I/K", "prev/next file"],
|
||||||
["M", "close textfile"],
|
["M", "close textfile"],
|
||||||
["E", "edit textfile"],
|
["E", "edit textfile"],
|
||||||
["S", "select file (for cut/rename)"],
|
["S", "select file (for cut/copy/rename)"],
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
|
|
||||||
@@ -214,6 +220,7 @@ var tl_browser = {
|
|||||||
"wt_ren": "rename selected items$NHotkey: F2",
|
"wt_ren": "rename selected items$NHotkey: F2",
|
||||||
"wt_del": "delete selected items$NHotkey: ctrl-K",
|
"wt_del": "delete selected items$NHotkey: ctrl-K",
|
||||||
"wt_cut": "cut selected items <small>(then paste somewhere else)</small>$NHotkey: ctrl-X",
|
"wt_cut": "cut selected items <small>(then paste somewhere else)</small>$NHotkey: ctrl-X",
|
||||||
|
"wt_cpy": "copy selected items to clipboard$N(to paste them somewhere else)$NHotkey: ctrl-C",
|
||||||
"wt_pst": "paste a previously cut / copied selection$NHotkey: ctrl-V",
|
"wt_pst": "paste a previously cut / copied selection$NHotkey: ctrl-V",
|
||||||
"wt_selall": "select all files$NHotkey: ctrl-A (when file focused)",
|
"wt_selall": "select all files$NHotkey: ctrl-A (when file focused)",
|
||||||
"wt_selinv": "invert selection",
|
"wt_selinv": "invert selection",
|
||||||
@@ -285,6 +292,7 @@ var tl_browser = {
|
|||||||
"cl_uopts": "up2k switches",
|
"cl_uopts": "up2k switches",
|
||||||
"cl_favico": "favicon",
|
"cl_favico": "favicon",
|
||||||
"cl_bigdir": "big dirs",
|
"cl_bigdir": "big dirs",
|
||||||
|
"cl_hsort": "#sort",
|
||||||
"cl_keytype": "key notation",
|
"cl_keytype": "key notation",
|
||||||
"cl_hiddenc": "hidden columns",
|
"cl_hiddenc": "hidden columns",
|
||||||
"cl_hidec": "hide",
|
"cl_hidec": "hide",
|
||||||
@@ -327,6 +335,7 @@ var tl_browser = {
|
|||||||
|
|
||||||
"cdt_lim": "max number of files to show in a folder",
|
"cdt_lim": "max number of files to show in a folder",
|
||||||
"cdt_ask": "when scrolling to the bottom,$Ninstead of loading more files,$Nask what to do",
|
"cdt_ask": "when scrolling to the bottom,$Ninstead of loading more files,$Nask what to do",
|
||||||
|
"cdt_hsort": "how many sorting rules (<code>,sorthref</code>) to include in media-URLs. Setting this to 0 will also ignore sorting-rules included in media links when clicking them",
|
||||||
|
|
||||||
"tt_entree": "show navpane (directory tree sidebar)$NHotkey: B",
|
"tt_entree": "show navpane (directory tree sidebar)$NHotkey: B",
|
||||||
"tt_detree": "show breadcrumbs$NHotkey: B",
|
"tt_detree": "show breadcrumbs$NHotkey: B",
|
||||||
@@ -408,6 +417,7 @@ var tl_browser = {
|
|||||||
"fr_emore": "select at least one item to rename",
|
"fr_emore": "select at least one item to rename",
|
||||||
"fd_emore": "select at least one item to delete",
|
"fd_emore": "select at least one item to delete",
|
||||||
"fc_emore": "select at least one item to cut",
|
"fc_emore": "select at least one item to cut",
|
||||||
|
"fcp_emore": "select at least one item to copy to clipboard",
|
||||||
|
|
||||||
"fs_sc": "share the folder you're in",
|
"fs_sc": "share the folder you're in",
|
||||||
"fs_ss": "share the selected files",
|
"fs_ss": "share the selected files",
|
||||||
@@ -460,16 +470,28 @@ var tl_browser = {
|
|||||||
"fc_ok": "cut {0} items",
|
"fc_ok": "cut {0} items",
|
||||||
"fc_warn": 'cut {0} items\n\nbut: only <b>this</b> browser-tab can paste them\n(since the selection is so absolutely massive)',
|
"fc_warn": 'cut {0} items\n\nbut: only <b>this</b> browser-tab can paste them\n(since the selection is so absolutely massive)',
|
||||||
|
|
||||||
"fp_ecut": "first cut some files / folders to paste / move\n\nnote: you can cut / paste across different browser tabs",
|
"fcc_ok": "copied {0} items to clipboard",
|
||||||
"fp_ename": "these {0} items cannot be moved here (names already exist):",
|
"fcc_warn": 'copied {0} items to clipboard\n\nbut: only <b>this</b> browser-tab can paste them\n(since the selection is so absolutely massive)',
|
||||||
|
|
||||||
|
"fp_apply": "use these names",
|
||||||
|
"fp_ecut": "first cut or copy some files / folders to paste / move\n\nnote: you can cut / paste across different browser tabs",
|
||||||
|
"fp_ename": "{0} items cannot be moved here because the names are already taken. Give them new names below to continue, or blank the name to skip them:",
|
||||||
|
"fcp_ename": "{0} items cannot be copied here because the names are already taken. Give them new names below to continue, or blank the name to skip them:",
|
||||||
|
"fp_emore": "there are still some filename collisions left to fix",
|
||||||
"fp_ok": "move OK",
|
"fp_ok": "move OK",
|
||||||
|
"fcp_ok": "copy OK",
|
||||||
"fp_busy": "moving {0} items...\n\n{1}",
|
"fp_busy": "moving {0} items...\n\n{1}",
|
||||||
|
"fcp_busy": "copying {0} items...\n\n{1}",
|
||||||
"fp_err": "move failed:\n",
|
"fp_err": "move failed:\n",
|
||||||
|
"fcp_err": "copy failed:\n",
|
||||||
"fp_confirm": "move these {0} items here?",
|
"fp_confirm": "move these {0} items here?",
|
||||||
|
"fcp_confirm": "copy these {0} items here?",
|
||||||
"fp_etab": 'failed to read clipboard from other browser tab',
|
"fp_etab": 'failed to read clipboard from other browser tab',
|
||||||
"fp_name": "uploading a file from your device. Give it a name:",
|
"fp_name": "uploading a file from your device. Give it a name:",
|
||||||
"fp_both_m": '<h6>choose what to paste</h6><code>Enter</code> = Move {0} files from «{1}»\n<code>ESC</code> = Upload {2} files from your device',
|
"fp_both_m": '<h6>choose what to paste</h6><code>Enter</code> = Move {0} files from «{1}»\n<code>ESC</code> = Upload {2} files from your device',
|
||||||
|
"fcp_both_m": '<h6>choose what to paste</h6><code>Enter</code> = Copy {0} files from «{1}»\n<code>ESC</code> = Upload {2} files from your device',
|
||||||
"fp_both_b": '<a href="#" id="modal-ok">Move</a><a href="#" id="modal-ng">Upload</a>',
|
"fp_both_b": '<a href="#" id="modal-ok">Move</a><a href="#" id="modal-ng">Upload</a>',
|
||||||
|
"fcp_both_b": '<a href="#" id="modal-ok">Copy</a><a href="#" id="modal-ng">Upload</a>',
|
||||||
|
|
||||||
"mk_noname": "type a name into the text field on the left before you do that :p",
|
"mk_noname": "type a name into the text field on the left before you do that :p",
|
||||||
|
|
||||||
@@ -481,7 +503,7 @@ var tl_browser = {
|
|||||||
"tvt_dl": "download this file$NHotkey: Y\">💾 download",
|
"tvt_dl": "download this file$NHotkey: Y\">💾 download",
|
||||||
"tvt_prev": "show previous document$NHotkey: i\">⬆ prev",
|
"tvt_prev": "show previous document$NHotkey: i\">⬆ prev",
|
||||||
"tvt_next": "show next document$NHotkey: K\">⬇ next",
|
"tvt_next": "show next document$NHotkey: K\">⬇ next",
|
||||||
"tvt_sel": "select file ( for cut / delete / ... )$NHotkey: S\">sel",
|
"tvt_sel": "select file ( for cut / copy / delete / ... )$NHotkey: S\">sel",
|
||||||
"tvt_edit": "open file in text editor$NHotkey: E\">✏️ edit",
|
"tvt_edit": "open file in text editor$NHotkey: E\">✏️ edit",
|
||||||
|
|
||||||
"gt_vau": "don't show videos, just play the audio\">🎧",
|
"gt_vau": "don't show videos, just play the audio\">🎧",
|
||||||
@@ -587,6 +609,7 @@ var tl_browser = {
|
|||||||
"u_pott": "<p>files: <b>{0}</b> finished, <b>{1}</b> failed, <b>{2}</b> busy, <b>{3}</b> queued</p>",
|
"u_pott": "<p>files: <b>{0}</b> finished, <b>{1}</b> failed, <b>{2}</b> busy, <b>{3}</b> queued</p>",
|
||||||
"u_ever": "this is the basic uploader; up2k needs at least<br>chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1",
|
"u_ever": "this is the basic uploader; up2k needs at least<br>chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1",
|
||||||
"u_su2k": 'this is the basic uploader; <a href="#" id="u2yea">up2k</a> is better',
|
"u_su2k": 'this is the basic uploader; <a href="#" id="u2yea">up2k</a> is better',
|
||||||
|
"u_uput": 'optimize for speed (skip checksum)',
|
||||||
"u_ewrite": 'you do not have write-access to this folder',
|
"u_ewrite": 'you do not have write-access to this folder',
|
||||||
"u_eread": 'you do not have read-access to this folder',
|
"u_eread": 'you do not have read-access to this folder',
|
||||||
"u_enoi": 'file-search is not enabled in server config',
|
"u_enoi": 'file-search is not enabled in server config',
|
||||||
@@ -605,6 +628,7 @@ var tl_browser = {
|
|||||||
"u_hashdone": 'hashing done',
|
"u_hashdone": 'hashing done',
|
||||||
"u_hashing": 'hash',
|
"u_hashing": 'hash',
|
||||||
"u_hs": 'handshaking...',
|
"u_hs": 'handshaking...',
|
||||||
|
"u_started": "the files are now being uploaded; see [🚀]",
|
||||||
"u_dupdefer": "duplicate; will be processed after all other files",
|
"u_dupdefer": "duplicate; will be processed after all other files",
|
||||||
"u_actx": "click this text to prevent loss of<br />performance when switching to other windows/tabs",
|
"u_actx": "click this text to prevent loss of<br />performance when switching to other windows/tabs",
|
||||||
"u_fixed": "OK! Fixed it 👍",
|
"u_fixed": "OK! Fixed it 👍",
|
||||||
@@ -640,6 +664,7 @@ var tl_browser = {
|
|||||||
"ue_la": 'you are currently logged in as "{0}"',
|
"ue_la": 'you are currently logged in as "{0}"',
|
||||||
"ue_sr": 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the magnifying glass 🔎 (next to the big SEARCH button), and try uploading again\n\nsorry',
|
"ue_sr": 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the magnifying glass 🔎 (next to the big SEARCH button), and try uploading again\n\nsorry',
|
||||||
"ue_ta": 'try uploading again, it should work now',
|
"ue_ta": 'try uploading again, it should work now',
|
||||||
|
"ue_ab": "this file is already being uploaded into another folder, and that upload must be completed before the file can be uploaded elsewhere.\n\nYou can abort and forget the initial upload using the top-left 🧯",
|
||||||
"ur_1uo": "OK: File uploaded successfully",
|
"ur_1uo": "OK: File uploaded successfully",
|
||||||
"ur_auo": "OK: All {0} files uploaded successfully",
|
"ur_auo": "OK: All {0} files uploaded successfully",
|
||||||
"ur_1so": "OK: File found on server",
|
"ur_1so": "OK: File found on server",
|
||||||
|
|||||||
@@ -89,7 +89,7 @@ var tl_cpanel = {{
|
|||||||
"cc1": "other stuff:",
|
"cc1": "other stuff:",
|
||||||
"h1": "disable k304", // TLNote: "j1" explains what k304 is
|
"h1": "disable k304", // TLNote: "j1" explains what k304 is
|
||||||
"i1": "enable k304",
|
"i1": "enable k304",
|
||||||
"j1": "enabling this will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general",
|
"j1": "enabling k304 will disconnect your client on every HTTP 304, which can prevent some buggy proxies from getting stuck (suddenly not loading pages), <em>but</em> it will also make things slower in general",
|
||||||
"k1": "reset client settings",
|
"k1": "reset client settings",
|
||||||
"l1": "login for more:",
|
"l1": "login for more:",
|
||||||
"m1": "welcome back,", // TLNote: "welcome back, USERNAME"
|
"m1": "welcome back,", // TLNote: "welcome back, USERNAME"
|
||||||
@@ -111,6 +111,11 @@ var tl_cpanel = {{
|
|||||||
"ta2": "repeat to confirm new password:",
|
"ta2": "repeat to confirm new password:",
|
||||||
"ta3": "found a typo; please try again",
|
"ta3": "found a typo; please try again",
|
||||||
"aa1": "incoming files:",
|
"aa1": "incoming files:",
|
||||||
|
"ab1": "disable no304",
|
||||||
|
"ac1": "enable no304",
|
||||||
|
"ad1": "enabling no304 will disable all caching; try this if k304 wasn't enough. This will waste a huge amount of network traffic!",
|
||||||
|
"ae1": "active downloads:",
|
||||||
|
"af1": "show recent uploads",
|
||||||
}},
|
}},
|
||||||
}};
|
}};
|
||||||
|
|
||||||
|
|||||||
1
setup.py
1
setup.py
@@ -108,6 +108,7 @@ args = {
|
|||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.13",
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: Implementation :: Jython",
|
"Programming Language :: Python :: Implementation :: Jython",
|
||||||
"Programming Language :: Python :: Implementation :: PyPy",
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
|
|||||||
110
tests/test_cp.py
Normal file
110
tests/test_cp.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
from itertools import product
|
||||||
|
|
||||||
|
from copyparty.authsrv import AuthSrv
|
||||||
|
from copyparty.httpcli import HttpCli
|
||||||
|
from tests import util as tu
|
||||||
|
from tests.util import Cfg
|
||||||
|
|
||||||
|
|
||||||
|
class TestDedup(tu.TC):
|
||||||
|
def setUp(self):
|
||||||
|
self.td = tu.get_ramdisk()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
if self.conn:
|
||||||
|
self.conn.shutdown()
|
||||||
|
os.chdir(tempfile.gettempdir())
|
||||||
|
shutil.rmtree(self.td)
|
||||||
|
|
||||||
|
def reset(self):
|
||||||
|
td = os.path.join(self.td, "vfs")
|
||||||
|
if os.path.exists(td):
|
||||||
|
shutil.rmtree(td)
|
||||||
|
os.mkdir(td)
|
||||||
|
os.chdir(td)
|
||||||
|
for a in "abc":
|
||||||
|
os.mkdir(a)
|
||||||
|
for b in "fg":
|
||||||
|
d = "%s/%s%s" % (a, a, b)
|
||||||
|
os.mkdir(d)
|
||||||
|
for fn in "x":
|
||||||
|
fp = "%s/%s%s%s" % (d, a, b, fn)
|
||||||
|
with open(fp, "wb") as f:
|
||||||
|
f.write(fp.encode("utf-8"))
|
||||||
|
return td
|
||||||
|
|
||||||
|
def cinit(self):
|
||||||
|
if self.conn:
|
||||||
|
self.fstab = self.conn.hsrv.hub.up2k.fstab
|
||||||
|
self.conn.hsrv.hub.up2k.shutdown()
|
||||||
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
|
self.conn = tu.VHttpConn(self.args, self.asrv, self.log, b"", True)
|
||||||
|
if self.fstab:
|
||||||
|
self.conn.hsrv.hub.up2k.fstab = self.fstab
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
tc_dedup = ["sym", "no"]
|
||||||
|
vols = [".::A", "a/af:a/af:r", "b:a/b:r"]
|
||||||
|
tcs = [
|
||||||
|
"/a?copy=/c/a /a/af/afx /a/ag/agx /a/b/bf/bfx /a/b/bg/bgx /b/bf/bfx /b/bg/bgx /c/a/af/afx /c/a/ag/agx /c/a/b/bf/bfx /c/a/b/bg/bgx /c/cf/cfx /c/cg/cgx",
|
||||||
|
"/b?copy=/d /a/af/afx /a/ag/agx /a/b/bf/bfx /a/b/bg/bgx /b/bf/bfx /b/bg/bgx /c/cf/cfx /c/cg/cgx /d/bf/bfx /d/bg/bgx",
|
||||||
|
"/b/bf?copy=/d /a/af/afx /a/ag/agx /a/b/bf/bfx /a/b/bg/bgx /b/bf/bfx /b/bg/bgx /c/cf/cfx /c/cg/cgx /d/bfx",
|
||||||
|
"/a/af?copy=/d /a/af/afx /a/ag/agx /a/b/bf/bfx /a/b/bg/bgx /b/bf/bfx /b/bg/bgx /c/cf/cfx /c/cg/cgx /d/afx",
|
||||||
|
"/a/af?copy=/ /a/af/afx /a/ag/agx /a/b/bf/bfx /a/b/bg/bgx /afx /b/bf/bfx /b/bg/bgx /c/cf/cfx /c/cg/cgx",
|
||||||
|
"/a/af/afx?copy=/afx /a/af/afx /a/ag/agx /a/b/bf/bfx /a/b/bg/bgx /afx /b/bf/bfx /b/bg/bgx /c/cf/cfx /c/cg/cgx",
|
||||||
|
]
|
||||||
|
|
||||||
|
self.conn = None
|
||||||
|
self.fstab = None
|
||||||
|
for dedup, act_exp in product(tc_dedup, tcs):
|
||||||
|
action, expect = act_exp.split(" ", 1)
|
||||||
|
t = "dedup:%s action:%s" % (dedup, action)
|
||||||
|
print("\n\n\033[0;7m# ", t, "\033[0m")
|
||||||
|
|
||||||
|
ka = {"dav_inf": True}
|
||||||
|
if dedup == "hard":
|
||||||
|
ka["hardlink"] = True
|
||||||
|
elif dedup == "no":
|
||||||
|
ka["no_dedup"] = True
|
||||||
|
|
||||||
|
self.args = Cfg(v=vols, a=[], **ka)
|
||||||
|
self.reset()
|
||||||
|
self.cinit()
|
||||||
|
|
||||||
|
self.do_cp(action)
|
||||||
|
zs = self.propfind()
|
||||||
|
|
||||||
|
fns = " ".join(zs[1])
|
||||||
|
self.assertEqual(expect, fns)
|
||||||
|
|
||||||
|
def do_cp(self, action):
|
||||||
|
hdr = "POST %s HTTP/1.1\r\nConnection: close\r\nContent-Length: 0\r\n\r\n"
|
||||||
|
buf = (hdr % (action,)).encode("utf-8")
|
||||||
|
print("CP [%s]" % (action,))
|
||||||
|
HttpCli(self.conn.setbuf(buf)).run()
|
||||||
|
ret = self.conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||||
|
print("CP <-- ", ret)
|
||||||
|
self.assertStart("HTTP/1.1 201 Created\r", ret[0])
|
||||||
|
self.assertEqual("k\r\n", ret[1])
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def propfind(self):
|
||||||
|
h = "PROPFIND / HTTP/1.1\r\nConnection: close\r\n\r\n"
|
||||||
|
HttpCli(self.conn.setbuf(h.encode("utf-8"))).run()
|
||||||
|
h, t = self.conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||||
|
fns = t.split("<D:response><D:href>")[1:]
|
||||||
|
fns = [x.split("</D", 1)[0] for x in fns]
|
||||||
|
fns = [x for x in fns if not x.endswith("/")]
|
||||||
|
fns.sort()
|
||||||
|
return h, fns
|
||||||
|
|
||||||
|
def log(self, src, msg, c=0):
|
||||||
|
print(msg)
|
||||||
@@ -15,7 +15,7 @@ from tests import util as tu
|
|||||||
from tests.util import Cfg
|
from tests.util import Cfg
|
||||||
|
|
||||||
|
|
||||||
class TestDedup(unittest.TestCase):
|
class TestDedup(tu.TC):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.td = tu.get_ramdisk()
|
self.td = tu.get_ramdisk()
|
||||||
|
|
||||||
@@ -34,6 +34,8 @@ class TestDedup(unittest.TestCase):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
if self.conn:
|
||||||
|
self.conn.shutdown()
|
||||||
os.chdir(tempfile.gettempdir())
|
os.chdir(tempfile.gettempdir())
|
||||||
shutil.rmtree(self.td)
|
shutil.rmtree(self.td)
|
||||||
|
|
||||||
@@ -71,7 +73,7 @@ class TestDedup(unittest.TestCase):
|
|||||||
sfn, hs = self.do_post_hs(dn, fns[0], f1, True)
|
sfn, hs = self.do_post_hs(dn, fns[0], f1, True)
|
||||||
for fn in fns[1:]:
|
for fn in fns[1:]:
|
||||||
h, b = self.handshake(dn, fn, f1)
|
h, b = self.handshake(dn, fn, f1)
|
||||||
self.assertIn(" 422 Unpro", h)
|
self.assertStart("HTTP/1.1 422 Unpro", h)
|
||||||
self.assertIn("a different location;", b)
|
self.assertIn("a different location;", b)
|
||||||
self.do_post_data(dn, fns[0], f1, True, sfn, hs)
|
self.do_post_data(dn, fns[0], f1, True, sfn, hs)
|
||||||
if not e2d:
|
if not e2d:
|
||||||
@@ -156,10 +158,10 @@ class TestDedup(unittest.TestCase):
|
|||||||
rm = cms[irm]
|
rm = cms[irm]
|
||||||
dn, fn, _ = rm
|
dn, fn, _ = rm
|
||||||
h, b = self.curl("%s/%s?delete" % (dn, fn), meth="POST")
|
h, b = self.curl("%s/%s?delete" % (dn, fn), meth="POST")
|
||||||
self.assertIn(" 200 OK", h)
|
self.assertStart("HTTP/1.1 200 OK", h)
|
||||||
self.assertIn("deleted 1 files", b)
|
self.assertIn("deleted 1 files", b)
|
||||||
h, b = self.curl("%s/%s" % (dn, fn))
|
h, b = self.curl("%s/%s" % (dn, fn))
|
||||||
self.assertIn(" 404 Not Fo", h)
|
self.assertStart("HTTP/1.1 404 Not Fo", h)
|
||||||
for cm in cms:
|
for cm in cms:
|
||||||
if cm == rm:
|
if cm == rm:
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -17,6 +17,11 @@ from copyparty.up2k import Up2k
|
|||||||
from tests import util as tu
|
from tests import util as tu
|
||||||
from tests.util import Cfg
|
from tests.util import Cfg
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Optional
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def hdr(query, uname):
|
def hdr(query, uname):
|
||||||
h = "GET /%s HTTP/1.1\r\nPW: %s\r\nConnection: close\r\n\r\n"
|
h = "GET /%s HTTP/1.1\r\nPW: %s\r\nConnection: close\r\n\r\n"
|
||||||
@@ -29,12 +34,21 @@ class TestDots(unittest.TestCase):
|
|||||||
self.is_dut = True
|
self.is_dut = True
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
self.conn: Optional[tu.VHttpConn] = None
|
||||||
self.td = tu.get_ramdisk()
|
self.td = tu.get_ramdisk()
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
if self.conn:
|
||||||
|
self.conn.shutdown()
|
||||||
os.chdir(tempfile.gettempdir())
|
os.chdir(tempfile.gettempdir())
|
||||||
shutil.rmtree(self.td)
|
shutil.rmtree(self.td)
|
||||||
|
|
||||||
|
def cinit(self):
|
||||||
|
if self.conn:
|
||||||
|
self.conn.shutdown()
|
||||||
|
self.conn = None
|
||||||
|
self.conn = tu.VHttpConn(self.args, self.asrv, self.log, b"")
|
||||||
|
|
||||||
def test_dots(self):
|
def test_dots(self):
|
||||||
td = os.path.join(self.td, "vfs")
|
td = os.path.join(self.td, "vfs")
|
||||||
os.mkdir(td)
|
os.mkdir(td)
|
||||||
@@ -57,6 +71,7 @@ class TestDots(unittest.TestCase):
|
|||||||
vcfg = [".::r,u1:r.,u2", "a:a:r,u1:r,u2", ".b:.b:r.,u1:r,u2"]
|
vcfg = [".::r,u1:r.,u2", "a:a:r,u1:r,u2", ".b:.b:r.,u1:r,u2"]
|
||||||
self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"], e2dsa=True)
|
self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"], e2dsa=True)
|
||||||
self.asrv = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
|
self.cinit()
|
||||||
|
|
||||||
self.assertEqual(self.tardir("", "u1"), "f0 t/f1 a/f3 a/da/f4")
|
self.assertEqual(self.tardir("", "u1"), "f0 t/f1 a/f3 a/da/f4")
|
||||||
self.assertEqual(self.tardir(".t", "u1"), "f2")
|
self.assertEqual(self.tardir(".t", "u1"), "f2")
|
||||||
@@ -88,6 +103,7 @@ class TestDots(unittest.TestCase):
|
|||||||
self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"], dotsrch=False, e2d=True)
|
self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"], dotsrch=False, e2d=True)
|
||||||
self.asrv = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
u2idx = U2idx(self)
|
u2idx = U2idx(self)
|
||||||
|
self.cinit()
|
||||||
|
|
||||||
x = u2idx.search("u1", self.asrv.vfs.all_vols.values(), "", 999)
|
x = u2idx.search("u1", self.asrv.vfs.all_vols.values(), "", 999)
|
||||||
x = " ".join(sorted([x["rp"] for x in x[0]]))
|
x = " ".join(sorted([x["rp"] for x in x[0]]))
|
||||||
@@ -113,6 +129,8 @@ class TestDots(unittest.TestCase):
|
|||||||
]
|
]
|
||||||
self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"])
|
self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"])
|
||||||
self.asrv = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
|
self.cinit()
|
||||||
|
|
||||||
zj = json.loads(self.curl("?ls", "u1")[1])
|
zj = json.loads(self.curl("?ls", "u1")[1])
|
||||||
url = "?k=" + zj["dk"]
|
url = "?k=" + zj["dk"]
|
||||||
# should descend into folders, but not other volumes:
|
# should descend into folders, but not other volumes:
|
||||||
@@ -148,6 +166,7 @@ class TestDots(unittest.TestCase):
|
|||||||
|
|
||||||
self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"])
|
self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"])
|
||||||
self.asrv = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
|
self.cinit()
|
||||||
|
|
||||||
dk = {}
|
dk = {}
|
||||||
for d in "dk dks dk,fk dks,fk".split():
|
for d in "dk dks dk,fk dks,fk".split():
|
||||||
@@ -267,8 +286,8 @@ class TestDots(unittest.TestCase):
|
|||||||
self.assertIn('">folder</text>', self.curl(zs, "u2")[1])
|
self.assertIn('">folder</text>', self.curl(zs, "u2")[1])
|
||||||
|
|
||||||
# fk enabled, so this should fail
|
# fk enabled, so this should fail
|
||||||
self.assertIn('">e404</text>', self.curl("dk,fk/f.t1?th=x", "u2")[1])
|
self.assertIn('">e403</text>', self.curl("dk,fk/f.t1?th=x", "u2")[1])
|
||||||
self.assertIn('">e404</text>', self.curl("dk,fk/s1/f.t2?th=x", "u2")[1])
|
self.assertIn('">e403</text>', self.curl("dk,fk/s1/f.t2?th=x", "u2")[1])
|
||||||
|
|
||||||
# but dk should return correct filekeys, so try that
|
# but dk should return correct filekeys, so try that
|
||||||
zs = "dk,fk/%s&th=x" % (zj["files"][0]["href"])
|
zs = "dk,fk/%s&th=x" % (zj["files"][0]["href"])
|
||||||
@@ -313,8 +332,8 @@ class TestDots(unittest.TestCase):
|
|||||||
self.assertIn('">folder</text>', self.curl(zs, "u2")[1])
|
self.assertIn('">folder</text>', self.curl(zs, "u2")[1])
|
||||||
|
|
||||||
# fk enabled, so this should fail
|
# fk enabled, so this should fail
|
||||||
self.assertIn('">e404</text>', self.curl("dks,fk/f.t1?th=x", "u2")[1])
|
self.assertIn('">e403</text>', self.curl("dks,fk/f.t1?th=x", "u2")[1])
|
||||||
self.assertIn('">e404</text>', self.curl("dks,fk/s1/f.t2?th=x", "u2")[1])
|
self.assertIn('">e403</text>', self.curl("dks,fk/s1/f.t2?th=x", "u2")[1])
|
||||||
|
|
||||||
# but dk should return correct filekeys, so try that
|
# but dk should return correct filekeys, so try that
|
||||||
zs = "dks,fk/%s&th=x" % (zj["files"][0]["href"])
|
zs = "dks,fk/%s&th=x" % (zj["files"][0]["href"])
|
||||||
@@ -353,7 +372,7 @@ class TestDots(unittest.TestCase):
|
|||||||
|
|
||||||
def curl(self, url, uname, binary=False, req=b""):
|
def curl(self, url, uname, binary=False, req=b""):
|
||||||
req = req or hdr(url, uname)
|
req = req or hdr(url, uname)
|
||||||
conn = tu.VHttpConn(self.args, self.asrv, self.log, req)
|
conn = self.conn.setbuf(req)
|
||||||
HttpCli(conn).run()
|
HttpCli(conn).run()
|
||||||
if binary:
|
if binary:
|
||||||
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
|
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ class TestDXML(unittest.TestCase):
|
|||||||
lk = parse_xml(txt)
|
lk = parse_xml(txt)
|
||||||
self.assertEqual(lk.tag, "{DAV:}lockinfo")
|
self.assertEqual(lk.tag, "{DAV:}lockinfo")
|
||||||
|
|
||||||
if not lk.find(r"./{DAV:}depth"):
|
if lk.find(r"./{DAV:}depth") is None:
|
||||||
lk.append(mktnod("D:depth", "infinity"))
|
lk.append(mktnod("D:depth", "infinity"))
|
||||||
|
|
||||||
lk.append(mkenod("D:timeout", mktnod("D:href", "Second-3600")))
|
lk.append(mkenod("D:timeout", mktnod("D:href", "Second-3600")))
|
||||||
|
|||||||
@@ -12,14 +12,20 @@ from copyparty.httpcli import HttpCli
|
|||||||
from tests import util as tu
|
from tests import util as tu
|
||||||
from tests.util import Cfg
|
from tests.util import Cfg
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Optional
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def hdr(query):
|
def hdr(query):
|
||||||
h = "GET /{} HTTP/1.1\r\nPW: o\r\nConnection: close\r\n\r\n"
|
h = "GET /{} HTTP/1.1\r\nPW: o\r\nConnection: close\r\n\r\n"
|
||||||
return h.format(query).encode("utf-8")
|
return h.format(query).encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
class TestHooks(unittest.TestCase):
|
class TestHooks(tu.TC):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
self.conn: Optional[tu.VHttpConn] = None
|
||||||
self.td = tu.get_ramdisk()
|
self.td = tu.get_ramdisk()
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
@@ -34,6 +40,12 @@ class TestHooks(unittest.TestCase):
|
|||||||
os.chdir(td)
|
os.chdir(td)
|
||||||
return td
|
return td
|
||||||
|
|
||||||
|
def cinit(self):
|
||||||
|
if self.conn:
|
||||||
|
self.conn.shutdown()
|
||||||
|
self.conn = None
|
||||||
|
self.conn = tu.VHttpConn(self.args, self.asrv, self.log, b"")
|
||||||
|
|
||||||
def test(self):
|
def test(self):
|
||||||
vcfg = ["a/b/c/d:c/d:A", "a:a:r"]
|
vcfg = ["a/b/c/d:c/d:A", "a:a:r"]
|
||||||
|
|
||||||
@@ -59,9 +71,10 @@ class TestHooks(unittest.TestCase):
|
|||||||
ka = {hooktype: ["j,c1,h.py"]}
|
ka = {hooktype: ["j,c1,h.py"]}
|
||||||
self.args = Cfg(v=vcfg, a=["o:o"], e2d=True, **ka)
|
self.args = Cfg(v=vcfg, a=["o:o"], e2d=True, **ka)
|
||||||
self.asrv = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
|
self.cinit()
|
||||||
|
|
||||||
h, b = upfun(url_up)
|
h, b = upfun(url_up)
|
||||||
self.assertIn("201 Created", h)
|
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||||
h, b = self.curl(url_dl)
|
h, b = self.curl(url_dl)
|
||||||
self.assertEqual(b, "ok %s\n" % (url_up))
|
self.assertEqual(b, "ok %s\n" % (url_up))
|
||||||
|
|
||||||
@@ -73,7 +86,7 @@ class TestHooks(unittest.TestCase):
|
|||||||
buf = "PUT /{0} HTTP/1.1\r\nPW: o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n"
|
buf = "PUT /{0} HTTP/1.1\r\nPW: o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n"
|
||||||
buf = buf.format(url, len(url) + 4).encode("utf-8")
|
buf = buf.format(url, len(url) + 4).encode("utf-8")
|
||||||
print("PUT -->", buf)
|
print("PUT -->", buf)
|
||||||
conn = tu.VHttpConn(self.args, self.asrv, self.log, buf)
|
conn = self.conn.setbuf(buf)
|
||||||
HttpCli(conn).run()
|
HttpCli(conn).run()
|
||||||
ret = conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
ret = conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||||
print("PUT <--", ret)
|
print("PUT <--", ret)
|
||||||
@@ -92,14 +105,14 @@ class TestHooks(unittest.TestCase):
|
|||||||
buf = (bdy % (fn,) + "ok %s/%s\n" % (url, fn) + ftr).encode("utf-8")
|
buf = (bdy % (fn,) + "ok %s/%s\n" % (url, fn) + ftr).encode("utf-8")
|
||||||
buf = (hdr % (url, len(buf))).encode("utf-8") + buf
|
buf = (hdr % (url, len(buf))).encode("utf-8") + buf
|
||||||
print("PoST -->", buf)
|
print("PoST -->", buf)
|
||||||
conn = tu.VHttpConn(self.args, self.asrv, self.log, buf)
|
conn = self.conn.setbuf(buf)
|
||||||
HttpCli(conn).run()
|
HttpCli(conn).run()
|
||||||
ret = conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
ret = conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||||
print("POST <--", ret)
|
print("POST <--", ret)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def curl(self, url, binary=False):
|
def curl(self, url, binary=False):
|
||||||
conn = tu.VHttpConn(self.args, self.asrv, self.log, hdr(url))
|
conn = self.conn.setbuf(hdr(url))
|
||||||
HttpCli(conn).run()
|
HttpCli(conn).run()
|
||||||
if binary:
|
if binary:
|
||||||
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
|
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import zipfile
|
|||||||
from copyparty.authsrv import AuthSrv
|
from copyparty.authsrv import AuthSrv
|
||||||
from copyparty.httpcli import HttpCli
|
from copyparty.httpcli import HttpCli
|
||||||
from tests import util as tu
|
from tests import util as tu
|
||||||
from tests.util import Cfg, eprint
|
from tests.util import Cfg, eprint, pfind2ls
|
||||||
|
|
||||||
|
|
||||||
def hdr(query):
|
def hdr(query):
|
||||||
@@ -129,6 +129,24 @@ class TestHttpCli(unittest.TestCase):
|
|||||||
else:
|
else:
|
||||||
ref = []
|
ref = []
|
||||||
|
|
||||||
|
h, b = self.propfind(durl, 1)
|
||||||
|
fns = [x for x in pfind2ls(b) if not x.endswith("/")]
|
||||||
|
if ref:
|
||||||
|
self.assertIn("<D:propstat>", b)
|
||||||
|
elif not rok and not wok:
|
||||||
|
self.assertListEqual([], fns)
|
||||||
|
else:
|
||||||
|
self.assertIn("<D:multistatus", b)
|
||||||
|
|
||||||
|
h, b = self.propfind(durl, 0)
|
||||||
|
fns = [x for x in pfind2ls(b) if not x.endswith("/")]
|
||||||
|
if ref:
|
||||||
|
self.assertIn("<D:propstat>", b)
|
||||||
|
elif not rok:
|
||||||
|
self.assertListEqual([], fns)
|
||||||
|
else:
|
||||||
|
self.assertIn("<D:multistatus", b)
|
||||||
|
|
||||||
if test_tar:
|
if test_tar:
|
||||||
url = durl + "?tar"
|
url = durl + "?tar"
|
||||||
h, b = self.curl(url, True)
|
h, b = self.curl(url, True)
|
||||||
@@ -178,6 +196,8 @@ class TestHttpCli(unittest.TestCase):
|
|||||||
ap = os.path.join(vn.realpath, rem)
|
ap = os.path.join(vn.realpath, rem)
|
||||||
os.unlink(ap)
|
os.unlink(ap)
|
||||||
|
|
||||||
|
self.conn.shutdown()
|
||||||
|
|
||||||
def can_rw(self, fp):
|
def can_rw(self, fp):
|
||||||
# lowest non-neutral folder declares permissions
|
# lowest non-neutral folder declares permissions
|
||||||
expect = fp.split("/")[:-1]
|
expect = fp.split("/")[:-1]
|
||||||
@@ -223,5 +243,12 @@ class TestHttpCli(unittest.TestCase):
|
|||||||
|
|
||||||
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||||
|
|
||||||
|
def propfind(self, url, depth=1):
|
||||||
|
zs = "PROPFIND /%s HTTP/1.1\r\nDepth: %d\r\nPW: o\r\nConnection: close\r\n\r\n"
|
||||||
|
buf = zs % (url, depth)
|
||||||
|
conn = self.conn.setbuf(buf.encode("utf-8"))
|
||||||
|
HttpCli(conn).run()
|
||||||
|
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||||
|
|
||||||
def log(self, src, msg, c=0):
|
def log(self, src, msg, c=0):
|
||||||
print(msg)
|
print(msg)
|
||||||
|
|||||||
@@ -21,12 +21,14 @@ def hdr(query):
|
|||||||
return h.format(query).encode("utf-8")
|
return h.format(query).encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
class TestMetrics(unittest.TestCase):
|
class TestMetrics(tu.TC):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.td = tu.get_ramdisk()
|
self.td = tu.get_ramdisk()
|
||||||
os.chdir(self.td)
|
os.chdir(self.td)
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
if self.conn:
|
||||||
|
self.conn.shutdown()
|
||||||
os.chdir(tempfile.gettempdir())
|
os.chdir(tempfile.gettempdir())
|
||||||
shutil.rmtree(self.td)
|
shutil.rmtree(self.td)
|
||||||
|
|
||||||
@@ -53,10 +55,11 @@ class TestMetrics(unittest.TestCase):
|
|||||||
self.conn = self.fstab = self.metrics = None
|
self.conn = self.fstab = self.metrics = None
|
||||||
self.cinit()
|
self.cinit()
|
||||||
h, b = self.curl(".cpr/metrics")
|
h, b = self.curl(".cpr/metrics")
|
||||||
self.assertIn(".1 200 OK", h)
|
self.assertStart("HTTP/1.1 200 OK\r", h)
|
||||||
ptns = r"""
|
ptns = r"""
|
||||||
cpp_uptime_seconds [0-9]\.[0-9]{3}$
|
cpp_uptime_seconds [0-9]\.[0-9]{3}$
|
||||||
cpp_boot_unixtime_seconds [0-9]{7,10}\.[0-9]{3}$
|
cpp_boot_unixtime_seconds [0-9]{7,10}\.[0-9]{3}$
|
||||||
|
cpp_active_dl 0$
|
||||||
cpp_http_reqs_created [0-9]{7,10}$
|
cpp_http_reqs_created [0-9]{7,10}$
|
||||||
cpp_http_reqs_total -1$
|
cpp_http_reqs_total -1$
|
||||||
cpp_http_conns 9$
|
cpp_http_conns 9$
|
||||||
|
|||||||
@@ -20,11 +20,13 @@ TODO inject tags into db and verify ls
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class TestDedup(unittest.TestCase):
|
class TestDedup(tu.TC):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.td = tu.get_ramdisk()
|
self.td = tu.get_ramdisk()
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
if not PY2 and self.conn:
|
||||||
|
self.conn.shutdown()
|
||||||
os.chdir(tempfile.gettempdir())
|
os.chdir(tempfile.gettempdir())
|
||||||
shutil.rmtree(self.td)
|
shutil.rmtree(self.td)
|
||||||
|
|
||||||
@@ -128,7 +130,7 @@ class TestDedup(unittest.TestCase):
|
|||||||
HttpCli(self.conn.setbuf(buf)).run()
|
HttpCli(self.conn.setbuf(buf)).run()
|
||||||
ret = self.conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
ret = self.conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||||
print("MV <-- ", ret)
|
print("MV <-- ", ret)
|
||||||
self.assertIn(" 201 Created", ret[0])
|
self.assertStart("HTTP/1.1 201 Created\r", ret[0])
|
||||||
self.assertEqual("k\r\n", ret[1])
|
self.assertEqual("k\r\n", ret[1])
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|||||||
264
tests/test_webdav.py
Normal file
264
tests/test_webdav.py
Normal file
@@ -0,0 +1,264 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from copyparty.authsrv import AuthSrv
|
||||||
|
from copyparty.httpcli import HttpCli
|
||||||
|
from tests import util as tu
|
||||||
|
from tests.util import TC, Cfg, pfind2ls
|
||||||
|
|
||||||
|
# tcpdump of `rclone ls dav:`
|
||||||
|
RCLONE_PROPFIND = """PROPFIND /%s HTTP/1.1
|
||||||
|
Host: 127.0.0.1:3923
|
||||||
|
User-Agent: rclone/v1.67.0
|
||||||
|
Content-Length: 308
|
||||||
|
Authorization: Basic azp1
|
||||||
|
Depth: 1
|
||||||
|
Referer: http://127.0.0.1:3923/
|
||||||
|
Accept-Encoding: gzip
|
||||||
|
|
||||||
|
<?xml version="1.0"?>
|
||||||
|
<d:propfind xmlns:d="DAV:" xmlns:oc="http://owncloud.org/ns" xmlns:nc="http://nextcloud.org/ns">
|
||||||
|
<d:prop>
|
||||||
|
<d:displayname />
|
||||||
|
<d:getlastmodified />
|
||||||
|
<d:getcontentlength />
|
||||||
|
<d:resourcetype />
|
||||||
|
<d:getcontenttype />
|
||||||
|
<oc:checksums />
|
||||||
|
<oc:permissions />
|
||||||
|
</d:prop>
|
||||||
|
</d:propfind>
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
# tcpdump of `rclone copy fa dav:/a/` (it does a mkcol first)
|
||||||
|
RCLONE_MKCOL = """MKCOL /%s HTTP/1.1
|
||||||
|
Host: 127.0.0.1:3923
|
||||||
|
User-Agent: rclone/v1.67.0
|
||||||
|
Authorization: Basic azp1
|
||||||
|
Referer: http://127.0.0.1:3923/
|
||||||
|
Accept-Encoding: gzip
|
||||||
|
\n"""
|
||||||
|
|
||||||
|
|
||||||
|
# tcpdump of `rclone copy fa dav:/a/` (the actual upload)
|
||||||
|
RCLONE_PUT = """PUT /%s HTTP/1.1
|
||||||
|
Host: 127.0.0.1:3923
|
||||||
|
User-Agent: rclone/v1.67.0
|
||||||
|
Content-Length: 6
|
||||||
|
Authorization: Basic azp1
|
||||||
|
Content-Type: application/octet-stream
|
||||||
|
Oc-Checksum: SHA1:f5e3dc3fb27af53cd0005a1184e2df06481199e8
|
||||||
|
Referer: http://127.0.0.1:3923/
|
||||||
|
X-Oc-Mtime: 1689453578
|
||||||
|
Accept-Encoding: gzip
|
||||||
|
|
||||||
|
fgsfds"""
|
||||||
|
|
||||||
|
|
||||||
|
# tcpdump of `rclone delete dav:/a/d1/` (it does propfind recursively and then this on each file)
|
||||||
|
# (note: `rclone rmdirs dav:/a/d1/` does the same thing but just each folder after asserting they're empty)
|
||||||
|
RCLONE_DELETE = """DELETE /%s HTTP/1.1
|
||||||
|
Host: 127.0.0.1:3923
|
||||||
|
User-Agent: rclone/v1.67.0
|
||||||
|
Authorization: Basic azp1
|
||||||
|
Referer: http://127.0.0.1:3923/
|
||||||
|
Accept-Encoding: gzip
|
||||||
|
\n"""
|
||||||
|
|
||||||
|
|
||||||
|
# tcpdump of `rclone move dav:/a/d1/d2 /a/d1/d3` (it does a lot of boilerplate propfinds/mkcols before)
|
||||||
|
RCLONE_MOVE = """MOVE /%s HTTP/1.1
|
||||||
|
Host: 127.0.0.1:3923
|
||||||
|
User-Agent: rclone/v1.67.0
|
||||||
|
Authorization: Basic azp1
|
||||||
|
Destination: http://127.0.0.1:3923/%s
|
||||||
|
Overwrite: T
|
||||||
|
Referer: http://127.0.0.1:3923/
|
||||||
|
Accept-Encoding: gzip
|
||||||
|
\n"""
|
||||||
|
|
||||||
|
|
||||||
|
class TestHttpCli(TC):
|
||||||
|
def setUp(self):
|
||||||
|
self.td = tu.get_ramdisk()
|
||||||
|
self.maxDiff = 99999
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.conn.shutdown()
|
||||||
|
os.chdir(tempfile.gettempdir())
|
||||||
|
shutil.rmtree(self.td)
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
td = os.path.join(self.td, "vfs")
|
||||||
|
os.mkdir(td)
|
||||||
|
os.chdir(td)
|
||||||
|
|
||||||
|
self.fn = "g{:x}g".format(int(time.time() * 3))
|
||||||
|
vcfg = [
|
||||||
|
"r:r:r,u",
|
||||||
|
"w:w:w,u",
|
||||||
|
"a:a:A,u",
|
||||||
|
"x:x:r,u2",
|
||||||
|
"x/r:x/r:r,u",
|
||||||
|
"x/x:x/x:r,u2",
|
||||||
|
]
|
||||||
|
self.args = Cfg(v=vcfg, a=["u:u", "u2:u2"])
|
||||||
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
|
self.conn = tu.VHttpConn(self.args, self.asrv, self.log, b"", True)
|
||||||
|
|
||||||
|
self.fns = ["%s/%s" % (zs.split(":")[0], self.fn) for zs in vcfg]
|
||||||
|
for fp in self.fns:
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(fp))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
with open(fp, "wb") as f:
|
||||||
|
f.write(("ok %s\n" % (fp,)).encode("utf-8"))
|
||||||
|
|
||||||
|
##
|
||||||
|
## depth:1 (regular listing)
|
||||||
|
|
||||||
|
# unmapped root; should return list of volumes
|
||||||
|
h, b = self.req(RCLONE_PROPFIND % ("",))
|
||||||
|
fns = pfind2ls(b)
|
||||||
|
self.assertStart("HTTP/1.1 207 Multi-Status\r", h)
|
||||||
|
self.assertListEqual(fns, ["/", "/a/", "/r/"])
|
||||||
|
|
||||||
|
# toplevel of a volume; has one file
|
||||||
|
h, b = self.req(RCLONE_PROPFIND % ("a",))
|
||||||
|
fns = pfind2ls(b)
|
||||||
|
self.assertStart("HTTP/1.1 207 Multi-Status\r", h)
|
||||||
|
self.assertListEqual(fns, ["/a/", "/a/" + self.fn])
|
||||||
|
|
||||||
|
# toplevel of a volume; has one file
|
||||||
|
h, b = self.req(RCLONE_PROPFIND % ("r",))
|
||||||
|
fns = pfind2ls(b)
|
||||||
|
self.assertStart("HTTP/1.1 207 Multi-Status\r", h)
|
||||||
|
self.assertListEqual(fns, ["/r/", "/r/" + self.fn])
|
||||||
|
|
||||||
|
# toplevel of write-only volume; has one file, will not list
|
||||||
|
h, b = self.req(RCLONE_PROPFIND % ("w",))
|
||||||
|
fns = pfind2ls(b)
|
||||||
|
self.assertStart("HTTP/1.1 207 Multi-Status\r", h)
|
||||||
|
self.assertListEqual(fns, ["/w/"])
|
||||||
|
|
||||||
|
##
|
||||||
|
## auth challenge
|
||||||
|
|
||||||
|
bad_pfind = RCLONE_PROPFIND.replace("Authorization: Basic azp1\n", "")
|
||||||
|
bad_put = RCLONE_PUT.replace("Authorization: Basic azp1\n", "")
|
||||||
|
urls = ["", "r", "w", "a"]
|
||||||
|
urls += [x + "/" + self.fn for x in urls[1:]]
|
||||||
|
for url in urls:
|
||||||
|
for q in (bad_pfind, bad_put):
|
||||||
|
h, b = self.req(q % (url,))
|
||||||
|
self.assertStart("HTTP/1.1 401 Unauthorized\r", h)
|
||||||
|
self.assertIn('\nWWW-Authenticate: Basic realm="a"\r', h)
|
||||||
|
|
||||||
|
##
|
||||||
|
## depth:0 (recursion)
|
||||||
|
|
||||||
|
# depth:0 from unmapped root should work;
|
||||||
|
# will NOT list contents of /x/r/ due to current limitations
|
||||||
|
# (stops descending at first non-accessible volume)
|
||||||
|
recursive = RCLONE_PROPFIND.replace("Depth: 1\n", "")
|
||||||
|
h, b = self.req(recursive % ("",))
|
||||||
|
fns = pfind2ls(b)
|
||||||
|
expect = ["/", "/a/", "/r/"]
|
||||||
|
expect += [x + self.fn for x in expect[1:]]
|
||||||
|
self.assertListEqual(fns, expect)
|
||||||
|
|
||||||
|
# same thing here...
|
||||||
|
h, b = self.req(recursive % ("/x",))
|
||||||
|
fns = pfind2ls(b)
|
||||||
|
self.assertListEqual(fns, [])
|
||||||
|
|
||||||
|
# but this obviously works
|
||||||
|
h, b = self.req(recursive % ("/x/r",))
|
||||||
|
fns = pfind2ls(b)
|
||||||
|
self.assertListEqual(fns, ["/x/r/", "/x/r/" + self.fn])
|
||||||
|
|
||||||
|
##
|
||||||
|
## uploading
|
||||||
|
|
||||||
|
# rclone does a propfind on the target file first; expects 404
|
||||||
|
h, b = self.req(RCLONE_PROPFIND % ("a/fa",))
|
||||||
|
self.assertStart("HTTP/1.1 404 Not Found\r", h)
|
||||||
|
|
||||||
|
# then it does a mkcol (mkdir), expecting 405 (exists)
|
||||||
|
h, b = self.req(RCLONE_MKCOL % ("a",))
|
||||||
|
self.assertStart("HTTP/1.1 405 Method Not Allowed\r", h)
|
||||||
|
|
||||||
|
# then it uploads the file
|
||||||
|
h, b = self.req(RCLONE_PUT % ("a/fa",))
|
||||||
|
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||||
|
|
||||||
|
# then it does a propfind to confirm
|
||||||
|
h, b = self.req(RCLONE_PROPFIND % ("a/fa",))
|
||||||
|
fns = pfind2ls(b)
|
||||||
|
self.assertStart("HTTP/1.1 207 Multi-Status\r", h)
|
||||||
|
self.assertListEqual(fns, ["/a/fa"])
|
||||||
|
|
||||||
|
##
|
||||||
|
## upload into set of subfolders that don't exist yet
|
||||||
|
|
||||||
|
# rclone does this:
|
||||||
|
# propfind /a/d1/d2/fa => 404
|
||||||
|
# mkcol /a/d1/d2/ => 409
|
||||||
|
# propfind /a/d1/d2/ => 404
|
||||||
|
# mkcol /a/d1/ => 201
|
||||||
|
# mkcol /a/d1/d2/ => 201
|
||||||
|
# put /a/d1/d2/fa => 201
|
||||||
|
# propfind /a/d1/d2/fa => 207
|
||||||
|
# ...some of which already tested above;
|
||||||
|
|
||||||
|
h, b = self.req(RCLONE_PROPFIND % ("/a/d1/d2/",))
|
||||||
|
self.assertStart("HTTP/1.1 404 Not Found\r", h)
|
||||||
|
|
||||||
|
h, b = self.req(RCLONE_PROPFIND % ("/a/d1/",))
|
||||||
|
self.assertStart("HTTP/1.1 404 Not Found\r", h)
|
||||||
|
|
||||||
|
h, b = self.req(RCLONE_MKCOL % ("/a/d1/d2/",))
|
||||||
|
self.assertStart("HTTP/1.1 409 Conflict\r", h)
|
||||||
|
|
||||||
|
h, b = self.req(RCLONE_MKCOL % ("/a/d1/",))
|
||||||
|
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||||
|
|
||||||
|
h, b = self.req(RCLONE_MKCOL % ("/a/d1/d2/",))
|
||||||
|
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||||
|
|
||||||
|
h, b = self.req(RCLONE_PUT % ("a/d1/d2/fa",))
|
||||||
|
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||||
|
|
||||||
|
##
|
||||||
|
## rename
|
||||||
|
|
||||||
|
h, b = self.req(RCLONE_MOVE % ("a/d1/d2/", "a/d1/d3/"))
|
||||||
|
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||||
|
self.assertListEqual(os.listdir("a/d1"), ["d3"])
|
||||||
|
|
||||||
|
##
|
||||||
|
## delete
|
||||||
|
|
||||||
|
h, b = self.req(RCLONE_DELETE % ("a/d1",))
|
||||||
|
self.assertStart("HTTP/1.1 200 OK\r", h)
|
||||||
|
if os.path.exists("a/d1"):
|
||||||
|
self.fail("a/d1 still exists")
|
||||||
|
|
||||||
|
def req(self, q):
|
||||||
|
h, b = q.split("\n\n", 1)
|
||||||
|
q = h.replace("\n", "\r\n") + "\r\n\r\n" + b
|
||||||
|
conn = self.conn.setbuf(q.encode("utf-8"))
|
||||||
|
HttpCli(conn).run()
|
||||||
|
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||||
|
|
||||||
|
def log(self, src, msg, c=0):
|
||||||
|
print(msg)
|
||||||
@@ -12,6 +12,7 @@ import sys
|
|||||||
import tempfile
|
import tempfile
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
import unittest
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
@@ -32,14 +33,6 @@ def eprint(*a, **ka):
|
|||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
|
||||||
if MACOS:
|
|
||||||
import posixpath
|
|
||||||
|
|
||||||
posixpath.islink = nah
|
|
||||||
os.path.islink = nah
|
|
||||||
# 25% faster; until any tests do symlink stuff
|
|
||||||
|
|
||||||
|
|
||||||
from copyparty.__main__ import init_E
|
from copyparty.__main__ import init_E
|
||||||
from copyparty.broker_thr import BrokerThr
|
from copyparty.broker_thr import BrokerThr
|
||||||
from copyparty.ico import Ico
|
from copyparty.ico import Ico
|
||||||
@@ -118,26 +111,40 @@ def get_ramdisk():
|
|||||||
return subdir(ret)
|
return subdir(ret)
|
||||||
|
|
||||||
|
|
||||||
|
def pfind2ls(xml):
|
||||||
|
return [x.split("<", 1)[0] for x in xml.split("<D:href>")[1:]]
|
||||||
|
|
||||||
|
|
||||||
|
class TC(unittest.TestCase):
|
||||||
|
def __init__(self, *a, **ka):
|
||||||
|
super(TC, self).__init__(*a, **ka)
|
||||||
|
|
||||||
|
def assertStart(self, member, container, msg=None):
|
||||||
|
if not container.startswith(member):
|
||||||
|
standardMsg = "%s not found in %s" % (member, container)
|
||||||
|
self.fail(self._formatMessage(msg, standardMsg))
|
||||||
|
|
||||||
|
|
||||||
class Cfg(Namespace):
|
class Cfg(Namespace):
|
||||||
def __init__(self, a=None, v=None, c=None, **ka0):
|
def __init__(self, a=None, v=None, c=None, **ka0):
|
||||||
ka = {}
|
ka = {}
|
||||||
|
|
||||||
ex = "chpw daw dav_auth dav_inf dav_mac dav_rt e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp early_ban ed emp exp force_js getmod grid gsel hardlink ih ihead magic hardlink_only nid nih no_acode no_athumb no_clone no_dav no_db_ip no_del no_dirsz no_dupe no_lifetime no_logues no_mv no_pipe no_poll no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nw og og_no_head og_s_title q rand re_dirsz smb srch_dbg stats uqe vague_403 vc ver write_uplog xdev xlink xvol zs"
|
ex = "chpw daw dav_auth dav_mac dav_rt e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp early_ban ed emp exp force_js getmod grid gsel hardlink ih ihead magic hardlink_only nid nih no_acode no_athumb no_bauth no_clone no_cp no_dav no_db_ip no_del no_dirsz no_dupe no_lifetime no_logues no_mv no_pipe no_poll no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nsort nw og og_no_head og_s_title ohead q rand re_dirsz rss smb srch_dbg srch_excl stats uqe vague_403 vc ver write_uplog xdev xlink xvol zs"
|
||||||
ka.update(**{k: False for k in ex.split()})
|
ka.update(**{k: False for k in ex.split()})
|
||||||
|
|
||||||
ex = "dedup dotpart dotsrch hook_v no_dhash no_fastboot no_fpool no_htp no_rescan no_sendfile no_ses no_snap no_up_list no_voldump re_dhash plain_ip"
|
ex = "dav_inf dedup dotpart dotsrch hook_v no_dhash no_fastboot no_fpool no_htp no_rescan no_sendfile no_ses no_snap no_up_list no_voldump re_dhash plain_ip"
|
||||||
ka.update(**{k: True for k in ex.split()})
|
ka.update(**{k: True for k in ex.split()})
|
||||||
|
|
||||||
ex = "ah_cli ah_gen css_browser hist js_browser js_other mime mimes no_forget no_hash no_idx nonsus_urls og_tpl og_ua"
|
ex = "ah_cli ah_gen css_browser hist ipu js_browser js_other mime mimes no_forget no_hash no_idx nonsus_urls og_tpl og_ua"
|
||||||
ka.update(**{k: None for k in ex.split()})
|
ka.update(**{k: None for k in ex.split()})
|
||||||
|
|
||||||
ex = "hash_mt safe_dedup srch_time u2abort u2j u2sz"
|
ex = "hash_mt hsortn safe_dedup srch_time u2abort u2j u2sz"
|
||||||
ka.update(**{k: 1 for k in ex.split()})
|
ka.update(**{k: 1 for k in ex.split()})
|
||||||
|
|
||||||
ex = "au_vol mtab_age reg_cap s_thead s_tbody th_convt"
|
ex = "au_vol dl_list mtab_age reg_cap s_thead s_tbody th_convt"
|
||||||
ka.update(**{k: 9 for k in ex.split()})
|
ka.update(**{k: 9 for k in ex.split()})
|
||||||
|
|
||||||
ex = "db_act k304 loris re_maxage rproxy rsp_jtr rsp_slp s_wr_slp snap_wri theme themes turbo"
|
ex = "db_act k304 loris no304 re_maxage rproxy rsp_jtr rsp_slp s_wr_slp snap_wri theme themes turbo"
|
||||||
ka.update(**{k: 0 for k in ex.split()})
|
ka.update(**{k: 0 for k in ex.split()})
|
||||||
|
|
||||||
ex = "ah_alg bname chpw_db doctitle df exit favico idp_h_usr ipa html_head lg_sbf log_fk md_sbf name og_desc og_site og_th og_title og_title_a og_title_v og_title_i shr tcolor textfiles unlist vname xff_src R RS SR"
|
ex = "ah_alg bname chpw_db doctitle df exit favico idp_h_usr ipa html_head lg_sbf log_fk md_sbf name og_desc og_site og_th og_title og_title_a og_title_v og_title_i shr tcolor textfiles unlist vname xff_src R RS SR"
|
||||||
@@ -146,7 +153,7 @@ class Cfg(Namespace):
|
|||||||
ex = "ban_403 ban_404 ban_422 ban_pw ban_url"
|
ex = "ban_403 ban_404 ban_422 ban_pw ban_url"
|
||||||
ka.update(**{k: "no" for k in ex.split()})
|
ka.update(**{k: "no" for k in ex.split()})
|
||||||
|
|
||||||
ex = "grp on403 on404 xad xar xau xban xbd xbr xbu xiu xm"
|
ex = "grp on403 on404 xac xad xar xau xban xbc xbd xbr xbu xiu xm"
|
||||||
ka.update(**{k: [] for k in ex.split()})
|
ka.update(**{k: [] for k in ex.split()})
|
||||||
|
|
||||||
ex = "exp_lg exp_md"
|
ex = "exp_lg exp_md"
|
||||||
@@ -254,6 +261,8 @@ class VHttpSrv(object):
|
|||||||
self.broker = NullBroker(args, asrv)
|
self.broker = NullBroker(args, asrv)
|
||||||
self.prism = None
|
self.prism = None
|
||||||
self.bans = {}
|
self.bans = {}
|
||||||
|
self.tdls = self.dls = {}
|
||||||
|
self.tdli = self.dli = {}
|
||||||
self.nreq = 0
|
self.nreq = 0
|
||||||
self.nsus = 0
|
self.nsus = 0
|
||||||
|
|
||||||
@@ -276,6 +285,10 @@ class VHttpSrv(object):
|
|||||||
self.u2idx = self.u2idx or U2idx(self)
|
self.u2idx = self.u2idx or U2idx(self)
|
||||||
return self.u2idx
|
return self.u2idx
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
if self.u2idx:
|
||||||
|
self.u2idx.shutdown()
|
||||||
|
|
||||||
|
|
||||||
class VHttpSrvUp2k(VHttpSrv):
|
class VHttpSrvUp2k(VHttpSrv):
|
||||||
def __init__(self, args, asrv, log):
|
def __init__(self, args, asrv, log):
|
||||||
@@ -283,6 +296,11 @@ class VHttpSrvUp2k(VHttpSrv):
|
|||||||
self.hub = VHub(args, asrv, log)
|
self.hub = VHub(args, asrv, log)
|
||||||
self.broker = VBrokerThr(self.hub)
|
self.broker = VBrokerThr(self.hub)
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
self.hub.up2k.shutdown()
|
||||||
|
if self.u2idx:
|
||||||
|
self.u2idx.shutdown()
|
||||||
|
|
||||||
|
|
||||||
class VHttpConn(object):
|
class VHttpConn(object):
|
||||||
def __init__(self, args, asrv, log, buf, use_up2k=False):
|
def __init__(self, args, asrv, log, buf, use_up2k=False):
|
||||||
@@ -292,6 +310,8 @@ class VHttpConn(object):
|
|||||||
self.args = args
|
self.args = args
|
||||||
self.asrv = asrv
|
self.asrv = asrv
|
||||||
self.bans = {}
|
self.bans = {}
|
||||||
|
self.tdls = self.dls = {}
|
||||||
|
self.tdli = self.dli = {}
|
||||||
self.freshen_pwd = 0.0
|
self.freshen_pwd = 0.0
|
||||||
|
|
||||||
Ctor = VHttpSrvUp2k if use_up2k else VHttpSrv
|
Ctor = VHttpSrvUp2k if use_up2k else VHttpSrv
|
||||||
@@ -318,6 +338,9 @@ class VHttpConn(object):
|
|||||||
self.sr = Unrecv(self.s, None) # type: ignore
|
self.sr = Unrecv(self.s, None) # type: ignore
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
self.hsrv.shutdown()
|
||||||
|
|
||||||
|
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
os.system("rem")
|
os.system("rem")
|
||||||
|
|||||||
Reference in New Issue
Block a user