Compare commits

...

70 Commits

Author SHA1 Message Date
ed
a7cef91b8b v1.9.23 2023-12-01 00:39:49 +00:00
ed
a4a112c0ee update pkgs to 1.9.22 2023-12-01 01:14:18 +00:00
ed
e6bcee28d6 v1.9.22 2023-12-01 00:31:02 +00:00
ed
626b5770a5 add --ftp-ipa 2023-11-30 23:36:46 +00:00
ed
c2f92cacc1 mention the new auth feature 2023-11-30 23:01:05 +00:00
ed
4f8a1f5f6a allow free text selection in modals by deferring focus 2023-11-30 22:41:16 +00:00
ed
4a98b73915 fix a bug previouly concealed by window.event;
hitting enter would clear out an entire chain of modals,
because the event didn't get consumed like it should,
so let's make double sure that will be the case
2023-11-30 22:40:30 +00:00
ed
00812cb1da new option --ipa; client IP allowlist:
connections from outside the specified list of IP prefixes are rejected
(docker-friendly alternative to -i 127.0.0.1)

also mkdir any missing folders when logging to file
2023-11-30 20:45:43 +00:00
ed
16766e702e add basic-docker-compose (#59) 2023-11-30 20:14:38 +00:00
ed
5e932a9504 hilight metavars in help text 2023-11-30 18:19:34 +00:00
ed
ccab44daf2 initial support for identity providers (#62):
add argument --hdr-au-usr which specifies a HTTP header to read
usernames from; entirely bypasses copyparty's password checks
for http/https clients (ftp/smb are unaffected)

users must exist in the copyparty config, passwords can be whatever

just the first step but already a bit useful on its own,
more to come in a few months
2023-11-30 18:18:47 +00:00
ed
8c52b88767 make linters happier 2023-11-30 17:33:07 +00:00
ed
c9fd26255b support environment variables mostly everywhere,
useful for docker/systemd stuff

also makes logfiles flush to disk per line by default;
can be disabled for a small performance gain with --no-logflush
2023-11-30 10:22:52 +00:00
ed
0b9b8dbe72 systemd: get rid of nftables portforwarding;
suggest letting copyparty bind 80/443 itself because nft hard
2023-11-30 10:13:14 +00:00
ed
b7723ac245 rely on filekeys for album-art over bluetooth;
will probably fail when some devices (sup iphone) stream to car stereos
but at least passwords won't end up somewhere unexpected this way
(plus, the js no longer uses the jank url to request waveforms)
2023-11-29 23:20:59 +00:00
ed
35b75c3db1 avoid palemoon bug on dragging a text selection;
"permission denied to access property preventDefault"
2023-11-26 20:22:59 +00:00
ed
f902779050 avoid potential dom confusion (ie8 is already no-js) 2023-11-26 20:08:52 +00:00
ed
fdddd36a5d update pkgs to 1.9.21 2023-11-25 14:48:41 +00:00
ed
c4ba123779 v1.9.21 2023-11-25 14:17:58 +00:00
ed
72e355eb2c prisonparty: prevent overlapping setup/teardown 2023-11-25 14:03:41 +00:00
ed
43d409a5d9 prisonparty accepts user/group names 2023-11-25 13:40:21 +00:00
ed
b1fffc2246 open textfiles inline in grid-view, closes #63;
also fix the Y hotkey (which converts all links in the list-view into
download links), making that apply to the grid-view as well
2023-11-25 13:09:12 +00:00
ed
edd3e53ab3 prisonparty: support zfs-ubuntu
* when bind-mounting, resolve any symlinks ($v/) and read target inode;
   for example merged /bin and /usr/bin
* add failsafe in case this test should break in new exciting ways;
   inspect `mount` for any instances of the jailed path
   (not /proc/mounts since that has funny space encoding)
* unmount in a while-loop because xargs freaks out if one of them fail
   * and systemd doesn't give us a /dev/stderr to write to anyways
2023-11-25 02:16:48 +00:00
ed
aa0b119031 update pkgs to 1.9.20 2023-11-21 23:44:56 +00:00
ed
eddce00765 v1.9.20 2023-11-21 23:25:41 +00:00
ed
6f4bde2111 fix infinite backspin on "previous track";
when playing the first track in a folder and hitting the previous track
button, it would keep switching through the previous folders inifinitely
2023-11-21 23:23:51 +00:00
ed
f3035e8869 clear load-more buttons upon navigation (thx icxes) 2023-11-21 22:53:46 +00:00
ed
a9730499c0 don't suggest loading more search results beyond server cap 2023-11-21 22:38:35 +00:00
ed
b66843efe2 reduce cpu priority of ffmpeg, hooks, parsers 2023-11-21 22:21:33 +00:00
ed
cc1aaea300 update pkgs to 1.9.19 2023-11-19 12:45:32 +00:00
ed
9ccc238799 v1.9.19 2023-11-19 12:29:19 +00:00
ed
8526ef9368 srch-dbg: handle jumpvols correctly 2023-11-19 11:35:13 +00:00
ed
3c36727d07 fix filekeys not appearing in up2k in world-writable vols 2023-11-19 11:19:08 +00:00
ed
ef33ce94cd filter shadowed files from search results (#61),
also adds optimization to stop opening cursors
when max results has already been hit
2023-11-19 11:04:36 +00:00
ed
d500baf5c5 update pkgs to 1.9.18 2023-11-18 21:16:10 +00:00
ed
deef32335e v1.9.18 2023-11-18 21:06:55 +00:00
ed
fc4b51ad00 make dhash more volatile; probably fixes #61:
if any volumes were added or removed since last use,
drop dhash to verify that there are no files to shadow
2023-11-18 20:48:56 +00:00
ed
fa762754bf fix close/more thumbs in search results for pillow 10.x 2023-11-18 13:57:35 +00:00
ed
29bd8f57c4 fix js error when ctrl-clicking a search result; closes #60 2023-11-18 13:47:00 +00:00
ed
abc37354ef update pkgs to 1.9.17 2023-11-11 18:22:51 +00:00
ed
ee3333362f v1.9.17 2023-11-11 17:38:43 +00:00
ed
7c0c6b94a3 drop asyncore; pyftpdlib has vendored it 2023-11-11 17:20:00 +00:00
ed
bac733113c up2k-hasher robustness:
webdav clients tend to upload and then immediately delete
files to test for write-access and available disk space,
so don't crash and burn when that happens
2023-11-11 16:21:54 +00:00
ed
32ab65d7cb add cfssl to packaging + improve certgen expiration check 2023-11-11 15:30:03 +00:00
ed
c6744dc483 u2c: configurable retry delay 2023-11-11 14:46:00 +00:00
ed
b9997d677d u2c: give up on files with bitflips 2023-11-11 14:30:46 +00:00
ed
10defe6aef u2c: make -x case-insensitive 2023-11-11 14:02:01 +00:00
ed
736aa125a8 fix dumb 2023-11-11 13:52:06 +00:00
ed
eb48373b8b mention fpm 2023-11-08 00:55:16 +00:00
ed
d4a7b7d84d add contribution ideas 2023-11-06 15:33:29 +00:00
ed
2923a38b87 update pkgs to 1.9.16 2023-11-04 23:30:07 +00:00
ed
dabdaaee33 v1.9.16 2023-11-04 21:58:01 +00:00
ed
65e4d67c3e mkdir with leading slash works as expected 2023-11-04 22:21:56 +00:00
ed
4b720f4150 add more prometheus metrics; breaking changes:
* cpp_uptime is now a gauge
* cpp_bans is now cpp_active_bans (and also a gauge)

and other related fixes:
* stop emitting invalid cpp_disk_size/free for offline volumes
* support overriding the spec-mandatory mimetype with ?mime=foo
2023-11-04 20:32:34 +00:00
ed
2e85a25614 improve service listing 2023-11-04 10:23:37 +00:00
ed
713fffcb8e also mkdir missing intermediates,
unless requester is a webdav client (those expect a 409)
2023-11-03 23:23:49 +00:00
ed
8020b11ea0 improve/simplify validation/errorhandling:
* some malicious requests are now answered with HTTP 422,
   so that they count against --ban-422
* do not include request headers when replying to invalid requests,
   in case there is a reverse-proxy inserting something interesting
2023-11-03 23:07:16 +00:00
ed
2523d76756 windows: fix symlinks 2023-11-03 17:16:12 +00:00
ed
7ede509973 nginx: reduce cost of spurious connectivity loss;
default value of fail_timeout (10sec) makes server unavailable for that
amount of time, even if the server is just down for a quick restart
2023-11-03 17:13:11 +00:00
ed
7c1d97af3b slightly better pyinstaller loader 2023-11-03 17:09:34 +00:00
ed
95566e8388 cosmetics:
* fix toast/tooltip colors on splashpage
* properly warn if --ah-cli or --ah-gen is used without --ah-alg
* support ^D during --ah-cli
* improve flavor texts
2023-11-03 16:52:43 +00:00
ed
76afb62b7b make each segment of links separately selectable 2023-10-25 12:21:39 +00:00
ed
7dec922c70 update pkgs to 1.9.15 2023-10-24 16:56:57 +00:00
ed
c07e0110f8 v1.9.15 2023-10-24 16:43:26 +00:00
ed
2808734047 drc: further reduce volume skip between songs 2023-10-24 16:38:29 +00:00
ed
1f75314463 placeholder expansion in readme and logues; closes #56
also fixes the "scan" volflag which broke in v1.9.14
2023-10-24 16:37:32 +00:00
ed
063fa3efde drc: fix volume jump on song change
(in exchange for a chance of clipping, which should be fine because
all browsers appear to have a limiter on the output anyways)
2023-10-23 09:05:31 +00:00
ed
44693d79ec update pkgs to 1.9.14 2023-10-21 14:52:22 +00:00
ed
cea746377e v1.9.14 2023-10-21 14:43:11 +00:00
ed
59a98bd2b5 update pkgs to 1.9.13 2023-10-21 13:34:50 +00:00
62 changed files with 1534 additions and 577 deletions

View File

@@ -1,3 +1,43 @@
* do something cool * do something cool
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight 👍👍
but to be more specific,
# contribution ideas
## documentation
I think we can agree that the documentation leaves a LOT to be desired. I've realized I'm not exactly qualified for this 😅 but maybe the [soon-to-come setup GUI](https://github.com/9001/copyparty/issues/57) will make this more manageable. The best documentation is the one that never had to be written, right? :> so I suppose we can give this a wait-and-see approach for a bit longer.
## crazy ideas & features
assuming they won't cause too much problems or side-effects :>
i think someone was working on a way to list directories over DNS for example...
if you wanna have a go at coding it up yourself then maybe mention the idea on discord before you get too far, otherwise just go nuts 👍
## others
aside from documentation and ideas, some other things that would be cool to have some help with is:
* **translations** -- the copyparty web-UI has translations for english and norwegian at the top of [browser.js](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/web/browser.js); if you'd like to add a translation for another language then that'd be welcome! and if that language has a grammar that doesn't fit into the way the strings are assembled, then we'll fix that as we go :>
* **UI ideas** -- at some point I was thinking of rewriting the UI in react/preact/something-not-vanilla-javascript, but I'll admit the comfiness of not having any build stage combined with raw performance has kinda convinced me otherwise :p but I'd be very open to ideas on how the UI could be improved, or be more intuitive.
* **docker improvements** -- I don't really know what I'm doing when it comes to containers, so I'm sure there's a *huge* room for improvement here, mainly regarding how you're supposed to use the container with kubernetes / docker-compose / any of the other popular ways to do things. At some point I swear I'll start learning about docker so I can pick up clach04's [docker-compose draft](https://github.com/9001/copyparty/issues/38) and learn how that stuff ticks, unless someone beats me to it!
* **packaging** for various linux distributions -- this could either be as simple as just plopping the sfx.py in the right place and calling that from systemd (the archlinux package [originally did this](https://github.com/9001/copyparty/pull/18)); maybe with a small config-file which would cause copyparty to load settings from `/etc/copyparty.d` (like the [archlinux package](https://github.com/9001/copyparty/tree/hovudstraum/contrib/package/arch) does with `copyparty.conf`), or it could be a proper installation of the copyparty python package into /usr/lib or similar (the archlinux package [eventually went for this approach](https://github.com/9001/copyparty/pull/26))
* [fpm](https://github.com/jordansissel/fpm) can probably help with the technical part of it, but someone needs to handle distro relations :-)
* **software integration** -- I'm sure there's a lot of usecases where copyparty could complement something else, or the other way around, so any ideas or any work in this regard would be dope. This doesn't necessarily have to be code inside copyparty itself;
* [hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) -- these are small programs which are called by copyparty when certain things happen (files are uploaded, someone hits a 404, etc.), and could be a fun way to add support for more usecases
* [parser plugins](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) -- if you want to have copyparty analyze and index metadata for some oddball file-formats, then additional plugins would be neat :>

View File

@@ -53,6 +53,7 @@ turn almost any device into a file server with resumable uploads/downloads using
* [webdav server](#webdav-server) - with read-write support * [webdav server](#webdav-server) - with read-write support
* [connecting to webdav from windows](#connecting-to-webdav-from-windows) - using the GUI * [connecting to webdav from windows](#connecting-to-webdav-from-windows) - using the GUI
* [smb server](#smb-server) - unsafe, slow, not recommended for wan * [smb server](#smb-server) - unsafe, slow, not recommended for wan
* [browser ux](#browser-ux) - tweaking the ui
* [file indexing](#file-indexing) - enables dedup and music search ++ * [file indexing](#file-indexing) - enables dedup and music search ++
* [exclude-patterns](#exclude-patterns) - to save some time * [exclude-patterns](#exclude-patterns) - to save some time
* [filesystem guards](#filesystem-guards) - avoid traversing into other filesystems * [filesystem guards](#filesystem-guards) - avoid traversing into other filesystems
@@ -66,6 +67,7 @@ turn almost any device into a file server with resumable uploads/downloads using
* [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/)) * [event hooks](#event-hooks) - trigger a program on uploads, renames etc ([examples](./bin/hooks/))
* [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/)) * [upload events](#upload-events) - the older, more powerful approach ([examples](./bin/mtag/))
* [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/)) * [handlers](#handlers) - redefine behavior with plugins ([examples](./bin/handlers/))
* [identity providers](#identity-providers) - replace copyparty passwords with oauth and such
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed * [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
* [themes](#themes) * [themes](#themes)
* [complete examples](#complete-examples) * [complete examples](#complete-examples)
@@ -118,8 +120,8 @@ just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/
enable thumbnails (images/audio/video), media indexing, and audio transcoding by installing some recommended deps: enable thumbnails (images/audio/video), media indexing, and audio transcoding by installing some recommended deps:
* **Alpine:** `apk add py3-pillow ffmpeg` * **Alpine:** `apk add py3-pillow ffmpeg`
* **Debian:** `apt install python3-pil ffmpeg` * **Debian:** `apt install --no-install-recommends python3-pil ffmpeg`
* **Fedora:** `dnf install python3-pillow ffmpeg` * **Fedora:** rpmfusion + `dnf install python3-pillow ffmpeg`
* **FreeBSD:** `pkg install py39-sqlite3 py39-pillow ffmpeg` * **FreeBSD:** `pkg install py39-sqlite3 py39-pillow ffmpeg`
* **MacOS:** `port install py-Pillow ffmpeg` * **MacOS:** `port install py-Pillow ffmpeg`
* **MacOS** (alternative): `brew install pillow ffmpeg` * **MacOS** (alternative): `brew install pillow ffmpeg`
@@ -317,6 +319,8 @@ same order here too
upgrade notes upgrade notes
* `1.9.16` (2023-11-04):
* `--stats`/prometheus: `cpp_bans` renamed to `cpp_active_bans`, and that + `cpp_uptime` are gauges
* `1.6.0` (2023-01-29): * `1.6.0` (2023-01-29):
* http-api: delete/move is now `POST` instead of `GET` * http-api: delete/move is now `POST` instead of `GET`
* everything other than `GET` and `HEAD` must pass [cors validation](#cors) * everything other than `GET` and `HEAD` must pass [cors validation](#cors)
@@ -789,6 +793,8 @@ other notes,
* files named `README.md` / `readme.md` will be rendered after directory listings unless `--no-readme` (but `.epilogue.html` takes precedence) * files named `README.md` / `readme.md` will be rendered after directory listings unless `--no-readme` (but `.epilogue.html` takes precedence)
* `README.md` and `*logue.html` can contain placeholder values which are replaced server-side before embedding into directory listings; see `--help-exp`
## searching ## searching
@@ -954,6 +960,16 @@ authenticate with one of the following:
* username `$password`, password `k` * username `$password`, password `k`
## browser ux
tweaking the ui
* set default sort order globally with `--sort` or per-volume with the `sort` volflag; specify one or more comma-separated columns to sort by, and prefix the column name with `-` for reverse sort
* the column names you can use are visible as tooltips when hovering over the column headers in the directory listing, for example `href ext sz ts tags/.up_at tags/Cirle tags/.tn tags/Artist tags/Title`
* to sort in music order (album, track, artist, title) with filename as fallback, you could `--sort tags/Cirle,tags/.tn,tags/Artist,tags/Title,href`
* to sort by upload date, first enable showing the upload date in the listing with `-e2d -mte +.up_at` and then `--sort tags/.up_at`
## file indexing ## file indexing
enables dedup and music search ++ enables dedup and music search ++
@@ -1178,6 +1194,17 @@ redefine behavior with plugins ([examples](./bin/handlers/))
replace 404 and 403 errors with something completely different (that's it for now) replace 404 and 403 errors with something completely different (that's it for now)
## identity providers
replace copyparty passwords with oauth and such
work is [ongoing](https://github.com/9001/copyparty/issues/62) to support authenticating / authorizing users based on a separate authentication proxy, which makes it possible to support oauth, single-sign-on, etc.
it is currently possible to specify `--hdr-au-usr x-username`; copyparty will then skip password validation and blindly trust the username specified in the `X-Username` request header
the remaining stuff (accepting user groups through another header, creating volumes on the fly) are still to-do
## hiding from google ## hiding from google
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings: tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
@@ -1292,8 +1319,23 @@ scrape_configs:
``` ```
currently the following metrics are available, currently the following metrics are available,
* `cpp_uptime_seconds` * `cpp_uptime_seconds` time since last copyparty restart
* `cpp_bans` number of banned IPs * `cpp_boot_unixtime_seconds` same but as an absolute timestamp
* `cpp_http_conns` number of open http(s) connections
* `cpp_http_reqs` number of http(s) requests handled
* `cpp_sus_reqs` number of 403/422/malicious requests
* `cpp_active_bans` number of currently banned IPs
* `cpp_total_bans` number of IPs banned since last restart
these are available unless `--nos-vst` is specified:
* `cpp_db_idle_seconds` time since last database activity (upload/rename/delete)
* `cpp_db_act_seconds` same but as an absolute timestamp
* `cpp_idle_vols` number of volumes which are idle / ready
* `cpp_busy_vols` number of volumes which are busy / indexing
* `cpp_offline_vols` number of volumes which are offline / unavailable
* `cpp_hashing_files` number of files queued for hashing / indexing
* `cpp_tagq_files` number of files queued for metadata scanning
* `cpp_mtpq_files` number of files queued for plugin-based analysis
and these are available per-volume only: and these are available per-volume only:
* `cpp_disk_size_bytes` total HDD size * `cpp_disk_size_bytes` total HDD size
@@ -1312,9 +1354,12 @@ some of the metrics have additional requirements to function correctly,
the following options are available to disable some of the metrics: the following options are available to disable some of the metrics:
* `--nos-hdd` disables `cpp_disk_*` which can prevent spinning up HDDs * `--nos-hdd` disables `cpp_disk_*` which can prevent spinning up HDDs
* `--nos-vol` disables `cpp_vol_*` which reduces server startup time * `--nos-vol` disables `cpp_vol_*` which reduces server startup time
* `--nos-vst` disables volume state, reducing the worst-case prometheus query time by 0.5 sec
* `--nos-dup` disables `cpp_dupe_*` which reduces the server load caused by prometheus queries * `--nos-dup` disables `cpp_dupe_*` which reduces the server load caused by prometheus queries
* `--nos-unf` disables `cpp_unf_*` for no particular purpose * `--nos-unf` disables `cpp_unf_*` for no particular purpose
note: the following metrics are counted incorrectly if multiprocessing is enabled with `-j`: `cpp_http_conns`, `cpp_http_reqs`, `cpp_sus_reqs`, `cpp_active_bans`, `cpp_total_bans`
# packages # packages
@@ -1333,9 +1378,13 @@ now [available on copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI
```bash ```bash
dnf copr enable @copr/PyPI dnf copr enable @copr/PyPI
dnf install python3-copyparty # just a minimal install, or... dnf install python3-copyparty # just a minimal install, or...
dnf install python3-{copyparty,pillow,argon2-cffi,pyftpdlib,pyOpenSSL} ffmpeg-free # with recommended deps dnf install python3-{copyparty,pillow,argon2-cffi,pyftpdlib,pyOpenSSL} ffmpeg # with recommended deps
``` ```
`ffmpeg` comes from [rpmfusion](https://rpmfusion.org/Configuration#Command_Line_Setup_using_rpm) so it's recommended to enable that (you don't want `ffmpeg-free` since it fails to thumbnail most h264/mkv/mp4 videos)
to run copyparty as a service, use the [systemd service scripts](https://github.com/9001/copyparty/tree/hovudstraum/contrib/systemd), just replace `/usr/bin/python3 /usr/local/bin/copyparty-sfx.py` with `/usr/bin/copyparty`
this *may* also work on RHEL but [I'm not paying IBM to verify that](https://www.jeffgeerling.com/blog/2023/dear-red-hat-are-you-dumb) this *may* also work on RHEL but [I'm not paying IBM to verify that](https://www.jeffgeerling.com/blog/2023/dear-red-hat-are-you-dumb)
@@ -1472,7 +1521,7 @@ TLDR: yes
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep | | play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr | | **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
* internet explorer 6 to 8 behave the same * internet explorer 6 through 8 behave the same
* firefox 52 and chrome 49 are the final winxp versions * firefox 52 and chrome 49 are the final winxp versions
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`) * `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
* `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server * `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server

View File

@@ -207,7 +207,7 @@ def examples():
def main(): def main():
global NC, BY_PATH global NC, BY_PATH # pylint: disable=global-statement
os.system("") os.system("")
print() print()
@@ -282,7 +282,8 @@ def main():
if ver == "corrupt": if ver == "corrupt":
die("{} database appears to be corrupt, sorry") die("{} database appears to be corrupt, sorry")
if ver < DB_VER1 or ver > DB_VER2: iver = int(ver)
if iver < DB_VER1 or iver > DB_VER2:
m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first" m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
die(m) die(m)

View File

@@ -53,7 +53,13 @@ from urllib.parse import unquote_to_bytes as unquote
WINDOWS = sys.platform == "win32" WINDOWS = sys.platform == "win32"
MACOS = platform.system() == "Darwin" MACOS = platform.system() == "Darwin"
UTC = timezone.utc UTC = timezone.utc
info = log = dbg = None
def print(*args, **kwargs):
try:
builtins.print(*list(args), **kwargs)
except:
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
print( print(
@@ -65,6 +71,13 @@ print(
) )
def null_log(msg):
pass
info = log = dbg = null_log
try: try:
from fuse import FUSE, FuseOSError, Operations from fuse import FUSE, FuseOSError, Operations
except: except:
@@ -84,13 +97,6 @@ except:
raise raise
def print(*args, **kwargs):
try:
builtins.print(*list(args), **kwargs)
except:
builtins.print(termsafe(" ".join(str(x) for x in args)), **kwargs)
def termsafe(txt): def termsafe(txt):
try: try:
return txt.encode(sys.stdout.encoding, "backslashreplace").decode( return txt.encode(sys.stdout.encoding, "backslashreplace").decode(
@@ -119,10 +125,6 @@ def fancy_log(msg):
print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="") print("{:10.6f} {} {}\n".format(time.time() % 900, rice_tid(), msg), end="")
def null_log(msg):
pass
def hexler(binary): def hexler(binary):
return binary.replace("\r", "\\r").replace("\n", "\\n") return binary.replace("\r", "\\r").replace("\n", "\\n")
return " ".join(["{}\033[36m{:02x}\033[0m".format(b, ord(b)) for b in binary]) return " ".join(["{}\033[36m{:02x}\033[0m".format(b, ord(b)) for b in binary])

View File

@@ -12,13 +12,13 @@ done
help() { cat <<'EOF' help() { cat <<'EOF'
usage: usage:
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...] ./prisonparty.sh <ROOTDIR> <USER|UID> <GROUP|GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
example: example:
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd ./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
example for running straight from source (instead of using an sfx): example for running straight from source (instead of using an sfx):
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
note that if you have python modules installed as --user (such as bpm/key detectors), note that if you have python modules installed as --user (such as bpm/key detectors),
you should add /home/foo/.local as a VOLDIR you should add /home/foo/.local as a VOLDIR
@@ -28,6 +28,16 @@ exit 1
} }
errs=
for c in awk chroot dirname getent lsof mknod mount realpath sed sort stat uniq; do
command -v $c >/dev/null || {
echo ERROR: command not found: $c
errs=1
}
done
[ $errs ] && exit 1
# read arguments # read arguments
trap help EXIT trap help EXIT
jail="$(realpath "$1")"; shift jail="$(realpath "$1")"; shift
@@ -58,11 +68,18 @@ cpp="$1"; shift
} }
trap - EXIT trap - EXIT
usr="$(getent passwd $uid | cut -d: -f1)"
[ "$usr" ] || { echo "ERROR invalid username/uid $uid"; exit 1; }
uid="$(getent passwd $uid | cut -d: -f3)"
grp="$(getent group $gid | cut -d: -f1)"
[ "$grp" ] || { echo "ERROR invalid groupname/gid $gid"; exit 1; }
gid="$(getent group $gid | cut -d: -f3)"
# debug/vis # debug/vis
echo echo
echo "chroot-dir = $jail" echo "chroot-dir = $jail"
echo "user:group = $uid:$gid" echo "user:group = $uid:$gid ($usr:$grp)"
echo " copyparty = $cpp" echo " copyparty = $cpp"
echo echo
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:" printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
@@ -80,34 +97,39 @@ jail="${jail%/}"
# bind-mount system directories and volumes # bind-mount system directories and volumes
for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq | printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
while IFS= read -r v; do while IFS= read -r v; do
[ -e "$v" ] || { [ -e "$v" ] || {
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v" printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
continue continue
} }
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a) i1=$(stat -c%D.%i "$v/" 2>/dev/null || echo a)
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b) i2=$(stat -c%D.%i "$jail$v/" 2>/dev/null || echo b)
# echo "v [$v] i1 [$i1] i2 [$i2]"
[ $i1 = $i2 ] && continue [ $i1 = $i2 ] && continue
mount | grep -qF " $jail$v " && echo wtf $i1 $i2 $v && continue
mkdir -p "$jail$v" mkdir -p "$jail$v"
mount --bind "$v" "$jail$v" mount --bind "$v" "$jail$v"
done done
rmdir "$jail/.prisonlock" || true
cln() { cln() {
rv=$? trap - EXIT
wait -f -p rv $p || true wait -f -n $p && rv=0 || rv=$?
cd / cd /
echo "stopping chroot..." echo "stopping chroot..."
lsof "$jail" | grep -F "$jail" && for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
lsof "$jail" 2>/dev/null | grep -F "$jail" &&
echo "chroot is in use; will not unmount" || echo "chroot is in use; will not unmount" ||
{ {
mount | grep -F " on $jail" | mount | grep -F " on $jail" |
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' | awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount LC_ALL=C sort -r | while IFS= read -r v; do
umount "$v" && echo "umount OK: $v"
done
} }
rmdir "$jail/.prisonlock" || true
exit $rv exit $rv
} }
trap cln EXIT trap cln EXIT
@@ -128,8 +150,8 @@ chmod 777 "$jail/tmp"
# run copyparty # run copyparty
export HOME=$(getent passwd $uid | cut -d: -f6) export HOME="$(getent passwd $uid | cut -d: -f6)"
export USER=$(getent passwd $uid | cut -d: -f1) export USER="$usr"
export LOGNAME="$USER" export LOGNAME="$USER"
#echo "pybin [$pybin]" #echo "pybin [$pybin]"
#echo "pyarg [$pyarg]" #echo "pyarg [$pyarg]"
@@ -137,5 +159,5 @@ export LOGNAME="$USER"
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" & chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
p=$! p=$!
trap 'kill -USR1 $p' USR1 trap 'kill -USR1 $p' USR1
trap 'kill $p' INT TERM trap 'trap - INT TERM; kill $p' INT TERM
wait wait

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
S_VERSION = "1.10" S_VERSION = "1.11"
S_BUILD_DT = "2023-08-15" S_BUILD_DT = "2023-11-11"
""" """
u2c.py: upload to copyparty u2c.py: upload to copyparty
@@ -105,12 +105,14 @@ class File(object):
# set by handshake # set by handshake
self.recheck = False # duplicate; redo handshake after all files done self.recheck = False # duplicate; redo handshake after all files done
self.ucids = [] # type: list[str] # chunks which need to be uploaded self.ucids = [] # type: list[str] # chunks which need to be uploaded
self.wark = None # type: str self.wark = "" # type: str
self.url = None # type: str self.url = "" # type: str
self.nhs = 0
# set by upload # set by upload
self.up_b = 0 # type: int self.up_b = 0 # type: int
self.up_c = 0 # type: int self.up_c = 0 # type: int
self.cd = 0
# t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n" # t = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
# eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name)) # eprint(t.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
@@ -221,6 +223,7 @@ class MTHash(object):
def hash_at(self, nch): def hash_at(self, nch):
f = self.f f = self.f
assert f
ofs = ofs0 = nch * self.csz ofs = ofs0 = nch * self.csz
hashobj = hashlib.sha512() hashobj = hashlib.sha512()
chunk_sz = chunk_rem = min(self.csz, self.sz - ofs) chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
@@ -433,7 +436,7 @@ def walkdirs(err, tops, excl):
za = [x.replace(b"/", b"\\") for x in za] za = [x.replace(b"/", b"\\") for x in za]
tops = za tops = za
ptn = re.compile(excl.encode("utf-8") or b"\n") ptn = re.compile(excl.encode("utf-8") or b"\n", re.I)
for top in tops: for top in tops:
isdir = os.path.isdir(top) isdir = os.path.isdir(top)
@@ -461,7 +464,7 @@ def quotep(btxt):
if not PY2: if not PY2:
quot1 = quot1.encode("ascii") quot1 = quot1.encode("ascii")
return quot1.replace(b" ", b"+") return quot1.replace(b" ", b"+") # type: ignore
# from copyparty/util.py # from copyparty/util.py
@@ -498,7 +501,7 @@ def up2k_chunksize(filesize):
# mostly from copyparty/up2k.py # mostly from copyparty/up2k.py
def get_hashlist(file, pcb, mth): def get_hashlist(file, pcb, mth):
# type: (File, any, any) -> None # type: (File, Any, Any) -> None
"""generates the up2k hashlist from file contents, inserts it into `file`""" """generates the up2k hashlist from file contents, inserts it into `file`"""
chunk_sz = up2k_chunksize(file.size) chunk_sz = up2k_chunksize(file.size)
@@ -598,7 +601,7 @@ def handshake(ar, file, search):
raise raise
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em)) eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
time.sleep(1) time.sleep(ar.cd)
try: try:
r = r.json() r = r.json()
@@ -689,6 +692,7 @@ class Ctl(object):
def __init__(self, ar, stats=None): def __init__(self, ar, stats=None):
self.ok = False self.ok = False
self.errs = 0
self.ar = ar self.ar = ar
self.stats = stats or self._scan() self.stats = stats or self._scan()
if not self.stats: if not self.stats:
@@ -736,7 +740,7 @@ class Ctl(object):
self._fancy() self._fancy()
self.ok = True self.ok = not self.errs
def _safe(self): def _safe(self):
"""minimal basic slow boring fallback codepath""" """minimal basic slow boring fallback codepath"""
@@ -961,13 +965,22 @@ class Ctl(object):
self.q_upload.put(None) self.q_upload.put(None)
break break
with self.mutex:
self.handshaker_busy += 1
upath = file.abs.decode("utf-8", "replace") upath = file.abs.decode("utf-8", "replace")
if not VT100: if not VT100:
upath = upath.lstrip("\\?") upath = upath.lstrip("\\?")
file.nhs += 1
if file.nhs > 32:
print("ERROR: giving up on file %s" % (upath))
self.errs += 1
continue
with self.mutex:
self.handshaker_busy += 1
while time.time() < file.cd:
time.sleep(0.1)
hs, sprs = handshake(self.ar, file, search) hs, sprs = handshake(self.ar, file, search)
if search: if search:
if hs: if hs:
@@ -1050,6 +1063,7 @@ class Ctl(object):
except Exception as ex: except Exception as ex:
t = "upload failed, retrying: {0} #{1} ({2})\n" t = "upload failed, retrying: {0} #{1} ({2})\n"
eprint(t.format(file.name, cid[:8], ex)) eprint(t.format(file.name, cid[:8], ex))
file.cd = time.time() + self.ar.cd
# handshake will fix it # handshake will fix it
with self.mutex: with self.mutex:
@@ -1103,7 +1117,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("-v", action="store_true", help="verbose") ap.add_argument("-v", action="store_true", help="verbose")
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath") ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
ap.add_argument("-s", action="store_true", help="file-search (disables upload)") ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\.hist/.*'") ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\\.hist/.*'")
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible") ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
ap.add_argument("--version", action="store_true", help="show version and exit") ap.add_argument("--version", action="store_true", help="show version and exit")
@@ -1121,6 +1135,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing") ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading") ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)") ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
ap.add_argument("--cd", type=float, metavar="SEC", default=5, help="delay before reattempting a failed handshake/upload")
ap.add_argument("--safe", action="store_true", help="use simple fallback approach") ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)") ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
@@ -1187,6 +1202,9 @@ source file/folder selection uses rsync syntax, meaning that:
ar.z = True ar.z = True
ctl = Ctl(ar, ctl.stats) ctl = Ctl(ar, ctl.stats)
if ctl.errs:
print("WARNING: %d errors" % (ctl.errs))
sys.exit(0 if ctl.ok else 1) sys.exit(0 if ctl.ok else 1)

View File

@@ -66,7 +66,7 @@ def main():
ofs = ln.find("{") ofs = ln.find("{")
j = json.loads(ln[ofs:]) j = json.loads(ln[ofs:])
except: except:
pass continue
w = j["wark"] w = j["wark"]
if db.execute("select w from up where w = ?", (w,)).fetchone(): if db.execute("select w from up where w = ?", (w,)).fetchone():

View File

@@ -13,7 +13,7 @@
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1 # on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
upstream cpp { upstream cpp {
server 127.0.0.1:3923; server 127.0.0.1:3923 fail_timeout=1s;
keepalive 1; keepalive 1;
} }
server { server {

View File

@@ -1,14 +1,15 @@
# Maintainer: icxes <dev.null@need.moe> # Maintainer: icxes <dev.null@need.moe>
pkgname=copyparty pkgname=copyparty
pkgver="1.9.12" pkgver="1.9.22"
pkgrel=1 pkgrel=1
pkgdesc="Portable file sharing hub" pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, zeroconf, media indexer, thumbnails++"
arch=("any") arch=("any")
url="https://github.com/9001/${pkgname}" url="https://github.com/9001/${pkgname}"
license=('MIT') license=('MIT')
depends=("python" "lsof" "python-jinja") depends=("python" "lsof" "python-jinja")
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz") makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags" optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
"cfssl: generate TLS certificates on startup (pointless when reverse-proxied)"
"python-mutagen: music tags (alternative)" "python-mutagen: music tags (alternative)"
"python-pillow: thumbnails for images" "python-pillow: thumbnails for images"
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)" "python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
@@ -20,7 +21,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
) )
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz") source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
backup=("etc/${pkgname}.d/init" ) backup=("etc/${pkgname}.d/init" )
sha256sums=("bf285725a70b3b201fa8927dd93b294dc9c8c29e00d6826accac8977fc72e1d4") sha256sums=("a54743b0a34f7d8b201b47940772f1830a924d45fe476b70ea96f93793ec028e")
build() { build() {
cd "${srcdir}/${pkgname}-${pkgver}" cd "${srcdir}/${pkgname}-${pkgver}"

View File

@@ -1,11 +1,11 @@
# this will start `/usr/bin/copyparty-sfx.py` # this will start `/usr/bin/copyparty-sfx.py`
# in a chroot, preventing accidental access elsewhere # in a chroot, preventing accidental access elsewhere,
# and read config from `/etc/copyparty.d/*.conf` # and read copyparty config from `/etc/copyparty.d/*.conf`
# #
# expose additional filesystem locations to copyparty # expose additional filesystem locations to copyparty
# by listing them between the last `1000` and `--` # by listing them between the last `cpp` and `--`
# #
# `1000 1000` = what user to run copyparty as # `cpp cpp` = user/group to run copyparty as; can be IDs (1000 1000)
# #
# unless you add -q to disable logging, you may want to remove the # unless you add -q to disable logging, you may want to remove the
# following line to allow buffering (slightly better performance): # following line to allow buffering (slightly better performance):
@@ -24,7 +24,9 @@ ExecReload=/bin/kill -s USR1 $MAINPID
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
# run copyparty # run copyparty
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail 1000 1000 /etc/copyparty.d -- \ ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail cpp cpp \
/etc/copyparty.d \
-- \
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init /usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
[Install] [Install]

View File

@@ -3,6 +3,9 @@
# use argon2id-hashed passwords in config files (sha2 is always available) # use argon2id-hashed passwords in config files (sha2 is always available)
withHashedPasswords ? true, withHashedPasswords ? true,
# generate TLS certificates on startup (pointless when reverse-proxied)
withCertgen ? false,
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing # create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
withThumbnails ? true, withThumbnails ? true,
@@ -34,6 +37,7 @@ let
] ]
++ lib.optional withSMB impacket ++ lib.optional withSMB impacket
++ lib.optional withFTPS pyopenssl ++ lib.optional withFTPS pyopenssl
++ lib.optional withCertgen cfssl
++ lib.optional withThumbnails pillow ++ lib.optional withThumbnails pillow
++ lib.optional withFastThumbnails pyvips ++ lib.optional withFastThumbnails pyvips
++ lib.optional withMediaProcessing ffmpeg ++ lib.optional withMediaProcessing ffmpeg

View File

@@ -1,5 +1,5 @@
{ {
"url": "https://github.com/9001/copyparty/releases/download/v1.9.12/copyparty-sfx.py", "url": "https://github.com/9001/copyparty/releases/download/v1.9.22/copyparty-sfx.py",
"version": "1.9.12", "version": "1.9.22",
"hash": "sha256-/ih867kYtyYcwM+jf5ciHmgTg8BVC+Ve6U8BnamN0kw=" "hash": "sha256-FfGJGCeRuv8YF7xfi5whmnr4M3+Qcdx/ysSZKJNsysM="
} }

View File

@@ -0,0 +1,42 @@
# not actually YAML but lets pretend:
# -*- mode: yaml -*-
# vim: ft=yaml:
# put this file in /etc/
[global]
e2dsa # enable file indexing and filesystem scanning
e2ts # and enable multimedia indexing
ansi # and colors in log messages
# disable logging to stdout/journalctl and log to a file instead;
# $LOGS_DIRECTORY is usually /var/log/copyparty (comes from systemd)
# and copyparty replaces %Y-%m%d with Year-MonthDay, so the
# full path will be something like /var/log/copyparty/2023-1130.txt
# (note: enable compression by adding .xz at the end)
q, lo: $LOGS_DIRECTORY/%Y-%m%d.log
# p: 80,443,3923 # listen on 80/443 as well (requires CAP_NET_BIND_SERVICE)
# i: 127.0.0.1 # only allow connections from localhost (reverse-proxies)
# ftp: 3921 # enable ftp server on port 3921
# p: 3939 # listen on another port
# df: 16 # stop accepting uploads if less than 16 GB free disk space
# ver # show copyparty version in the controlpanel
# grid # show thumbnails/grid-view by default
# theme: 2 # monokai
# name: datasaver # change the server-name that's displayed in the browser
# stats, nos-dup # enable the prometheus endpoint, but disable the dupes counter (too slow)
# no-robots, force-js # make it harder for search engines to read your server
[accounts]
ed: wark # username: password
[/] # create a volume at "/" (the webroot), which will
/mnt # share the contents of the "/mnt" folder
accs:
rw: * # everyone gets read-write access, but
rwmda: ed # the user "ed" gets read-write-move-delete-admin

View File

@@ -1,28 +1,27 @@
# this will start `/usr/local/bin/copyparty-sfx.py` # this will start `/usr/local/bin/copyparty-sfx.py` and
# and share '/mnt' with anonymous read+write # read copyparty config from `/etc/copyparty.conf`, for example:
# https://github.com/9001/copyparty/blob/hovudstraum/contrib/systemd/copyparty.conf
# #
# installation: # installation:
# wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py # wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py
# cp -pv copyparty.service /etc/systemd/system/ # useradd -r -s /sbin/nologin -d /var/lib/copyparty copyparty
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel # firewall-cmd --permanent --add-port=3923/tcp # --zone=libvirt
# firewall-cmd --permanent --add-port={80,443,3923}/tcp # --zone=libvirt
# firewall-cmd --reload # firewall-cmd --reload
# cp -pv copyparty.service /etc/systemd/system/
# cp -pv copyparty.conf /etc/
# restorecon -vr /etc/systemd/system/copyparty.service # on fedora/rhel
# systemctl daemon-reload && systemctl enable --now copyparty # systemctl daemon-reload && systemctl enable --now copyparty
# #
# if it fails to start, first check this: systemctl status copyparty # if it fails to start, first check this: systemctl status copyparty
# then try starting it while viewing logs: journalctl -fan 100 # then try starting it while viewing logs:
# journalctl -fan 100
# tail -Fn 100 /var/log/copyparty/$(date +%Y-%m%d.log)
# #
# you may want to: # you may want to:
# change "User=cpp" and "/home/cpp/" to another user # - change "User=copyparty" and "/var/lib/copyparty/" to another user
# remove the nft lines to only listen on port 3923 # - edit /etc/copyparty.conf to configure copyparty
# and in the ExecStart= line: # and in the ExecStart= line:
# change '/usr/bin/python3' to another interpreter # - change '/usr/bin/python3' to another interpreter
# change '/mnt::rw' to another location or permission-set
# add '-q' to disable logging on busy servers
# add '-i 127.0.0.1' to only allow local connections
# add '-e2dsa' to enable filesystem scanning + indexing
# add '-e2ts' to enable metadata indexing
# remove '--ansi' to disable colored logs
# #
# with `Type=notify`, copyparty will signal systemd when it is ready to # with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty. # accept connections; correctly delaying units depending on copyparty.
@@ -30,11 +29,9 @@
# python disabling line-buffering, so messages are out-of-order: # python disabling line-buffering, so messages are out-of-order:
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png # https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
# #
# unless you add -q to disable logging, you may want to remove the ########################################################################
# following line to allow buffering (slightly better performance): ########################################################################
# Environment=PYTHONUNBUFFERED=x
#
# keep ExecStartPre before ExecStart, at least on rhel8
[Unit] [Unit]
Description=copyparty file server Description=copyparty file server
@@ -44,23 +41,52 @@ Type=notify
SyslogIdentifier=copyparty SyslogIdentifier=copyparty
Environment=PYTHONUNBUFFERED=x Environment=PYTHONUNBUFFERED=x
ExecReload=/bin/kill -s USR1 $MAINPID ExecReload=/bin/kill -s USR1 $MAINPID
PermissionsStartOnly=true
# user to run as + where the TLS certificate is (if any) ## user to run as + where the TLS certificate is (if any)
User=cpp ##
Environment=XDG_CONFIG_HOME=/home/cpp/.config User=copyparty
Group=copyparty
WorkingDirectory=/var/lib/copyparty
Environment=XDG_CONFIG_HOME=/var/lib/copyparty/.config
# OPTIONAL: setup forwarding from ports 80 and 443 to port 3923 ## OPTIONAL: allow copyparty to listen on low ports (like 80/443);
ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true' ## you need to uncomment the "p: 80,443,3923" in the config too
ExecStartPre=+nft add table ip nat ## ------------------------------------------------------------
ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; } ## a slightly safer alternative is to enable partyalone.service
ExecStartPre=+nft add rule ip nat prerouting tcp dport 80 redirect to :3923 ## which does portforwarding with nftables instead, but an even
ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923 ## better option is to use a reverse-proxy (nginx/caddy/...)
##
AmbientCapabilities=CAP_NET_BIND_SERVICE
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running ## some quick hardening; TODO port more from the nixos package
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' ##
MemoryMax=50%
MemorySwapMax=50%
ProtectClock=true
ProtectControlGroups=true
ProtectHostname=true
ProtectKernelLogs=true
ProtectKernelModules=true
ProtectKernelTunables=true
ProtectProc=invisible
RemoveIPC=true
RestrictNamespaces=true
RestrictRealtime=true
RestrictSUIDSGID=true
# copyparty settings ## create a directory for logfiles;
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw ## this defines $LOGS_DIRECTORY which is used in copyparty.conf
##
LogsDirectory=copyparty
## finally, start copyparty and give it the config file:
##
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -c /etc/copyparty.conf
# NOTE: if you installed copyparty from an OS package repo (nice)
# then you probably want something like this instead:
#ExecStart=/usr/bin/copyparty -c /etc/copyparty.conf
[Install] [Install]
WantedBy=multi-user.target WantedBy=multi-user.target

View File

@@ -1,5 +1,5 @@
# this will start `/usr/local/bin/copyparty-sfx.py` # this will start `/usr/local/bin/copyparty-sfx.py`
# in a chroot, preventing accidental access elsewhere # in a chroot, preventing accidental access elsewhere,
# and share '/mnt' with anonymous read+write # and share '/mnt' with anonymous read+write
# #
# installation: # installation:
@@ -7,9 +7,9 @@
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty # 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
# #
# expose additional filesystem locations to copyparty # expose additional filesystem locations to copyparty
# by listing them between the last `1000` and `--` # by listing them between the last `cpp` and `--`
# #
# `1000 1000` = what user to run copyparty as # `cpp cpp` = user/group to run copyparty as; can be IDs (1000 1000)
# #
# you may want to: # you may want to:
# change '/mnt::rw' to another location or permission-set # change '/mnt::rw' to another location or permission-set
@@ -32,7 +32,9 @@ ExecReload=/bin/kill -s USR1 $MAINPID
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
# run copyparty # run copyparty
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \ ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail cpp cpp \
/mnt \
-- \
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw /usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
[Install] [Install]

View File

@@ -23,7 +23,7 @@ if not PY2:
unicode: Callable[[Any], str] = str unicode: Callable[[Any], str] = str
else: else:
sys.dont_write_bytecode = True sys.dont_write_bytecode = True
unicode = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable unicode = unicode # type: ignore
WINDOWS: Any = ( WINDOWS: Any = (
[int(x) for x in platform.version().split(".")] [int(x) for x in platform.version().split(".")]

View File

@@ -27,6 +27,7 @@ from .authsrv import expand_config_file, re_vol, split_cfg_ln, upgrade_cfg_fmt
from .cfg import flagcats, onedash from .cfg import flagcats, onedash
from .svchub import SvcHub from .svchub import SvcHub
from .util import ( from .util import (
DEF_EXP,
DEF_MTE, DEF_MTE,
DEF_MTH, DEF_MTH,
IMPLICATIONS, IMPLICATIONS,
@@ -142,9 +143,11 @@ def warn(msg: str) -> None:
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg)) lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
def init_E(E: EnvParams) -> None: def init_E(EE: EnvParams) -> None:
# __init__ runs 18 times when oxidized; do expensive stuff here # __init__ runs 18 times when oxidized; do expensive stuff here
E = EE # pylint: disable=redefined-outer-name
def get_unixdir() -> str: def get_unixdir() -> str:
paths: list[tuple[Callable[..., Any], str]] = [ paths: list[tuple[Callable[..., Any], str]] = [
(os.environ.get, "XDG_CONFIG_HOME"), (os.environ.get, "XDG_CONFIG_HOME"),
@@ -245,7 +248,7 @@ def get_srvname() -> str:
return ret return ret
def get_fk_salt(cert_path) -> str: def get_fk_salt() -> str:
fp = os.path.join(E.cfg, "fk-salt.txt") fp = os.path.join(E.cfg, "fk-salt.txt")
try: try:
with open(fp, "rb") as f: with open(fp, "rb") as f:
@@ -319,6 +322,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
# oh man i love openssl # oh man i love openssl
# check this out # check this out
# hold my beer # hold my beer
assert ssl
ptn = re.compile(r"^OP_NO_(TLS|SSL)v") ptn = re.compile(r"^OP_NO_(TLS|SSL)v")
sslver = terse_sslver(al.ssl_ver).split(",") sslver = terse_sslver(al.ssl_ver).split(",")
flags = [k for k in ssl.__dict__ if ptn.match(k)] flags = [k for k in ssl.__dict__ if ptn.match(k)]
@@ -352,6 +356,7 @@ def configure_ssl_ver(al: argparse.Namespace) -> None:
def configure_ssl_ciphers(al: argparse.Namespace) -> None: def configure_ssl_ciphers(al: argparse.Namespace) -> None:
assert ssl
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
if al.ssl_ver: if al.ssl_ver:
ctx.options &= ~al.ssl_flags_en ctx.options &= ~al.ssl_flags_en
@@ -431,9 +436,9 @@ def disable_quickedit() -> None:
if PY2: if PY2:
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD) wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
k32.GetStdHandle.errcheck = ecb k32.GetStdHandle.errcheck = ecb # type: ignore
k32.GetConsoleMode.errcheck = ecb k32.GetConsoleMode.errcheck = ecb # type: ignore
k32.SetConsoleMode.errcheck = ecb k32.SetConsoleMode.errcheck = ecb # type: ignore
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD) k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD) k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
@@ -646,6 +651,47 @@ def get_sects():
""" """
), ),
], ],
[
"exp",
"text expansion",
dedent(
"""
specify --exp or the "exp" volflag to enable placeholder expansions
in README.md / .prologue.html / .epilogue.html
--exp-md (volflag exp_md) holds the list of placeholders which can be
expanded in READMEs, and --exp-lg (volflag exp_lg) likewise for logues;
any placeholder not given in those lists will be ignored and shown as-is
the default list will expand the following placeholders:
\033[36m{{self.ip}} \033[35mclient ip
\033[36m{{self.ua}} \033[35mclient user-agent
\033[36m{{self.uname}} \033[35mclient username
\033[36m{{self.host}} \033[35mthe "Host" header, or the server's external IP otherwise
\033[36m{{cfg.name}} \033[35mthe --name global-config
\033[36m{{cfg.logout}} \033[35mthe --logout global-config
\033[36m{{vf.scan}} \033[35mthe "scan" volflag
\033[36m{{vf.thsize}} \033[35mthumbnail size
\033[36m{{srv.itime}} \033[35mserver time in seconds
\033[36m{{srv.htime}} \033[35mserver time as YY-mm-dd, HH:MM:SS (UTC)
\033[36m{{hdr.cf_ipcountry}} \033[35mthe "CF-IPCountry" client header (probably blank)
\033[0m
so the following types of placeholders can be added to the lists:
* any client header can be accessed through {{hdr.*}}
* any variable in httpcli.py can be accessed through {{self.*}}
* any global server setting can be accessed through {{cfg.*}}
* any volflag can be accessed through {{vf.*}}
remove vf.scan from default list using --exp-md /vf.scan
add "accept" header to def. list using --exp-md +hdr.accept
for performance reasons, expansion only happens while embedding
documents into directory listings, and when accessing a ?doc=...
link, but never otherwise, so if you click a -txt- link you'll
have to refresh the page to apply expansion
"""
),
],
[ [
"ls", "ls",
"volume inspection", "volume inspection",
@@ -776,8 +822,6 @@ def add_general(ap, nc, srvname):
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, \033[33mUSER\033[0m:\033[33mPASS\033[0m; example [\033[32med:wark\033[0m]") ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, \033[33mUSER\033[0m:\033[33mPASS\033[0m; example [\033[32med:wark\033[0m]")
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m]") ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, \033[33mSRC\033[0m:\033[33mDST\033[0m:\033[33mFLAG\033[0m; examples [\033[32m.::r\033[0m], [\033[32m/mnt/nas/music:/music:r:aed\033[0m]")
ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files") ap2.add_argument("-ed", action="store_true", help="enable the ?dots url parameter / client option which allows clients to see dotfiles / hidden files")
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see --help-urlform") ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-form POSTs; see --help-urlform")
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]") ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example [\033[32m$ip-10.1.2.\033[0m] or [\033[32m$ip-]")
ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)") ap2.add_argument("--name", metavar="TXT", type=u, default=srvname, help="server name (displayed topleft in browser and in mDNS)")
@@ -790,7 +834,7 @@ def add_qr(ap, tty):
ap2.add_argument("--qr", action="store_true", help="show http:// QR-code on startup") ap2.add_argument("--qr", action="store_true", help="show http:// QR-code on startup")
ap2.add_argument("--qrs", action="store_true", help="show https:// QR-code on startup") ap2.add_argument("--qrs", action="store_true", help="show https:// QR-code on startup")
ap2.add_argument("--qrl", metavar="PATH", type=u, default="", help="location to include in the url, for example [\033[32mpriv/?pw=hunter2\033[0m]") ap2.add_argument("--qrl", metavar="PATH", type=u, default="", help="location to include in the url, for example [\033[32mpriv/?pw=hunter2\033[0m]")
ap2.add_argument("--qri", metavar="PREFIX", type=u, default="", help="select IP which starts with PREFIX; [\033[32m.\033[0m] to force default IP when mDNS URL would have been used instead") ap2.add_argument("--qri", metavar="PREFIX", type=u, default="", help="select IP which starts with \033[33mPREFIX\033[0m; [\033[32m.\033[0m] to force default IP when mDNS URL would have been used instead")
ap2.add_argument("--qr-fg", metavar="COLOR", type=int, default=0 if tty else 16, help="foreground; try [\033[32m0\033[0m] if the qr-code is unreadable") ap2.add_argument("--qr-fg", metavar="COLOR", type=int, default=0 if tty else 16, help="foreground; try [\033[32m0\033[0m] if the qr-code is unreadable")
ap2.add_argument("--qr-bg", metavar="COLOR", type=int, default=229, help="background (white=255)") ap2.add_argument("--qr-bg", metavar="COLOR", type=int, default=229, help="background (white=255)")
ap2.add_argument("--qrp", metavar="CELLS", type=int, default=4, help="padding (spec says 4 or more, but 1 is usually fine)") ap2.add_argument("--qrp", metavar="CELLS", type=int, default=4, help="padding (spec says 4 or more, but 1 is usually fine)")
@@ -802,7 +846,7 @@ def add_upload(ap):
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed") ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads, hiding them from clients unless -ed")
ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip") ap2.add_argument("--plain-ip", action="store_true", help="when avoiding filename collisions by appending the uploader's ip to the filename: append the plaintext ip instead of salting and hashing the ip")
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled") ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than SEC seconds ago)") ap2.add_argument("--blank-wt", metavar="SEC", type=int, default=300, help="file write grace period (any client can write to a blank file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600") ap2.add_argument("--reg-cap", metavar="N", type=int, default=38400, help="max number of uploads to keep in memory when running without -e2d; roughly 1 MiB RAM per 600")
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)") ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload (very slow on windows)")
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)") ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even when it might be dangerous (multiprocessing, filesystems lacking sparse-files support, ...)")
@@ -811,13 +855,13 @@ def add_upload(ap):
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes)") ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead (volflag=copydupes)")
ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)") ap2.add_argument("--no-dupe", action="store_true", help="reject duplicate files during upload; only matches within the same volume (volflag=nodupe)")
ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually") ap2.add_argument("--no-snap", action="store_true", help="disable snapshots -- forget unfinished uploads on shutdown; don't create .hist/up2k.snap files -- abandoned/interrupted uploads must be cleaned up manually")
ap2.add_argument("--snap-wri", metavar="SEC", type=int, default=300, help="write upload state to ./hist/up2k.snap every SEC seconds; allows resuming incomplete uploads after a server crash") ap2.add_argument("--snap-wri", metavar="SEC", type=int, default=300, help="write upload state to ./hist/up2k.snap every \033[33mSEC\033[0m seconds; allows resuming incomplete uploads after a server crash")
ap2.add_argument("--snap-drop", metavar="MIN", type=float, default=1440, help="forget unfinished uploads after MIN minutes; impossible to resume them after that (360=6h, 1440=24h)") ap2.add_argument("--snap-drop", metavar="MIN", type=float, default=1440, help="forget unfinished uploads after \033[33mMIN\033[0m minutes; impossible to resume them after that (360=6h, 1440=24h)")
ap2.add_argument("--u2ts", metavar="TXT", type=u, default="c", help="how to timestamp uploaded files; [\033[32mc\033[0m]=client-last-modified, [\033[32mu\033[0m]=upload-time, [\033[32mfc\033[0m]=force-c, [\033[32mfu\033[0m]=force-u (volflag=u2ts)") ap2.add_argument("--u2ts", metavar="TXT", type=u, default="c", help="how to timestamp uploaded files; [\033[32mc\033[0m]=client-last-modified, [\033[32mu\033[0m]=upload-time, [\033[32mfc\033[0m]=force-c, [\033[32mfu\033[0m]=force-u (volflag=u2ts)")
ap2.add_argument("--rand", action="store_true", help="force randomized filenames, --nrand chars long (volflag=rand)") ap2.add_argument("--rand", action="store_true", help="force randomized filenames, --nrand chars long (volflag=rand)")
ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)") ap2.add_argument("--nrand", metavar="NUM", type=int, default=9, help="randomized filenames length (volflag=nrand)")
ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)") ap2.add_argument("--magic", action="store_true", help="enable filetype detection on nameless uploads (volflag=magic)")
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests") ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure \033[33mGiB\033[0m free disk space by rejecting upload requests")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files") ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck") ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; [\033[32m-1\033[0m] = forbidden/always-off, [\033[32m0\033[0m] = default-off and warn if enabled, [\033[32m1\033[0m] = default-off, [\033[32m2\033[0m] = on, [\033[32m3\033[0m] = on and disable datecheck")
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine") ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; [\033[32ms\033[0m]=smallest-first, [\033[32mn\033[0m]=alphabetical, [\033[32mfs\033[0m]=force-s, [\033[32mfn\033[0m]=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
@@ -832,6 +876,7 @@ def add_network(ap):
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy") ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; [\033[32m0\033[0m]=tcp, [\033[32m1\033[0m]=origin (first x-fwd, unsafe), [\033[32m2\033[0m]=outermost-proxy, [\033[32m3\033[0m]=second-proxy, [\033[32m-1\033[0m]=closest-proxy")
ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from (argument must be lowercase, but not the actual header)") ap2.add_argument("--xff-hdr", metavar="NAME", type=u, default="x-forwarded-for", help="if reverse-proxied, which http header to read the client's real ip from (argument must be lowercase, but not the actual header)")
ap2.add_argument("--xff-src", metavar="IP", type=u, default="127., ::1", help="comma-separated list of trusted reverse-proxy IPs; only accept the real-ip header (--xff-hdr) if the incoming connection is from an IP starting with either of these. Can be disabled with [\033[32many\033[0m] if you are behind cloudflare (or similar) and are using --xff-hdr=cf-connecting-ip (or similar)") ap2.add_argument("--xff-src", metavar="IP", type=u, default="127., ::1", help="comma-separated list of trusted reverse-proxy IPs; only accept the real-ip header (--xff-hdr) if the incoming connection is from an IP starting with either of these. Can be disabled with [\033[32many\033[0m] if you are behind cloudflare (or similar) and are using --xff-hdr=cf-connecting-ip (or similar)")
ap2.add_argument("--ipa", metavar="PREFIX", type=u, default="", help="only accept connections from IP-addresses starting with \033[33mPREFIX\033[0m; example: [\033[32m127., 10.89., 192.168.\033[0m]")
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here (eg. /foo/bar)") ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here (eg. /foo/bar)")
if ANYWIN: if ANYWIN:
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances") ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
@@ -842,7 +887,7 @@ def add_network(ap):
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes") ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds") ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds") ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..SEC") ap2.add_argument("--rsp-jtr", metavar="SEC", type=float, default=0, help="debug: response delay, random duration 0..\033[33mSEC\033[0m")
def add_tls(ap, cert_path): def add_tls(ap, cert_path):
@@ -875,14 +920,19 @@ def add_cert(ap, cert_path):
ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: ecdsa-256 rsa-4096 rsa-2048") ap2.add_argument("--crt-alg", metavar="S-N", type=u, default="ecdsa-256", help="algorithm and keysize; one of these: ecdsa-256 rsa-4096 rsa-2048")
def add_auth(ap):
ap2 = ap.add_argument_group('user authentication options')
ap2.add_argument("--hdr-au-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks and assume the request-header \033[33mHN\033[0m contains the username of the requesting user (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy. Also, the argument must be lowercase, but not the actual header")
def add_zeroconf(ap): def add_zeroconf(ap):
ap2 = ap.add_argument_group("Zeroconf options") ap2 = ap.add_argument_group("Zeroconf options")
ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)") ap2.add_argument("-z", action="store_true", help="enable all zeroconf backends (mdns, ssdp)")
ap2.add_argument("--z-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes\n └─example: \033[32meth0, wlo1, virhost0, 192.168.123.0/24, fd00:fda::/96\033[0m") ap2.add_argument("--z-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes\n └─example: \033[32meth0, wlo1, virhost0, 192.168.123.0/24, fd00:fda::/96\033[0m")
ap2.add_argument("--z-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes") ap2.add_argument("--z-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
ap2.add_argument("--z-chk", metavar="SEC", type=int, default=10, help="check for network changes every SEC seconds (0=disable)") ap2.add_argument("--z-chk", metavar="SEC", type=int, default=10, help="check for network changes every \033[33mSEC\033[0m seconds (0=disable)")
ap2.add_argument("-zv", action="store_true", help="verbose all zeroconf backends") ap2.add_argument("-zv", action="store_true", help="verbose all zeroconf backends")
ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every SEC seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]") ap2.add_argument("--mc-hop", metavar="SEC", type=int, default=0, help="rejoin multicast groups every \033[33mSEC\033[0m seconds (workaround for some switches/routers which cause mDNS to suddenly stop working after some time); try [\033[32m300\033[0m] or [\033[32m180\033[0m]")
def add_zc_mdns(ap): def add_zc_mdns(ap):
@@ -902,7 +952,7 @@ def add_zc_mdns(ap):
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network") ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not") ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty") ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every SEC; useful if clients have IPs in a subnet which doesn't overlap with the server") ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every \033[33mSEC\033[0m; useful if clients have IPs in a subnet which doesn't overlap with the server")
def add_zc_ssdp(ap): def add_zc_ssdp(ap):
@@ -917,11 +967,12 @@ def add_zc_ssdp(ap):
def add_ftp(ap): def add_ftp(ap):
ap2 = ap.add_argument_group('FTP options') ap2 = ap.add_argument_group('FTP options')
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example \033[32m3921") ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on \033[33mPORT\033[0m, for example \033[32m3921")
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example \033[32m3990") ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on \033[33mPORT\033[0m, for example \033[32m3990")
ap2.add_argument("--ftpv", action="store_true", help="verbose") ap2.add_argument("--ftpv", action="store_true", help="verbose")
ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4") ap2.add_argument("--ftp4", action="store_true", help="only listen on IPv4")
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than SEC seconds ago)") ap2.add_argument("--ftp-ipa", metavar="PFX", type=u, default="", help="only accept connections from IP-addresses starting with \033[33mPFX\033[0m; specify [\033[32many\033[0m] to disable inheriting \033[33m--ipa\033[0m. Example: [\033[32m127., 10.89., 192.168.\033[0m]")
ap2.add_argument("--ftp-wt", metavar="SEC", type=int, default=7, help="grace period for resuming interrupted uploads (any client can write to any file last-modified more recently than \033[33mSEC\033[0m seconds ago)")
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections") ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000") ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
@@ -958,15 +1009,15 @@ def add_handlers(ap):
def add_hooks(ap): def add_hooks(ap):
ap2 = ap.add_argument_group('event hooks (see --help-hooks)') ap2 = ap.add_argument_group('event hooks (see --help-hooks)')
ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute CMD before a file upload starts") ap2.add_argument("--xbu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file upload starts")
ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute CMD after a file upload finishes") ap2.add_argument("--xau", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file upload finishes")
ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute CMD after all uploads finish and volume is idle") ap2.add_argument("--xiu", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after all uploads finish and volume is idle")
ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute CMD before a file move/rename") ap2.add_argument("--xbr", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file move/rename")
ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute CMD after a file move/rename") ap2.add_argument("--xar", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file move/rename")
ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute CMD before a file delete") ap2.add_argument("--xbd", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m before a file delete")
ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute CMD after a file delete") ap2.add_argument("--xad", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m after a file delete")
ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute CMD on message") ap2.add_argument("--xm", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m on message")
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute CMD if someone gets banned (pw/404/403/url)") ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute \033[33mCMD\033[0m if someone gets banned (pw/404/403/url)")
def add_stats(ap): def add_stats(ap):
@@ -974,6 +1025,7 @@ def add_stats(ap):
ap2.add_argument("--stats", action="store_true", help="enable openmetrics at /.cpr/metrics for admin accounts") ap2.add_argument("--stats", action="store_true", help="enable openmetrics at /.cpr/metrics for admin accounts")
ap2.add_argument("--nos-hdd", action="store_true", help="disable disk-space metrics (used/free space)") ap2.add_argument("--nos-hdd", action="store_true", help="disable disk-space metrics (used/free space)")
ap2.add_argument("--nos-vol", action="store_true", help="disable volume size metrics (num files, total bytes, vmaxb/vmaxn)") ap2.add_argument("--nos-vol", action="store_true", help="disable volume size metrics (num files, total bytes, vmaxb/vmaxn)")
ap2.add_argument("--nos-vst", action="store_true", help="disable volume state metrics (indexing, analyzing, activity)")
ap2.add_argument("--nos-dup", action="store_true", help="disable dupe-files metrics (good idea; very slow)") ap2.add_argument("--nos-dup", action="store_true", help="disable dupe-files metrics (good idea; very slow)")
ap2.add_argument("--nos-unf", action="store_true", help="disable unfinished-uploads metrics") ap2.add_argument("--nos-unf", action="store_true", help="disable unfinished-uploads metrics")
@@ -1015,7 +1067,7 @@ def add_safety(ap):
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)") ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore --no-robots") ap2.add_argument("--force-js", action="store_true", help="don't send folder listings as HTML, force clients to use the embedded json instead -- slight protection against misbehaving search engines which ignore --no-robots")
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)") ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything (volflag=norobots)")
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)") ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after \033[33mH\033[0m hours of inactivity; [\033[32m0.0028\033[0m]=10sec, [\033[32m0.1\033[0m]=6min, [\033[32m24\033[0m]=day, [\033[32m168\033[0m]=week, [\033[32m720\033[0m]=month, [\033[32m8760\033[0m]=year)")
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]") ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="50,60,1440", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; only affects users who cannot see directory listings because their access is either g/G/h") ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="50,60,1440", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; only affects users who cannot see directory listings because their access is either g/G/h")
ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month") ap2.add_argument("--ban-403", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m 403's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; [\033[32m1440\033[0m]=day, [\033[32m10080\033[0m]=week, [\033[32m43200\033[0m]=month")
@@ -1023,8 +1075,8 @@ def add_safety(ap):
ap2.add_argument("--ban-url", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m sus URL's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; applies only to access g/G/h (decent replacement for --ban-404 if that can't be used)") ap2.add_argument("--ban-url", metavar="N,W,B", type=u, default="9,2,1440", help="hitting more than \033[33mN\033[0m sus URL's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; applies only to access g/G/h (decent replacement for --ban-404 if that can't be used)")
ap2.add_argument("--sus-urls", metavar="R", type=u, default=r"\.php$|(^|/)wp-(admin|content|includes)/", help="URLs which are considered sus / eligible for banning; disable with blank or [\033[32mno\033[0m]") ap2.add_argument("--sus-urls", metavar="R", type=u, default=r"\.php$|(^|/)wp-(admin|content|includes)/", help="URLs which are considered sus / eligible for banning; disable with blank or [\033[32mno\033[0m]")
ap2.add_argument("--nonsus-urls", metavar="R", type=u, default=r"^(favicon\.ico|robots\.txt)$|^apple-touch-icon|^\.well-known", help="harmless URLs ignored from 404-bans; disable with blank or [\033[32mno\033[0m]") ap2.add_argument("--nonsus-urls", metavar="R", type=u, default=r"^(favicon\.ico|robots\.txt)$|^apple-touch-icon|^\.well-known", help="harmless URLs ignored from 404-bans; disable with blank or [\033[32mno\033[0m]")
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0") ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for \033[33mMIN\033[0m minutes (and also kill its active connections) -- disable with 0")
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]") ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for \033[33mB\033[0m minutes; disable with [\033[32m0\033[0m]")
ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives") ap2.add_argument("--acao", metavar="V[,V]", type=u, default="*", help="Access-Control-Allow-Origin; list of origins (domains/IPs without port) to accept requests from; [\033[32mhttps://1.2.3.4\033[0m]. Default [\033[32m*\033[0m] allows requests from all sites but removes cookies and http-auth; only ?pw=hunter2 survives")
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)") ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)")
@@ -1043,7 +1095,7 @@ def add_shutdown(ap):
ap2 = ap.add_argument_group('shutdown options') ap2 = ap.add_argument_group('shutdown options')
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints") ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all") ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing") ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after \033[33mWHEN\033[0m has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing")
def add_logging(ap): def add_logging(ap):
@@ -1052,13 +1104,14 @@ def add_logging(ap):
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz") ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: \033[32mcpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR") ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR") ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
ap2.add_argument("--no-logflush", action="store_true", help="don't flush the logfile after each write; tiny bit faster")
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup") ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
ap2.add_argument("--log-tdec", metavar="N", type=int, default=3, help="timestamp resolution / number of timestamp decimals") ap2.add_argument("--log-tdec", metavar="N", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
ap2.add_argument("--log-badpwd", metavar="N", type=int, default=1, help="log passphrase of failed login attempts: 0=terse, 1=plaintext, 2=hashed") ap2.add_argument("--log-badpwd", metavar="N", type=int, default=1, help="log failed login attempt passwords: 0=terse, 1=plaintext, 2=hashed")
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs") ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling") ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header") ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching") ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
def add_admin(ap): def add_admin(ap):
@@ -1082,7 +1135,7 @@ def add_thumbnail(ap):
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output") ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg output for video thumbs") ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg output for video thumbs")
ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs") ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs")
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds") ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than \033[33mSEC\033[0m seconds")
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled") ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds") ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling -e2d will make these case-insensitive, and also automatically select thumbnails for all folders that contain pics, even if none match this pattern") ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; enabling -e2d will make these case-insensitive, and also automatically select thumbnails for all folders that contain pics, even if none match this pattern")
@@ -1100,7 +1153,7 @@ def add_transcoding(ap):
ap2 = ap.add_argument_group('transcoding options') ap2 = ap.add_argument_group('transcoding options')
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding") ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)") ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds") ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
def add_db_general(ap, hcores): def add_db_general(ap, hcores):
@@ -1121,8 +1174,8 @@ def add_db_general(ap, hcores):
ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)") ap2.add_argument("--xlink", action="store_true", help="on upload: check all volumes for dupes, not just the target volume (volflag=xlink)")
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing") ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off (volflag=scan)") ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off (volflag=scan)")
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)") ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than SEC seconds") ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than \033[33mSEC\033[0m seconds")
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially") ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)") ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
@@ -1141,7 +1194,16 @@ def add_db_metadata(ap):
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping") ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.); either an entire replacement list, or add/remove stuff on the default-list with +foo or /bar", default=DEF_MTE) ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.); either an entire replacement list, or add/remove stuff on the default-list with +foo or /bar", default=DEF_MTE)
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.); assign/add/remove same as -mte", default=DEF_MTH) ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.); assign/add/remove same as -mte", default=DEF_MTH)
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file") ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag \033[33mM\033[0m using program \033[33mBIN\033[0m to parse the file")
def add_txt(ap):
ap2 = ap.add_argument_group('textfile options')
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="textfile editor checks for serverside changes every \033[33mSEC\033[0m seconds")
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
ap2.add_argument("--exp", action="store_true", help="enable textfile expansion -- replace {{self.ip}} and such; see --help-exp (volflag=exp)")
ap2.add_argument("--exp-md", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in markdown files; add/remove stuff on the default list with +hdr_foo or /vf.scan (volflag=exp_md)")
ap2.add_argument("--exp-lg", metavar="V,V,V", type=u, default=DEF_EXP, help="comma/space-separated list of placeholders to expand in prologue/epilogue files (volflag=exp_lg)")
def add_ui(ap, retry): def add_ui(ap, retry):
@@ -1151,7 +1213,7 @@ def add_ui(ap, retry):
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use") ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed") ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)") ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching REGEX in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)") ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable") ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])") ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include") ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
@@ -1178,13 +1240,14 @@ def add_debug(ap):
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file") ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd") ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead") ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
ap2.add_argument("--srch-dbg", action="store_true", help="explain search processing, and do some extra expensive sanity checks")
ap2.add_argument("--rclone-mdns", action="store_true", help="use mdns-domain instead of server-ip on /?hc") ap2.add_argument("--rclone-mdns", action="store_true", help="use mdns-domain instead of server-ip on /?hc")
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60") ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to \033[33mP\033[0math every \033[33mS\033[0m second, for example --stackmon=\033[32m./st/%%Y-%%m/%%d/%%H%%M.xz,60")
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC") ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every \033[33mSEC\033[0m")
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches REGEX; [\033[32m.\033[0m] (a single dot) = all files") ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches \033[33mREGEX\033[0m; [\033[32m.\033[0m] (a single dot) = all files")
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to --bf-nc and --bf-dir") ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to --bf-nc and --bf-dir")
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than NUM files at --kf-dir already; default: 6.3 GiB max (200*32M)") ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than \033[33mNUM\033[0m files at --kf-dir already; default: 6.3 GiB max (200*32M)")
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at PATH; default: folder named 'bf' wherever copyparty was started") ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at \033[33mPATH\033[0m; default: folder named 'bf' wherever copyparty was started")
# fmt: on # fmt: on
@@ -1201,7 +1264,7 @@ def run_argparse(
cert_path = os.path.join(E.cfg, "cert.pem") cert_path = os.path.join(E.cfg, "cert.pem")
fk_salt = get_fk_salt(cert_path) fk_salt = get_fk_salt()
ah_salt = get_ah_salt() ah_salt = get_ah_salt()
# alpine peaks at 5 threads for some reason, # alpine peaks at 5 threads for some reason,
@@ -1217,6 +1280,7 @@ def run_argparse(
add_network(ap) add_network(ap)
add_tls(ap, cert_path) add_tls(ap, cert_path)
add_cert(ap, cert_path) add_cert(ap, cert_path)
add_auth(ap)
add_qr(ap, tty) add_qr(ap, tty)
add_zeroconf(ap) add_zeroconf(ap)
add_zc_mdns(ap) add_zc_mdns(ap)
@@ -1237,6 +1301,7 @@ def run_argparse(
add_handlers(ap) add_handlers(ap)
add_hooks(ap) add_hooks(ap)
add_stats(ap) add_stats(ap)
add_txt(ap)
add_ui(ap, retry) add_ui(ap, retry)
add_admin(ap) add_admin(ap)
add_logging(ap) add_logging(ap)
@@ -1264,7 +1329,7 @@ def run_argparse(
for k, h, t in sects: for k, h, t in sects:
k2 = "help_" + k.replace("-", "_") k2 = "help_" + k.replace("-", "_")
if vars(ret)[k2]: if vars(ret)[k2]:
lprint("# {} help page".format(k)) lprint("# %s help page (%s)" % (k, h))
lprint(t + "\033[0m") lprint(t + "\033[0m")
sys.exit(0) sys.exit(0)

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (1, 9, 13) VERSION = (1, 9, 23)
CODENAME = "prometheable" CODENAME = "prometheable"
BUILD_DT = (2023, 10, 21) BUILD_DT = (2023, 12, 1)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -21,9 +21,9 @@ from .util import (
META_NOBOTS, META_NOBOTS,
SQLITE_VER, SQLITE_VER,
UNPLICATIONS, UNPLICATIONS,
UTC,
ODict, ODict,
Pebkac, Pebkac,
UTC,
absreal, absreal,
afsenc, afsenc,
get_df, get_df,
@@ -414,7 +414,7 @@ class VFS(object):
hist = flags.get("hist") hist = flags.get("hist")
if hist and hist != "-": if hist and hist != "-":
zs = "{}/{}".format(hist.rstrip("/"), name) zs = "{}/{}".format(hist.rstrip("/"), name)
flags["hist"] = os.path.expanduser(zs) if zs.startswith("~") else zs flags["hist"] = os.path.expandvars(os.path.expanduser(zs))
return flags return flags
@@ -476,12 +476,10 @@ class VFS(object):
err: int = 403, err: int = 403,
) -> tuple["VFS", str]: ) -> tuple["VFS", str]:
"""returns [vfsnode,fs_remainder] if user has the requested permissions""" """returns [vfsnode,fs_remainder] if user has the requested permissions"""
if ANYWIN: if relchk(vpath):
mod = relchk(vpath) if self.log:
if mod: self.log("vfs", "invalid relpath [{}]".format(vpath))
if self.log: raise Pebkac(422)
self.log("vfs", "invalid relpath [{}]".format(vpath))
raise Pebkac(404)
cvpath = undot(vpath) cvpath = undot(vpath)
vn, rem = self._find(cvpath) vn, rem = self._find(cvpath)
@@ -500,8 +498,8 @@ class VFS(object):
t = "{} has no {} in [{}] => [{}] => [{}]" t = "{} has no {} in [{}] => [{}] => [{}]"
self.log("vfs", t.format(uname, msg, vpath, cvpath, ap), 6) self.log("vfs", t.format(uname, msg, vpath, cvpath, ap), 6)
t = "you don't have {}-access for this location" t = 'you don\'t have %s-access in "/%s"'
raise Pebkac(err, t.format(msg)) raise Pebkac(err, t % (msg, cvpath))
return vn, rem return vn, rem
@@ -949,9 +947,7 @@ class AuthSrv(object):
if vp is not None and ap is None: if vp is not None and ap is None:
ap = ln ap = ln
if ap.startswith("~"): ap = os.path.expandvars(os.path.expanduser(ap))
ap = os.path.expanduser(ap)
ap = absreal(ap) ap = absreal(ap)
self._l(ln, 2, "bound to filesystem-path [{}]".format(ap)) self._l(ln, 2, "bound to filesystem-path [{}]".format(ap))
self._map_volume(ap, vp, mount, daxs, mflags) self._map_volume(ap, vp, mount, daxs, mflags)
@@ -961,6 +957,7 @@ class AuthSrv(object):
err = "" err = ""
try: try:
self._l(ln, 5, "volume access config:") self._l(ln, 5, "volume access config:")
assert vp
sk, sv = ln.split(":") sk, sv = ln.split(":")
if re.sub("[rwmdgGha]", "", sk) or not sk: if re.sub("[rwmdgGha]", "", sk) or not sk:
err = "invalid accs permissions list; " err = "invalid accs permissions list; "
@@ -978,6 +975,7 @@ class AuthSrv(object):
err = "" err = ""
try: try:
self._l(ln, 6, "volume-specific config:") self._l(ln, 6, "volume-specific config:")
assert vp
zd = split_cfg_ln(ln) zd = split_cfg_ln(ln)
fstr = "" fstr = ""
for sk, sv in zd.items(): for sk, sv in zd.items():
@@ -1188,12 +1186,13 @@ class AuthSrv(object):
vfs = VFS(self.log_func, mount[dst], dst, daxs[dst], mflags[dst]) vfs = VFS(self.log_func, mount[dst], dst, daxs[dst], mflags[dst])
continue continue
assert vfs # type: ignore
zv = vfs.add(mount[dst], dst) zv = vfs.add(mount[dst], dst)
zv.axs = daxs[dst] zv.axs = daxs[dst]
zv.flags = mflags[dst] zv.flags = mflags[dst]
zv.dbv = None zv.dbv = None
assert vfs assert vfs # type: ignore
vfs.all_vols = {} vfs.all_vols = {}
vfs.all_aps = [] vfs.all_aps = []
vfs.all_vps = [] vfs.all_vps = []
@@ -1261,9 +1260,7 @@ class AuthSrv(object):
if vflag == "-": if vflag == "-":
pass pass
elif vflag: elif vflag:
if vflag.startswith("~"): vflag = os.path.expandvars(os.path.expanduser(vflag))
vflag = os.path.expanduser(vflag)
vol.histpath = uncyg(vflag) if WINDOWS else vflag vol.histpath = uncyg(vflag) if WINDOWS else vflag
elif self.args.hist: elif self.args.hist:
for nch in range(len(hid)): for nch in range(len(hid)):
@@ -1479,15 +1476,11 @@ class AuthSrv(object):
raise Exception(t.format(dbd, dbds)) raise Exception(t.format(dbd, dbds))
# default tag cfgs if unset # default tag cfgs if unset
if "mte" not in vol.flags: for k in ("mte", "mth", "exp_md", "exp_lg"):
vol.flags["mte"] = self.args.mte.copy() if k not in vol.flags:
else: vol.flags[k] = getattr(self.args, k).copy()
vol.flags["mte"] = odfusion(self.args.mte, vol.flags["mte"]) else:
vol.flags[k] = odfusion(getattr(self.args, k), vol.flags[k])
if "mth" not in vol.flags:
vol.flags["mth"] = self.args.mth.copy()
else:
vol.flags["mth"] = odfusion(self.args.mth, vol.flags["mth"])
# append additive args from argv to volflags # append additive args from argv to volflags
hooks = "xbu xau xiu xbr xar xbd xad xm xban".split() hooks = "xbu xau xiu xbr xar xbd xad xm xban".split()
@@ -1727,6 +1720,9 @@ class AuthSrv(object):
def setup_pwhash(self, acct: dict[str, str]) -> None: def setup_pwhash(self, acct: dict[str, str]) -> None:
self.ah = PWHash(self.args) self.ah = PWHash(self.args)
if not self.ah.on: if not self.ah.on:
if self.args.ah_cli or self.args.ah_gen:
t = "\n BAD CONFIG:\n cannot --ah-cli or --ah-gen without --ah-alg"
raise Exception(t)
return return
if self.args.ah_cli: if self.args.ah_cli:

View File

@@ -46,8 +46,8 @@ class BrokerMp(object):
self.num_workers = self.args.j or CORES self.num_workers = self.args.j or CORES
self.log("broker", "booting {} subprocesses".format(self.num_workers)) self.log("broker", "booting {} subprocesses".format(self.num_workers))
for n in range(1, self.num_workers + 1): for n in range(1, self.num_workers + 1):
q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1) q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1) # type: ignore
q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64) q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64) # type: ignore
proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n)) proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n))
Daemon(self.collector, "mp-sink-{}".format(n), (proc,)) Daemon(self.collector, "mp-sink-{}".format(n), (proc,))

View File

@@ -132,7 +132,10 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
try: try:
expiry, inf = _read_crt(args, "srv.pem") expiry, inf = _read_crt(args, "srv.pem")
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry if "sans" not in inf:
raise Exception("no useable cert found")
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
cert_insec = os.path.join(args.E.mod, "res/insecure.pem") cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
for n in names: for n in names:
if n not in inf["sans"]: if n not in inf["sans"]:

View File

@@ -17,7 +17,6 @@ def vf_bmap() -> dict[str, str]:
"no_thumb": "dthumb", "no_thumb": "dthumb",
"no_vthumb": "dvthumb", "no_vthumb": "dvthumb",
"no_athumb": "dathumb", "no_athumb": "dathumb",
"re_maxage": "scan",
"th_no_crop": "nocrop", "th_no_crop": "nocrop",
"dav_auth": "davauth", "dav_auth": "davauth",
"dav_rt": "davrt", "dav_rt": "davrt",
@@ -33,6 +32,7 @@ def vf_bmap() -> dict[str, str]:
"e2v", "e2v",
"e2vu", "e2vu",
"e2vp", "e2vp",
"exp",
"grid", "grid",
"hardlink", "hardlink",
"magic", "magic",
@@ -52,10 +52,19 @@ def vf_vmap() -> dict[str, str]:
ret = { ret = {
"no_hash": "nohash", "no_hash": "nohash",
"no_idx": "noidx", "no_idx": "noidx",
"re_maxage": "scan",
"th_convt": "convt", "th_convt": "convt",
"th_size": "thsize", "th_size": "thsize",
} }
for k in ("dbd", "lg_sbf", "md_sbf", "nrand", "sort", "unlist", "u2ts"): for k in (
"dbd",
"lg_sbf",
"md_sbf",
"nrand",
"sort",
"unlist",
"u2ts",
):
ret[k] = k ret[k] = k
return ret return ret
@@ -64,6 +73,8 @@ def vf_cmap() -> dict[str, str]:
"""argv-to-volflag: complex/lists""" """argv-to-volflag: complex/lists"""
ret = {} ret = {}
for k in ( for k in (
"exp_lg",
"exp_md",
"html_head", "html_head",
"mte", "mte",
"mth", "mth",

View File

@@ -9,19 +9,13 @@ import stat
import sys import sys
import time import time
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E
try:
import asynchat
except:
sys.path.append(os.path.join(E.mod, "vend"))
from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer
from pyftpdlib.filesystems import AbstractedFS, FilesystemError from pyftpdlib.filesystems import AbstractedFS, FilesystemError
from pyftpdlib.handlers import FTPHandler from pyftpdlib.handlers import FTPHandler
from pyftpdlib.ioloop import IOLoop from pyftpdlib.ioloop import IOLoop
from pyftpdlib.servers import FTPServer from pyftpdlib.servers import FTPServer
from .__init__ import PY2, TYPE_CHECKING
from .authsrv import VFS from .authsrv import VFS
from .bos import bos from .bos import bos
from .util import ( from .util import (
@@ -92,6 +86,12 @@ class FtpAuth(DummyAuthorizer):
if bonk: if bonk:
logging.warning("client banned: invalid passwords") logging.warning("client banned: invalid passwords")
bans[ip] = bonk bans[ip] = bonk
try:
# only possible if multiprocessing disabled
self.hub.broker.httpsrv.bans[ip] = bonk # type: ignore
self.hub.broker.httpsrv.nban += 1 # type: ignore
except:
pass
raise AuthenticationFailed("Authentication failed.") raise AuthenticationFailed("Authentication failed.")
@@ -148,7 +148,7 @@ class FtpFs(AbstractedFS):
try: try:
vpath = vpath.replace("\\", "/").strip("/") vpath = vpath.replace("\\", "/").strip("/")
rd, fn = os.path.split(vpath) rd, fn = os.path.split(vpath)
if ANYWIN and relchk(rd): if relchk(rd):
logging.warning("malicious vpath: %s", vpath) logging.warning("malicious vpath: %s", vpath)
t = "Unsupported characters in [{}]" t = "Unsupported characters in [{}]"
raise FSE(t.format(vpath), 1) raise FSE(t.format(vpath), 1)
@@ -404,7 +404,16 @@ class FtpHandler(FTPHandler):
super(FtpHandler, self).__init__(conn, server, ioloop) super(FtpHandler, self).__init__(conn, server, ioloop)
cip = self.remote_ip cip = self.remote_ip
self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip if cip.startswith("::ffff:"):
cip = cip[7:]
if self.args.ftp_ipa_re and not self.args.ftp_ipa_re.match(cip):
logging.warning("client rejected (--ftp-ipa): %s", cip)
self.connected = False
conn.close()
return
self.cli_ip = cip
# abspath->vpath mapping to resolve log_transfer paths # abspath->vpath mapping to resolve log_transfer paths
self.vfs_map: dict[str, str] = {} self.vfs_map: dict[str, str] = {}

View File

@@ -39,10 +39,11 @@ from .szip import StreamZip
from .util import ( from .util import (
HTTPCODE, HTTPCODE,
META_NOBOTS, META_NOBOTS,
UTC,
Garda,
MultipartParser, MultipartParser,
ODict, ODict,
Pebkac, Pebkac,
UTC,
UnrecvEOF, UnrecvEOF,
absreal, absreal,
alltrace, alltrace,
@@ -75,11 +76,12 @@ from .util import (
runhook, runhook,
s3enc, s3enc,
sanitize_fn, sanitize_fn,
sanitize_vpath,
sendfile_kern, sendfile_kern,
sendfile_py, sendfile_py,
undot, undot,
unescape_cookie, unescape_cookie,
unquote, unquote, # type: ignore
unquotep, unquotep,
vjoin, vjoin,
vol_san, vol_san,
@@ -146,6 +148,7 @@ class HttpCli(object):
self.rem = " " self.rem = " "
self.vpath = " " self.vpath = " "
self.vpaths = " " self.vpaths = " "
self.gctx = " " # additional context for garda
self.trailing_slash = True self.trailing_slash = True
self.uname = " " self.uname = " "
self.pw = " " self.pw = " "
@@ -233,6 +236,10 @@ class HttpCli(object):
if self.is_banned(): if self.is_banned():
return False return False
if self.args.ipa_re and not self.args.ipa_re.match(self.conn.addr[0]):
self.log("client rejected (--ipa)", 3)
return False
try: try:
self.s.settimeout(2) self.s.settimeout(2)
headerlines = read_header(self.sr, self.args.s_thead, self.args.s_thead) headerlines = read_header(self.sr, self.args.s_thead, self.args.s_thead)
@@ -254,8 +261,8 @@ class HttpCli(object):
k, zs = header_line.split(":", 1) k, zs = header_line.split(":", 1)
self.headers[k.lower()] = zs.strip() self.headers[k.lower()] = zs.strip()
except: except:
msg = " ]\n#[ ".join(headerlines) msg = "#[ " + " ]\n#[ ".join(headerlines) + " ]"
raise Pebkac(400, "bad headers:\n#[ " + msg + " ]") raise Pebkac(400, "bad headers", log=msg)
except Pebkac as ex: except Pebkac as ex:
self.mode = "GET" self.mode = "GET"
@@ -268,8 +275,14 @@ class HttpCli(object):
self.loud_reply(unicode(ex), status=ex.code, headers=h, volsan=True) self.loud_reply(unicode(ex), status=ex.code, headers=h, volsan=True)
except: except:
pass pass
if ex.log:
self.log("additional error context:\n" + ex.log, 6)
return False return False
self.conn.hsrv.nreq += 1
self.ua = self.headers.get("user-agent", "") self.ua = self.headers.get("user-agent", "")
self.is_rclone = self.ua.startswith("rclone/") self.is_rclone = self.ua.startswith("rclone/")
@@ -411,12 +424,9 @@ class HttpCli(object):
self.vpath + "/" if self.trailing_slash and self.vpath else self.vpath self.vpath + "/" if self.trailing_slash and self.vpath else self.vpath
) )
ok = "\x00" not in self.vpath if relchk(self.vpath) and (self.vpath != "*" or self.mode != "OPTIONS"):
if ANYWIN:
ok = ok and not relchk(self.vpath)
if not ok and (self.vpath != "*" or self.mode != "OPTIONS"):
self.log("invalid relpath [{}]".format(self.vpath)) self.log("invalid relpath [{}]".format(self.vpath))
self.cbonk(self.conn.hsrv.g422, self.vpath, "bad_vp", "invalid relpaths")
return self.tx_404() and self.keepalive return self.tx_404() and self.keepalive
zso = self.headers.get("authorization") zso = self.headers.get("authorization")
@@ -433,8 +443,16 @@ class HttpCli(object):
except: except:
pass pass
self.pw = uparam.get("pw") or self.headers.get("pw") or bauth or cookie_pw if self.args.hdr_au_usr:
self.uname = self.asrv.iacct.get(self.asrv.ah.hash(self.pw)) or "*" self.pw = ""
self.uname = self.headers.get(self.args.hdr_au_usr) or "*"
if self.uname not in self.asrv.vfs.aread:
self.log("unknown username: [%s]" % (self.uname), 1)
self.uname = "*"
else:
self.pw = uparam.get("pw") or self.headers.get("pw") or bauth or cookie_pw
self.uname = self.asrv.iacct.get(self.asrv.ah.hash(self.pw)) or "*"
self.rvol = self.asrv.vfs.aread[self.uname] self.rvol = self.asrv.vfs.aread[self.uname]
self.wvol = self.asrv.vfs.awrite[self.uname] self.wvol = self.asrv.vfs.awrite[self.uname]
self.mvol = self.asrv.vfs.amove[self.uname] self.mvol = self.asrv.vfs.amove[self.uname]
@@ -549,6 +567,9 @@ class HttpCli(object):
zb = b"<pre>" + html_escape(msg).encode("utf-8", "replace") zb = b"<pre>" + html_escape(msg).encode("utf-8", "replace")
h = {"WWW-Authenticate": 'Basic realm="a"'} if pex.code == 401 else {} h = {"WWW-Authenticate": 'Basic realm="a"'} if pex.code == 401 else {}
self.reply(zb, status=pex.code, headers=h, volsan=True) self.reply(zb, status=pex.code, headers=h, volsan=True)
if pex.log:
self.log("additional error context:\n" + pex.log, 6)
return self.keepalive return self.keepalive
except Pebkac: except Pebkac:
return False return False
@@ -559,6 +580,36 @@ class HttpCli(object):
else: else:
return self.conn.iphash.s(self.ip) return self.conn.iphash.s(self.ip)
def cbonk(self, g: Garda, v: str, reason: str, descr: str) -> bool:
self.conn.hsrv.nsus += 1
if not g.lim:
return False
bonk, ip = g.bonk(self.ip, v + self.gctx)
if not bonk:
return False
xban = self.vn.flags.get("xban")
if not xban or not runhook(
self.log,
xban,
self.vn.canonical(self.rem),
self.vpath,
self.host,
self.uname,
time.time(),
0,
self.ip,
time.time(),
reason,
):
self.log("client banned: %s" % (descr,), 1)
self.conn.hsrv.bans[ip] = bonk
self.conn.hsrv.nban += 1
return True
return False
def is_banned(self) -> bool: def is_banned(self) -> bool:
if not self.conn.bans: if not self.conn.bans:
return False return False
@@ -678,24 +729,7 @@ class HttpCli(object):
or not self.args.nonsus_urls or not self.args.nonsus_urls
or not self.args.nonsus_urls.search(self.vpath) or not self.args.nonsus_urls.search(self.vpath)
): ):
bonk, ip = g.bonk(self.ip, self.vpath) self.cbonk(g, self.vpath, str(status), "%ss" % (status,))
if bonk:
xban = self.vn.flags.get("xban")
if not xban or not runhook(
self.log,
xban,
self.vn.canonical(self.rem),
self.vpath,
self.host,
self.uname,
time.time(),
0,
self.ip,
time.time(),
str(status),
):
self.log("client banned: %ss" % (status,), 1)
self.conn.hsrv.bans[ip] = bonk
if volsan: if volsan:
vols = list(self.asrv.vfs.all_vols.values()) vols = list(self.asrv.vfs.all_vols.values())
@@ -866,7 +900,11 @@ class HttpCli(object):
return self.tx_ico(self.vpath.split("/")[-1], exact=True) return self.tx_ico(self.vpath.split("/")[-1], exact=True)
if self.vpath.startswith(".cpr/ssdp"): if self.vpath.startswith(".cpr/ssdp"):
return self.conn.hsrv.ssdp.reply(self) if self.conn.hsrv.ssdp:
return self.conn.hsrv.ssdp.reply(self)
else:
self.reply(b"ssdp is disabled in server config", 404)
return False
if self.vpath.startswith(".cpr/dd/") and self.args.mpmc: if self.vpath.startswith(".cpr/dd/") and self.args.mpmc:
if self.args.mpmc == ".": if self.args.mpmc == ".":
@@ -2121,8 +2159,10 @@ class HttpCli(object):
return True return True
def get_pwd_cookie(self, pwd: str) -> str: def get_pwd_cookie(self, pwd: str) -> str:
if self.asrv.ah.hash(pwd) in self.asrv.iacct: hpwd = self.asrv.ah.hash(pwd)
msg = "login ok" uname = self.asrv.iacct.get(hpwd)
if uname:
msg = "hi " + uname
dur = int(60 * 60 * self.args.logout) dur = int(60 * 60 * self.args.logout)
else: else:
logpwd = pwd logpwd = pwd
@@ -2133,41 +2173,21 @@ class HttpCli(object):
logpwd = "%" + base64.b64encode(zb[:12]).decode("utf-8") logpwd = "%" + base64.b64encode(zb[:12]).decode("utf-8")
self.log("invalid password: {}".format(logpwd), 3) self.log("invalid password: {}".format(logpwd), 3)
self.cbonk(self.conn.hsrv.gpwd, pwd, "pw", "invalid passwords")
g = self.conn.hsrv.gpwd
if g.lim:
bonk, ip = g.bonk(self.ip, pwd)
if bonk:
xban = self.vn.flags.get("xban")
if not xban or not runhook(
self.log,
xban,
self.vn.canonical(self.rem),
self.vpath,
self.host,
self.uname,
time.time(),
0,
self.ip,
time.time(),
"pw",
):
self.log("client banned: invalid passwords", 1)
self.conn.hsrv.bans[ip] = bonk
msg = "naw dude" msg = "naw dude"
pwd = "x" # nosec pwd = "x" # nosec
dur = None dur = 0
if pwd == "x": if pwd == "x":
# reset both plaintext and tls # reset both plaintext and tls
# (only affects active tls cookies when tls) # (only affects active tls cookies when tls)
for k in ("cppwd", "cppws") if self.is_https else ("cppwd",): for k in ("cppwd", "cppws") if self.is_https else ("cppwd",):
ck = gencookie(k, pwd, self.args.R, False, dur) ck = gencookie(k, pwd, self.args.R, False)
self.out_headerlist.append(("Set-Cookie", ck)) self.out_headerlist.append(("Set-Cookie", ck))
else: else:
k = "cppws" if self.is_https else "cppwd" k = "cppws" if self.is_https else "cppwd"
ck = gencookie(k, pwd, self.args.R, self.is_https, dur) ck = gencookie(k, pwd, self.args.R, self.is_https, dur, "; HttpOnly")
self.out_headerlist.append(("Set-Cookie", ck)) self.out_headerlist.append(("Set-Cookie", ck))
return msg return msg
@@ -2177,26 +2197,30 @@ class HttpCli(object):
new_dir = self.parser.require("name", 512) new_dir = self.parser.require("name", 512)
self.parser.drop() self.parser.drop()
sanitized = sanitize_fn(new_dir, "", []) return self._mkdir(vjoin(self.vpath, new_dir))
return self._mkdir(vjoin(self.vpath, sanitized))
def _mkdir(self, vpath: str, dav: bool = False) -> bool: def _mkdir(self, vpath: str, dav: bool = False) -> bool:
nullwrite = self.args.nw nullwrite = self.args.nw
self.gctx = vpath
vpath = undot(vpath)
vfs, rem = self.asrv.vfs.get(vpath, self.uname, False, True) vfs, rem = self.asrv.vfs.get(vpath, self.uname, False, True)
self._assert_safe_rem(rem) rem = sanitize_vpath(rem, "/", [])
fn = vfs.canonical(rem) fn = vfs.canonical(rem)
if not fn.startswith(vfs.realpath):
self.log("invalid mkdir [%s] [%s]" % (self.gctx, vpath), 1)
raise Pebkac(422)
if not nullwrite: if not nullwrite:
fdir = os.path.dirname(fn) fdir = os.path.dirname(fn)
if not bos.path.isdir(fdir): if dav and not bos.path.isdir(fdir):
raise Pebkac(409, "parent folder does not exist") raise Pebkac(409, "parent folder does not exist")
if bos.path.isdir(fn): if bos.path.isdir(fn):
raise Pebkac(405, "that folder exists already") raise Pebkac(405, 'folder "/%s" already exists' % (vpath,))
try: try:
bos.mkdir(fn) bos.makedirs(fn)
except OSError as ex: except OSError as ex:
if ex.errno == errno.EACCES: if ex.errno == errno.EACCES:
raise Pebkac(500, "the server OS denied write-access") raise Pebkac(500, "the server OS denied write-access")
@@ -2205,7 +2229,7 @@ class HttpCli(object):
except: except:
raise Pebkac(500, min_ex()) raise Pebkac(500, min_ex())
self.out_headers["X-New-Dir"] = quotep(vpath.split("/")[-1]) self.out_headers["X-New-Dir"] = quotep(vpath)
if dav: if dav:
self.reply(b"", 201) self.reply(b"", 201)
@@ -2726,6 +2750,29 @@ class HttpCli(object):
return file_lastmod, True return file_lastmod, True
def _expand(self, txt: str, phs: list[str]) -> str:
for ph in phs:
if ph.startswith("hdr."):
sv = str(self.headers.get(ph[4:], ""))
elif ph.startswith("self."):
sv = str(getattr(self, ph[5:], ""))
elif ph.startswith("cfg."):
sv = str(getattr(self.args, ph[4:], ""))
elif ph.startswith("vf."):
sv = str(self.vn.flags.get(ph[3:]) or "")
elif ph == "srv.itime":
sv = str(int(time.time()))
elif ph == "srv.htime":
sv = datetime.now(UTC).strftime("%Y-%m-%d, %H:%M:%S")
else:
self.log("unknown placeholder in server config: [%s]" % (ph), 3)
continue
sv = self.conn.hsrv.ptn_hsafe.sub("_", sv)
txt = txt.replace("{{%s}}" % (ph,), sv)
return txt
def tx_file(self, req_path: str) -> bool: def tx_file(self, req_path: str) -> bool:
status = 200 status = 200
logmsg = "{:4} {} ".format("", self.req) logmsg = "{:4} {} ".format("", self.req)
@@ -3052,7 +3099,7 @@ class HttpCli(object):
self.reply(ico, mime=mime, headers={"Last-Modified": lm}) self.reply(ico, mime=mime, headers={"Last-Modified": lm})
return True return True
def tx_md(self, fs_path: str) -> bool: def tx_md(self, vn: VFS, fs_path: str) -> bool:
logmsg = " %s @%s " % (self.req, self.uname) logmsg = " %s @%s " % (self.req, self.uname)
if not self.can_write: if not self.can_write:
@@ -3069,9 +3116,16 @@ class HttpCli(object):
st = bos.stat(html_path) st = bos.stat(html_path)
ts_html = st.st_mtime ts_html = st.st_mtime
max_sz = 1024 * self.args.txt_max
sz_md = 0 sz_md = 0
lead = b"" lead = b""
fullfile = b""
for buf in yieldfile(fs_path): for buf in yieldfile(fs_path):
if sz_md < max_sz:
fullfile += buf
else:
fullfile = b""
if not sz_md and b"\n" in buf[:2]: if not sz_md and b"\n" in buf[:2]:
lead = buf[: buf.find(b"\n") + 1] lead = buf[: buf.find(b"\n") + 1]
sz_md += len(lead) sz_md += len(lead)
@@ -3080,6 +3134,21 @@ class HttpCli(object):
for c, v in [(b"&", 4), (b"<", 3), (b">", 3)]: for c, v in [(b"&", 4), (b"<", 3), (b">", 3)]:
sz_md += (len(buf) - len(buf.replace(c, b""))) * v sz_md += (len(buf) - len(buf.replace(c, b""))) * v
if (
fullfile
and "exp" in vn.flags
and "edit" not in self.uparam
and "edit2" not in self.uparam
and vn.flags.get("exp_md")
):
fulltxt = fullfile.decode("utf-8", "replace")
fulltxt = self._expand(fulltxt, vn.flags.get("exp_md") or [])
fullfile = fulltxt.encode("utf-8", "replace")
if fullfile:
fullfile = html_bescape(fullfile)
sz_md = len(lead) + len(fullfile)
file_ts = int(max(ts_md, ts_html, self.E.t0)) file_ts = int(max(ts_md, ts_html, self.E.t0))
file_lastmod, do_send = self._chk_lastmod(file_ts) file_lastmod, do_send = self._chk_lastmod(file_ts)
self.out_headers["Last-Modified"] = file_lastmod self.out_headers["Last-Modified"] = file_lastmod
@@ -3121,8 +3190,11 @@ class HttpCli(object):
try: try:
self.s.sendall(html[0] + lead) self.s.sendall(html[0] + lead)
for buf in yieldfile(fs_path): if fullfile:
self.s.sendall(html_bescape(buf)) self.s.sendall(fullfile)
else:
for buf in yieldfile(fs_path):
self.s.sendall(html_bescape(buf))
self.s.sendall(html[1]) self.s.sendall(html[1])
@@ -3243,7 +3315,7 @@ class HttpCli(object):
if v == "y": if v == "y":
dur = 86400 * 299 dur = 86400 * 299
else: else:
dur = None dur = 0
v = "x" v = "x"
ck = gencookie("k304", v, self.args.R, False, dur) ck = gencookie("k304", v, self.args.R, False, dur)
@@ -3253,7 +3325,7 @@ class HttpCli(object):
def setck(self) -> bool: def setck(self) -> bool:
k, v = self.uparam["setck"].split("=", 1) k, v = self.uparam["setck"].split("=", 1)
t = None if v == "" else 86400 * 299 t = 0 if v == "" else 86400 * 299
ck = gencookie(k, v, self.args.R, False, t) ck = gencookie(k, v, self.args.R, False, t)
self.out_headerlist.append(("Set-Cookie", ck)) self.out_headerlist.append(("Set-Cookie", ck))
self.reply(b"o7\n") self.reply(b"o7\n")
@@ -3261,7 +3333,7 @@ class HttpCli(object):
def set_cfg_reset(self) -> bool: def set_cfg_reset(self) -> bool:
for k in ("k304", "js", "idxh", "cppwd", "cppws"): for k in ("k304", "js", "idxh", "cppwd", "cppws"):
cookie = gencookie(k, "x", self.args.R, False, None) cookie = gencookie(k, "x", self.args.R, False)
self.out_headerlist.append(("Set-Cookie", cookie)) self.out_headerlist.append(("Set-Cookie", cookie))
self.redirect("", "?h#cc") self.redirect("", "?h#cc")
@@ -3753,7 +3825,7 @@ class HttpCli(object):
or "edit2" in self.uparam or "edit2" in self.uparam
) )
): ):
return self.tx_md(abspath) return self.tx_md(vn, abspath)
return self.tx_file(abspath) return self.tx_file(abspath)
@@ -3815,6 +3887,10 @@ class HttpCli(object):
if bos.path.exists(fn): if bos.path.exists(fn):
with open(fsenc(fn), "rb") as f: with open(fsenc(fn), "rb") as f:
logues[n] = f.read().decode("utf-8") logues[n] = f.read().decode("utf-8")
if "exp" in vn.flags:
logues[n] = self._expand(
logues[n], vn.flags.get("exp_lg") or []
)
readme = "" readme = ""
if not self.args.no_readme and not logues[1]: if not self.args.no_readme and not logues[1]:
@@ -3824,6 +3900,8 @@ class HttpCli(object):
with open(fsenc(fn), "rb") as f: with open(fsenc(fn), "rb") as f:
readme = f.read().decode("utf-8") readme = f.read().decode("utf-8")
break break
if readme and "exp" in vn.flags:
readme = self._expand(readme, vn.flags.get("exp_md") or [])
vf = vn.flags vf = vn.flags
unlist = vf.get("unlist", "") unlist = vf.get("unlist", "")
@@ -4134,6 +4212,12 @@ class HttpCli(object):
if sz < 1024 * self.args.txt_max: if sz < 1024 * self.args.txt_max:
with open(fsenc(docpath), "rb") as f: with open(fsenc(docpath), "rb") as f:
doctxt = f.read().decode("utf-8", "replace") doctxt = f.read().decode("utf-8", "replace")
if doc.lower().endswith(".md") and "exp" in vn.flags:
doctxt = self._expand(doctxt, vn.flags.get("exp_md") or [])
else:
self.log("doc 2big: [{}]".format(doc), c=6)
doctxt = "( size of textfile exceeds serverside limit )"
else: else:
self.log("doc 404: [{}]".format(doc), c=6) self.log("doc 404: [{}]".format(doc), c=6)
doctxt = "( textfile not found )" doctxt = "( textfile not found )"

View File

@@ -128,6 +128,9 @@ class HttpSrv(object):
self.u2fh = FHC() self.u2fh = FHC()
self.metrics = Metrics(self) self.metrics = Metrics(self)
self.nreq = 0
self.nsus = 0
self.nban = 0
self.srvs: list[socket.socket] = [] self.srvs: list[socket.socket] = []
self.ncli = 0 # exact self.ncli = 0 # exact
self.clients: set[HttpConn] = set() # laggy self.clients: set[HttpConn] = set() # laggy
@@ -149,6 +152,7 @@ class HttpSrv(object):
self._build_statics() self._build_statics()
self.ptn_cc = re.compile(r"[\x00-\x1f]") self.ptn_cc = re.compile(r"[\x00-\x1f]")
self.ptn_hsafe = re.compile(r"[\x00-\x1f<>\"'&]")
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split() self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
if not self.args.no_dav: if not self.args.no_dav:

View File

@@ -8,7 +8,7 @@ import re
from .__init__ import PY2 from .__init__ import PY2
from .th_srv import HAVE_PIL, HAVE_PILF from .th_srv import HAVE_PIL, HAVE_PILF
from .util import BytesIO from .util import BytesIO # type: ignore
class Ico(object): class Ico(object):
@@ -22,7 +22,7 @@ class Ico(object):
ext = bext.decode("utf-8") ext = bext.decode("utf-8")
zb = hashlib.sha1(bext).digest()[2:4] zb = hashlib.sha1(bext).digest()[2:4]
if PY2: if PY2:
zb = [ord(x) for x in zb] zb = [ord(x) for x in zb] # type: ignore
c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3) c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3)
c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 0.8 if HAVE_PILF else 1, 1) c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 0.8 if HAVE_PILF else 1, 1)
@@ -91,20 +91,6 @@ class Ico(object):
img.save(buf, format="PNG", compress_level=1) img.save(buf, format="PNG", compress_level=1)
return "image/png", buf.getvalue() return "image/png", buf.getvalue()
elif False:
# 48s, too slow
import pyvips
h = int(192 * h / w)
w = 192
img = pyvips.Image.text(
ext, width=w, height=h, dpi=192, align=pyvips.Align.CENTRE
)
img = img.ifthenelse(ci[3:], ci[:3], blend=True)
# i = i.resize(3, kernel=pyvips.Kernel.NEAREST)
buf = img.write_to_buffer(".png[compression=1]")
return "image/png", buf
svg = """\ svg = """\
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g> <svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>

View File

@@ -34,14 +34,23 @@ class Metrics(object):
ret: list[str] = [] ret: list[str] = []
def addc(k: str, unit: str, v: str, desc: str) -> None: def addc(k: str, v: str, desc: str) -> None:
if unit: zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s"
k += "_" + unit ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v))
zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v)) def adduc(k: str, unit: str, v: str, desc: str) -> None:
else: k += "_" + unit
zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s" zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v)) ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v))
def addg(k: str, v: str, desc: str) -> None:
zs = "# TYPE %s gauge\n# HELP %s %s\n%s %s"
ret.append(zs % (k, k, desc, k, v))
def addug(k: str, unit: str, v: str, desc: str) -> None:
k += "_" + unit
zs = "# TYPE %s gauge\n# UNIT %s %s\n# HELP %s %s\n%s %s"
ret.append(zs % (k, k, unit, k, desc, k, v))
def addh(k: str, typ: str, desc: str) -> None: def addh(k: str, typ: str, desc: str) -> None:
zs = "# TYPE %s %s\n# HELP %s %s" zs = "# TYPE %s %s\n# HELP %s %s"
@@ -54,17 +63,75 @@ class Metrics(object):
def addv(k: str, v: str) -> None: def addv(k: str, v: str) -> None:
ret.append("%s %s" % (k, v)) ret.append("%s %s" % (k, v))
t = "time since last copyparty restart"
v = "{:.3f}".format(time.time() - self.hsrv.t0) v = "{:.3f}".format(time.time() - self.hsrv.t0)
addc("cpp_uptime", "seconds", v, "time since last server restart") addug("cpp_uptime", "seconds", v, t)
# timestamps are gauges because initial value is not zero
t = "unixtime of last copyparty restart"
v = "{:.3f}".format(self.hsrv.t0)
addug("cpp_boot_unixtime", "seconds", v, t)
t = "number of open http(s) client connections"
addg("cpp_http_conns", str(self.hsrv.ncli), t)
t = "number of http(s) requests since last restart"
addc("cpp_http_reqs", str(self.hsrv.nreq), t)
t = "number of 403/422/malicious reqs since restart"
addc("cpp_sus_reqs", str(self.hsrv.nsus), t)
v = str(len(conn.bans or [])) v = str(len(conn.bans or []))
addc("cpp_bans", "", v, "number of banned IPs") addg("cpp_active_bans", v, "number of currently banned IPs")
t = "number of IPs banned since last restart"
addg("cpp_total_bans", str(self.hsrv.nban), t)
if not args.nos_vst:
x = self.hsrv.broker.ask("up2k.get_state")
vs = json.loads(x.get())
nvidle = 0
nvbusy = 0
nvoffline = 0
for v in vs["volstate"].values():
if v == "online, idle":
nvidle += 1
elif "OFFLINE" in v:
nvoffline += 1
else:
nvbusy += 1
addg("cpp_idle_vols", str(nvidle), "number of idle/ready volumes")
addg("cpp_busy_vols", str(nvbusy), "number of busy/indexing volumes")
addg("cpp_offline_vols", str(nvoffline), "number of offline volumes")
t = "time since last database activity (upload/rename/delete)"
addug("cpp_db_idle", "seconds", str(vs["dbwt"]), t)
t = "unixtime of last database activity (upload/rename/delete)"
addug("cpp_db_act", "seconds", str(vs["dbwu"]), t)
t = "number of files queued for hashing/indexing"
addg("cpp_hashing_files", str(vs["hashq"]), t)
t = "number of files queued for metadata scanning"
addg("cpp_tagq_files", str(vs["tagq"]), t)
try:
t = "number of files queued for plugin-based analysis"
addg("cpp_mtpq_files", str(int(vs["mtpq"])), t)
except:
pass
if not args.nos_hdd: if not args.nos_hdd:
addbh("cpp_disk_size_bytes", "total HDD size of volume") addbh("cpp_disk_size_bytes", "total HDD size of volume")
addbh("cpp_disk_free_bytes", "free HDD space in volume") addbh("cpp_disk_free_bytes", "free HDD space in volume")
for vpath, vol in allvols: for vpath, vol in allvols:
free, total = get_df(vol.realpath) free, total = get_df(vol.realpath)
if free is None or total is None:
continue
addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total)) addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total))
addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free)) addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free))
@@ -161,5 +228,6 @@ class Metrics(object):
ret.append("# EOF") ret.append("# EOF")
mime = "application/openmetrics-text; version=1.0.0; charset=utf-8" mime = "application/openmetrics-text; version=1.0.0; charset=utf-8"
mime = cli.uparam.get("mime") or mime
cli.reply("\n".join(ret).encode("utf-8"), mime=mime) cli.reply("\n".join(ret).encode("utf-8"), mime=mime)
return True return True

View File

@@ -118,7 +118,7 @@ def ffprobe(
b"--", b"--",
fsenc(abspath), fsenc(abspath),
] ]
rc, so, se = runcmd(cmd, timeout=timeout) rc, so, se = runcmd(cmd, timeout=timeout, nice=True)
retchk(rc, cmd, se) retchk(rc, cmd, se)
return parse_ffprobe(so) return parse_ffprobe(so)
@@ -261,7 +261,8 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
if ".resw" in ret and ".resh" in ret: if ".resw" in ret and ".resh" in ret:
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"]) ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
zd = {k: (0, v) for k, v in ret.items()} zero = int("0")
zd = {k: (zero, v) for k, v in ret.items()}
return zd, md return zd, md
@@ -562,6 +563,7 @@ class MTag(object):
args = { args = {
"env": env, "env": env,
"nice": True,
"timeout": parser.timeout, "timeout": parser.timeout,
"kill": parser.kill, "kill": parser.kill,
"capture": parser.capture, "capture": parser.capture,
@@ -572,11 +574,6 @@ class MTag(object):
zd.update(ret) zd.update(ret)
args["sin"] = json.dumps(zd).encode("utf-8", "replace") args["sin"] = json.dumps(zd).encode("utf-8", "replace")
if WINDOWS:
args["creationflags"] = 0x4000
else:
cmd = ["nice"] + cmd
bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])] bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])]
rc, v, err = runcmd(bcmd, **args) # type: ignore rc, v, err = runcmd(bcmd, **args) # type: ignore
retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v) retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v)

View File

@@ -136,8 +136,12 @@ class PWHash(object):
import getpass import getpass
while True: while True:
p1 = getpass.getpass("password> ") try:
p2 = getpass.getpass("again or just hit ENTER> ") p1 = getpass.getpass("password> ")
p2 = getpass.getpass("again or just hit ENTER> ")
except EOFError:
return
if p2 and p1 != p2: if p2 and p1 != p2:
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr) print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
continue continue

View File

@@ -406,6 +406,7 @@ class SMB(object):
smbserver.os.path.abspath = self._hook smbserver.os.path.abspath = self._hook
smbserver.os.path.expanduser = self._hook smbserver.os.path.expanduser = self._hook
smbserver.os.path.expandvars = self._hook
smbserver.os.path.getatime = self._hook smbserver.os.path.getatime = self._hook
smbserver.os.path.getctime = self._hook smbserver.os.path.getctime = self._hook
smbserver.os.path.getmtime = self._hook smbserver.os.path.getmtime = self._hook

View File

@@ -36,16 +36,17 @@ from .tcpsrv import TcpSrv
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
from .up2k import Up2k from .up2k import Up2k
from .util import ( from .util import (
FFMPEG_URL, DEF_EXP,
VERSIONS,
Daemon,
DEF_MTE, DEF_MTE,
DEF_MTH, DEF_MTH,
FFMPEG_URL,
UTC,
VERSIONS,
Daemon,
Garda, Garda,
HLog, HLog,
HMaccas, HMaccas,
ODict, ODict,
UTC,
alltrace, alltrace,
ansi_re, ansi_re,
min_ex, min_ex,
@@ -410,13 +411,14 @@ class SvcHub(object):
if not vl: if not vl:
continue continue
vl = [os.path.expanduser(x) if x.startswith("~") else x for x in vl] vl = [os.path.expandvars(os.path.expanduser(x)) for x in vl]
setattr(al, k, vl) setattr(al, k, vl)
for k in "lo hist ssl_log".split(" "): for k in "lo hist ssl_log".split(" "):
vs = getattr(al, k) vs = getattr(al, k)
if vs and vs.startswith("~"): if vs:
setattr(al, k, os.path.expanduser(vs)) vs = os.path.expandvars(os.path.expanduser(vs))
setattr(al, k, vs)
for k in "sus_urls nonsus_urls".split(" "): for k in "sus_urls nonsus_urls".split(" "):
vs = getattr(al, k) vs = getattr(al, k)
@@ -430,11 +432,9 @@ class SvcHub(object):
elif al.ban_url == "no": elif al.ban_url == "no":
al.sus_urls = None al.sus_urls = None
if al.xff_src in ("any", "0", ""): al.xff_re = self._ipa2re(al.xff_src)
al.xff_re = None al.ipa_re = self._ipa2re(al.ipa)
else: al.ftp_ipa_re = self._ipa2re(al.ftp_ipa or al.ipa)
zs = al.xff_src.replace(" ", "").replace(".", "\\.").replace(",", "|")
al.xff_re = re.compile("^(?:" + zs + ")")
mte = ODict.fromkeys(DEF_MTE.split(","), True) mte = ODict.fromkeys(DEF_MTE.split(","), True)
al.mte = odfusion(mte, al.mte) al.mte = odfusion(mte, al.mte)
@@ -442,6 +442,10 @@ class SvcHub(object):
mth = ODict.fromkeys(DEF_MTH.split(","), True) mth = ODict.fromkeys(DEF_MTH.split(","), True)
al.mth = odfusion(mth, al.mth) al.mth = odfusion(mth, al.mth)
exp = ODict.fromkeys(DEF_EXP.split(" "), True)
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
for k in ["no_hash", "no_idx"]: for k in ["no_hash", "no_idx"]:
ptn = getattr(self.args, k) ptn = getattr(self.args, k)
if ptn: if ptn:
@@ -449,6 +453,13 @@ class SvcHub(object):
return True return True
def _ipa2re(self, txt) -> Optional[re.Pattern]:
if txt in ("any", "0", ""):
return None
zs = txt.replace(" ", "").replace(".", "\\.").replace(",", "|")
return re.compile("^(?:" + zs + ")")
def _setlimits(self) -> None: def _setlimits(self) -> None:
try: try:
import resource import resource
@@ -511,12 +522,17 @@ class SvcHub(object):
sel_fn = "{}.{}".format(fn, ctr) sel_fn = "{}.{}".format(fn, ctr)
fn = sel_fn fn = sel_fn
try:
os.makedirs(os.path.dirname(fn))
except:
pass
try: try:
if do_xz: if do_xz:
import lzma import lzma
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0) lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
self.args.no_logflush = True
else: else:
lh = open(fn, "wt", encoding="utf-8", errors="replace") lh = open(fn, "wt", encoding="utf-8", errors="replace")
except: except:
@@ -746,7 +762,24 @@ class SvcHub(object):
(zd.hour * 100 + zd.minute) * 100 + zd.second, (zd.hour * 100 + zd.minute) * 100 + zd.second,
zd.microsecond // self.log_div, zd.microsecond // self.log_div,
) )
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
if c and not self.args.no_ansi:
if isinstance(c, int):
msg = "\033[3%sm%s\033[0m" % (c, msg)
elif "\033" not in c:
msg = "\033[%sm%s\033[0m" % (c, msg)
else:
msg = "%s%s\033[0m" % (c, msg)
if "\033" in src:
src += "\033[0m"
if "\033" in msg:
msg += "\033[0m"
self.logf.write("@%s [%s] %s\n" % (ts, src, msg))
if not self.args.no_logflush:
self.logf.flush()
now = time.time() now = time.time()
if now >= self.next_day: if now >= self.next_day:
@@ -822,6 +855,8 @@ class SvcHub(object):
if self.logf: if self.logf:
self.logf.write(msg) self.logf.write(msg)
if not self.args.no_logflush:
self.logf.flush()
def pr(self, *a: Any, **ka: Any) -> None: def pr(self, *a: Any, **ka: Any) -> None:
try: try:

View File

@@ -18,7 +18,7 @@ from .bos import bos
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
from .util import ( from .util import (
FFMPEG_URL, FFMPEG_URL,
BytesIO, BytesIO, # type: ignore
Cooldown, Cooldown,
Daemon, Daemon,
Pebkac, Pebkac,
@@ -411,6 +411,7 @@ class ThumbSrv(object):
if c == crops[-1]: if c == crops[-1]:
raise raise
assert img # type: ignore
img.write_to_file(tpath, Q=40) img.write_to_file(tpath, Q=40)
def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None: def conv_ffmpeg(self, abspath: str, tpath: str, fmt: str, vn: VFS) -> None:
@@ -468,7 +469,7 @@ class ThumbSrv(object):
def _run_ff(self, cmd: list[bytes], vn: VFS) -> None: def _run_ff(self, cmd: list[bytes], vn: VFS) -> None:
# self.log((b" ".join(cmd)).decode("utf-8")) # self.log((b" ".join(cmd)).decode("utf-8"))
ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"]) ret, _, serr = runcmd(cmd, timeout=vn.flags["convt"], nice=True)
if not ret: if not ret:
return return

View File

@@ -9,6 +9,7 @@ import time
from operator import itemgetter from operator import itemgetter
from .__init__ import ANYWIN, TYPE_CHECKING, unicode from .__init__ import ANYWIN, TYPE_CHECKING, unicode
from .authsrv import LEELOO_DALLAS
from .bos import bos from .bos import bos
from .up2k import up2k_wark_from_hashlist from .up2k import up2k_wark_from_hashlist
from .util import ( from .util import (
@@ -20,6 +21,7 @@ from .util import (
min_ex, min_ex,
quotep, quotep,
s3dec, s3dec,
vjoin,
) )
if HAVE_SQLITE3: if HAVE_SQLITE3:
@@ -282,6 +284,11 @@ class U2idx(object):
have_mt: bool, have_mt: bool,
lim: int, lim: int,
) -> tuple[list[dict[str, Any]], list[str], bool]: ) -> tuple[list[dict[str, Any]], list[str], bool]:
if self.args.srch_dbg:
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
zs = "\n ".join(["/%s = %s" % (x[0], x[1]) for x in vols])
self.log(t % (len(vols), zs), 5)
done_flag: list[bool] = [] done_flag: list[bool] = []
self.active_id = "{:.6f}_{}".format( self.active_id = "{:.6f}_{}".format(
time.time(), threading.current_thread().ident time.time(), threading.current_thread().ident
@@ -300,13 +307,31 @@ class U2idx(object):
ret = [] ret = []
seen_rps: set[str] = set() seen_rps: set[str] = set()
lim = min(lim, int(self.args.srch_hits)) clamp = int(self.args.srch_hits)
if lim >= clamp:
lim = clamp
clamped = True
else:
clamped = False
taglist = {} taglist = {}
for (vtop, ptop, flags) in vols: for (vtop, ptop, flags) in vols:
if lim < 0:
break
cur = self.get_cur(ptop) cur = self.get_cur(ptop)
if not cur: if not cur:
continue continue
excl = []
for vp2 in self.asrv.vfs.all_vols.keys():
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
excl.append(vp2[len(vtop) :].lstrip("/"))
if self.args.srch_dbg:
t = "searching in volume /%s (%s), excludelist %s"
self.log(t % (vtop, ptop, excl), 5)
self.active_cur = cur self.active_cur = cur
vuv = [] vuv = []
@@ -327,6 +352,13 @@ class U2idx(object):
if rd.startswith("//") or fn.startswith("//"): if rd.startswith("//") or fn.startswith("//"):
rd, fn = s3dec(rd, fn) rd, fn = s3dec(rd, fn)
if rd in excl or any([x for x in excl if rd.startswith(x + "/")]):
if self.args.srch_dbg:
zs = vjoin(vjoin(vtop, rd), fn)
t = "database inconsistency in volume '/%s'; ignoring: %s"
self.log(t % (vtop, zs), 1)
continue
rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
if not dots and "/." in ("/" + rp): if not dots and "/." in ("/" + rp):
continue continue
@@ -355,6 +387,19 @@ class U2idx(object):
if lim < 0: if lim < 0:
break break
if self.args.srch_dbg:
t = "in volume '/%s': hit: %s"
self.log(t % (vtop, rp), 5)
zs = vjoin(vtop, rp)
chk_vn, _ = self.asrv.vfs.get(zs, LEELOO_DALLAS, True, False)
chk_vn = chk_vn.dbv or chk_vn
if chk_vn.vpath != vtop:
raise Exception(
"database inconsistency! in volume '/%s' (%s), found file [%s] which belongs to volume '/%s' (%s)"
% (vtop, ptop, zs, chk_vn.vpath, chk_vn.realpath)
)
seen_rps.add(rp) seen_rps.add(rp)
sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]}) sret.append({"ts": int(ts), "sz": sz, "rp": rp + suf, "w": w[:16]})
@@ -372,12 +417,16 @@ class U2idx(object):
ret.extend(sret) ret.extend(sret)
# print("[{}] {}".format(ptop, sret)) # print("[{}] {}".format(ptop, sret))
if self.args.srch_dbg:
t = "in volume '/%s': got %d hits, %d total so far"
self.log(t % (vtop, len(sret), len(ret)), 5)
done_flag.append(True) done_flag.append(True)
self.active_id = "" self.active_id = ""
ret.sort(key=itemgetter("rp")) ret.sort(key=itemgetter("rp"))
return ret, list(taglist.keys()), lim < 0 return ret, list(taglist.keys()), lim < 0 and not clamped
def terminator(self, identifier: str, done_flag: list[bool]) -> None: def terminator(self, identifier: str, done_flag: list[bool]) -> None:
for _ in range(self.timeout): for _ in range(self.timeout):

View File

@@ -65,6 +65,11 @@ from .util import (
w8b64enc, w8b64enc,
) )
try:
from pathlib import Path
except:
pass
if HAVE_SQLITE3: if HAVE_SQLITE3:
import sqlite3 import sqlite3
@@ -261,6 +266,7 @@ class Up2k(object):
"hashq": self.n_hashq, "hashq": self.n_hashq,
"tagq": self.n_tagq, "tagq": self.n_tagq,
"mtpq": mtpq, "mtpq": mtpq,
"dbwu": "{:.2f}".format(self.db_act),
"dbwt": "{:.2f}".format( "dbwt": "{:.2f}".format(
min(1000 * 24 * 60 * 60 - 1, time.time() - self.db_act) min(1000 * 24 * 60 * 60 - 1, time.time() - self.db_act)
), ),
@@ -789,6 +795,11 @@ class Up2k(object):
except: except:
return None return None
vpath = "?"
for k, v in self.asrv.vfs.all_vols.items():
if v.realpath == ptop:
vpath = k
_, flags = self._expr_idx_filter(flags) _, flags = self._expr_idx_filter(flags)
ft = "\033[0;32m{}{:.0}" ft = "\033[0;32m{}{:.0}"
@@ -814,7 +825,7 @@ class Up2k(object):
if str(fl[k1]) == str(getattr(self.args, k2)): if str(fl[k1]) == str(getattr(self.args, k2)):
del fl[k1] del fl[k1]
else: else:
fl[k1] = ",".join(x for x in fl) fl[k1] = ",".join(x for x in fl[k1])
a = [ a = [
(ft if v is True else ff if v is False else fv).format(k, str(v)) (ft if v is True else ff if v is False else fv).format(k, str(v))
for k, v in fl.items() for k, v in fl.items()
@@ -824,17 +835,9 @@ class Up2k(object):
a = ["\033[90mall-default"] a = ["\033[90mall-default"]
if a: if a:
vpath = "?"
for k, v in self.asrv.vfs.all_vols.items():
if v.realpath == ptop:
vpath = k
if vpath:
vpath += "/"
zs = " ".join(sorted(a)) zs = " ".join(sorted(a))
zs = zs.replace("90mre.compile(", "90m(") # nohash zs = zs.replace("90mre.compile(", "90m(") # nohash
self.log("/{} {}".format(vpath, zs), "35") self.log("/{} {}".format(vpath + ("/" if vpath else ""), zs), "35")
reg = {} reg = {}
drp = None drp = None
@@ -879,14 +882,13 @@ class Up2k(object):
try: try:
if bos.makedirs(histpath): if bos.makedirs(histpath):
hidedir(histpath) hidedir(histpath)
except: except Exception as ex:
t = "failed to initialize volume '/%s': %s"
self.log(t % (vpath, ex), 1)
return None return None
try: try:
cur = self._open_db(db_path) cur = self._open_db(db_path)
self.cur[ptop] = cur
self.volsize[cur] = 0
self.volnfiles[cur] = 0
# speeds measured uploading 520 small files on a WD20SPZX (SMR 2.5" 5400rpm 4kb) # speeds measured uploading 520 small files on a WD20SPZX (SMR 2.5" 5400rpm 4kb)
dbd = flags["dbd"] dbd = flags["dbd"]
@@ -920,6 +922,13 @@ class Up2k(object):
cur.execute("pragma synchronous=" + sync) cur.execute("pragma synchronous=" + sync)
cur.connection.commit() cur.connection.commit()
self._verify_db_cache(cur, vpath)
self.cur[ptop] = cur
self.volsize[cur] = 0
self.volnfiles[cur] = 0
return cur, db_path return cur, db_path
except: except:
msg = "cannot use database at [{}]:\n{}" msg = "cannot use database at [{}]:\n{}"
@@ -927,6 +936,25 @@ class Up2k(object):
return None return None
def _verify_db_cache(self, cur: "sqlite3.Cursor", vpath: str) -> None:
# check if volume config changed since last use; drop caches if so
zsl = [vpath] + list(sorted(self.asrv.vfs.all_vols.keys()))
zb = hashlib.sha1("\n".join(zsl).encode("utf-8", "replace")).digest()
vcfg = base64.urlsafe_b64encode(zb[:18]).decode("ascii")
c = cur.execute("select v from kv where k = 'volcfg'")
try:
(oldcfg,) = c.fetchone()
except:
oldcfg = ""
if oldcfg != vcfg:
cur.execute("delete from kv where k = 'volcfg'")
cur.execute("delete from dh")
cur.execute("delete from cv")
cur.execute("insert into kv values ('volcfg',?)", (vcfg,))
cur.connection.commit()
def _build_file_index(self, vol: VFS, all_vols: list[VFS]) -> tuple[bool, bool]: def _build_file_index(self, vol: VFS, all_vols: list[VFS]) -> tuple[bool, bool]:
do_vac = False do_vac = False
top = vol.realpath top = vol.realpath
@@ -1257,8 +1285,8 @@ class Up2k(object):
db.t = time.time() db.t = time.time()
if not self.args.no_dhash: if not self.args.no_dhash:
db.c.execute("delete from dh where d = ?", (drd,)) db.c.execute("delete from dh where d = ?", (drd,)) # type: ignore
db.c.execute("insert into dh values (?,?)", (drd, dhash)) db.c.execute("insert into dh values (?,?)", (drd, dhash)) # type: ignore
if self.stop: if self.stop:
return -1 return -1
@@ -1277,7 +1305,7 @@ class Up2k(object):
if n: if n:
t = "forgetting {} shadowed autoindexed files in [{}] > [{}]" t = "forgetting {} shadowed autoindexed files in [{}] > [{}]"
self.log(t.format(n, top, sh_rd)) self.log(t.format(n, top, sh_rd))
assert sh_erd assert sh_erd # type: ignore
q = "delete from dh where (d = ? or d like ?||'%')" q = "delete from dh where (d = ? or d like ?||'%')"
db.c.execute(q, (sh_erd, sh_erd + "/")) db.c.execute(q, (sh_erd, sh_erd + "/"))
@@ -2176,7 +2204,7 @@ class Up2k(object):
t = "native sqlite3 backup failed; using fallback method:\n" t = "native sqlite3 backup failed; using fallback method:\n"
self.log(t + min_ex()) self.log(t + min_ex())
finally: finally:
c2.close() c2.close() # type: ignore
db = cur.connection db = cur.connection
cur.close() cur.close()
@@ -2633,7 +2661,12 @@ class Up2k(object):
not ret["hash"] not ret["hash"]
and "fk" in vfs.flags and "fk" in vfs.flags
and not self.args.nw and not self.args.nw
and (cj["user"] in vfs.axs.uread or cj["user"] in vfs.axs.upget) and (
cj["user"] in vfs.axs.uread
or cj["user"] in vfs.axs.upget
or "*" in vfs.axs.uread
or "*" in vfs.axs.upget
)
): ):
alg = 2 if "fka" in vfs.flags else 1 alg = 2 if "fka" in vfs.flags else 1
ap = absreal(djoin(job["ptop"], job["prel"], job["name"])) ap = absreal(djoin(job["ptop"], job["prel"], job["name"]))
@@ -2723,7 +2756,18 @@ class Up2k(object):
raise Exception("symlink-fallback disabled in cfg") raise Exception("symlink-fallback disabled in cfg")
if not linked: if not linked:
os.symlink(fsenc(lsrc), fsenc(ldst)) if ANYWIN:
Path(ldst).symlink_to(lsrc)
if not bos.path.exists(dst):
try:
bos.unlink(dst)
except:
pass
t = "the created symlink [%s] did not resolve to [%s]"
raise Exception(t % (ldst, lsrc))
else:
os.symlink(fsenc(lsrc), fsenc(ldst))
linked = True linked = True
except Exception as ex: except Exception as ex:
self.log("cannot link; creating copy: " + repr(ex)) self.log("cannot link; creating copy: " + repr(ex))
@@ -3904,45 +3948,58 @@ class Up2k(object):
self.n_hashq -= 1 self.n_hashq -= 1
# self.log("hashq {}".format(self.n_hashq)) # self.log("hashq {}".format(self.n_hashq))
ptop, vtop, rd, fn, ip, at, usr, skip_xau = self.hashq.get() task = self.hashq.get()
# self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn)) if len(task) != 8:
if "e2d" not in self.flags[ptop]: raise Exception("invalid hash task")
continue
abspath = djoin(ptop, rd, fn) try:
self.log("hashing " + abspath) if not self._hash_t(task):
inf = bos.stat(abspath)
if not inf.st_size:
wark = up2k_wark_from_metadata(
self.salt, inf.st_size, int(inf.st_mtime), rd, fn
)
else:
hashes = self._hashlist_from_file(abspath)
if not hashes:
return return
except Exception as ex:
self.log("failed to hash %s: %s" % (task, ex), 1)
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes) def _hash_t(self, task: tuple[str, str, str, str, str, float, str, bool]) -> bool:
ptop, vtop, rd, fn, ip, at, usr, skip_xau = task
# self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
if "e2d" not in self.flags[ptop]:
return True
with self.mutex: abspath = djoin(ptop, rd, fn)
self.idx_wark( self.log("hashing " + abspath)
self.flags[ptop], inf = bos.stat(abspath)
rd, if not inf.st_size:
fn, wark = up2k_wark_from_metadata(
inf.st_mtime, self.salt, inf.st_size, int(inf.st_mtime), rd, fn
inf.st_size, )
ptop, else:
vtop, hashes = self._hashlist_from_file(abspath)
wark, if not hashes:
"", return False
usr,
ip,
at,
skip_xau,
)
if at and time.time() - at > 30: wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
with self.rescan_cond:
self.rescan_cond.notify_all() with self.mutex:
self.idx_wark(
self.flags[ptop],
rd,
fn,
inf.st_mtime,
inf.st_size,
ptop,
vtop,
wark,
"",
usr,
ip,
at,
skip_xau,
)
if at and time.time() - at > 30:
with self.rescan_cond:
self.rescan_cond.notify_all()
return True
def hash_file( def hash_file(
self, self,

View File

@@ -115,6 +115,11 @@ if True: # pylint: disable=using-constant-test
import typing import typing
from typing import Any, Generator, Optional, Pattern, Protocol, Union from typing import Any, Generator, Optional, Pattern, Protocol, Union
try:
from typing import LiteralString
except:
pass
class RootLogger(Protocol): class RootLogger(Protocol):
def __call__(self, src: str, msg: str, c: Union[int, str] = 0) -> None: def __call__(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
return None return None
@@ -144,15 +149,15 @@ if not PY2:
from urllib.parse import quote_from_bytes as quote from urllib.parse import quote_from_bytes as quote
from urllib.parse import unquote_to_bytes as unquote from urllib.parse import unquote_to_bytes as unquote
else: else:
from StringIO import StringIO as BytesIO from StringIO import StringIO as BytesIO # type: ignore
from urllib import quote # pylint: disable=no-name-in-module from urllib import quote # type: ignore # pylint: disable=no-name-in-module
from urllib import unquote # pylint: disable=no-name-in-module from urllib import unquote # type: ignore # pylint: disable=no-name-in-module
try: try:
struct.unpack(b">i", b"idgi") struct.unpack(b">i", b"idgi")
spack = struct.pack spack = struct.pack # type: ignore
sunpack = struct.unpack sunpack = struct.unpack # type: ignore
except: except:
def spack(fmt: bytes, *a: Any) -> bytes: def spack(fmt: bytes, *a: Any) -> bytes:
@@ -289,6 +294,8 @@ EXTS["vnd.mozilla.apng"] = "png"
MAGIC_MAP = {"jpeg": "jpg"} MAGIC_MAP = {"jpeg": "jpg"}
DEF_EXP = "self.ip self.ua self.uname self.host cfg.name cfg.logout vf.scan vf.thsize hdr.cf_ipcountry srv.itime srv.htime"
DEF_MTE = "circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash" DEF_MTE = "circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash"
DEF_MTH = ".vq,.aq,vc,ac,fmt,res,.fps" DEF_MTH = ".vq,.aq,vc,ac,fmt,res,.fps"
@@ -376,6 +383,7 @@ def py_desc() -> str:
def _sqlite_ver() -> str: def _sqlite_ver() -> str:
assert sqlite3 # type: ignore
try: try:
co = sqlite3.connect(":memory:") co = sqlite3.connect(":memory:")
cur = co.cursor() cur = co.cursor()
@@ -1561,8 +1569,8 @@ def read_header(sr: Unrecv, t_idle: int, t_tot: int) -> list[str]:
raise Pebkac( raise Pebkac(
400, 400,
"protocol error while reading headers:\n" "protocol error while reading headers",
+ ret.decode("utf-8", "replace"), log=ret.decode("utf-8", "replace"),
) )
ofs = ret.find(b"\r\n\r\n") ofs = ret.find(b"\r\n\r\n")
@@ -1645,16 +1653,15 @@ def gen_filekey_dbg(
return ret return ret
def gencookie(k: str, v: str, r: str, tls: bool, dur: Optional[int]) -> str: def gencookie(k: str, v: str, r: str, tls: bool, dur: int = 0, txt: str = "") -> str:
v = v.replace("%", "%25").replace(";", "%3B") v = v.replace("%", "%25").replace(";", "%3B")
if dur: if dur:
exp = formatdate(time.time() + dur, usegmt=True) exp = formatdate(time.time() + dur, usegmt=True)
else: else:
exp = "Fri, 15 Aug 1997 01:00:00 GMT" exp = "Fri, 15 Aug 1997 01:00:00 GMT"
return "{}={}; Path=/{}; Expires={}{}; SameSite=Lax".format( t = "%s=%s; Path=/%s; Expires=%s%s%s; SameSite=Lax"
k, v, r, exp, "; Secure" if tls else "" return t % (k, v, r, exp, "; Secure" if tls else "", txt)
)
def humansize(sz: float, terse: bool = False) -> str: def humansize(sz: float, terse: bool = False) -> str:
@@ -1771,7 +1778,16 @@ def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
return fn.strip() return fn.strip()
def sanitize_vpath(vp: str, ok: str, bad: list[str]) -> str:
parts = vp.replace(os.sep, "/").split("/")
ret = [sanitize_fn(x, ok, bad) for x in parts]
return "/".join(ret)
def relchk(rp: str) -> str: def relchk(rp: str) -> str:
if "\x00" in rp:
return "[nul]"
if ANYWIN: if ANYWIN:
if "\n" in rp or "\r" in rp: if "\n" in rp or "\r" in rp:
return "x\nx" return "x\nx"
@@ -1807,17 +1823,20 @@ def exclude_dotfiles(filepaths: list[str]) -> list[str]:
return [x for x in filepaths if not x.split("/")[-1].startswith(".")] return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
def odfusion(base: ODict[str, bool], oth: str) -> ODict[str, bool]: def odfusion(base: Union[ODict[str, bool], ODict["LiteralString", bool]], oth: str) -> ODict[str, bool]:
# merge an "ordered set" (just a dict really) with another list of keys # merge an "ordered set" (just a dict really) with another list of keys
words0 = [x for x in oth.split(",") if x]
words1 = [x for x in oth[1:].split(",") if x]
ret = base.copy() ret = base.copy()
if oth.startswith("+"): if oth.startswith("+"):
for k in oth[1:].split(","): for k in words1:
ret[k] = True ret[k] = True
elif oth[:1] in ("-", "/"): elif oth[:1] in ("-", "/"):
for k in oth[1:].split(","): for k in words1:
ret.pop(k, None) ret.pop(k, None)
else: else:
ret = ODict.fromkeys(oth.split(","), True) ret = ODict.fromkeys(words0, True)
return ret return ret
@@ -1974,10 +1993,10 @@ else:
# moonrunes become \x3f with bytestrings, # moonrunes become \x3f with bytestrings,
# losing mojibake support is worth # losing mojibake support is worth
def _not_actually_mbcs_enc(txt: str) -> bytes: def _not_actually_mbcs_enc(txt: str) -> bytes:
return txt return txt # type: ignore
def _not_actually_mbcs_dec(txt: bytes) -> str: def _not_actually_mbcs_dec(txt: bytes) -> str:
return txt return txt # type: ignore
fsenc = afsenc = sfsenc = _not_actually_mbcs_enc fsenc = afsenc = sfsenc = _not_actually_mbcs_enc
fsdec = _not_actually_mbcs_dec fsdec = _not_actually_mbcs_dec
@@ -2036,6 +2055,7 @@ def atomic_move(usrc: str, udst: str) -> None:
def get_df(abspath: str) -> tuple[Optional[int], Optional[int]]: def get_df(abspath: str) -> tuple[Optional[int], Optional[int]]:
try: try:
# some fuses misbehave # some fuses misbehave
assert ctypes
if ANYWIN: if ANYWIN:
bfree = ctypes.c_ulonglong(0) bfree = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW( # type: ignore ctypes.windll.kernel32.GetDiskFreeSpaceExW( # type: ignore
@@ -2438,6 +2458,7 @@ def getalive(pids: list[int], pgid: int) -> list[int]:
alive.append(pid) alive.append(pid)
else: else:
# windows doesn't have pgroups; assume # windows doesn't have pgroups; assume
assert psutil
psutil.Process(pid) psutil.Process(pid)
alive.append(pid) alive.append(pid)
except: except:
@@ -2455,6 +2476,7 @@ def killtree(root: int) -> None:
pgid = 0 pgid = 0
if HAVE_PSUTIL: if HAVE_PSUTIL:
assert psutil
pids = [root] pids = [root]
parent = psutil.Process(root) parent = psutil.Process(root)
for child in parent.children(recursive=True): for child in parent.children(recursive=True):
@@ -2494,9 +2516,34 @@ def killtree(root: int) -> None:
pass pass
def _find_nice() -> str:
if WINDOWS:
return "" # use creationflags
try:
zs = shutil.which("nice")
if zs:
return zs
except:
pass
# busted PATHs and/or py2
for zs in ("/bin", "/sbin", "/usr/bin", "/usr/sbin"):
zs += "/nice"
if os.path.exists(zs):
return zs
return ""
NICES = _find_nice()
NICEB = NICES.encode("utf-8")
def runcmd( def runcmd(
argv: Union[list[bytes], list[str]], timeout: Optional[float] = None, **ka: Any argv: Union[list[bytes], list[str]], timeout: Optional[float] = None, **ka: Any
) -> tuple[int, str, str]: ) -> tuple[int, str, str]:
isbytes = isinstance(argv[0], (bytes, bytearray))
kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one
capture = ka.pop("capture", 3) # 0=none 1=stdout 2=stderr 3=both capture = ka.pop("capture", 3) # 0=none 1=stdout 2=stderr 3=both
@@ -2510,13 +2557,22 @@ def runcmd(
berr: bytes berr: bytes
if ANYWIN: if ANYWIN:
if isinstance(argv[0], (bytes, bytearray)): if isbytes:
if argv[0] in CMD_EXEB: if argv[0] in CMD_EXEB:
argv[0] += b".exe" argv[0] += b".exe"
else: else:
if argv[0] in CMD_EXES: if argv[0] in CMD_EXES:
argv[0] += ".exe" argv[0] += ".exe"
if ka.pop("nice", None):
if WINDOWS:
ka["creationflags"] = 0x4000
elif NICEB:
if isbytes:
argv = [NICEB] + argv
else:
argv = [NICES] + argv
p = sp.Popen(argv, stdout=cout, stderr=cerr, **ka) p = sp.Popen(argv, stdout=cout, stderr=cerr, **ka)
if not timeout or PY2: if not timeout or PY2:
bout, berr = p.communicate(sin) bout, berr = p.communicate(sin)
@@ -2664,13 +2720,13 @@ def _parsehook(
sp_ka = { sp_ka = {
"env": env, "env": env,
"nice": True,
"timeout": tout, "timeout": tout,
"kill": kill, "kill": kill,
"capture": cap, "capture": cap,
} }
if cmd.startswith("~"): cmd = os.path.expandvars(os.path.expanduser(cmd))
cmd = os.path.expanduser(cmd)
return chk, fork, jtxt, wait, sp_ka, cmd return chk, fork, jtxt, wait, sp_ka, cmd
@@ -2809,9 +2865,7 @@ def loadpy(ap: str, hot: bool) -> Any:
depending on what other inconveniently named files happen depending on what other inconveniently named files happen
to be in the same folder to be in the same folder
""" """
if ap.startswith("~"): ap = os.path.expandvars(os.path.expanduser(ap))
ap = os.path.expanduser(ap)
mdir, mfile = os.path.split(absreal(ap)) mdir, mfile = os.path.split(absreal(ap))
mname = mfile.rsplit(".", 1)[0] mname = mfile.rsplit(".", 1)[0]
sys.path.insert(0, mdir) sys.path.insert(0, mdir)
@@ -2819,7 +2873,7 @@ def loadpy(ap: str, hot: bool) -> Any:
if PY2: if PY2:
mod = __import__(mname) mod = __import__(mname)
if hot: if hot:
reload(mod) reload(mod) # type: ignore
else: else:
import importlib import importlib
@@ -2962,6 +3016,7 @@ def termsize() -> tuple[int, int]:
def hidedir(dp) -> None: def hidedir(dp) -> None:
if ANYWIN: if ANYWIN:
try: try:
assert ctypes
k32 = ctypes.WinDLL("kernel32") k32 = ctypes.WinDLL("kernel32")
attrs = k32.GetFileAttributesW(dp) attrs = k32.GetFileAttributesW(dp)
if attrs >= 0: if attrs >= 0:
@@ -2971,9 +3026,12 @@ def hidedir(dp) -> None:
class Pebkac(Exception): class Pebkac(Exception):
def __init__(self, code: int, msg: Optional[str] = None) -> None: def __init__(
self, code: int, msg: Optional[str] = None, log: Optional[str] = None
) -> None:
super(Pebkac, self).__init__(msg or HTTPCODE[code]) super(Pebkac, self).__init__(msg or HTTPCODE[code])
self.code = code self.code = code
self.log = log
def __repr__(self) -> str: def __repr__(self) -> str:
return "Pebkac({}, {})".format(self.code, repr(self.args)) return "Pebkac({}, {})".format(self.code, repr(self.args))

View File

@@ -1891,6 +1891,10 @@ html.y #doc {
text-align: center; text-align: center;
padding: .5em; padding: .5em;
} }
#docul li.bn span {
font-weight: bold;
color: var(--fg-max);
}
#doc.prism { #doc.prism {
padding-left: 3em; padding-left: 3em;
} }

View File

@@ -251,7 +251,7 @@ var Ls = {
"mt_coth": "convert all others (not mp3) to opus\">oth", "mt_coth": "convert all others (not mp3) to opus\">oth",
"mt_tint": "background level (0-100) on the seekbar$Nto make buffering less distracting", "mt_tint": "background level (0-100) on the seekbar$Nto make buffering less distracting",
"mt_eq": "enables the equalizer and gain control;$N$Nboost &lt;code&gt;0&lt;/code&gt; = standard 100% volume (unmodified)$N$Nwidth &lt;code&gt;1 &nbsp;&lt;/code&gt; = standard stereo (unmodified)$Nwidth &lt;code&gt;0.5&lt;/code&gt; = 50% left-right crossfeed$Nwidth &lt;code&gt;0 &nbsp;&lt;/code&gt; = mono$N$Nboost &lt;code&gt;-0.8&lt;/code&gt; &amp; width &lt;code&gt;10&lt;/code&gt; = vocal removal :^)$N$Nenabling the equalizer makes gapless albums fully gapless, so leave it on with all the values at zero (except width = 1) if you care about that", "mt_eq": "enables the equalizer and gain control;$N$Nboost &lt;code&gt;0&lt;/code&gt; = standard 100% volume (unmodified)$N$Nwidth &lt;code&gt;1 &nbsp;&lt;/code&gt; = standard stereo (unmodified)$Nwidth &lt;code&gt;0.5&lt;/code&gt; = 50% left-right crossfeed$Nwidth &lt;code&gt;0 &nbsp;&lt;/code&gt; = mono$N$Nboost &lt;code&gt;-0.8&lt;/code&gt; &amp; width &lt;code&gt;10&lt;/code&gt; = vocal removal :^)$N$Nenabling the equalizer makes gapless albums fully gapless, so leave it on with all the values at zero (except width = 1) if you care about that",
"mt_drc": "enables the dynamic range compressor (volume flattener / brickwaller); will also enable EQ to balance the spaghetti, so set all EQ fields except for 'width' to 0 if you don't want it$N$Nlowers the volume of audio above THRESHOLD dB; for every RATIO dB past THRESHOLD there is 1 dB of output, so default values of tresh -24 and ratio 12 means it should never get louder than -22 dB and it is safe to increase the equalizer boost to 0.8, or even 1.8 with ATK 0 and a huge RLS like 90$N$Nplease see wikipedia instead, this is probably wrong", "mt_drc": "enables the dynamic range compressor (volume flattener / brickwaller); will also enable EQ to balance the spaghetti, so set all EQ fields except for 'width' to 0 if you don't want it$N$Nlowers the volume of audio above THRESHOLD dB; for every RATIO dB past THRESHOLD there is 1 dB of output, so default values of tresh -24 and ratio 12 means it should never get louder than -22 dB and it is safe to increase the equalizer boost to 0.8, or even 1.8 with ATK 0 and a huge RLS like 90 (only works in firefox; RLS is max 1 in other browsers)$N$N(see wikipedia, they explain it much better)",
"mb_play": "play", "mb_play": "play",
"mm_hashplay": "play this audio file?", "mm_hashplay": "play this audio file?",
@@ -327,8 +327,8 @@ var Ls = {
"tv_xe1": "could not load textfile:\n\nerror ", "tv_xe1": "could not load textfile:\n\nerror ",
"tv_xe2": "404, file not found", "tv_xe2": "404, file not found",
"tv_lst": "list of textfiles in", "tv_lst": "list of textfiles in",
"tvt_close": "return to folder view$NHotkey: M\">❌ close", "tvt_close": "return to folder view$NHotkey: M (or Esc)\">❌ close",
"tvt_dl": "download this file\">💾 download", "tvt_dl": "download this file$NHotkey: Y\">💾 download",
"tvt_prev": "show previous document$NHotkey: i\">⬆ prev", "tvt_prev": "show previous document$NHotkey: i\">⬆ prev",
"tvt_next": "show next document$NHotkey: K\">⬇ next", "tvt_next": "show next document$NHotkey: K\">⬇ next",
"tvt_sel": "select file &nbsp; ( for cut / delete / ... )$NHotkey: S\">sel", "tvt_sel": "select file &nbsp; ( for cut / delete / ... )$NHotkey: S\">sel",
@@ -559,8 +559,9 @@ var Ls = {
"dokumentviser", "dokumentviser",
["I/K", "forr./neste fil"], ["I/K", "forr./neste fil"],
["M", "lukk tekstdokument"], ["M", "lukk tekstdokument"],
["E", "rediger tekstdokument"] ["E", "rediger tekstdokument"],
["S", "velg fil (for F2/ctrl-x/...)"] ["S", "velg fil (for F2/ctrl-x/...)"],
["Y", "last ned tekstfil"],
] ]
], ],
@@ -730,7 +731,7 @@ var Ls = {
"mt_coth": "konverter alt annet (men ikke mp3) til opus\">andre", "mt_coth": "konverter alt annet (men ikke mp3) til opus\">andre",
"mt_tint": "nivå av bakgrunnsfarge på søkestripa (0-100),$Ngjør oppdateringer mindre distraherende", "mt_tint": "nivå av bakgrunnsfarge på søkestripa (0-100),$Ngjør oppdateringer mindre distraherende",
"mt_eq": "aktiver tonekontroll og forsterker;$N$Nboost &lt;code&gt;0&lt;/code&gt; = normal volumskala$N$Nwidth &lt;code&gt;1 &nbsp;&lt;/code&gt; = normal stereo$Nwidth &lt;code&gt;0.5&lt;/code&gt; = 50% blanding venstre-høyre$Nwidth &lt;code&gt;0 &nbsp;&lt;/code&gt; = mono$N$Nboost &lt;code&gt;-0.8&lt;/code&gt; &amp; width &lt;code&gt;10&lt;/code&gt; = instrumental :^)$N$Nreduserer også dødtid imellom sangfiler", "mt_eq": "aktiver tonekontroll og forsterker;$N$Nboost &lt;code&gt;0&lt;/code&gt; = normal volumskala$N$Nwidth &lt;code&gt;1 &nbsp;&lt;/code&gt; = normal stereo$Nwidth &lt;code&gt;0.5&lt;/code&gt; = 50% blanding venstre-høyre$Nwidth &lt;code&gt;0 &nbsp;&lt;/code&gt; = mono$N$Nboost &lt;code&gt;-0.8&lt;/code&gt; &amp; width &lt;code&gt;10&lt;/code&gt; = instrumental :^)$N$Nreduserer også dødtid imellom sangfiler",
"mt_drc": "aktiver volum-utjevning (dynamic range compressor); vil også aktivere tonejustering, så sett alle EQ-feltene bortsett fra 'width' til 0 hvis du ikke vil ha noe EQ$N$Nfilteret vil dempe volumet på alt som er høyere enn TRESH dB; for hver RATIO dB over grensen er det 1dB som treffer høyttalerne, så standardverdiene tresh -24 og ratio 12 skal bety at volumet ikke går høyere enn -22 dB, slik at man trygt kan øke boost-verdien i equalizer'n til rundt 0.8, eller 1.8 kombinert med ATK 0 og RLS 90$N$Ngodt mulig jeg har misforstått litt, så wikipedia forklarer nok bedre", "mt_drc": "aktiver volum-utjevning (dynamic range compressor); vil også aktivere tonejustering, så sett alle EQ-feltene bortsett fra 'width' til 0 hvis du ikke vil ha noe EQ$N$Nfilteret vil dempe volumet på alt som er høyere enn TRESH dB; for hver RATIO dB over grensen er det 1dB som treffer høyttalerne, så standardverdiene tresh -24 og ratio 12 skal bety at volumet ikke går høyere enn -22 dB, slik at man trygt kan øke boost-verdien i equalizer'n til rundt 0.8, eller 1.8 kombinert med ATK 0 og RLS 90 (bare mulig i firefox; andre nettlesere tar ikke høyere RLS enn 1)$N$Nwikipedia forklarer dette mye bedre forresten",
"mb_play": "lytt", "mb_play": "lytt",
"mm_hashplay": "spill denne sangen?", "mm_hashplay": "spill denne sangen?",
@@ -806,8 +807,8 @@ var Ls = {
"tv_xe1": "kunne ikke laste tekstfil:\n\nfeil ", "tv_xe1": "kunne ikke laste tekstfil:\n\nfeil ",
"tv_xe2": "404, Fil ikke funnet", "tv_xe2": "404, Fil ikke funnet",
"tv_lst": "tekstfiler i mappen", "tv_lst": "tekstfiler i mappen",
"tvt_close": "gå tilbake til mappen$NSnarvei: M\">❌ lukk", "tvt_close": "gå tilbake til mappen$NSnarvei: M (eller Esc)\">❌ lukk",
"tvt_dl": "last ned denne filen\">💾 last ned", "tvt_dl": "last ned denne filen$NSnarvei: Y\">💾 last ned",
"tvt_prev": "vis forrige dokument$NSnarvei: i\">⬆ forr.", "tvt_prev": "vis forrige dokument$NSnarvei: i\">⬆ forr.",
"tvt_next": "vis neste dokument$NSnarvei: K\">⬇ neste", "tvt_next": "vis neste dokument$NSnarvei: K\">⬇ neste",
"tvt_sel": "markér filen &nbsp; ( for utklipp / sletting / ... )$NSnarvei: S\">merk", "tvt_sel": "markér filen &nbsp; ( for utklipp / sletting / ... )$NSnarvei: S\">merk",
@@ -1505,7 +1506,6 @@ var mpl = (function () {
artist = (np.circle && np.circle != np.artist ? np.circle + ' // ' : '') + (np.artist || (fns.length > 1 ? fns[0] : '')), artist = (np.circle && np.circle != np.artist ? np.circle + ' // ' : '') + (np.artist || (fns.length > 1 ? fns[0] : '')),
title = np.title || fns.pop(), title = np.title || fns.pop(),
cover = '', cover = '',
pcover = '',
tags = { title: title }; tags = { title: title };
if (artist) if (artist)
@@ -1520,20 +1520,14 @@ var mpl = (function () {
for (var a = 0, aa = files.length; a < aa; a++) { for (var a = 0, aa = files.length; a < aa; a++) {
if (/^(cover|folder)\.(jpe?g|png|gif)$/i.test(files[a].textContent)) { if (/^(cover|folder)\.(jpe?g|png|gif)$/i.test(files[a].textContent)) {
cover = noq_href(files[a]); cover = files[a].getAttribute('href');
break; break;
} }
} }
if (cover) { if (cover) {
cover += (cover.indexOf('?') === -1 ? '?' : '&') + 'th=j'; cover += (cover.indexOf('?') === -1 ? '?' : '&') + 'th=j';
pcover = cover; tags.artwork = [{ "src": cover, type: "image/jpeg" }];
var pwd = get_pwd();
if (pwd)
pcover += '&pw=' + uricom_enc(pwd);
tags.artwork = [{ "src": pcover, type: "image/jpeg" }];
} }
} }
@@ -1545,7 +1539,7 @@ var mpl = (function () {
ebi('np_dur').textContent = np['.dur'] || ''; ebi('np_dur').textContent = np['.dur'] || '';
ebi('np_url').textContent = get_vpath() + np.file.split('?')[0]; ebi('np_url').textContent = get_vpath() + np.file.split('?')[0];
if (!MOBILE) if (!MOBILE)
ebi('np_img').setAttribute('src', cover || ''); // dont give last.fm the pwd ebi('np_img').setAttribute('src', cover || '');
navigator.mediaSession.metadata = new MediaMetadata(tags); navigator.mediaSession.metadata = new MediaMetadata(tags);
navigator.mediaSession.setActionHandler('play', mplay); navigator.mediaSession.setActionHandler('play', mplay);
@@ -1723,7 +1717,7 @@ function MPlayer() {
var t0 = Date.now(); var t0 = Date.now();
if (mpl.waves) if (mpl.waves)
fetch(url + '&th=p').then(function (x) { fetch(url.replace(/\bth=opus&/, '') + '&th=p').then(function (x) {
x.body.getReader().read(); x.body.getReader().read();
}); });
@@ -2243,7 +2237,7 @@ function song_skip(n, dirskip) {
return; return;
} }
if (tid) if (tid && !dirskip)
play(ofs + n); play(ofs + n);
else else
play(mp.order[n == -1 ? mp.order.length - 1 : 0]); play(mp.order[n == -1 ? mp.order.length - 1 : 0]);
@@ -2276,6 +2270,21 @@ function next_song_cmn(e) {
mpl.traversals = 0; mpl.traversals = 0;
t_fchg = 0; t_fchg = 0;
} }
function last_song(e) {
ev(e);
if (mp.order.length) {
mpl.traversals = 0;
return song_skip(-1, true);
}
if (mpl.traversals++ < 5) {
treectl.ls_cb = last_song;
return tree_neigh(-1);
}
toast.inf(10, L.mm_nof);
console.log("mm_nof2");
mpl.traversals = 0;
t_fchg = 0;
}
function prev_song(e) { function prev_song(e) {
ev(e); ev(e);
@@ -2501,7 +2510,7 @@ var afilt = (function () {
"drcen": false, "drcen": false,
"bands": [31.25, 62.5, 125, 250, 500, 1000, 2000, 4000, 8000, 16000], "bands": [31.25, 62.5, 125, 250, 500, 1000, 2000, 4000, 8000, 16000],
"gains": [4, 3, 2, 1, 0, 0, 1, 2, 3, 4], "gains": [4, 3, 2, 1, 0, 0, 1, 2, 3, 4],
"drcv": [-24, 30, 12, 0.003, 0.25], "drcv": [-24, 30, 12, 0.01, 0.25],
"drch": ['tresh', 'knee', 'ratio', 'atk', 'rls'], "drch": ['tresh', 'knee', 'ratio', 'atk', 'rls'],
"drck": ['threshold', 'knee', 'ratio', 'attack', 'release'], "drck": ['threshold', 'knee', 'ratio', 'attack', 'release'],
"drcn": null, "drcn": null,
@@ -2576,6 +2585,8 @@ var afilt = (function () {
var gains = jread('au_eq_gain', r.gains); var gains = jread('au_eq_gain', r.gains);
if (r.gains.length == gains.length) if (r.gains.length == gains.length)
r.gains = gains; r.gains = gains;
r.drcv = jread('au_drcv', r.drcv);
} }
catch (ex) { } catch (ex) { }
@@ -2692,6 +2703,17 @@ var afilt = (function () {
for (var a = 0; a < r.drcv.length; a++) for (var a = 0; a < r.drcv.length; a++)
fi[r.drck[a]].value = r.drcv[a]; fi[r.drck[a]].value = r.drcv[a];
if (r.drcv[3] < 0.02) {
// avoid static at decode start
fi.attack.value = 0.02;
setTimeout(function () {
try {
fi.attack.value = r.drcv[3];
}
catch (ex) { }
}, 200);
}
r.filters.push(fi); r.filters.push(fi);
timer.add(showdrc); timer.add(showdrc);
} }
@@ -2783,7 +2805,7 @@ var afilt = (function () {
return; return;
r.drcv[n] = v; r.drcv[n] = v;
jwrite('au_drc', r.drcv); jwrite('au_drcv', r.drcv);
if (r.drcn) if (r.drcn)
r.drcn[r.drck[n]].value = v; r.drcn[r.drck[n]].value = v;
} }
@@ -2907,7 +2929,7 @@ function play(tid, is_ev, seek) {
tn = mp.order.length - 1; tn = mp.order.length - 1;
} }
else if (mpl.pb_mode == 'next') { else if (mpl.pb_mode == 'next') {
treectl.ls_cb = prev_song; treectl.ls_cb = last_song;
return tree_neigh(-1); return tree_neigh(-1);
} }
} }
@@ -2991,7 +3013,7 @@ function play(tid, is_ev, seek) {
pbar.unwave(); pbar.unwave();
if (mpl.waves) if (mpl.waves)
pbar.loadwaves(url + '&th=p'); pbar.loadwaves(url.replace(/\bth=opus&/, '') + '&th=p');
mpui.progress_updater(); mpui.progress_updater();
pbar.onresize(); pbar.onresize();
@@ -3784,7 +3806,7 @@ var fileman = (function () {
function rename_cb() { function rename_cb() {
if (this.status !== 201) { if (this.status !== 201) {
var msg = this.responseText; var msg = unpre(this.responseText);
toast.err(9, L.fr_efail + msg); toast.err(9, L.fr_efail + msg);
return; return;
} }
@@ -3833,7 +3855,7 @@ var fileman = (function () {
} }
function delete_cb() { function delete_cb() {
if (this.status !== 200) { if (this.status !== 200) {
var msg = this.responseText; var msg = unpre(this.responseText);
toast.err(9, L.fd_err + msg); toast.err(9, L.fd_err + msg);
return; return;
} }
@@ -3954,7 +3976,7 @@ var fileman = (function () {
} }
function paste_cb() { function paste_cb() {
if (this.status !== 201) { if (this.status !== 201) {
var msg = this.responseText; var msg = unpre(this.responseText);
toast.err(9, L.fp_err + msg); toast.err(9, L.fp_err + msg);
return; return;
} }
@@ -4105,7 +4127,10 @@ var showfile = (function () {
if (lang == 'md' && td.textContent != '-') if (lang == 'md' && td.textContent != '-')
continue; continue;
td.innerHTML = '<a href="#" class="doc bri" hl="' + link.id + '">-txt-</a>'; td.innerHTML = '<a href="#" id="t' +
link.id + '" class="doc bri" hl="' +
link.id + '">-txt-</a>';
td.getElementsByTagName('a')[0].setAttribute('href', '?doc=' + fn); td.getElementsByTagName('a')[0].setAttribute('href', '?doc=' + fn);
} }
r.mktree(); r.mktree();
@@ -4199,6 +4224,9 @@ var showfile = (function () {
el.textContent = txt; el.textContent = txt;
el.innerHTML = '<code>' + el.innerHTML + '</code>'; el.innerHTML = '<code>' + el.innerHTML + '</code>';
if (!window.no_prism) { if (!window.no_prism) {
if ((lang == 'conf' || lang == 'cfg') && ('\n' + txt).indexOf('\n# -*- mode: yaml -*-') + 1)
lang = 'yaml';
el.className = 'prism linkable-line-numbers line-numbers language-' + lang; el.className = 'prism linkable-line-numbers line-numbers language-' + lang;
if (!defer) if (!defer)
fun(el.firstChild); fun(el.firstChild);
@@ -4287,7 +4315,7 @@ var showfile = (function () {
}; };
r.mktree = function () { r.mktree = function () {
var html = ['<li class="bn">' + L.tv_lst + '<br />' + linksplit(get_vpath()).join('') + '</li>']; var html = ['<li class="bn">' + L.tv_lst + '<br />' + linksplit(get_vpath()).join('<span>/</span>') + '</li>'];
for (var a = 0; a < r.files.length; a++) { for (var a = 0; a < r.files.length; a++) {
var file = r.files[a]; var file = r.files[a];
html.push('<li><a href="?doc=' + html.push('<li><a href="?doc=' +
@@ -4489,29 +4517,39 @@ var thegrid = (function () {
function gclick(e, dbl) { function gclick(e, dbl) {
var oth = ebi(this.getAttribute('ref')), var oth = ebi(this.getAttribute('ref')),
href = noq_href(this), href = noq_href(this),
aplay = ebi('a' + oth.getAttribute('id')), fid = oth.getAttribute('id'),
aplay = ebi('a' + fid),
atext = ebi('t' + fid),
is_txt = atext && showfile.getlang(href),
is_img = /\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp|webm|mkv|mp4)(\?|$)/i.test(href), is_img = /\.(a?png|avif|bmp|gif|heif|jpe?g|jfif|svg|webp|webm|mkv|mp4)(\?|$)/i.test(href),
is_dir = href.endsWith('/'), is_dir = href.endsWith('/'),
is_srch = !!ebi('unsearch'),
in_tree = is_dir && treectl.find(oth.textContent.slice(0, -1)), in_tree = is_dir && treectl.find(oth.textContent.slice(0, -1)),
have_sel = QS('#files tr.sel'), have_sel = QS('#files tr.sel'),
td = oth.closest('td').nextSibling, td = oth.closest('td').nextSibling,
tr = td.parentNode; tr = td.parentNode;
if ((r.sel && !dbl && !ctrl(e)) || (treectl.csel && (e.shiftKey || ctrl(e)))) { if (!is_srch && ((r.sel && !dbl && !ctrl(e)) || (treectl.csel && (e.shiftKey || ctrl(e))))) {
td.onclick.call(td, e); td.onclick.call(td, e);
if (e.shiftKey) if (e.shiftKey)
return r.loadsel(); return r.loadsel();
clmod(this, 'sel', clgot(tr, 'sel')); clmod(this, 'sel', clgot(tr, 'sel'));
} }
else if (widget.is_open && aplay)
aplay.click();
else if (in_tree && !have_sel) else if (in_tree && !have_sel)
in_tree.click(); in_tree.click();
else if (oth.hasAttribute('download'))
oth.click();
else if (widget.is_open && aplay)
aplay.click();
else if (is_dir && !have_sel) else if (is_dir && !have_sel)
treectl.reqls(href, true); treectl.reqls(href, true);
else if (is_txt && !has(['md', 'htm', 'html'], is_txt))
atext.click();
else if (!is_img && have_sel) else if (!is_img && have_sel)
window.open(href, '_blank'); window.open(href, '_blank');
@@ -4634,7 +4672,7 @@ var thegrid = (function () {
if (r.full) if (r.full)
ihref += 'f' ihref += 'f'
if (href == "#") if (href == "#")
ihref = SR + '/.cpr/ico/⏏️'; ihref = SR + '/.cpr/ico/' + (ref == 'moar' ? '++' : 'exit');
} }
else if (isdir) { else if (isdir) {
ihref = SR + '/.cpr/ico/folder'; ihref = SR + '/.cpr/ico/folder';
@@ -5037,6 +5075,13 @@ document.onkeydown = function (e) {
return QS('#twobytwo').click(); return QS('#twobytwo').click();
} }
if (showfile.active()) {
if (k == 'KeyS')
showfile.tglsel();
if (k == 'KeyE' && ebi('editdoc').style.display != 'none')
ebi('editdoc').click();
}
if (thegrid.en) { if (thegrid.en) {
if (k == 'KeyS') if (k == 'KeyS')
return ebi('gridsel').click(); return ebi('gridsel').click();
@@ -5047,13 +5092,6 @@ document.onkeydown = function (e) {
if (k == 'KeyD') if (k == 'KeyD')
return QSA('#ghead a[z]')[1].click(); return QSA('#ghead a[z]')[1].click();
} }
if (showfile.active()) {
if (k == 'KeyS')
showfile.tglsel();
if (k == 'KeyE' && ebi('editdoc').style.display != 'none')
ebi('editdoc').click();
}
}; };
@@ -5287,10 +5325,7 @@ document.onkeydown = function (e) {
function xhr_search_results() { function xhr_search_results() {
if (this.status !== 200) { if (this.status !== 200) {
var msg = this.responseText; var msg = unpre(this.responseText);
if (msg.indexOf('<pre>') === 0)
msg = msg.slice(5);
srch_msg(true, "http " + this.status + ": " + msg); srch_msg(true, "http " + this.status + ": " + msg);
search_in_progress = 0; search_in_progress = 0;
return; return;
@@ -5329,7 +5364,7 @@ document.onkeydown = function (e) {
if (ext.length > 8) if (ext.length > 8)
ext = '%'; ext = '%';
var links = linksplit(r.rp + '', id).join(''), var links = linksplit(r.rp + '', id).join('<span>/</span>'),
nodes = ['<tr><td>-</td><td><div>' + links + '</div>', sz]; nodes = ['<tr><td>-</td><td><div>' + links + '</div>', sz];
for (var b = 0; b < tagord.length; b++) { for (var b = 0; b < tagord.length; b++) {
@@ -6051,7 +6086,7 @@ var treectl = (function () {
} }
if (tn.lead == '-') if (tn.lead == '-')
tn.lead = '<a href="?doc=' + bhref + tn.lead = '<a href="?doc=' + bhref + '" id="t' + id +
'" class="doc' + (lang ? ' bri' : '') + '" class="doc' + (lang ? ' bri' : '') +
'" hl="' + id + '" name="' + hname + '">-txt-</a>'; '" hl="' + id + '" name="' + hname + '">-txt-</a>';
@@ -6086,6 +6121,7 @@ var treectl = (function () {
setTimeout(r.tscroll, 100); setTimeout(r.tscroll, 100);
} }
} }
else ebi('lazy').innerHTML = '';
function asdf() { function asdf() {
showfile.mktree(); showfile.mktree();
@@ -7155,16 +7191,17 @@ var msel = (function () {
form.onsubmit = function (e) { form.onsubmit = function (e) {
ev(e); ev(e);
clmod(sf, 'vis', 1); clmod(sf, 'vis', 1);
sf.textContent = 'creating "' + tb.value + '"...'; var dn = tb.value;
sf.textContent = 'creating "' + dn + '"...';
var fd = new FormData(); var fd = new FormData();
fd.append("act", "mkdir"); fd.append("act", "mkdir");
fd.append("name", tb.value); fd.append("name", dn);
var xhr = new XHR(); var xhr = new XHR();
xhr.vp = get_evpath(); xhr.vp = get_evpath();
xhr.dn = tb.value; xhr.dn = dn;
xhr.open('POST', xhr.vp, true); xhr.open('POST', dn.startsWith('/') ? (SR || '/') : xhr.vp, true);
xhr.onload = xhr.onerror = cb; xhr.onload = xhr.onerror = cb;
xhr.responseType = 'text'; xhr.responseType = 'text';
xhr.send(fd); xhr.send(fd);
@@ -7181,7 +7218,7 @@ var msel = (function () {
xhrchk(this, L.fd_xe1, L.fd_xe2); xhrchk(this, L.fd_xe1, L.fd_xe2);
if (this.status !== 201) { if (this.status !== 201) {
sf.textContent = 'error: ' + this.responseText; sf.textContent = 'error: ' + unpre(this.responseText);
return; return;
} }
@@ -7190,8 +7227,9 @@ var msel = (function () {
sf.textContent = ''; sf.textContent = '';
var dn = this.getResponseHeader('X-New-Dir'); var dn = this.getResponseHeader('X-New-Dir');
dn = dn || uricom_enc(this.dn); dn = dn ? '/' + dn + '/' : uricom_enc(this.dn);
treectl.goto(this.vp + dn + '/', true); treectl.goto(dn, true);
tree_scrollto();
} }
})(); })();
@@ -7228,7 +7266,7 @@ var msel = (function () {
xhrchk(this, L.fsm_xe1, L.fsm_xe2); xhrchk(this, L.fsm_xe1, L.fsm_xe2);
if (this.status < 200 || this.status > 201) { if (this.status < 200 || this.status > 201) {
sf.textContent = 'error: ' + this.responseText; sf.textContent = 'error: ' + unpre(this.responseText);
return; return;
} }
@@ -7573,7 +7611,7 @@ var unpost = (function () {
'<tr><td><a me="' + me + '" class="n' + a + '" href="#">' + L.un_del + '</a></td>' + '<tr><td><a me="' + me + '" class="n' + a + '" href="#">' + L.un_del + '</a></td>' +
'<td>' + unix2iso(res[a].at) + '</td>' + '<td>' + unix2iso(res[a].at) + '</td>' +
'<td>' + res[a].sz + '</td>' + '<td>' + res[a].sz + '</td>' +
'<td>' + linksplit(res[a].vp).join(' ') + '</td></tr>'); '<td>' + linksplit(res[a].vp).join('<span> / </span>') + '</td></tr>');
} }
html.push("</tbody></table>"); html.push("</tbody></table>");
@@ -7606,7 +7644,7 @@ var unpost = (function () {
function unpost_delete_cb() { function unpost_delete_cb() {
if (this.status !== 200) { if (this.status !== 200) {
var msg = this.responseText; var msg = unpre(this.responseText);
toast.err(9, L.un_derr + msg); toast.err(9, L.un_derr + msg);
return; return;
} }

View File

@@ -931,7 +931,13 @@ var set_lno = (function () {
// hotkeys / toolbar // hotkeys / toolbar
(function () { (function () {
var keydown = function (ev) { var keydown = function (ev) {
ev = ev || window.event; if (!ev && window.event) {
ev = window.event;
if (localStorage.dev_fbw == 1) {
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
console.error('using window.event');
}
}
var kc = ev.code || ev.keyCode || ev.which, var kc = ev.code || ev.keyCode || ev.which,
editing = document.activeElement == dom_src; editing = document.activeElement == dom_src;

View File

@@ -10,6 +10,7 @@
{{ html_head }} {{ html_head }}
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="{{ r }}/.cpr/splash.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}"> <link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
<style>ul{padding-left:1.3em}li{margin:.4em 0}</style>
</head> </head>
<body> <body>
@@ -48,9 +49,13 @@
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %} rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b> rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>W:</b>
</pre> </pre>
{% if s %} <ul>
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p> {% if s %}
{% endif %} <li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
{% endif %}
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
<li>old version of rclone? replace all <code>=</code> with <code>&nbsp;</code> (space)</li>
</ul>
<p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p> <p>if you want to use the native WebDAV client in windows instead (slow and buggy), first run <a href="{{ r }}/.cpr/a/webdav-cfg.bat">webdav-cfg.bat</a> to remove the 47 MiB filesize limit (also fixes latency and password login), then connect:</p>
<pre> <pre>
@@ -73,10 +78,13 @@
rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %} rclone config create {{ aname }}-dav webdav url=http{{ s }}://{{ rip }}{{ hport }} vendor=owncloud pacer_min_sleep=0.01ms{% if accs %} user=k pass=<b>{{ pw }}</b>{% endif %}
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b> rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-dav:{{ rvp }} <b>mp</b>
</pre> </pre>
{% if s %} <ul>
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>--no-check-certificate</code> to the mount command</em><br />---</p> {% if s %}
{% endif %} <li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
{% endif %}
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
<li>old version of rclone? replace all <code>=</code> with <code>&nbsp;</code> (space)</li>
</ul>
<p>or the emergency alternative (gnome/gui-only):</p> <p>or the emergency alternative (gnome/gui-only):</p>
<!-- gnome-bug: ignores vp --> <!-- gnome-bug: ignores vp -->
<pre> <pre>
@@ -123,8 +131,14 @@
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>W:</b> rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>W:</b>
</pre> </pre>
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
{% endif %} {% endif %}
<ul>
{% if args.ftps %}
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
{% endif %}
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
<li>old version of rclone? replace all <code>=</code> with <code>&nbsp;</code> (space)</li>
</ul>
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p> <p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
<pre> <pre>
explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }} explorer {{ "ftp" if args.ftp else "ftps" }}://{% if accs %}<b>{{ pw }}</b>:k@{% endif %}{{ host }}:{{ args.ftp or args.ftps }}/{{ rvp }}
@@ -145,8 +159,14 @@
rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true rclone config create {{ aname }}-ftps ftp host={{ rip }} port={{ args.ftps }} pass=k user={% if accs %}<b>{{ pw }}</b>{% else %}anonymous{% endif %} tls=false explicit_tls=true
rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>mp</b> rclone mount --vfs-cache-mode writes --dir-cache-time 5s {{ aname }}-ftps:{{ rvp }} <b>mp</b>
</pre> </pre>
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>no_check_certificate=true</code> to the config command</em><br />---</p>
{% endif %} {% endif %}
<ul>
{% if args.ftps %}
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
{% endif %}
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
<li>old version of rclone? replace all <code>=</code> with <code>&nbsp;</code> (space)</li>
</ul>
<p>emergency alternative (gnome/gui-only):</p> <p>emergency alternative (gnome/gui-only):</p>
<!-- gnome-bug: ignores vp --> <!-- gnome-bug: ignores vp -->
<pre> <pre>
@@ -178,7 +198,7 @@
partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ rvp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b> partyfuse.py{% if accs %} -a <b>{{ pw }}</b>{% endif %} http{{ s }}://{{ ep }}/{{ rvp }} <b><span class="os win">W:</span><span class="os lin mac">mp</span></b>
</pre> </pre>
{% if s %} {% if s %}
<p><em>note: if you are on LAN (or just dont have valid certificates), add <code>-td</code></em></p> <ul><li>if you are on LAN (or just dont have valid certificates), add <code>-td</code></li></ul>
{% endif %} {% endif %}
<p> <p>
you can use <a href="{{ r }}/.cpr/a/u2c.py">u2c.py</a> to upload (sometimes faster than web-browsers) you can use <a href="{{ r }}/.cpr/a/u2c.py">u2c.py</a> to upload (sometimes faster than web-browsers)

View File

@@ -1,3 +1,18 @@
:root {
--fg: #ccc;
--fg-max: #fff;
--bg-u2: #2b2b2b;
--bg-u5: #444;
}
html.y {
--fg: #222;
--fg-max: #000;
--bg-u2: #f7f7f7;
--bg-u5: #ccc;
}
html.bz {
--bg-u2: #202231;
}
@font-face { @font-face {
font-family: 'scp'; font-family: 'scp';
font-display: swap; font-display: swap;
@@ -14,6 +29,7 @@ html {
max-width: min(34em, 90%); max-width: min(34em, 90%);
max-width: min(34em, calc(100% - 7em)); max-width: min(34em, calc(100% - 7em));
color: #ddd; color: #ddd;
color: var(--fg);
background: #333; background: #333;
background: var(--bg-u2); background: var(--bg-u2);
border: 0 solid #777; border: 0 solid #777;
@@ -171,24 +187,15 @@ html {
color: #f6a; color: #f6a;
} }
html.y #tt { html.y #tt {
color: #333;
background: #fff;
border-color: #888 #000 #777 #000; border-color: #888 #000 #777 #000;
} }
html.bz #tt { html.bz #tt {
background: #202231;
border-color: #3b3f58; border-color: #3b3f58;
} }
html.y #tt, html.y #tt,
html.y #toast { html.y #toast {
box-shadow: 0 .3em 1em rgba(0,0,0,0.4); box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
} }
html.y #tt code {
color: #fff;
color: var(--fg-max);
background: #060;
background: var(--bg-u5);
}
#modalc code { #modalc code {
color: #060; color: #060;
background: transparent; background: transparent;
@@ -326,6 +333,9 @@ html.y .btn:focus {
box-shadow: 0 .1em .2em #037 inset; box-shadow: 0 .1em .2em #037 inset;
outline: #037 solid .1em; outline: #037 solid .1em;
} }
input[type="submit"] {
cursor: pointer;
}
input[type="text"]:focus, input[type="text"]:focus,
input:not([type]):focus, input:not([type]):focus,
textarea:focus { textarea:focus {

View File

@@ -1043,7 +1043,7 @@ function up2k_init(subtle) {
clmod(ebi(v), 'hl', 1); clmod(ebi(v), 'hl', 1);
} }
function offdrag(e) { function offdrag(e) {
ev(e); noope(e);
var v = this.getAttribute('v'); var v = this.getAttribute('v');
if (v) if (v)
@@ -1407,7 +1407,7 @@ function up2k_init(subtle) {
pvis.addfile([ pvis.addfile([
uc.fsearch ? esc(entry.name) : linksplit( uc.fsearch ? esc(entry.name) : linksplit(
entry.purl + uricom_enc(entry.name)).join(' '), entry.purl + uricom_enc(entry.name)).join(' / '),
'📐 ' + L.u_hashing, '📐 ' + L.u_hashing,
'' ''
], entry.size, draw_each); ], entry.size, draw_each);
@@ -2284,7 +2284,7 @@ function up2k_init(subtle) {
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b', cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff; sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
msg.push(linksplit(hit.rp).join('') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>'); msg.push(linksplit(hit.rp).join(' / ') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
} }
msg = msg.join('<br />\n'); msg = msg.join('<br />\n');
} }
@@ -2318,7 +2318,7 @@ function up2k_init(subtle) {
url += '?k=' + fk; url += '?k=' + fk;
} }
pvis.seth(t.n, 0, linksplit(url).join(' ')); pvis.seth(t.n, 0, linksplit(url).join(' / '));
} }
var chunksize = get_chunksize(t.size), var chunksize = get_chunksize(t.size),
@@ -2402,15 +2402,12 @@ function up2k_init(subtle) {
pvis.seth(t.n, 2, L.u_ehstmp, t); pvis.seth(t.n, 2, L.u_ehstmp, t);
var err = "", var err = "",
rsp = (xhr.responseText + ''), rsp = unpre(xhr.responseText),
ofs = rsp.lastIndexOf('\nURL: '); ofs = rsp.lastIndexOf('\nURL: ');
if (ofs !== -1) if (ofs !== -1)
rsp = rsp.slice(0, ofs); rsp = rsp.slice(0, ofs);
if (rsp.indexOf('<pre>') === 0)
rsp = rsp.slice(5);
if (rsp.indexOf('rate-limit ') !== -1) { if (rsp.indexOf('rate-limit ') !== -1) {
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0]; var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
console.log("rate-limit: " + penalty); console.log("rate-limit: " + penalty);
@@ -2429,7 +2426,7 @@ function up2k_init(subtle) {
err = rsp; err = rsp;
ofs = err.indexOf('\n/'); ofs = err.indexOf('\n/');
if (ofs !== -1) { if (ofs !== -1) {
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' '); err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' / ');
} }
if (!t.rechecks && (err_pend || err_srcb)) { if (!t.rechecks && (err_pend || err_srcb)) {
t.rechecks = 0; t.rechecks = 0;
@@ -2536,7 +2533,7 @@ function up2k_init(subtle) {
cdr = t.size; cdr = t.size;
var orz = function (xhr) { var orz = function (xhr) {
var txt = ((xhr.response && xhr.response.err) || xhr.responseText) + ''; var txt = unpre((xhr.response && xhr.response.err) || xhr.responseText);
if (txt.indexOf('upload blocked by x') + 1) { if (txt.indexOf('upload blocked by x') + 1) {
apop(st.busy.upload, upt); apop(st.busy.upload, upt);
apop(t.postlist, npart); apop(t.postlist, npart);

View File

@@ -277,7 +277,13 @@ function anymod(e, shift_ok) {
function ev(e) { function ev(e) {
e = e || window.event; if (!e && window.event) {
e = window.event;
if (localStorage.dev_fbw == 1) {
toast.warn(10, 'hello from fallback code ;_;\ncheck console trace');
console.error('using window.event');
}
}
if (!e) if (!e)
return; return;
@@ -296,7 +302,7 @@ function ev(e) {
function noope(e) { function noope(e) {
ev(e); try { ev(e); } catch (ex) { }
} }
@@ -622,9 +628,8 @@ function linksplit(rp, id) {
} }
var vlink = esc(uricom_dec(link)); var vlink = esc(uricom_dec(link));
if (link.indexOf('/') !== -1) { if (link.indexOf('/') !== -1)
vlink = vlink.slice(0, -1) + '<span>/</span>'; vlink = vlink.slice(0, -1);
}
if (!rp) { if (!rp) {
if (q) if (q)
@@ -756,17 +761,6 @@ function noq_href(el) {
} }
function get_pwd() {
var k = HTTPS ? 's=' : 'd=',
pwd = ('; ' + document.cookie).split('; cppw' + k);
if (pwd.length < 2)
return null;
return decodeURIComponent(pwd[1].split(';')[0]);
}
function unix2iso(ts) { function unix2iso(ts) {
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5); return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
} }
@@ -1357,6 +1351,11 @@ function lf2br(txt) {
} }
function unpre(txt) {
return ('' + txt).replace(/^<pre>/, '');
}
var toast = (function () { var toast = (function () {
var r = {}, var r = {},
te = null, te = null,
@@ -1469,6 +1468,7 @@ var modal = (function () {
r.load(); r.load();
r.busy = false; r.busy = false;
r.nofocus = 0;
r.show = function (html) { r.show = function (html) {
o = mknod('div', 'modal'); o = mknod('div', 'modal');
@@ -1492,6 +1492,7 @@ var modal = (function () {
}, 0); }, 0);
document.addEventListener('focus', onfocus); document.addEventListener('focus', onfocus);
document.addEventListener('selectionchange', onselch);
timer.add(onfocus); timer.add(onfocus);
if (cb_up) if (cb_up)
setTimeout(cb_up, 1); setTimeout(cb_up, 1);
@@ -1499,6 +1500,7 @@ var modal = (function () {
r.hide = function () { r.hide = function () {
timer.rm(onfocus); timer.rm(onfocus);
document.removeEventListener('selectionchange', onselch);
document.removeEventListener('focus', onfocus); document.removeEventListener('focus', onfocus);
document.removeEventListener('keydown', onkey); document.removeEventListener('keydown', onkey);
o.parentNode.removeChild(o); o.parentNode.removeChild(o);
@@ -1520,12 +1522,19 @@ var modal = (function () {
cb_ng(null); cb_ng(null);
} }
var onselch = function (e) {
r.nofocus = 30;
};
var onfocus = function (e) { var onfocus = function (e) {
var ctr = ebi('modalc'); var ctr = ebi('modalc');
if (!ctr || !ctr.contains || !document.activeElement || ctr.contains(document.activeElement)) if (!ctr || !ctr.contains || !document.activeElement || ctr.contains(document.activeElement))
return; return;
setTimeout(function () { setTimeout(function () {
if (--r.nofocus > 0)
return;
if (ctr = ebi('modal-ok')) if (ctr = ebi('modal-ok'))
ctr.focus(); ctr.focus();
}, 20); }, 20);
@@ -1543,16 +1552,16 @@ var modal = (function () {
if (k == 'Enter') { if (k == 'Enter') {
if (ae && ae == eng) if (ae && ae == eng)
return ng(); return ng(e);
return ok(); return ok(e);
} }
if ((k == 'ArrowLeft' || k == 'ArrowRight') && eng && (ae == eok || ae == eng)) if ((k == 'ArrowLeft' || k == 'ArrowRight') && eng && (ae == eok || ae == eng))
return (ae == eok ? eng : eok).focus() || ev(e); return (ae == eok ? eng : eok).focus() || ev(e);
if (k == 'Escape') if (k == 'Escape')
return ng(); return ng(e);
} }
var next = function () { var next = function () {

View File

@@ -1,3 +1,156 @@
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-1125-1417 `v1.9.21` in a bind
## new features
* #63 the grid-view will open textfiles in the textfile viewer
* [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh) now accepts user/group names (in addition to IDs)
## bugfixes
* the `Y` hotkey (which turns all links into download links) didn't affect the grid-view
* on some servers with unusual filesystem layouts (especially ubuntu-zfs), [prisonparty](https://github.com/9001/copyparty/blob/hovudstraum/bin/prisonparty.sh) would make an unholy mess of recursive bind-mounts, quickly running out of inodes and requiring a server reboot
* added several safeguards to avoid anything like this in the future
* mutex around jail setup/teardown to prevent racing other instances
* verify jail status by inspecting /proc/mounts between each folder to bind
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-1121-2325 `v1.9.20` nice
## new features
* expensive subprocesses (ffmpeg, parsers, hooks) will run with `nice` to reduce cpu priority
* ...so listening to flacs won't grind everything else to a halt
## bugfixes
* the "load more" search results button didn't disappear if you hit the serverside limit
* the "show all" button for huge folders didn't disappear when navigating into a smaller folder
* trying to play the previous track when you're already playing the first track in a folder would send you on a wild adventure
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-1119-1229 `v1.9.19` shadow filter
## bugfixes
* #61 Mk.II: filter search results to also handle this issue in volumes where reindexing is disabled, or (spoiler warning:) a bug in the directory indexer prevents shadowed files from being forgotten
* filekeys didn't always get included in the up2k UI for world-readable folders
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-1118-2106 `v1.9.18` cache invalidation
## bugfixes
* #61 search results could contain stale records from overlapping volumes:
* if volume `/foo` is indexed and then volume `/foo/bar` is later created, any files inside the `bar` subfolder would not become forgotten in `/foo`'s database until something in `/foo` changes, which could be never
* as a result, search results could show stale metadata from `/foo`'s database regarding files in `/foo/bar`
* fix this by dropping caches and reindexing if copyparty is started with a different list of volumes than last time
* #60 client error when ctrl-clicking search results
* icons for the close/more buttons in search results are now pillow-10.x compatible
## other changes
* `u2c.exe`: upgraded certifi to version `2023.11.17`
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-1111-1738 `v1.9.17` 11-11
## new features
* `u2c.py` / `u2c.exe` (the commandline uploader):
* `-x` is now case-insensitive
* if a file fails to upload after 30 attempts, give up (bitflips)
* add 5 sec delay before reattempts (configurable with `--cd`)
## bugfixes
* clients could crash the file indexer by uploading and then instantly deleting files (as some webdav clients tend to do)
* and fix some upload errorhandling which broke during a refactoring in v1.9.16
## other changes
* upgraded pyftpdlib to v1.5.9
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-1104-2158 `v1.9.16` windedup
## breaking changes
* two of the prometheus metrics have changed slightly; see the [breaking changes readme section](https://github.com/9001/copyparty#breaking-changes)
* (i'm not familiar with prometheus so i'm not sure if this is a big deal)
## new features
* #58 versioned docker images! no longer just `latest`
* browser: the mkdir feature now accepts `foo/bar/qux` and `../foo` and `/bar`
* add 14 more prometheus metrics; see [readme](https://github.com/9001/copyparty#prometheus) for details
* connections, requests, malicious requests, volume state, file hashing/analyzation queues
* catch some more malicious requests in the autoban filters
* some malicious requests are now answered with HTTP 422, so that they count against `--ban-422`
## bugfixes
* windows: fix symlink-based upload deduplication
* MS decided to make symlinks relative to working-directory rather than destination-path...
* `--stats` would produce invalid metrics if a volume was offline
* minor improvements to password hashing ux:
* properly warn if `--ah-cli` or `--ah-gen` is used without `--ah-alg`
* support `^D` during `--ah-cli`
* browser-ux / cosmetics:
* fix toast/tooltip colors on splashpage
* easier to do partial text selection inside links (search results, breadcrumbs, uploads)
* more rclone-related hints on the connect-page
## other changes
* malformed http headers from clients are no longer included in the client error-message
* just in case there are deployments with a reverse-proxy inserting interesting stuff on the way in
* the serverlog still contains all the necessary info to debug your own clients
* updated [example nginx config](https://github.com/9001/copyparty/blob/hovudstraum/contrib/nginx/copyparty.conf) to recover faster from brief server outages
* the default value of `fail_timeout` (10sec) makes nginx cache the outage for longer than necessary
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-1024-1643 `v1.9.15` expand placeholder
[made it just in time!](https://a.ocv.me/pub/g/nerd-stuff/PXL_20231024_170348367.jpg) (EDIT: nevermind, three of the containers didn't finish uploading to ghcr before takeoff ;_; all up now)
## new features
* #56 placeholder variables in markdown documents and prologue/epilogue html files
* default-disabled; must be enabled globally with `--exp` or per-volume with volflag `exp`
* `{{self.ip}}` becomes the client IP; see [/srv/expand/README.md](https://github.com/9001/copyparty/blob/hovudstraum/srv/expand/README.md) for more examples
* dynamic-range-compressor: reduced volume jumps between songs when enabled
## bugfixes
* v1.9.14 broke the `scan` volflag, causing volume rescans to happen every 10sec if enabled
* its global counterpart `--re-maxage` was not affected
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-1021-1443 `v1.9.14` uptime
## new features
* search for files by upload time
* option to display upload time in directory listings
* enable globally with `-e2d -mte +.up_at` or per-volume with volflags `e2d,mte=+.up_at`
* has a ~17% performance impact on directory listings
* [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression) in the audioplayer settings
* `--ban-404` is now default-enabled
* the turbo-uploader will now un-turbo when necessary to avoid banning itself
* this only affects accounts with permissions `g`, `G`, or `h`
* accounts with read-access (which are able to see directory listings anyways) and accounts with write-only access are no longer affected by `--ban-404` or `--ban-url`
## bugfixes
* #55 clients could hit the `--url-ban` filter when uploading over webdav
* fixed by limiting `--ban-404` and `--ban-url` to accounts with permission `g`, `G`, or `h`
* fixed 20% performance drop in python 3.12 due to utcfromtimestamp deprecation
* but 3.12.0 is still 5% slower than 3.11.6 for some reason
* volume listing on startup would display some redundant info
## other changes
* timeout for unfinished uploads increased from 6 to 24 hours
* and is now configurable with `--snap-drop`
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀ ▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-1015-2006 `v1.9.12` more buttons # 2023-1015-2006 `v1.9.12` more buttons

View File

@@ -0,0 +1,33 @@
# not actually YAML but lets pretend:
# -*- mode: yaml -*-
# vim: ft=yaml:
[global]
e2dsa # enable file indexing and filesystem scanning
e2ts # enable multimedia indexing
ansi # enable colors in log messages
# q, lo: /cfg/log/%Y-%m%d.log # log to file instead of docker
# ftp: 3921 # enable ftp server on port 3921
# p: 3939 # listen on another port
# ipa: 10.89. # only allow connections from 10.89.*
# df: 16 # stop accepting uploads if less than 16 GB free disk space
# ver # show copyparty version in the controlpanel
# grid # show thumbnails/grid-view by default
# theme: 2 # monokai
# name: datasaver # change the server-name that's displayed in the browser
# stats, nos-dup # enable the prometheus endpoint, but disable the dupes counter (too slow)
# no-robots, force-js # make it harder for search engines to read your server
[accounts]
ed: wark # username: password
[/] # create a volume at "/" (the webroot), which will
/w # share /w (the docker data volume)
accs:
rw: * # everyone gets read-write access, but
rwmda: ed # the user "ed" gets read-write-move-delete-admin

View File

@@ -0,0 +1,20 @@
version: '3'
services:
copyparty:
image: copyparty/ac:latest
container_name: copyparty
user: "1000:1000"
ports:
- 3923:3923
volumes:
- ./:/cfg:z
- /path/to/your/fileshare/top/folder:/w:z
stop_grace_period: 15s # thumbnailer is allowed to continue finishing up for 10s after the shutdown signal
healthcheck:
test: ["CMD-SHELL", "wget --spider -q 127.0.0.1:3923/?reset"]
interval: 1m
timeout: 2s
retries: 5
start_period: 15s

View File

@@ -28,10 +28,6 @@ https://github.com/nayuki/QR-Code-generator/
C: Project Nayuki C: Project Nayuki
L: MIT L: MIT
https://github.com/python/cpython/blob/3.10/Lib/asyncore.py
C: 1996 Sam Rushing
L: ISC
https://github.com/ahupp/python-magic/ https://github.com/ahupp/python-magic/
C: 2001-2014 Adam Hupp C: 2001-2014 Adam Hupp
L: MIT L: MIT

View File

@@ -100,6 +100,10 @@ include_trailing_comma = true
[tool.bandit] [tool.bandit]
skips = ["B104", "B110", "B112"] skips = ["B104", "B110", "B112"]
[tool.ruff]
line-length = 120
ignore = ["E402", "E722"]
# ===================================================================== # =====================================================================
[tool.pylint.MAIN] [tool.pylint.MAIN]

View File

@@ -17,11 +17,15 @@ docker run --rm -it -u 1000 -p 3923:3923 -v /mnt/nas:/w -v $PWD/cfgdir:/cfg copy
* if you are using rootless podman, remove `-u 1000` * if you are using rootless podman, remove `-u 1000`
* if you have selinux, append `:z` to all `-v` args (for example `-v /mnt/nas:/w:z`) * if you have selinux, append `:z` to all `-v` args (for example `-v /mnt/nas:/w:z`)
this example is also available as a podman-compatible [docker-compose yaml](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/docker/basic-docker-compose); example usage: `docker-compose up` (you may need to `systemctl enable --now podman.socket` or similar)
i'm unfamiliar with docker-compose and alternatives so let me know if this section could be better 🙏 i'm unfamiliar with docker-compose and alternatives so let me know if this section could be better 🙏
## configuration ## configuration
> this section basically explains how the [docker-compose yaml](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/docker/basic-docker-compose) works, so you may look there instead
the container has the same default config as the sfx and the pypi module, meaning it will listen on port 3923 and share the "current folder" (`/w` inside the container) as read-write for anyone the container has the same default config as the sfx and the pypi module, meaning it will listen on port 3923 and share the "current folder" (`/w` inside the container) as read-write for anyone
the recommended way to configure copyparty inside a container is to mount a folder which has one or more [config files](https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf) inside; `-v /your/config/folder:/cfg` the recommended way to configure copyparty inside a container is to mount a folder which has one or more [config files](https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf) inside; `-v /your/config/folder:/cfg`

View File

@@ -141,12 +141,25 @@ filt=
} }
[ $push ] && { [ $push ] && {
ver=$(
python3 ../../dist/copyparty-sfx.py --version 2>/dev/null |
awk '/^copyparty v/{sub(/-.*/,"");sub(/v/,"");print$2;exit}'
)
echo $ver | grep -E '[0-9]\.[0-9]' || {
echo no ver
exit 1
}
for i in $dhub_order; do for i in $dhub_order; do
printf '\ndockerhub %s\n' $i
podman manifest push --all copyparty-$i copyparty/$i:$ver
podman manifest push --all copyparty-$i copyparty/$i:latest podman manifest push --all copyparty-$i copyparty/$i:latest
done done &
for i in $ghcr_order; do for i in $ghcr_order; do
printf '\nghcr %s\n' $i
podman manifest push --all copyparty-$i ghcr.io/9001/copyparty-$i:$ver
podman manifest push --all copyparty-$i ghcr.io/9001/copyparty-$i:latest podman manifest push --all copyparty-$i ghcr.io/9001/copyparty-$i:latest
done done &
wait
} }
echo ok echo ok

View File

@@ -205,26 +205,22 @@ necho() {
mv {markupsafe,jinja2} j2/ mv {markupsafe,jinja2} j2/
necho collecting pyftpdlib necho collecting pyftpdlib
f="../build/pyftpdlib-1.5.8.tar.gz" f="../build/pyftpdlib-1.5.9.tar.gz"
[ -e "$f" ] || [ -e "$f" ] ||
(url=https://github.com/giampaolo/pyftpdlib/archive/refs/tags/release-1.5.8.tar.gz; (url=https://github.com/giampaolo/pyftpdlib/archive/refs/tags/release-1.5.9.tar.gz;
wget -O$f "$url" || curl -L "$url" >$f) wget -O$f "$url" || curl -L "$url" >$f)
tar -zxf $f tar -zxf $f
mv pyftpdlib-release-*/pyftpdlib . mv pyftpdlib-release-*/pyftpdlib .
rm -rf pyftpdlib-release-* pyftpdlib/test rm -rf pyftpdlib-release-* pyftpdlib/test
for f in pyftpdlib/_async{hat,ore}.py; do
[ -e "$f" ] || continue;
iawk 'NR<4||NR>27||!/^#/;NR==4{print"# license: https://opensource.org/licenses/ISC\n"}' $f
done
mkdir ftp/ mkdir ftp/
mv pyftpdlib ftp/ mv pyftpdlib ftp/
necho collecting asyncore, asynchat
for n in asyncore.py asynchat.py; do
f=../build/$n
[ -e "$f" ] ||
(url=https://raw.githubusercontent.com/python/cpython/c4d45ee670c09d4f6da709df072ec80cb7dfad22/Lib/$n;
wget -O$f "$url" || curl -L "$url" >$f)
done
necho collecting python-magic necho collecting python-magic
v=0.4.27 v=0.4.27
f="../build/python-magic-$v.tar.gz" f="../build/python-magic-$v.tar.gz"
@@ -293,12 +289,6 @@ necho() {
(cd "${x%/*}"; cp -p "../$(cat "${x##*/}")" ${x##*/}) (cd "${x%/*}"; cp -p "../$(cat "${x##*/}")" ${x##*/})
done done
# insert asynchat
mkdir copyparty/vend
for n in asyncore.py asynchat.py; do
awk 'NR<4||NR>27;NR==4{print"# license: https://opensource.org/licenses/ISC\n"}' ../build/$n >copyparty/vend/$n
done
rm -f copyparty/stolen/*/README.md rm -f copyparty/stolen/*/README.md
# remove type hints before build instead # remove type hints before build instead
@@ -419,7 +409,7 @@ iawk '/^ {0,4}[^ ]/{s=0}/^ {4}def (serve_forever|_loop)/{s=1}!s' ftp/pyftpdlib/s
rm -f ftp/pyftpdlib/{__main__,prefork}.py rm -f ftp/pyftpdlib/{__main__,prefork}.py
[ $no_ftp ] && [ $no_ftp ] &&
rm -rf copyparty/ftpd.py ftp asyncore.py asynchat.py && rm -rf copyparty/ftpd.py ftp &&
sed -ri '/\.ftp/d' copyparty/svchub.py sed -ri '/\.ftp/d' copyparty/svchub.py
[ $no_smb ] && [ $no_smb ] &&
@@ -576,8 +566,8 @@ nf=$(ls -1 "$zdir"/arc.* 2>/dev/null | wc -l)
cat ../$bdir/COPYING.txt) >> copyparty/res/COPYING.txt || cat ../$bdir/COPYING.txt) >> copyparty/res/COPYING.txt ||
echo "copying.txt 404 pls rebuild" echo "copying.txt 404 pls rebuild"
mv ftp/* j2/* copyparty/vend/* . mv ftp/* j2/* .
rm -rf ftp j2 py2 py37 copyparty/vend rm -rf ftp j2 py2 py37
(cd copyparty; tar -cvf z.tar $t; rm -rf $t) (cd copyparty; tar -cvf z.tar $t; rm -rf $t)
cd .. cd ..
pyoxidizer build --release --target-triple $tgt pyoxidizer build --release --target-triple $tgt

View File

@@ -9,7 +9,7 @@ f23615c522ed58b9a05978ba4c69c06224590f3a6adbd8e89b31838b181a57160739ceff1fc2ba6f
3c5adf0a36516d284a2ede363051edc1bcc9df925c5a8a9fa2e03cab579dd8d847fdad42f7fd5ba35992e08234c97d2dbfec40a9d12eec61c8dc03758f2bd88e typing_extensions-4.4.0-py3-none-any.whl 3c5adf0a36516d284a2ede363051edc1bcc9df925c5a8a9fa2e03cab579dd8d847fdad42f7fd5ba35992e08234c97d2dbfec40a9d12eec61c8dc03758f2bd88e typing_extensions-4.4.0-py3-none-any.whl
8d16a967a0a7872a7575b1005cf66915deacda6ee8611fbb52f42fc3e3beb2f901a5140c942a5d146bd412b92bfa9cbadd82beeba83df6d70930c6dc26608a5b upx-4.1.0-win32.zip 8d16a967a0a7872a7575b1005cf66915deacda6ee8611fbb52f42fc3e3beb2f901a5140c942a5d146bd412b92bfa9cbadd82beeba83df6d70930c6dc26608a5b upx-4.1.0-win32.zip
# u2c (win7) # u2c (win7)
4562b1065c6bce7084eb575b654985c990e26034bfcd8db54629312f43ac737e264db7a2b4d8b797e09919a485cbc6af3fd0931690b7ed79b62bcc0736aec9fc certifi-2023.7.22-py3-none-any.whl f3390290b896019b2fa169932390e4930d1c03c014e1f6db2405ca2eb1f51f5f5213f725885853805b742997b0edb369787e5c0069d217bc4e8b957f847f58b6 certifi-2023.11.17-py3-none-any.whl
904eb57b13bea80aea861de86987e618665d37fa9ea0856e0125a9ba767a53e5064de0b9c4735435a2ddf4f16f7f7d2c75a682e1de83d9f57922bdca8e29988c charset_normalizer-3.3.0-cp37-cp37m-win32.whl 904eb57b13bea80aea861de86987e618665d37fa9ea0856e0125a9ba767a53e5064de0b9c4735435a2ddf4f16f7f7d2c75a682e1de83d9f57922bdca8e29988c charset_normalizer-3.3.0-cp37-cp37m-win32.whl
ffdd45326f4e91c02714f7a944cbcc2fdd09299f709cfa8aec0892053eef0134fb80d9ba3790afd319538a86feb619037cbf533e2f5939cb56b35bb17f56c858 idna-3.4-py3-none-any.whl ffdd45326f4e91c02714f7a944cbcc2fdd09299f709cfa8aec0892053eef0134fb80d9ba3790afd319538a86feb619037cbf533e2f5939cb56b35bb17f56c858 idna-3.4-py3-none-any.whl
b795abb26ba2f04f1afcfb196f21f638014b26c8186f8f488f1c2d91e8e0220962fbd259dbc9c3875222eb47fc95c73fc0606aaa6602b9ebc524809c9ba3501f requests-2.31.0-py3-none-any.whl b795abb26ba2f04f1afcfb196f21f638014b26c8186f8f488f1c2d91e8e0220962fbd259dbc9c3875222eb47fc95c73fc0606aaa6602b9ebc524809c9ba3501f requests-2.31.0-py3-none-any.whl

View File

@@ -106,20 +106,19 @@ def meichk():
if filt not in sys.executable: if filt not in sys.executable:
filt = os.path.basename(sys.executable) filt = os.path.basename(sys.executable)
pids = [] hits = []
ptn = re.compile(r"^([^\s]+)\s+([0-9]+)")
try: try:
procs = sp.check_output("tasklist").decode("utf-8", "replace") cmd = "tasklist /fo csv".split(" ")
procs = sp.check_output(cmd).decode("utf-8", "replace")
except: except:
procs = "" # winpe procs = "" # winpe
for ln in procs.splitlines(): for ln in procs.split("\n"):
m = ptn.match(ln) if filt in ln.split('"')[:2][-1]:
if m and filt in m.group(1).lower(): hits.append(ln)
pids.append(int(m.group(2)))
mod = os.path.dirname(os.path.realpath(__file__)) mod = os.path.dirname(os.path.realpath(__file__))
if os.path.basename(mod).startswith("_MEI") and len(pids) == 2: if os.path.basename(mod).startswith("_MEI") and len(hits) == 2:
meicln(mod) meicln(mod)

View File

@@ -33,7 +33,7 @@ fns=(
) )
[ $w7 ] && fns+=( [ $w7 ] && fns+=(
pyinstaller-5.13.2-py3-none-win32.whl pyinstaller-5.13.2-py3-none-win32.whl
certifi-2022.12.7-py3-none-any.whl certifi-2023.11.17-py3-none-any.whl
chardet-5.1.0-py3-none-any.whl chardet-5.1.0-py3-none-any.whl
idna-3.4-py3-none-any.whl idna-3.4-py3-none-any.whl
requests-2.28.2-py3-none-any.whl requests-2.28.2-py3-none-any.whl

View File

@@ -59,9 +59,6 @@ copyparty/th_srv.py,
copyparty/u2idx.py, copyparty/u2idx.py,
copyparty/up2k.py, copyparty/up2k.py,
copyparty/util.py, copyparty/util.py,
copyparty/vend,
copyparty/vend/asynchat.py,
copyparty/vend/asyncore.py,
copyparty/web, copyparty/web,
copyparty/web/a, copyparty/web/a,
copyparty/web/a/__init__.py, copyparty/web/a/__init__.py,

View File

@@ -16,16 +16,11 @@ def uncomment(fpath):
orig = f.read().decode("utf-8") orig = f.read().decode("utf-8")
out = "" out = ""
for ln in orig.split("\n"):
if not ln.startswith("#"):
break
out += ln + "\n"
io_obj = io.StringIO(orig) io_obj = io.StringIO(orig)
prev_toktype = tokenize.INDENT prev_toktype = tokenize.INDENT
last_lineno = -1 last_lineno = -1
last_col = 0 last_col = 0
code = False
for tok in tokenize.generate_tokens(io_obj.readline): for tok in tokenize.generate_tokens(io_obj.readline):
# print(repr(tok)) # print(repr(tok))
token_type = tok[0] token_type = tok[0]
@@ -53,7 +48,11 @@ def uncomment(fpath):
out += token_string out += token_string
else: else:
out += '"a"' out += '"a"'
elif token_type != tokenize.COMMENT or is_legalese: elif token_type != tokenize.COMMENT:
out += token_string
if not code and token_string.strip():
code = True
elif is_legalese or (not start_col and not code):
out += token_string out += token_string
else: else:
if out.rstrip(" ").endswith("\n"): if out.rstrip(" ").endswith("\n"):

26
srv/expand/README.md Normal file
View File

@@ -0,0 +1,26 @@
## text expansion
enable expansion of placeholder variables in `README.md` and prologue/epilogue files with `--exp` and customize the list of allowed placeholders to expand using `--exp-md` and `--exp-lg`
| explanation | placeholder |
| -------------------- | -------------------- |
| your ip address | {{self.ip}} |
| your user-agent | {{self.ua}} |
| your username | {{self.uname}} |
| the `Host` you see | {{self.host}} |
| server unix time | {{srv.itime}} |
| server datetime | {{srv.htime}} |
| server name | {{cfg.name}} |
| logout after | {{cfg.logout}} hours |
| vol reindex interval | {{vf.scan}} |
| thumbnail size | {{vf.thsize}} |
| your country | {{hdr.cf_ipcountry}} |
placeholders starting with...
* `self.` are grabbed from copyparty's internal state; anything in `httpcli.py` is fair game
* `cfg.` are the global server settings
* `vf.` are the volflags of the current volume
* `hdr.` are grabbed from the client headers; any header is supported, just add it (in lowercase) to the allowlist
* `srv.` are processed inside the `_expand` function in httpcli
for example (bad example), `hdr_cf_ipcountry` maps to the header `CF-IPCountry` (which is generated by cloudflare before the request is passed on to your server / copyparty)

View File

@@ -1,16 +1,16 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import itertools
import re import re
import sys import sys
import time import time
import itertools
from . import util as tu
from .util import Cfg
from copyparty.authsrv import AuthSrv from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli from copyparty.httpcli import HttpCli
from . import util as tu
from .util import Cfg
atlas = ["%", "25", "2e", "2f", ".", "/"] atlas = ["%", "25", "2e", "2f", ".", "/"]

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import sys
import runpy import runpy
import sys
host = sys.argv[1] host = sys.argv[1]
sys.argv = sys.argv[:1] + sys.argv[2:] sys.argv = sys.argv[:1] + sys.argv[2:]

View File

@@ -4,9 +4,9 @@ from __future__ import print_function, unicode_literals
import re import re
import unittest import unittest
from xml.etree import ElementTree as ET from xml.etree import ElementTree as ET
from copyparty.dxml import parse_xml, BadXML, mkenod, mktnod
from copyparty.dxml import BadXML, mkenod, mktnod, parse_xml
ET.register_namespace("D", "DAV:") ET.register_namespace("D", "DAV:")

View File

@@ -4,18 +4,17 @@ from __future__ import print_function, unicode_literals
import io import io
import os import os
import time
import shutil
import pprint import pprint
import shutil
import tarfile import tarfile
import tempfile import tempfile
import time
import unittest import unittest
from tests import util as tu
from tests.util import Cfg, eprint
from copyparty.authsrv import AuthSrv from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli from copyparty.httpcli import HttpCli
from tests import util as tu
from tests.util import Cfg, eprint
def hdr(query): def hdr(query):

View File

@@ -2,19 +2,18 @@
# coding: utf-8 # coding: utf-8
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os
import json import json
import os
import shutil import shutil
import tempfile import tempfile
import unittest import unittest
from textwrap import dedent from textwrap import dedent
from copyparty import util
from copyparty.authsrv import VFS, AuthSrv
from tests import util as tu from tests import util as tu
from tests.util import Cfg from tests.util import Cfg
from copyparty.authsrv import AuthSrv, VFS
from copyparty import util
class TestVFS(unittest.TestCase): class TestVFS(unittest.TestCase):
def setUp(self): def setUp(self):

View File

@@ -3,23 +3,23 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os import os
import re
import sys
import time
import shutil
import jinja2
import threading
import tempfile
import platform import platform
import re
import shutil
import subprocess as sp import subprocess as sp
import sys
import tempfile
import threading
import time
from argparse import Namespace from argparse import Namespace
import jinja2
WINDOWS = platform.system() == "Windows" WINDOWS = platform.system() == "Windows"
ANYWIN = WINDOWS or sys.platform in ["msys"] ANYWIN = WINDOWS or sys.platform in ["msys"]
MACOS = platform.system() == "Darwin" MACOS = platform.system() == "Darwin"
J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader) J2_ENV = jinja2.Environment(loader=jinja2.BaseLoader) # type: ignore
J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}\nJ2EOT") J2_FILES = J2_ENV.from_string("{{ files|join('\n') }}\nJ2EOT")
@@ -43,7 +43,7 @@ if MACOS:
from copyparty.__init__ import E from copyparty.__init__ import E
from copyparty.__main__ import init_E from copyparty.__main__ import init_E
from copyparty.util import Unrecv, FHC, Garda from copyparty.util import FHC, Garda, Unrecv
init_E(E) init_E(E)
@@ -83,8 +83,8 @@ def get_ramdisk():
for _ in range(10): for _ in range(10):
try: try:
_, _ = chkcmd(["diskutil", "eraseVolume", "HFS+", "cptd", devname]) _, _ = chkcmd(["diskutil", "eraseVolume", "HFS+", "cptd", devname])
with open("/Volumes/cptd/.metadata_never_index", "w") as f: with open("/Volumes/cptd/.metadata_never_index", "wb") as f:
f.write("orz") f.write(b"orz")
try: try:
shutil.rmtree("/Volumes/cptd/.fseventsd") shutil.rmtree("/Volumes/cptd/.fseventsd")
@@ -99,23 +99,23 @@ def get_ramdisk():
raise Exception("ramdisk creation failed") raise Exception("ramdisk creation failed")
ret = os.path.join(tempfile.gettempdir(), "copyparty-test") ret = os.path.join(tempfile.gettempdir(), "copyparty-test")
try: if not os.path.isdir(ret):
os.mkdir(ret) os.mkdir(ret)
finally:
return subdir(ret) return subdir(ret)
class Cfg(Namespace): class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None): def __init__(self, a=None, v=None, c=None):
ka = {} ka = {}
ex = "daw dav_auth dav_inf dav_mac dav_rt dotsrch e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js getmod grid hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nw rand smb th_no_crop vague_403 vc ver xdev xlink xvol" ex = "daw dav_auth dav_inf dav_mac dav_rt dotsrch e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp exp force_js getmod grid hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nw rand smb th_no_crop vague_403 vc ver xdev xlink xvol"
ka.update(**{k: False for k in ex.split()}) ka.update(**{k: False for k in ex.split()})
ex = "dotpart no_rescan no_sendfile no_voldump plain_ip" ex = "dotpart no_rescan no_sendfile no_voldump plain_ip"
ka.update(**{k: True for k in ex.split()}) ka.update(**{k: True for k in ex.split()})
ex = "css_browser hist js_browser no_forget no_hash no_idx nonsus_urls" ex = "ah_cli ah_gen css_browser hist js_browser no_forget no_hash no_idx nonsus_urls"
ka.update(**{k: None for k in ex.split()}) ka.update(**{k: None for k in ex.split()})
ex = "s_thead s_tbody th_convt" ex = "s_thead s_tbody th_convt"
@@ -130,6 +130,9 @@ class Cfg(Namespace):
ex = "on403 on404 xad xar xau xban xbd xbr xbu xiu xm" ex = "on403 on404 xad xar xau xban xbd xbr xbu xiu xm"
ka.update(**{k: [] for k in ex.split()}) ka.update(**{k: [] for k in ex.split()})
ex = "exp_lg exp_md"
ka.update(**{k: {} for k in ex.split()})
super(Cfg, self).__init__( super(Cfg, self).__init__(
a=a or [], a=a or [],
v=v or [], v=v or [],
@@ -153,10 +156,10 @@ class Cfg(Namespace):
class NullBroker(object): class NullBroker(object):
def say(*args): def say(self, *args):
pass pass
def ask(*args): def ask(self, *args):
pass pass
@@ -187,6 +190,7 @@ class VHttpSrv(object):
self.broker = NullBroker() self.broker = NullBroker()
self.prism = None self.prism = None
self.bans = {} self.bans = {}
self.nreq = 0
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"] aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
self.j2 = {x: J2_FILES for x in aliases} self.j2 = {x: J2_FILES for x in aliases}
@@ -205,7 +209,7 @@ class VHttpSrv(object):
class VHttpConn(object): class VHttpConn(object):
def __init__(self, args, asrv, log, buf): def __init__(self, args, asrv, log, buf):
self.s = VSock(buf) self.s = VSock(buf)
self.sr = Unrecv(self.s, None) self.sr = Unrecv(self.s, None) # type: ignore
self.addr = ("127.0.0.1", "42069") self.addr = ("127.0.0.1", "42069")
self.args = args self.args = args
self.asrv = asrv self.asrv = asrv