Compare commits

...

39 Commits

Author SHA1 Message Date
ed
cafe53c055 v1.9.0 2023-08-20 22:02:40 +00:00
ed
7673beef72 actually impl --mc-hop (and improve --zm-spam) 2023-08-20 21:27:28 +00:00
ed
b28bfe64c0 explain apple bullshit 2023-08-20 22:09:00 +02:00
ed
135ece3fbd immediately allow uploading an interrupted and
deleted incomplete upload to another location
2023-08-20 19:16:35 +00:00
ed
bd3640d256 change to openmetrics 2023-08-20 18:50:14 +00:00
ed
fc0405c8f3 add prometheus metrics; closes #49 2023-08-20 17:58:06 +00:00
ed
7df890d964 wget: only allow http/https/ftp/ftps (#50):
these are all the protocols that are currently supported by wget,
so this has no practical effect aside from making sure we won't
suddenly get file:// support or something (which would be bad)
2023-08-20 09:47:50 +00:00
ed
8341041857 mdns: option to ignore spec to avoid issues on
networks where clients have multiple IPs of which some are subnets that
the copyparty server is not
2023-08-19 21:45:26 +00:00
ed
1b7634932d tar/zip-download: add opus transcoding filter 2023-08-19 19:40:46 +00:00
ed
48a3898aa6 suggest enabling the database on startup 2023-08-16 19:57:19 +00:00
ed
5d13ebb4ac avoid firefox-android quirk(?):
when repeatedly tapping the next-folder button, occasionally it will
reload the entire page instead of ajax'ing the directory contents.

Navigation happens by simulating a click in the directory sidebar,
so the incorrect behavior matches what would happen if the link to the
folder didn't have its onclick-handler attached, so should probably
double-check if there's some way for that to happen

Issue observed fairly easily in firefox on android, regardless if
copyparty is running locally or on a server in a different country.
Unable to reproduce with android-chrome or desktop-firefox

Could also be due to an addon (dark-reader, noscript, ublock-origin)

anyways, avoiding this by doing the navigation more explicitly
2023-08-16 19:56:47 +00:00
ed
015b87ee99 performance / cosmetic:
* js: use .call instead of .bind when possible
* when running without e2d, the message on startup regarding
  unfinished uploads didn't show the correct filesystem path
2023-08-16 19:32:43 +00:00
ed
0a48acf6be limit each column of the files table to screen width 2023-08-16 03:55:53 +00:00
ed
2b6a3afd38 fix iOS randomly increasing fontsize of some things:
* links which are wider than the display width
* probably input fields too
2023-08-16 03:47:19 +00:00
ed
18aa82fb2f make browser resizing smoother / less expensive 2023-08-15 16:55:19 +00:00
ed
f5407b2997 docker: persist autogenerated seeds, disable certgen, and
mention how to run the containers with selinux enabled
* assumes that a /cfg docker volume is provided
2023-08-15 15:07:33 +00:00
ed
474d5a155b android's got hella strict filename rules 2023-08-15 06:46:57 +02:00
ed
afcd98b794 mention some gotchas (thx noktuas) 2023-08-15 03:38:51 +02:00
ed
4f80e44ff7 option to exactly specify browser title prefix 2023-08-15 03:17:01 +02:00
ed
406e413594 hint at additional context in exceptions 2023-08-15 01:42:13 +02:00
ed
033b50ae1b u2c: exclude files by regex 2023-08-15 00:45:12 +02:00
ed
bee26e853b show server hostname in html titles:
* --doctitle defines most titles, prefixed with "--name: " by default
* the file browser is only prefixed with the --name itself
* --nth ("no-title-hostname") removes it
* also removed by --nih ("no-info-hostname")
2023-08-14 23:50:13 +02:00
ed
04a1f7040e adjustable timestamp resolution in log messages 2023-08-14 17:22:22 +02:00
ed
f9d5bb3b29 support upload by dragdrop from other browser windows,
hello from LO484 https://ocv.me/stuff/aircode.jpg
2023-07-28 21:43:40 +02:00
ed
ca0cd04085 update pkgs to 1.8.8 2023-07-25 16:25:27 +00:00
ed
999ee2e7bc v1.8.8 2023-07-25 15:50:48 +00:00
ed
1ff7f968e8 fix tls-cert regeneration on windows 2023-07-25 15:27:27 +00:00
ed
3966266207 remember ?edit and trailing-slash during login redirect 2023-07-25 15:14:47 +00:00
ed
d03e96a392 html5 strips the first leading LF in textareas; stop it 2023-07-25 14:16:54 +00:00
ed
4c843c6df9 fix md-editor lastmod cmp when browsercache is belligerent 2023-07-25 14:06:53 +00:00
ed
0896c5295c range-select fixes:
* dont crash when shiftclicking between folders
* remember origin when lazyloading more files
2023-07-25 14:06:31 +02:00
ed
cc0c9839eb update pkgs to 1.8.7 2023-07-23 16:16:49 +00:00
ed
d0aa20e17c v1.8.7 2023-07-23 15:43:38 +00:00
ed
1a658dedb7 fix infinite playback spin on servers with one single file 2023-07-23 14:52:42 +00:00
ed
8d376b854c this is the wrong way around 2023-07-23 14:10:23 +00:00
ed
490c16b01d be even stricter with ?hc 2023-07-23 13:23:52 +00:00
ed
2437a4e864 the CVE-2023-37474 fix was overly strict; loosen 2023-07-23 11:31:11 +00:00
ed
007d948cb9 fix GHSA-f54q-j679-p9hh: reflected-XSS in cookie-setters;
it was possible to set cookie values which contained newlines,
thus terminating the http header and bleeding into the body.

We now disallow control-characters in queries,
but still allow them in paths, as copyparty supports
filenames containing newlines and other mojibake.

The changes in `set_k304` are not necessary in fixing the vulnerability,
but makes the behavior more correct.
2023-07-23 10:55:08 +00:00
ed
335fcc8535 update pkgs to 1.8.6 2023-07-21 01:12:55 +00:00
45 changed files with 1002 additions and 194 deletions

View File

@@ -71,6 +71,7 @@ turn almost any device into a file server with resumable uploads/downloads using
* [themes](#themes)
* [complete examples](#complete-examples)
* [reverse-proxy](#reverse-proxy) - running copyparty next to other websites
* [prometheus](#prometheus) - metrics/stats can be enabled
* [packages](#packages) - the party might be closer than you think
* [arch package](#arch-package) - now [available on aur](https://aur.archlinux.org/packages/copyparty) maintained by [@icxes](https://github.com/icxes)
* [fedora package](#fedora-package) - now [available on copr-pypi](https://copr.fedorainfracloud.org/coprs/g/copr/PyPI/)
@@ -84,7 +85,7 @@ turn almost any device into a file server with resumable uploads/downloads using
* [iOS shortcuts](#iOS-shortcuts) - there is no iPhone app, but
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
* [client-side](#client-side) - when uploading files
* [security](#security) - some notes on hardening
* [security](#security) - there is a [discord server](https://discord.gg/25J8CdTT6G)
* [gotchas](#gotchas) - behavior that might be unexpected
* [cors](#cors) - cross-site request config
* [password hashing](#password-hashing) - you can hash passwords
@@ -109,7 +110,7 @@ just run **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/
* or install through pypi: `python3 -m pip install --user -U copyparty`
* or if you cannot install python, you can use [copyparty.exe](#copypartyexe) instead
* or install [on arch](#arch-package) [on NixOS](#nixos-module) [through nix](#nix-package)
* or install [on arch](#arch-package) [on fedora](#fedora-package) [on NixOS](#nixos-module) [through nix](#nix-package)
* or if you are on android, [install copyparty in termux](#install-on-android)
* or if you prefer to [use docker](./scripts/docker/) 🐋 you can do that too
* docker has all deps built-in, so skip this step:
@@ -295,6 +296,7 @@ server notes:
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
* also happens on mergerfs, so put the db elsewhere
* Ubuntu: dragging files from certain folders into firefox or chrome is impossible
* due to snap security policies -- see `snap connections firefox` for the allowlist, `removable-media` permits all of `/mnt` and `/media` apparently
@@ -518,6 +520,10 @@ you can also zip a selection of files or folders by clicking them in the browser
![copyparty-zipsel-fs8](https://user-images.githubusercontent.com/241032/129635374-e5136e01-470a-49b1-a762-848e8a4c9cdc.png)
cool trick: download a folder by appending url-params `?tar&opus` to transcode all audio files (except aac|m4a|mp3|ogg|opus|wma) to opus before they're added to the archive
* super useful if you're 5 minutes away from takeoff and realize you don't have any music on your phone but your server only has flac files and downloading those will burn through all your data + there wouldn't be enough time anyways
* and url-params `&j` / `&w` produce jpeg/webm thumbnails/spectrograms instead of the original audio/video/images
## uploading
@@ -700,7 +706,7 @@ open the `[🎺]` media-player-settings tab to configure it,
* `[loop]` keeps looping the folder
* `[next]` plays into the next folder
* transcode:
* `[flac]` convers `flac` and `wav` files into opus
* `[flac]` converts `flac` and `wav` files into opus
* `[aac]` converts `aac` and `m4a` files into opus
* `[oth]` converts all other known formats into opus
* `aac|ac3|aif|aiff|alac|alaw|amr|ape|au|dfpwm|dts|flac|gsm|it|m4a|mo3|mod|mp2|mp3|mpc|mptm|mt2|mulaw|ogg|okt|opus|ra|s3m|tak|tta|ulaw|wav|wma|wv|xm|xpk`
@@ -1003,6 +1009,9 @@ you can also set transaction limits which apply per-IP and per-volume, but these
* `:c,maxn=250,3600` allows 250 files over 1 hour from each IP (tracked per-volume)
* `:c,maxb=1g,300` allows 1 GiB total over 5 minutes from each IP (tracked per-volume)
notes:
* `vmaxb` and `vmaxn` requires either the `e2ds` volflag or `-e2dsa` global-option
## compress uploads
@@ -1225,6 +1234,7 @@ you can either:
* if copyparty says `incorrect --rp-loc or webserver config; expected vpath starting with [...]` it's likely because the webserver is stripping away the proxy location from the request URLs -- see the `ProxyPass` in the apache example below
some reverse proxies (such as [Caddy](https://caddyserver.com/)) can automatically obtain a valid https/tls certificate for you, and some support HTTP/2 and QUIC which could be a nice speed boost
* **warning:** nginx-QUIC is still experimental and can make uploads much slower, so HTTP/2 is recommended for now
example webserver configs:
@@ -1232,6 +1242,51 @@ example webserver configs:
* [apache2 config](contrib/apache/copyparty.conf) -- location-based
## prometheus
metrics/stats can be enabled at URL `/.cpr/metrics` for grafana / prometheus / etc (openmetrics 1.0.0)
must be enabled with `--stats` since it reduces startup time a tiny bit, and you probably want `-e2dsa` too
the endpoint is only accessible by `admin` accounts, meaning the `a` in `rwmda` in the following example commandline: `python3 -m copyparty -a ed:wark -v /mnt/nas::rwmda,ed --stats -e2dsa`
follow a guide for setting up `node_exporter` except have it read from copyparty instead; example `/etc/prometheus/prometheus.yml` below
```yaml
scrape_configs:
- job_name: copyparty
metrics_path: /.cpr/metrics
basic_auth:
password: wark
static_configs:
- targets: ['192.168.123.1:3923']
```
currently the following metrics are available,
* `cpp_uptime_seconds`
* `cpp_bans` number of banned IPs
and these are available per-volume only:
* `cpp_disk_size_bytes` total HDD size
* `cpp_disk_free_bytes` free HDD space
and these are per-volume and `total`:
* `cpp_vol_bytes` size of all files in volume
* `cpp_vol_files` number of files
* `cpp_dupe_bytes` disk space presumably saved by deduplication
* `cpp_dupe_files` number of dupe files
* `cpp_unf_bytes` currently unfinished / incoming uploads
some of the metrics have additional requirements to function correctly,
* `cpp_vol_*` requires either the `e2ds` volflag or `-e2dsa` global-option
the following options are available to disable some of the metrics:
* `--nos-hdd` disables `cpp_disk_*` which can prevent spinning up HDDs
* `--nos-vol` disables `cpp_vol_*` which reduces server startup time
* `--nos-dup` disables `cpp_dupe_*` which reduces the server load caused by prometheus queries
* `--nos-unf` disables `cpp_unf_*` for no particular purpose
# packages
the party might be closer than you think
@@ -1537,6 +1592,8 @@ when uploading files,
# security
there is a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` for all important updates (at the lack of better ideas)
some notes on hardening
* set `--rproxy 0` if your copyparty is directly facing the internet (not through a reverse-proxy)

View File

@@ -37,6 +37,10 @@ def main():
if "://" not in url:
url = "https://" + url
proto = url.split("://")[0].lower()
if proto not in ("http", "https", "ftp", "ftps"):
raise Exception("bad proto {}".format(proto))
os.chdir(inf["ap"])
name = url.split("?")[0].split("/")[-1]

View File

@@ -65,6 +65,10 @@ def main():
if "://" not in url:
url = "https://" + url
proto = url.split("://")[0].lower()
if proto not in ("http", "https", "ftp", "ftps"):
raise Exception("bad proto {}".format(proto))
os.chdir(fdir)
name = url.split("?")[0].split("/")[-1]

View File

@@ -1,8 +1,8 @@
#!/usr/bin/env python3
from __future__ import print_function, unicode_literals
S_VERSION = "1.9"
S_BUILD_DT = "2023-05-07"
S_VERSION = "1.10"
S_BUILD_DT = "2023-08-15"
"""
u2c.py: upload to copyparty
@@ -14,6 +14,7 @@ https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py
- if something breaks just try again and it'll autoresume
"""
import re
import os
import sys
import stat
@@ -411,10 +412,11 @@ def walkdir(err, top, seen):
err.append((ap, str(ex)))
def walkdirs(err, tops):
def walkdirs(err, tops, excl):
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
sep = "{0}".format(os.sep).encode("ascii")
if not VT100:
excl = excl.replace("/", r"\\")
za = []
for td in tops:
try:
@@ -431,6 +433,8 @@ def walkdirs(err, tops):
za = [x.replace(b"/", b"\\") for x in za]
tops = za
ptn = re.compile(excl.encode("utf-8") or b"\n")
for top in tops:
isdir = os.path.isdir(top)
if top[-1:] == sep:
@@ -443,6 +447,8 @@ def walkdirs(err, tops):
if isdir:
for ap, inf in walkdir(err, top, []):
if ptn.match(ap):
continue
yield stop, ap[len(stop) :].lstrip(sep), inf
else:
d, n = top.rsplit(sep, 1)
@@ -654,7 +660,7 @@ class Ctl(object):
nfiles = 0
nbytes = 0
err = []
for _, _, inf in walkdirs(err, ar.files):
for _, _, inf in walkdirs(err, ar.files, ar.x):
if stat.S_ISDIR(inf.st_mode):
continue
@@ -696,7 +702,7 @@ class Ctl(object):
if ar.te:
req_ses.verify = ar.te
self.filegen = walkdirs([], ar.files)
self.filegen = walkdirs([], ar.files, ar.x)
self.recheck = [] # type: list[File]
if ar.safe:
@@ -1097,6 +1103,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("-v", action="store_true", help="verbose")
ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath")
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
ap.add_argument("-x", type=unicode, metavar="REGEX", default="", help="skip file if filesystem-abspath matches REGEX, example: '.*/\.hist/.*'")
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
ap.add_argument("--version", action="store_true", help="show version and exit")
@@ -1113,7 +1120,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles)")
ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles and macos)")
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")

View File

@@ -1,6 +1,6 @@
# Maintainer: icxes <dev.null@need.moe>
pkgname=copyparty
pkgver="1.8.4"
pkgver="1.8.8"
pkgrel=1
pkgdesc="Portable file sharing hub"
arch=("any")
@@ -20,7 +20,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
)
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
backup=("etc/${pkgname}.d/init" )
sha256sums=("730455edb9e80571c7e01a9e306463c02dd8dc8b0b5bc1b6da6a0c1f458abec1")
sha256sums=("e4ee5198ecf335b49c973be2110afd519b4bccb6f2e3297c23d11536752171b9")
build() {
cd "${srcdir}/${pkgname}-${pkgver}"

View File

@@ -1,5 +1,5 @@
{
"url": "https://github.com/9001/copyparty/releases/download/v1.8.4/copyparty-sfx.py",
"version": "1.8.4",
"hash": "sha256-FTsQyheZNbWCn1kbN2CfgCTVZ8ceyNXZO8OhaxACUwg="
"url": "https://github.com/9001/copyparty/releases/download/v1.8.8/copyparty-sfx.py",
"version": "1.8.8",
"hash": "sha256-6tdhWti4w8s3MUg6/Ccpn9fooFsjq84uyhZFeRGV/Yg="
}

View File

@@ -716,6 +716,40 @@ def get_sects():
"""
),
],
[
"zm",
"mDNS debugging",
dedent(
"""
the mDNS protocol is multicast-based, which means there are thousands
of fun and intersesting ways for it to break unexpectedly
things to check if it does not work at all:
* is there a firewall blocking port 5353 on either the server or client?
(for example, clients may be able to send queries to copyparty,
but the replies could get lost)
* is multicast accidentally disabled on either the server or client?
(look for mDNS log messages saying "new client on [...]")
* the router/switch must be multicast and igmp capable
things to check if it works for a while but then it doesn't:
* is there a firewall blocking port 5353 on either the server or client?
(copyparty may be unable to see the queries from the clients, but the
clients may still be able to see the initial unsolicited announce,
so it works for about 2 minutes after startup until TTL expires)
* does the client have multiple IPs on its interface, and some of the
IPs are in subnets which the copyparty server is not a member of?
for both of the above intermittent issues, try --zm-spam 30
(not spec-compliant but nothing will mind)
"""
),
],
]
@@ -846,7 +880,7 @@ def add_zeroconf(ap):
def add_zc_mdns(ap):
ap2 = ap.add_argument_group("Zeroconf-mDNS options")
ap2 = ap.add_argument_group("Zeroconf-mDNS options; also see --help-zm")
ap2.add_argument("--zm", action="store_true", help="announce the enabled protocols over mDNS (multicast DNS-SD) -- compatible with KDE, gnome, macOS, ...")
ap2.add_argument("--zm-on", metavar="NETS", type=u, default="", help="enable zeroconf ONLY on the comma-separated list of subnets and/or interface names/indexes")
ap2.add_argument("--zm-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
@@ -860,8 +894,9 @@ def add_zc_mdns(ap):
ap2.add_argument("--zm-lf", metavar="PATH", type=u, default="", help="link a specific folder for ftp shares")
ap2.add_argument("--zm-ls", metavar="PATH", type=u, default="", help="link a specific folder for smb shares")
ap2.add_argument("--zm-mnic", action="store_true", help="merge NICs which share subnets; assume that same subnet means same network")
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working")
ap2.add_argument("--zm-msub", action="store_true", help="merge subnets on each NIC -- always enabled for ipv6 -- reduces network load, but gnome-gvfs clients may stop working, and clients cannot be in subnets that the server is not")
ap2.add_argument("--zm-noneg", action="store_true", help="disable NSEC replies -- try this if some clients don't see copyparty")
ap2.add_argument("--zm-spam", metavar="SEC", type=float, default=0, help="send unsolicited announce every SEC; useful if clients have IPs in a subnet which doesn't overlap with the server")
def add_zc_ssdp(ap):
@@ -927,6 +962,15 @@ def add_hooks(ap):
ap2.add_argument("--xban", metavar="CMD", type=u, action="append", help="execute CMD if someone gets banned (pw/404)")
def add_stats(ap):
ap2 = ap.add_argument_group('grafana/prometheus metrics endpoint')
ap2.add_argument("--stats", action="store_true", help="enable openmetrics at /.cpr/metrics for admin accounts")
ap2.add_argument("--nos-hdd", action="store_true", help="disable disk-space metrics (used/free space)")
ap2.add_argument("--nos-vol", action="store_true", help="disable volume size metrics (num files, total bytes, vmaxb/vmaxn)")
ap2.add_argument("--nos-dup", action="store_true", help="disable dupe-files metrics (good idea; very slow)")
ap2.add_argument("--nos-unf", action="store_true", help="disable unfinished-uploads metrics")
def add_yolo(ap):
ap2 = ap.add_argument_group('yolo options')
ap2.add_argument("--allow-csrf", action="store_true", help="disable csrf protections; let other domains/sites impersonate you through cross-site requests")
@@ -940,6 +984,7 @@ def add_optouts(ap):
ap2.add_argument("--no-dav", action="store_true", help="disable webdav support")
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
ap2.add_argument("-nth", action="store_true", help="no title hostname; don't show --name in <title>")
ap2.add_argument("-nih", action="store_true", help="no info hostname -- don't show in UI")
ap2.add_argument("-nid", action="store_true", help="no info disk-usage -- don't show in UI")
ap2.add_argument("-nb", action="store_true", help="no powered-by-copyparty branding in UI")
@@ -995,6 +1040,7 @@ def add_logging(ap):
ap2.add_argument("--no-ansi", action="store_true", default=not VT100, help="disable colors; same as environment-variable NO_COLOR")
ap2.add_argument("--ansi", action="store_true", help="force colors; overrides environment-variable NO_COLOR")
ap2.add_argument("--no-voldump", action="store_true", help="do not list volumes and permissions on startup")
ap2.add_argument("--log-tdec", type=int, default=3, help="timestamp resolution / number of timestamp decimals")
ap2.add_argument("--log-conn", action="store_true", help="debug: print tcp-server msgs")
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
@@ -1039,6 +1085,7 @@ def add_thumbnail(ap):
def add_transcoding(ap):
ap2 = ap.add_argument_group('transcoding options')
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
ap2.add_argument("--no-bacode", action="store_true", help="disable batch audio transcoding by folder download (zip/tar)")
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after SEC seconds")
@@ -1100,7 +1147,8 @@ def add_ui(ap, retry):
ap2.add_argument("--ih", action="store_true", help="if a folder contains index.html, show that instead of the directory listing by default (can be changed in the client settings UI)")
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
ap2.add_argument("--txt-max", metavar="KiB", type=int, default=64, help="max size of embedded textfiles on ?doc= (anything bigger will be lazy-loaded by JS)")
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty @ --name", help="title / service-name to show in html documents")
ap2.add_argument("--bname", metavar="TXT", type=u, default="--name", help="server name (displayed in filebrowser document title)")
ap2.add_argument("--pb-url", metavar="URL", type=u, default="https://github.com/9001/copyparty", help="powered-by link; disable with -np")
ap2.add_argument("--ver", action="store_true", help="show version on the control panel (incompatible by -np)")
ap2.add_argument("--md-sbf", metavar="FLAGS", type=u, default="downloads forms popups scripts top-navigation-by-user-activation", help="list of capabilities to ALLOW for README.md docs (volflag=md_sbf); see https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox")
@@ -1172,6 +1220,7 @@ def run_argparse(
add_yolo(ap)
add_handlers(ap)
add_hooks(ap)
add_stats(ap)
add_ui(ap, retry)
add_admin(ap)
add_logging(ap)

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 8, 6)
CODENAME = "argon"
BUILD_DT = (2023, 7, 21)
VERSION = (1, 9, 0)
CODENAME = "prometheable"
BUILD_DT = (2023, 8, 20)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -52,6 +52,11 @@ if TYPE_CHECKING:
LEELOO_DALLAS = "leeloo_dallas"
SEE_LOG = "see log for details"
SSEELOG = " ({})".format(SEE_LOG)
BAD_CFG = "invalid config; {}".format(SEE_LOG)
SBADCFG = " ({})".format(BAD_CFG)
class AXS(object):
def __init__(
@@ -795,7 +800,7 @@ class AuthSrv(object):
if dst in mount:
t = "multiple filesystem-paths mounted at [/{}]:\n [{}]\n [{}]"
self.log(t.format(dst, mount[dst], src), c=1)
raise Exception("invalid config")
raise Exception(BAD_CFG)
if src in mount.values():
t = "filesystem-path [{}] mounted in multiple locations:"
@@ -804,7 +809,7 @@ class AuthSrv(object):
t += "\n /{}".format(v)
self.log(t, c=3)
raise Exception("invalid config")
raise Exception(BAD_CFG)
if not bos.path.isdir(src):
self.log("warning: filesystem-path does not exist: {}".format(src), 3)
@@ -903,7 +908,7 @@ class AuthSrv(object):
t = "volume-specific config (anything from --help-flags)"
self._l(ln, 6, t)
else:
raise Exception("invalid section header")
raise Exception("invalid section header" + SBADCFG)
self.indent = " " if subsection else " "
continue
@@ -926,7 +931,7 @@ class AuthSrv(object):
acct[u] = p
except:
t = 'lines inside the [accounts] section must be "username: password"'
raise Exception(t)
raise Exception(t + SBADCFG)
continue
if vp is not None and ap is None:
@@ -954,7 +959,7 @@ class AuthSrv(object):
continue
except:
err += "accs entries must be 'rwmdgGa: user1, user2, ...'"
raise Exception(err)
raise Exception(err + SBADCFG)
if cat == catf:
err = ""
@@ -967,7 +972,7 @@ class AuthSrv(object):
if bad:
err = "bad characters [{}] in volflag name [{}]; "
err = err.format(bad, sk)
raise Exception(err)
raise Exception(err + SBADCFG)
if sv is True:
fstr += "," + sk
else:
@@ -979,9 +984,9 @@ class AuthSrv(object):
continue
except:
err += "flags entries (volflags) must be one of the following:\n 'flag1, flag2, ...'\n 'key: value'\n 'flag1, flag2, key: value'"
raise Exception(err)
raise Exception(err + SBADCFG)
raise Exception("unprocessable line in config")
raise Exception("unprocessable line in config" + SBADCFG)
self._e()
self.line_ctr = 0
@@ -1218,7 +1223,7 @@ class AuthSrv(object):
+ ", ".join(k for k in sorted(missing_users)),
c=1,
)
raise Exception("invalid config")
raise Exception(BAD_CFG)
if LEELOO_DALLAS in all_users:
raise Exception("sorry, reserved username: " + LEELOO_DALLAS)
@@ -1228,7 +1233,7 @@ class AuthSrv(object):
if pwd in seenpwds:
t = "accounts [{}] and [{}] have the same password; this is not supported"
self.log(t.format(seenpwds[pwd], usr), 1)
raise Exception("invalid config")
raise Exception(BAD_CFG)
seenpwds[pwd] = usr
promote = []
@@ -1612,6 +1617,7 @@ class AuthSrv(object):
vfs.bubble_flags()
have_e2d = False
have_e2t = False
t = "volumes and permissions:\n"
for zv in vfs.all_vols.values():
if not self.warn_anonwrite:
@@ -1635,6 +1641,9 @@ class AuthSrv(object):
if "e2d" in zv.flags:
have_e2d = True
if "e2t" in zv.flags:
have_e2t = True
t += "\n"
if self.warn_anonwrite:
@@ -1646,6 +1655,13 @@ class AuthSrv(object):
if t:
self.log("\n\033[{}\033[0m\n".format(t))
if not have_e2t:
t = "hint: argument -e2ts enables multimedia indexing (artist/title/...)"
self.log(t, 6)
else:
t = "hint: argument -e2dsa enables searching, upload-undo, and better deduplication"
self.log(t, 6)
zv, _ = vfs.get("/", "*", False, False)
zs = zv.realpath.lower()
if zs in ("/", "c:\\") or zs.startswith(r"c:\windows"):
@@ -2015,13 +2031,19 @@ def expand_config_file(ret: list[str], fp: str, ipath: str) -> None:
if os.path.isdir(fp):
names = os.listdir(fp)
ret.append("#\033[36m cfg files in {} => {}\033[0m".format(fp, names))
crumb = "#\033[36m cfg files in {} => {}\033[0m".format(fp, names)
ret.append(crumb)
for fn in sorted(names):
fp2 = os.path.join(fp, fn)
if not fp2.endswith(".conf") or fp2 in ipath:
continue
expand_config_file(ret, fp2, ipath)
if ret[-1] == crumb:
# no config files below; remove breadcrumb
ret.pop()
return
ipath += " -> " + fp

View File

@@ -181,6 +181,10 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
raise Exception("failed to translate cert: {}, {}".format(rc, se))
bname = os.path.join(args.crt_dir, "srv")
try:
os.unlink(bname + ".key")
except:
pass
os.rename(bname + "-key.pem", bname + ".key")
os.unlink(bname + ".csr")
@@ -216,7 +220,7 @@ def gencert(log: "RootLogger", args, netdevs: dict[str, Netdev]):
HAVE_CFSSL = False
log("cert", "could not create TLS certificates: {}".format(ex), 3)
if getattr(ex, "errno", 0) == errno.ENOENT:
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest"
t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest (cfssl, cfssljson, cfssl-certinfo)"
log("cert", t, 6)
ensure_cert(log, args)

View File

@@ -33,7 +33,7 @@ from .__version__ import S_VERSION
from .authsrv import VFS # typechk
from .bos import bos
from .star import StreamTar
from .sutil import StreamArc # typechk
from .sutil import StreamArc, gfilter
from .szip import StreamZip
from .util import (
HTTPCODE,
@@ -41,8 +41,8 @@ from .util import (
MultipartParser,
Pebkac,
UnrecvEOF,
alltrace,
absreal,
alltrace,
atomic_move,
exclude_dotfiles,
fsenc,
@@ -141,6 +141,7 @@ class HttpCli(object):
self.vn = self.asrv.vfs
self.rem = " "
self.vpath = " "
self.vpaths = " "
self.uname = " "
self.pw = " "
self.rvol = [" "]
@@ -210,7 +211,8 @@ class HttpCli(object):
ka["ts"] = self.conn.hsrv.cachebuster()
ka["lang"] = self.args.lang
ka["favico"] = self.args.favico
ka["svcname"] = self.args.doctitle
ka["s_name"] = self.args.bname
ka["s_doctitle"] = self.args.doctitle
ka["html_head"] = self.html_head
return tpl.render(**ka) # type: ignore
@@ -337,6 +339,15 @@ class HttpCli(object):
vpath, arglist = self.req.split("?", 1)
self.trailing_slash = vpath.endswith("/")
vpath = undot(vpath)
zs = unquotep(arglist)
m = self.conn.hsrv.ptn_cc.search(zs)
if m:
hit = zs[m.span()[0] :]
t = "malicious user; Cc in query [{}] => [{!r}]"
self.log(t.format(self.req, hit), 1)
return False
for k in arglist.split("&"):
if "=" in k:
k, zs = k.split("=", 1)
@@ -375,6 +386,9 @@ class HttpCli(object):
self.uparam = uparam
self.cookies = cookies
self.vpath = unquotep(vpath) # not query, so + means +
self.vpaths = (
self.vpath + "/" if self.trailing_slash and self.vpath else self.vpath
)
ok = "\x00" not in self.vpath
if ANYWIN:
@@ -488,6 +502,9 @@ class HttpCli(object):
pex: Pebkac = ex # type: ignore
try:
if pex.code == 999:
return False
post = self.mode in ["POST", "PUT"] or "content-length" in self.headers
if not self._check_nonfatal(pex, post):
self.keepalive = False
@@ -586,6 +603,14 @@ class HttpCli(object):
for k, zs in list(self.out_headers.items()) + self.out_headerlist:
response.append("%s: %s" % (k, zs))
for zs in response:
m = self.conn.hsrv.ptn_cc.search(zs)
if m:
hit = zs[m.span()[0] :]
t = "malicious user; Cc in out-hdr {!r} => [{!r}]"
self.log(t.format(zs, hit), 1)
raise Pebkac(999)
try:
# best practice to separate headers and body into different packets
self.s.sendall("\r\n".join(response).encode("utf-8") + b"\r\n\r\n")
@@ -667,6 +692,21 @@ class HttpCli(object):
r = ["%s=%s" % (k, quotep(zs)) if zs else k for k, zs in kv.items()]
return "?" + "&amp;".join(r)
def ourlq(self) -> str:
skip = ("pw", "h", "k")
ret = []
for k, v in self.ouparam.items():
if k in skip:
continue
t = "{}={}".format(quotep(k), quotep(v))
ret.append(t.replace(" ", "+").rstrip("="))
if not ret:
return ""
return "?" + "&".join(ret)
def redirect(
self,
vpath: str,
@@ -782,16 +822,21 @@ class HttpCli(object):
self.reply(b"", 301, headers=h)
return True
if self.vpath == ".cpr/metrics":
return self.conn.hsrv.metrics.tx(self)
path_base = os.path.join(self.E.mod, "web")
static_path = absreal(os.path.join(path_base, self.vpath[5:]))
if static_path in self.conn.hsrv.statics:
return self.tx_file(static_path)
if not static_path.startswith(path_base):
t = "attempted path traversal [{}] => [{}]"
t = "malicious user; attempted path traversal [{}] => [{}]"
self.log(t.format(self.vpath, static_path), 1)
self.tx_404()
return False
return self.tx_file(static_path)
if "cf_challenge" in self.uparam:
self.reply(self.j2s("cf").encode("utf-8", "replace"))
return True
@@ -2003,7 +2048,9 @@ class HttpCli(object):
dst = self.args.SRS
if self.vpath:
dst += quotep(self.vpath)
dst += quotep(self.vpaths)
dst += self.ourlq()
msg = self.get_pwd_cookie(pwd)
html = self.j2s("msg", h1=msg, h2='<a href="' + dst + '">ack</a>', redir=dst)
@@ -2612,7 +2659,7 @@ class HttpCli(object):
#
# if request is for foo.js, check if we have foo.js.{gz,br}
file_ts = 0
file_ts = 0.0
editions: dict[str, tuple[str, int]] = {}
for ext in ["", ".gz", ".br"]:
try:
@@ -2630,7 +2677,7 @@ class HttpCli(object):
else:
sz = st.st_size
file_ts = max(file_ts, int(st.st_mtime))
file_ts = max(file_ts, st.st_mtime)
editions[ext or "plain"] = (fs_path, sz)
except:
pass
@@ -2643,11 +2690,14 @@ class HttpCli(object):
#
# if-modified
file_lastmod, do_send = self._chk_lastmod(file_ts)
file_lastmod, do_send = self._chk_lastmod(int(file_ts))
self.out_headers["Last-Modified"] = file_lastmod
if not do_send:
status = 304
if self.can_write:
self.out_headers["X-Lastmod3"] = str(int(file_ts * 1000))
#
# Accept-Encoding and UA decides which edition to send
@@ -2849,6 +2899,16 @@ class HttpCli(object):
vpath, rem, set(items), self.uname, dots, False, not self.args.no_scandir
)
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
cfmt = ""
if self.thumbcli and not self.args.no_bacode:
for zs in ("opus", "w", "j"):
if zs in self.ouparam or uarg == zs:
cfmt = zs
if cfmt:
self.log("transcoding to [{}]".format(cfmt))
fgen = gfilter(fgen, self.thumbcli, self.uname, vpath, cfmt)
bgen = packer(self.log, fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
bsent = 0
for buf in bgen.gen():
@@ -2860,6 +2920,7 @@ class HttpCli(object):
bsent += len(buf)
except:
logmsg += " \033[31m" + unicode(bsent) + "\033[0m"
bgen.stop()
break
spd = self._spd(bsent)
@@ -2914,7 +2975,12 @@ class HttpCli(object):
ts_html = st.st_mtime
sz_md = 0
lead = b""
for buf in yieldfile(fs_path):
if not sz_md and b"\n" in buf[:2]:
lead = buf[: buf.find(b"\n") + 1]
sz_md += len(lead)
sz_md += len(buf)
for c, v in [(b"&", 4), (b"<", 3), (b">", 3)]:
sz_md += (len(buf) - len(buf.replace(c, b""))) * v
@@ -2933,7 +2999,6 @@ class HttpCli(object):
targs = {
"r": self.args.SR if self.is_vproxied else "",
"ts": self.conn.hsrv.cachebuster(),
"svcname": self.args.doctitle,
"html_head": self.html_head,
"edit": "edit" in self.uparam,
"title": html_escape(self.vpath, crlf=True),
@@ -2960,7 +3025,7 @@ class HttpCli(object):
return True
try:
self.s.sendall(html[0])
self.s.sendall(html[0] + lead)
for buf in yieldfile(fs_path):
self.s.sendall(html_bescape(buf))
@@ -2976,7 +3041,7 @@ class HttpCli(object):
return True
def tx_svcs(self) -> bool:
aname = re.sub("[^0-9a-zA-Z]+", "", self.args.name) or "a"
aname = re.sub("[^0-9a-zA-Z]+", "", self.args.vname) or "a"
ep = self.host
host = ep.split(":")[0]
hport = ep[ep.find(":") :] if ":" in ep else ""
@@ -2986,8 +3051,10 @@ class HttpCli(object):
else self.conn.hsrv.nm.map(self.ip) or host
)
# safer than html_escape/quotep since this avoids both XSS and shell-stuff
pw = re.sub(r"[<>&$?`]", "_", self.pw or "pw")
vp = re.sub(r"[<>&$?`]", "_", self.uparam["hc"] or "").lstrip("/")
pw = re.sub(r"[<>&$?`\"']", "_", self.pw or "pw")
vp = re.sub(r"[<>&$?`\"']", "_", self.uparam["hc"] or "").lstrip("/")
pw = pw.replace(" ", "%20")
vp = vp.replace(" ", "%20")
html = self.j2s(
"svcs",
args=self.args,
@@ -3058,7 +3125,7 @@ class HttpCli(object):
html = self.j2s(
"splash",
this=self,
qvpath=quotep(self.vpath),
qvpath=quotep(self.vpaths) + self.ourlq(),
rvol=rvol,
wvol=wvol,
avol=avol,
@@ -3077,7 +3144,14 @@ class HttpCli(object):
return True
def set_k304(self) -> bool:
ck = gencookie("k304", self.uparam["k304"], self.args.R, False, 86400 * 299)
v = self.uparam["k304"].lower()
if v == "y":
dur = 86400 * 299
else:
dur = None
v = "x"
ck = gencookie("k304", v, self.args.R, False, dur)
self.out_headerlist.append(("Set-Cookie", ck))
self.redirect("", "?h#cc")
return True
@@ -3109,7 +3183,8 @@ class HttpCli(object):
t = '<h1 id="n">404 not found &nbsp;┐( ´ -`)┌</h1><p><a id="r" href="{}/?h">go home</a></p>'
t = t.format(self.args.SR)
html = self.j2s("splash", this=self, qvpath=quotep(self.vpath), msg=t)
qv = quotep(self.vpaths) + self.ourlq()
html = self.j2s("splash", this=self, qvpath=qv, msg=t)
self.reply(html.encode("utf-8"), status=rc)
return True
@@ -3657,7 +3732,7 @@ class HttpCli(object):
"url_suf": url_suf,
"logues": logues,
"readme": readme,
"title": html_escape(self.vpath, crlf=True) or "💾🎉",
"title": html_escape("%s %s" % (self.args.bname, self.vpath), crlf=True),
"srv_info": srv_infot,
"dgrid": "grid" in vf,
"unlist": unlist,
@@ -3880,7 +3955,6 @@ class HttpCli(object):
doc = self.uparam.get("doc") if self.can_read else None
if doc:
doc = unquotep(doc.replace("+", " ").split("?")[0])
j2a["docname"] = doc
doctxt = None
if next((x for x in files if x["name"] == doc), None):

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import base64
import math
import os
import re
import socket
import sys
import threading
@@ -54,8 +55,8 @@ except SyntaxError:
)
sys.exit(1)
from .bos import bos
from .httpconn import HttpConn
from .metrics import Metrics
from .u2idx import U2idx
from .util import (
E_SCK,
@@ -65,6 +66,7 @@ from .util import (
Magician,
Netdev,
NetMap,
absreal,
ipnorm,
min_ex,
shut_socket,
@@ -98,6 +100,7 @@ class HttpSrv(object):
# redefine in case of multiprocessing
socket.setdefaulttimeout(120)
self.t0 = time.time()
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
self.magician = Magician()
self.nm = NetMap([], {})
@@ -121,6 +124,7 @@ class HttpSrv(object):
self.t_periodic: Optional[threading.Thread] = None
self.u2fh = FHC()
self.metrics = Metrics(self)
self.srvs: list[socket.socket] = []
self.ncli = 0 # exact
self.clients: set[HttpConn] = set() # laggy
@@ -138,6 +142,11 @@ class HttpSrv(object):
zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz")
self.prism = os.path.exists(zs)
self.statics: set[str] = set()
self._build_statics()
self.ptn_cc = re.compile(r"[\x00-\x1f]")
self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split()
if not self.args.no_dav:
zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE"
@@ -168,6 +177,14 @@ class HttpSrv(object):
except:
pass
def _build_statics(self) -> None:
for dp, _, df in os.walk(os.path.join(self.E.mod, "web")):
for fn in df:
ap = absreal(os.path.join(dp, fn))
self.statics.add(ap)
if ap.endswith(".gz") or ap.endswith(".br"):
self.statics.add(ap[:-3])
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
ips = set()
for ip, _ in self.bound:

View File

@@ -295,7 +295,9 @@ class MDNS(MCast):
while self.running:
timeout = (
0.02 + random.random() * 0.07
if self.probing or self.q or self.defend or self.unsolicited
if self.probing or self.q or self.defend
else max(0.05, self.unsolicited[0] - time.time())
if self.unsolicited
else (last_hop + ihop if ihop else 180)
)
rdy = select.select(self.srv, [], [], timeout)
@@ -513,6 +515,10 @@ class MDNS(MCast):
for srv in self.srv.values():
tx.add(srv)
if not self.unsolicited and self.args.zm_spam:
zf = time.time() + self.args.zm_spam + random.random() * 0.07
self.unsolicited.append(zf)
for srv, deadline in list(self.defend.items()):
if now < deadline:
continue

165
copyparty/metrics.py Normal file
View File

@@ -0,0 +1,165 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import json
import time
from .__init__ import TYPE_CHECKING
from .util import Pebkac, get_df, unhumanize
if TYPE_CHECKING:
from .httpcli import HttpCli
from .httpsrv import HttpSrv
class Metrics(object):
def __init__(self, hsrv: "HttpSrv") -> None:
self.hsrv = hsrv
def tx(self, cli: "HttpCli") -> bool:
if not cli.avol:
raise Pebkac(403, "not allowed for user " + cli.uname)
args = cli.args
if not args.stats:
raise Pebkac(403, "the stats feature is not enabled in server config")
conn = cli.conn
vfs = conn.asrv.vfs
allvols = list(sorted(vfs.all_vols.items()))
idx = conn.get_u2idx()
if not idx or not hasattr(idx, "p_end"):
idx = None
ret: list[str] = []
def addc(k: str, unit: str, v: str, desc: str) -> None:
if unit:
k += "_" + unit
zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s"
ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v))
else:
zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s"
ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v))
def addh(k: str, typ: str, desc: str) -> None:
zs = "# TYPE %s %s\n# HELP %s %s"
ret.append(zs % (k, typ, k, desc))
def addbh(k: str, desc: str) -> None:
zs = "# TYPE %s gauge\n# UNIT %s bytes\n# HELP %s %s"
ret.append(zs % (k, k, k, desc))
def addv(k: str, v: str) -> None:
ret.append("%s %s" % (k, v))
v = "{:.3f}".format(time.time() - self.hsrv.t0)
addc("cpp_uptime", "seconds", v, "time since last server restart")
v = str(len(conn.bans or []))
addc("cpp_bans", "", v, "number of banned IPs")
if not args.nos_hdd:
addbh("cpp_disk_size_bytes", "total HDD size of volume")
addbh("cpp_disk_free_bytes", "free HDD space in volume")
for vpath, vol in allvols:
free, total = get_df(vol.realpath)
addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total))
addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free))
if idx and not args.nos_vol:
addbh("cpp_vol_bytes", "num bytes of data in volume")
addh("cpp_vol_files", "gauge", "num files in volume")
addbh("cpp_vol_free_bytes", "free space (vmaxb) in volume")
addh("cpp_vol_free_files", "gauge", "free space (vmaxn) in volume")
tnbytes = 0
tnfiles = 0
volsizes = []
try:
ptops = [x.realpath for _, x in allvols]
x = self.hsrv.broker.ask("up2k.get_volsizes", ptops)
volsizes = x.get()
except Exception as ex:
cli.log("tx_stats get_volsizes: {!r}".format(ex), 3)
for (vpath, vol), (nbytes, nfiles) in zip(allvols, volsizes):
tnbytes += nbytes
tnfiles += nfiles
addv('cpp_vol_bytes{vol="/%s"}' % (vpath), str(nbytes))
addv('cpp_vol_files{vol="/%s"}' % (vpath), str(nfiles))
if vol.flags.get("vmaxb") or vol.flags.get("vmaxn"):
zi = unhumanize(vol.flags.get("vmaxb") or "0")
if zi:
v = str(zi - nbytes)
addv('cpp_vol_free_bytes{vol="/%s"}' % (vpath), v)
zi = unhumanize(vol.flags.get("vmaxn") or "0")
if zi:
v = str(zi - nfiles)
addv('cpp_vol_free_files{vol="/%s"}' % (vpath), v)
if volsizes:
addv('cpp_vol_bytes{vol="total"}', str(tnbytes))
addv('cpp_vol_files{vol="total"}', str(tnfiles))
if idx and not args.nos_dup:
addbh("cpp_dupe_bytes", "num dupe bytes in volume")
addh("cpp_dupe_files", "gauge", "num dupe files in volume")
tnbytes = 0
tnfiles = 0
for vpath, vol in allvols:
cur = idx.get_cur(vol.realpath)
if not cur:
continue
nbytes = 0
nfiles = 0
q = "select sz, count(*)-1 c from up group by w having c"
for sz, c in cur.execute(q):
nbytes += sz * c
nfiles += c
tnbytes += nbytes
tnfiles += nfiles
addv('cpp_dupe_bytes{vol="/%s"}' % (vpath), str(nbytes))
addv('cpp_dupe_files{vol="/%s"}' % (vpath), str(nfiles))
addv('cpp_dupe_bytes{vol="total"}', str(tnbytes))
addv('cpp_dupe_files{vol="total"}', str(tnfiles))
if not args.nos_unf:
addbh("cpp_unf_bytes", "incoming/unfinished uploads (num bytes)")
addh("cpp_unf_files", "gauge", "incoming/unfinished uploads (num files)")
tnbytes = 0
tnfiles = 0
try:
x = self.hsrv.broker.ask("up2k.get_unfinished")
xs = x.get()
xj = json.loads(xs)
for ptop, (nbytes, nfiles) in xj.items():
tnbytes += nbytes
tnfiles += nfiles
vol = next((x[1] for x in allvols if x[1].realpath == ptop), None)
if not vol:
t = "tx_stats get_unfinished: could not map {}"
cli.log(t.format(ptop), 3)
continue
addv('cpp_unf_bytes{vol="/%s"}' % (vol.vpath), str(nbytes))
addv('cpp_unf_files{vol="/%s"}' % (vol.vpath), str(nfiles))
addv('cpp_unf_bytes{vol="total"}', str(tnbytes))
addv('cpp_unf_files{vol="total"}', str(tnfiles))
except Exception as ex:
cli.log("tx_stats get_unfinished: {!r}".format(ex), 3)
ret.append("# EOF")
mime = "application/openmetrics-text; version=1.0.0; charset=utf-8"
cli.reply("\n".join(ret).encode("utf-8"), mime=mime)
return True

View File

@@ -15,7 +15,7 @@ from ipaddress import (
)
from .__init__ import MACOS, TYPE_CHECKING
from .util import Netdev, find_prefix, min_ex, spack
from .util import Daemon, Netdev, find_prefix, min_ex, spack
if TYPE_CHECKING:
from .svchub import SvcHub
@@ -228,6 +228,7 @@ class MCast(object):
for srv in self.srv.values():
assert srv.ip in self.sips
Daemon(self.hopper, "mc-hop")
return bound
def setup_socket(self, srv: MC_Sck) -> None:
@@ -299,34 +300,58 @@ class MCast(object):
t = "failed to set IPv4 TTL/LOOP; announcements may not survive multiple switches/routers"
self.log(t, 3)
self.hop(srv)
if self.hop(srv, False):
self.log("igmp was already joined?? chilling for a sec", 3)
time.sleep(1.2)
self.hop(srv, True)
self.b4.sort(reverse=True)
self.b6.sort(reverse=True)
def hop(self, srv: MC_Sck) -> None:
def hop(self, srv: MC_Sck, on: bool) -> bool:
"""rejoin to keepalive on routers/switches without igmp-snooping"""
sck = srv.sck
req = srv.mreq
if ":" in srv.ip:
if not on:
try:
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req)
# linux does leaves/joins twice with 0.2~1.05s spacing
time.sleep(1.2)
return True
except:
pass
return False
else:
sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req)
else:
if not on:
try:
sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req)
time.sleep(1.2)
return True
except:
pass
return False
else:
# t = "joining {} from ip {} idx {} with mreq {}"
# self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6)
sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req)
return True
def hopper(self):
while self.args.mc_hop and self.running:
time.sleep(self.args.mc_hop)
if not self.running:
return
for srv in self.srv.values():
self.hop(srv, False)
# linux does leaves/joins twice with 0.2~1.05s spacing
time.sleep(1.2)
if not self.running:
return
for srv in self.srv.values():
self.hop(srv, True)
def map_client(self, cip: str) -> Optional[MC_Sck]:
try:
return self.cscache[cip]

View File

@@ -81,7 +81,7 @@ class SSDPr(object):
ubase = "{}://{}:{}".format(proto, sip, sport)
zsl = self.args.zsl
url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/")
name = "{} @ {}".format(self.args.doctitle, self.args.name)
name = self.args.doctitle
zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid))
hc.reply(zs.encode("utf-8", "replace"))
return False # close connectino

View File

@@ -61,6 +61,7 @@ class StreamTar(StreamArc):
Daemon(self._gen, "star-gen")
def gen(self) -> Generator[Optional[bytes], None, None]:
buf = b""
try:
while True:
buf = self.qfile.q.get()
@@ -72,6 +73,12 @@ class StreamTar(StreamArc):
yield None
finally:
while buf:
try:
buf = self.qfile.q.get()
except:
pass
if self.errf:
bos.unlink(self.errf["ap"])
@@ -101,6 +108,9 @@ class StreamTar(StreamArc):
errors.append((f["vp"], f["err"]))
continue
if self.stopped:
break
try:
self.ser(f)
except:

View File

@@ -1,10 +1,14 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import tempfile
from datetime import datetime
from .__init__ import CORES
from .bos import bos
from .th_cli import ThumbCli
from .util import vjoin
if True: # pylint: disable=using-constant-test
from typing import Any, Generator, Optional
@@ -21,10 +25,78 @@ class StreamArc(object):
):
self.log = log
self.fgen = fgen
self.stopped = False
def gen(self) -> Generator[Optional[bytes], None, None]:
raise Exception("override me")
def stop(self) -> None:
self.stopped = True
def gfilter(
fgen: Generator[dict[str, Any], None, None],
thumbcli: ThumbCli,
uname: str,
vtop: str,
fmt: str,
) -> Generator[dict[str, Any], None, None]:
from concurrent.futures import ThreadPoolExecutor
pend = []
with ThreadPoolExecutor(max_workers=CORES) as tp:
try:
for f in fgen:
task = tp.submit(enthumb, thumbcli, uname, vtop, f, fmt)
pend.append((task, f))
if pend[0][0].done() or len(pend) > CORES * 4:
task, f = pend.pop(0)
try:
f = task.result(600)
except:
pass
yield f
for task, f in pend:
try:
f = task.result(600)
except:
pass
yield f
except Exception as ex:
thumbcli.log("gfilter flushing ({})".format(ex))
for task, f in pend:
try:
task.result(600)
except:
pass
thumbcli.log("gfilter flushed")
def enthumb(
thumbcli: ThumbCli, uname: str, vtop: str, f: dict[str, Any], fmt: str
) -> dict[str, Any]:
rem = f["vp"]
ext = rem.rsplit(".", 1)[-1].lower()
if fmt == "opus" and ext in "aac|m4a|mp3|ogg|opus|wma".split("|"):
raise Exception()
vp = vjoin(vtop, rem.split("/", 1)[1])
vn, rem = thumbcli.asrv.vfs.get(vp, uname, True, False)
dbv, vrem = vn.get_dbv(rem)
thp = thumbcli.get(dbv, vrem, f["st"].st_mtime, fmt)
if not thp:
raise Exception()
ext = "jpg" if fmt == "j" else "webp" if fmt == "w" else fmt
sz = bos.path.getsize(thp)
st: os.stat_result = f["st"]
ts = st.st_mtime
f["ap"] = thp
f["vp"] = f["vp"].rsplit(".", 1)[0] + "." + ext
f["st"] = os.stat_result((st.st_mode, -1, -1, 1, 1000, 1000, sz, ts, ts, ts))
return f
def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]:
report = ["copyparty failed to add the following files to the archive:", ""]

View File

@@ -29,7 +29,7 @@ if True: # pylint: disable=using-constant-test
from typing import Any, Optional, Union
from .__init__ import ANYWIN, EXE, MACOS, TYPE_CHECKING, EnvParams, unicode
from .authsrv import AuthSrv
from .authsrv import BAD_CFG, AuthSrv
from .cert import ensure_cert
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
from .tcpsrv import TcpSrv
@@ -131,8 +131,11 @@ class SvcHub(object):
args.force_js = True
if not self._process_config():
raise Exception("bad config")
raise Exception(BAD_CFG)
self.log_div = 10 ** (6 - args.log_tdec)
self.log_efmt = "%02d:%02d:%02d.%0{}d".format(args.log_tdec)
self.log_dfmt = "%04d-%04d-%06d.%0{}d".format(args.log_tdec)
self.log = self._log_disabled if args.q else self._log_enabled
if args.lo:
self._setup_logfile(printed)
@@ -162,6 +165,14 @@ class SvcHub(object):
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
args.theme = "{0}{1} {0} {1}".format(ch, bri)
if args.nih:
args.vname = ""
args.doctitle = args.doctitle.replace(" @ --name", "")
else:
args.vname = args.name
args.doctitle = args.doctitle.replace("--name", args.vname)
args.bname = args.bname.replace("--name", args.vname) or args.vname
if args.log_fk:
args.log_fk = re.compile(args.log_fk)
@@ -673,11 +684,11 @@ class SvcHub(object):
with self.log_mutex:
zd = datetime.utcnow()
ts = "%04d-%04d-%06d.%03d" % (
ts = self.log_dfmt % (
zd.year,
zd.month * 100 + zd.day,
(zd.hour * 100 + zd.minute) * 100 + zd.second,
zd.microsecond // 1000,
zd.microsecond // self.log_div,
)
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
@@ -729,11 +740,11 @@ class SvcHub(object):
msg = "%s%s\033[0m" % (c, msg)
zd = datetime.utcfromtimestamp(now)
ts = "%02d:%02d:%02d.%03d" % (
ts = self.log_efmt % (
zd.hour,
zd.minute,
zd.second,
zd.microsecond // 1000,
zd.microsecond // self.log_div,
)
msg = fmt % (ts, src, msg)
try:

View File

@@ -108,6 +108,7 @@ class ThumbCli(object):
if st.st_size:
ret = tpath = tp
fmt = ret.rsplit(".")[1]
break
else:
abort = True
except:

View File

@@ -22,7 +22,7 @@ from copy import deepcopy
from queue import Queue
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, WINDOWS
from .authsrv import LEELOO_DALLAS, VFS, AuthSrv
from .authsrv import LEELOO_DALLAS, SSEELOG, VFS, AuthSrv
from .bos import bos
from .cfg import vf_bmap, vf_vmap
from .fsutil import Fstab
@@ -267,11 +267,49 @@ class Up2k(object):
}
return json.dumps(ret, indent=4)
def get_unfinished(self) -> str:
if PY2 or not self.mutex.acquire(timeout=0.5):
return "{}"
ret: dict[str, tuple[int, int]] = {}
try:
for ptop, tab2 in self.registry.items():
nbytes = 0
nfiles = 0
drp = self.droppable.get(ptop, {})
for wark, job in tab2.items():
if wark in drp:
continue
nfiles += 1
try:
# close enough on average
nbytes += len(job["need"]) * job["size"] // len(job["hash"])
except:
pass
ret[ptop] = (nbytes, nfiles)
finally:
self.mutex.release()
return json.dumps(ret, indent=4)
def get_volsize(self, ptop: str) -> tuple[int, int]:
with self.mutex:
return self._get_volsize(ptop)
def get_volsizes(self, ptops: list[str]) -> list[tuple[int, int]]:
ret = []
with self.mutex:
for ptop in ptops:
ret.append(self._get_volsize(ptop))
return ret
def _get_volsize(self, ptop: str) -> tuple[int, int]:
if "e2ds" not in self.flags.get(ptop, {}):
return (0, 0)
cur = self.cur[ptop]
nbytes = self.volsize[cur]
nfiles = self.volnfiles[cur]
@@ -791,9 +829,9 @@ class Up2k(object):
reg = {}
drp = None
path = os.path.join(histpath, "up2k.snap")
if bos.path.exists(path):
with gzip.GzipFile(path, "rb") as f:
snap = os.path.join(histpath, "up2k.snap")
if bos.path.exists(snap):
with gzip.GzipFile(snap, "rb") as f:
j = f.read().decode("utf-8")
reg2 = json.loads(j)
@@ -804,20 +842,20 @@ class Up2k(object):
pass
for k, job in reg2.items():
path = djoin(job["ptop"], job["prel"], job["name"])
if bos.path.exists(path):
fp = djoin(job["ptop"], job["prel"], job["name"])
if bos.path.exists(fp):
reg[k] = job
job["poke"] = time.time()
job["busy"] = {}
else:
self.log("ign deleted file in snap: [{}]".format(path))
self.log("ign deleted file in snap: [{}]".format(fp))
if drp is None:
drp = [k for k, v in reg.items() if not v.get("need", [])]
else:
drp = [x for x in drp if x in reg]
t = "loaded snap {} |{}| ({})".format(path, len(reg.keys()), len(drp or []))
t = "loaded snap {} |{}| ({})".format(snap, len(reg.keys()), len(drp or []))
ta = [t] + self._vis_reg_progress(reg)
self.log("\n".join(ta))
@@ -946,7 +984,11 @@ class Up2k(object):
db.c.connection.commit()
if vol.flags.get("vmaxb") or vol.flags.get("vmaxn"):
if (
vol.flags.get("vmaxb")
or vol.flags.get("vmaxn")
or (self.args.stats and not self.args.nos_vol)
):
zs = "select count(sz), sum(sz) from up"
vn, vb = db.c.execute(zs).fetchone()
vb = vb or 0
@@ -955,7 +997,7 @@ class Up2k(object):
self.volnfiles[db.c] = vn
vmaxb = unhumanize(vol.flags.get("vmaxb") or "0")
vmaxn = unhumanize(vol.flags.get("vmaxn") or "0")
t = "{} / {} ( {} / {} files) in {}".format(
t = "{:>5} / {:>5} ( {:>5} / {:>5} files) in {}".format(
humansize(vb, True),
humansize(vmaxb, True),
humansize(vn, True).rstrip("B"),
@@ -2358,27 +2400,31 @@ class Up2k(object):
cur = jcur
ptop = None # use cj or job as appropriate
if job or wark in reg:
job = job or reg[wark]
if (
job["ptop"] == cj["ptop"]
and job["prel"] == cj["prel"]
and job["name"] == cj["name"]
):
if not job and wark in reg:
# ensure the files haven't been deleted manually
names = [job[x] for x in ["name", "tnam"] if x in job]
rj = reg[wark]
names = [rj[x] for x in ["name", "tnam"] if x in rj]
for fn in names:
path = djoin(job["ptop"], job["prel"], fn)
path = djoin(rj["ptop"], rj["prel"], fn)
try:
if bos.path.getsize(path) > 0:
if bos.path.getsize(path) > 0 or not rj["need"]:
# upload completed or both present
break
except:
# missing; restart
if not self.args.nw and not n4g:
job = None
t = "forgetting deleted partial upload at {}"
self.log(t.format(path))
del reg[wark]
break
else:
if job or wark in reg:
job = job or reg[wark]
if (
job["ptop"] != cj["ptop"]
or job["prel"] != cj["prel"]
or job["name"] != cj["name"]
):
# file contents match, but not the path
src = djoin(job["ptop"], job["prel"], job["name"])
dst = djoin(cj["ptop"], cj["prel"], cj["name"])
@@ -2660,7 +2706,7 @@ class Up2k(object):
if not job:
known = " ".join([x for x in self.registry[ptop].keys()])
self.log("unknown wark [{}], known: {}".format(wark, known))
raise Pebkac(400, "unknown wark")
raise Pebkac(400, "unknown wark" + SSEELOG)
if chash not in job["need"]:
msg = "chash = {} , need:\n".format(chash)

View File

@@ -171,6 +171,7 @@ HTTPCODE = {
500: "Internal Server Error",
501: "Not Implemented",
503: "Service Unavailable",
999: "MissingNo",
}
@@ -1227,12 +1228,15 @@ def ren_open(
except OSError as ex_:
ex = ex_
if ex.errno == errno.EINVAL and not asciified:
# EPERM: android13
if ex.errno in (errno.EINVAL, errno.EPERM) and not asciified:
asciified = True
bname, fname = [
zs.encode("ascii", "replace").decode("ascii").replace("?", "_")
for zs in [bname, fname]
]
zsl = []
for zs in (bname, fname):
zs = zs.encode("ascii", "replace").decode("ascii")
zs = re.sub(r"[^][a-zA-Z0-9(){}.,+=!-]", "_", zs)
zsl.append(zs)
bname, fname = zsl
continue
# ENOTSUP: zfs on ubuntu 20.04

View File

@@ -310,7 +310,7 @@ window.baguetteBox = (function () {
options = {};
setOptions(o);
if (tt.en)
tt.show.bind(this)();
tt.show.call(this);
}
function setVmode() {
@@ -356,7 +356,7 @@ window.baguetteBox = (function () {
setVmode();
if (tt.en)
tt.show.bind(this)();
tt.show.call(this);
}
function findfile() {
@@ -376,7 +376,12 @@ window.baguetteBox = (function () {
else
(vid() || ebi('bbox-overlay')).requestFullscreen();
}
catch (ex) { alert(ex); }
catch (ex) {
if (IPHONE)
alert('sorry, apple decided to make this impossible on iphones (should work on ipad tho)');
else
alert(ex);
}
}
function tglsel() {
@@ -968,7 +973,7 @@ window.baguetteBox = (function () {
clmod(btnPrev, 'off', 't');
clmod(btnNext, 'off', 't');
if (Date.now() - ctime <= 500)
if (Date.now() - ctime <= 500 && !IPHONE)
tglfull();
ctime = Date.now();

View File

@@ -731,14 +731,13 @@ html.y #files thead th {
margin: 0;
padding: .3em .5em;
background: var(--bg);
max-width: var(--file-td-w);
word-wrap: break-word;
overflow: hidden;
}
#files tr:nth-child(2n) td {
background: var(--row-alt);
}
#files td+td+td {
max-width: 30em;
overflow: hidden;
}
#files td+td {
box-shadow: 1px 0 0 0 rgba(128,128,128,var(--f-sh1)) inset, 0 1px 0 rgba(255,255,255,var(--f-sh2)) inset, 0 -1px 0 rgba(255,255,255,var(--f-sh2)) inset;
}

View File

@@ -142,6 +142,7 @@
themes = {{ themes }},
dtheme = "{{ dtheme }}",
srvinf = "{{ srv_info }}",
s_name = "{{ s_name }}",
lang = "{{ lang }}",
dfavico = "{{ favico }}",
def_hcols = {{ def_hcols|tojson }},

View File

@@ -403,6 +403,7 @@ var Ls = {
"u_https3": "for better performance",
"u_ancient": 'your browser is impressively ancient -- maybe you should <a href="#" onclick="goto(\'bup\')">use bup instead</a>',
"u_nowork": "need firefox 53+ or chrome 57+ or iOS 11+",
"u_uri": "to dragdrop images from other browser windows,\nplease drop it onto the big upload button",
"u_enpot": 'switch to <a href="#">potato UI</a> (may improve upload speed)',
"u_depot": 'switch to <a href="#">fancy UI</a> (may reduce upload speed)',
"u_gotpot": 'switching to the potato UI for improved upload speed,\n\nfeel free to disagree and switch back!',
@@ -865,6 +866,7 @@ var Ls = {
"u_https3": "for høyere hastighet",
"u_ancient": 'nettleseren din er prehistorisk -- mulig du burde <a href="#" onclick="goto(\'bup\')">bruke bup istedenfor</a>',
"u_nowork": "krever firefox 53+, chrome 57+, eller iOS 11+",
"u_uri": "for å laste opp bilder ifra andre nettleservinduer,\nslipp bildet rett på den store last-opp-knappen",
"u_enpot": 'bytt til <a href="#">enkelt UI</a> (gir sannsynlig raskere opplastning)',
"u_depot": 'bytt til <a href="#">snæsent UI</a> (gir sannsynlig tregere opplastning)',
"u_gotpot": 'byttet til et enklere UI for å laste opp raskere,\n\ndu kan gjerne bytte tilbake altså!',
@@ -1136,16 +1138,16 @@ ebi('op_cfg').innerHTML = (
'<div>\n' +
' <h3>' + L.cl_favico + ' <span id="ico1">🎉</span></h3>\n' +
' <div>\n' +
' <input type="text" id="icot" style="width:1.3em" value="" tt="' + L.cft_text + '" />' +
' <input type="text" id="icof" style="width:2em" value="" tt="' + L.cft_fg + '" />' +
' <input type="text" id="icob" style="width:2em" value="" tt="' + L.cft_bg + '" />' +
' <input type="text" id="icot" value="" ' + NOAC + ' style="width:1.3em" tt="' + L.cft_text + '" />' +
' <input type="text" id="icof" value="" ' + NOAC + ' style="width:2em" tt="' + L.cft_fg + '" />' +
' <input type="text" id="icob" value="" ' + NOAC + ' style="width:2em" tt="' + L.cft_bg + '" />' +
' </td>\n' +
' </div>\n' +
'</div>\n' +
'<div>\n' +
' <h3>' + L.cl_bigdir + '</h3>\n' +
' <div>\n' +
' <input type="text" id="bd_lim" value="250" style="width:4em" tt="' + L.cdt_lim + '" />' +
' <input type="text" id="bd_lim" value="250" ' + NOAC + ' style="width:4em" tt="' + L.cdt_lim + '" />' +
' <a id="bd_ask" class="tgl btn" href="#" tt="' + L.cdt_ask + '">ask</a>\n' +
' </td>\n' +
' </div>\n' +
@@ -1333,7 +1335,7 @@ var mpl = (function () {
) : '') +
'<div><h3>' + L.ml_tint + '</h3><div>' +
'<input type="text" id="pb_tint" style="width:2.4em" value="0" tt="' + L.mt_tint + '" />' +
'<input type="text" id="pb_tint" value="0" ' + NOAC + ' style="width:2.4em" tt="' + L.mt_tint + '" />' +
'</div></div>' +
'<div><h3>' + L.ml_eq + '</h3><div id="audio_eq"></div></div>');
@@ -2036,8 +2038,7 @@ var pbar = (function () {
t_redraw = setTimeout(r.drawpos, sm > 50 ? 20 : 50);
};
window.addEventListener('resize', r.onresize);
r.onresize();
onresize100.add(r.onresize, true);
return r;
})();
@@ -2099,8 +2100,7 @@ var vbar = (function () {
clearTimeout(untext);
untext = setTimeout(r.draw, 1000);
};
window.addEventListener('resize', r.onresize);
r.onresize();
onresize100.add(r.onresize, true);
var rect;
function mousedown(e) {
@@ -2125,6 +2125,11 @@ var vbar = (function () {
lastv = Date.now();
mp.setvol(mul);
r.draw();
setTimeout(function () {
if (IPHONE && mp.au && mul < 0.9 && mp.au.volume == 1)
toast.inf(6, 'volume doesnt work because <a href="https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11" target="_blank">apple says no</a>');
}, 1);
}
can.onmousedown = function (e) {
if (e.button !== 0)
@@ -2173,13 +2178,18 @@ function seek_au_sec(seek) {
}
function song_skip(n) {
var tid = null;
if (mp.au)
tid = mp.au.tid;
function song_skip(n, dirskip) {
var tid = mp.au ? mp.au.tid : null,
ofs = tid ? mp.order.indexOf(tid) : -1;
if (tid !== null)
play(mp.order.indexOf(tid) + n);
if (dirskip && ofs + 1 && ofs > mp.order.length - 2) {
toast.inf(10, L.mm_nof);
mpl.traversals = 0;
return;
}
if (tid)
play(ofs + n);
else
play(mp.order[n == -1 ? mp.order.length - 1 : 0]);
}
@@ -2194,8 +2204,9 @@ function next_song(e) {
function next_song_cmn(e) {
ev(e);
if (mp.order.length) {
var dirskip = mpl.traversals;
mpl.traversals = 0;
return song_skip(1);
return song_skip(1, dirskip);
}
if (mpl.traversals++ < 5) {
if (MOBILE && t_fchg && Date.now() - t_fchg > 30 * 1000)
@@ -2707,7 +2718,7 @@ var afilt = (function () {
html.push('<td><a href="#" class="eq_step" step="0.5" band="' + b + '">+</a></td>');
h2.push('<td>' + vs[a][1] + '</td>');
h4.push('<td><a href="#" class="eq_step" step="-0.5" band="' + b + '">&ndash;</a></td>');
h3.push('<td><input type="text" class="eq_gain" band="' + b + '" value="' + vs[a][2] + '" /></td>');
h3.push('<td><input type="text" class="eq_gain" ' + NOAC + ' band="' + b + '" value="' + vs[a][2] + '" /></td>');
}
html = html.join('\n') + '</tr><tr>';
html += h2.join('\n') + '</tr><tr>';
@@ -3417,8 +3428,8 @@ var fileman = (function () {
'<a id="rn_case" class="tgl btn" href="#" tt="' + L.fr_case + '</a>',
'</div>',
'<div id="rn_vadv"><table>',
'<tr><td>regex</td><td><input type="text" id="rn_re" tt="regex search pattern to apply to original filenames; capturing groups can be referenced in the format field below like &lt;code&gt;(1)&lt;/code&gt; and &lt;code&gt;(2)&lt;/code&gt; and so on" placeholder="^[0-9]+[\\. ]+(.*) - (.*)" /></td></tr>',
'<tr><td>format</td><td><input type="text" id="rn_fmt" tt="inspired by foobar2000:$N&lt;code&gt;(title)&lt;/code&gt; is replaced by song title,$N&lt;code&gt;[(artist) - ](title)&lt;/code&gt; skips the first part if artist is blank$N&lt;code&gt;$lpad((tn),2,0)&lt;/code&gt; pads tracknumber to 2 digits" placeholder="[(artist) - ](title).(ext)" /></td></tr>',
'<tr><td>regex</td><td><input type="text" id="rn_re" ' + NOAC + ' tt="regex search pattern to apply to original filenames; capturing groups can be referenced in the format field below like &lt;code&gt;(1)&lt;/code&gt; and &lt;code&gt;(2)&lt;/code&gt; and so on" placeholder="^[0-9]+[\\. ]+(.*) - (.*)" /></td></tr>',
'<tr><td>format</td><td><input type="text" id="rn_fmt" ' + NOAC + ' tt="inspired by foobar2000:$N&lt;code&gt;(title)&lt;/code&gt; is replaced by song title,$N&lt;code&gt;[(artist) - ](title)&lt;/code&gt; skips the first part if artist is blank$N&lt;code&gt;$lpad((tn),2,0)&lt;/code&gt; pads tracknumber to 2 digits" placeholder="[(artist) - ](title).(ext)" /></td></tr>',
'<tr><td>preset</td><td><select id="rn_pre"></select>',
'<button id="rn_pdel">❌ ' + L.fr_pdel + '</button>',
'<button id="rn_pnew">💾 ' + L.fr_pnew + '</button>',
@@ -3930,7 +3941,7 @@ var showfile = (function () {
if (!lang)
continue;
r.files.push({ 'id': link.id, 'name': fn });
r.files.push({ 'id': link.id, 'name': uricom_dec(fn) });
var td = ebi(link.id).closest('tr').getElementsByTagName('td')[0];
@@ -4120,8 +4131,9 @@ var showfile = (function () {
var html = ['<li class="bn">' + L.tv_lst + '<br />' + linksplit(get_vpath()).join('') + '</li>'];
for (var a = 0; a < r.files.length; a++) {
var file = r.files[a];
html.push('<li><a href="?doc=' + file.name + '" hl="' + file.id +
'">' + esc(uricom_dec(file.name)) + '</a>');
html.push('<li><a href="?doc=' +
uricom_enc(file.name) + '" hl="' + file.id +
'">' + esc(file.name) + '</a>');
}
ebi('docul').innerHTML = html.join('\n');
};
@@ -4303,14 +4315,14 @@ var thegrid = (function () {
if (ctrl(e) && !treectl.csel && !r.sel)
return true;
return gclick.bind(this)(e, false);
return gclick.call(this, e, false);
}
function gclick2(e) {
if (ctrl(e) || !r.sel)
return true;
return gclick.bind(this)(e, true);
return gclick.call(this, e, true);
}
function gclick(e, dbl) {
@@ -4325,7 +4337,7 @@ var thegrid = (function () {
tr = td.parentNode;
if ((r.sel && !dbl && !ctrl(e)) || (treectl.csel && (e.shiftKey || ctrl(e)))) {
td.onclick.bind(td)(e);
td.onclick.call(td, e);
if (e.shiftKey)
return r.loadsel();
clmod(this, 'sel', clgot(tr, 'sel'));
@@ -4611,8 +4623,7 @@ function tree_neigh(n) {
if (act >= links.length)
act = 0;
treectl.dir_cb = tree_scrollto;
links[act].click();
treectl.treego.call(links[act]);
links[act].focus();
}
@@ -4631,7 +4642,7 @@ function tree_up(justgo) {
return;
}
if (act.previousSibling.textContent == '-') {
act.previousSibling.click();
treectl.treego.call(act.previousSibling);
if (!justgo)
return;
}
@@ -4906,7 +4917,7 @@ document.onkeydown = function (e) {
for (var a = 0; a < trs.length; a += 2) {
html.push('<table>' + (trs[a].concat(trs[a + 1])).join('\n') + '</table>');
}
html.push('<table id="tq_raw"><tr><td>raw</td><td><input id="q_raw" type="text" name="q" placeholder="( tags like *nhato* or tags like *taishi* ) and ( not tags like *nhato* or not tags like *taishi* )" /></td></tr></table>');
html.push('<table id="tq_raw"><tr><td>raw</td><td><input id="q_raw" type="text" name="q" ' + NOAC + ' placeholder="( tags like *nhato* or tags like *taishi* ) and ( not tags like *nhato* or not tags like *taishi* )" /></td></tr></table>');
ebi('srch_form').innerHTML = html.join('\n');
var o = QSA('#op_search input');
@@ -5174,6 +5185,7 @@ document.onkeydown = function (e) {
}
})();
function aligngriditems() {
if (!treectl)
return;
@@ -5196,7 +5208,25 @@ function aligngriditems() {
ebi('ggrid').style.justifyContent = treectl.hidden ? 'center' : 'space-between';
}
}
window.addEventListener('resize', aligngriditems);
onresize100.add(aligngriditems);
var filecolwidth = (function () {
var lastwidth = -1;
return function () {
var vw = window.innerWidth / parseFloat(getComputedStyle(document.body)['font-size']),
w = Math.floor(vw - 2);
if (w == lastwidth)
return;
lastwidth = w;
document.documentElement.style.setProperty('--file-td-w', w + 'em');
}
})();
onresize100.add(filecolwidth, true);
var treectl = (function () {
var r = {
@@ -5543,7 +5573,7 @@ var treectl = (function () {
}
links[a].className = cl;
links[a].onclick = treego;
links[a].onclick = r.treego;
links[a].onmouseenter = nowrap ? menter : null;
links[a].onmouseleave = nowrap ? mleave : null;
}
@@ -5605,7 +5635,7 @@ var treectl = (function () {
return els[a].click();
}
function treego(e) {
r.treego = function (e) {
if (ctrl(e))
return true;
@@ -5841,6 +5871,8 @@ var treectl = (function () {
}
if (sel.length)
msel.loadsel(sel);
else
msel.origin_id(null);
setTimeout(eval_hash, 1);
}
@@ -6711,7 +6743,9 @@ var msel = (function () {
};
r.loadsel = function (sel) {
if (!sel || !r.so || !ebi(r.so))
r.so = r.pr = null;
r.sel = [];
r.load();
@@ -7401,8 +7435,16 @@ function goto_unpost(e) {
}
function wintitle(txt) {
document.title = (txt ? txt : '') + get_vpath().slice(1, -1).split('/').pop();
function wintitle(txt, noname) {
if (txt === undefined)
txt = '';
if (s_name && !noname)
txt = s_name + ' ' + txt;
txt += get_vpath().slice(1, -1).split('/').pop();
document.title = txt;
}

View File

@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<title>{{ svcname }}</title>
<title>{{ s_doctitle }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
</head>

View File

@@ -278,6 +278,7 @@ function Modpoll() {
return;
var new_md = this.responseText,
new_mt = this.getResponseHeader('X-Lastmod3') || r.lastmod,
server_ref = server_md.replace(/\r/g, ''),
server_now = new_md.replace(/\r/g, '');
@@ -285,6 +286,7 @@ function Modpoll() {
if (r.initial && server_ref != server_now)
return modal.confirm('Your browser decided to show an outdated copy of the document!\n\nDo you want to load the latest version from the server instead?', function () {
dom_src.value = server_md = new_md;
last_modified = new_mt;
draw_md();
}, null);

View File

@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<title>{{ svcname }}</title>
<title>{{ s_doctitle }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<meta name="theme-color" content="#333">

View File

@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<title>{{ svcname }}</title>
<title>{{ s_doctitle }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<meta name="theme-color" content="#333">

View File

@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<title>{{ args.doctitle }} @ {{ args.name }}</title>
<title>{{ s_doctitle }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
<meta name="theme-color" content="#333">

View File

@@ -4,6 +4,8 @@
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(deps/scp.woff2) format('woff2');
}
html {
text-size-adjust: 100%;
-webkit-text-size-adjust: 100%;
touch-action: manipulation;
}
#tt, #toast {

View File

@@ -723,7 +723,7 @@ function Donut(uc, st) {
function strobe() {
var txt = strobes.pop();
wintitle(txt);
wintitle(txt, false);
if (!txt)
clearInterval(tstrober);
}
@@ -971,7 +971,7 @@ function up2k_init(subtle) {
if (++nenters <= 0)
nenters = 1;
if (onover.bind(this)(e))
if (onover.call(this, e))
return true;
var mup, up = QS('#up_zd');
@@ -995,16 +995,29 @@ function up2k_init(subtle) {
function onoverb(e) {
// zones are alive; disable cuo2duo branch
document.body.ondragover = document.body.ondrop = null;
return onover.bind(this)(e);
return onover.call(this, e);
}
function onover(e) {
return onovercmn(this, e, false);
}
function onoverbtn(e) {
return onovercmn(this, e, true);
}
function onovercmn(self, e, btn) {
try {
var ok = false, dt = e.dataTransfer.types;
for (var a = 0; a < dt.length; a++)
if (dt[a] == 'Files')
ok = true;
else if (dt[a] == 'text/uri-list')
return true;
else if (dt[a] == 'text/uri-list') {
if (btn) {
ok = true;
if (toast.txt == L.u_uri)
toast.hide();
}
else
return toast.inf(10, L.u_uri) || true;
}
if (!ok)
return true;
@@ -1020,8 +1033,11 @@ function up2k_init(subtle) {
document.body.ondragenter = document.body.ondragleave = document.body.ondragover = null;
return modal.alert('your browser does not support drag-and-drop uploading');
}
if (btn)
return;
clmod(ebi('drops'), 'vis', 1);
var v = this.getAttribute('v');
var v = self.getAttribute('v');
if (v)
clmod(ebi(v), 'hl', 1);
}
@@ -1045,6 +1061,8 @@ function up2k_init(subtle) {
document.body.ondragleave = offdrag;
document.body.ondragover = onover;
document.body.ondrop = gotfile;
ebi('u2btn').ondrop = gotfile;
ebi('u2btn').ondragover = onoverbtn;
var drops = [ebi('up_dz'), ebi('srch_dz')];
for (var a = 0; a < 2; a++) {
@@ -1088,7 +1106,7 @@ function up2k_init(subtle) {
function gotfile(e) {
ev(e);
nenters = 0;
offdrag.bind(this)();
offdrag.call(this);
var dz = this && this.getAttribute('id');
if (!dz && e && e.clientY)
// cuo2duo fallback
@@ -1132,7 +1150,7 @@ function up2k_init(subtle) {
dst = good_files;
if (is_itemlist) {
if (fobj.kind !== 'file')
if (fobj.kind !== 'file' && fobj.type !== 'text/uri-list')
continue;
try {
@@ -1144,6 +1162,8 @@ function up2k_init(subtle) {
}
catch (ex) { }
fobj = fobj.getAsFile();
if (!fobj)
continue;
}
try {
if (fobj.size < 1)
@@ -2610,8 +2630,7 @@ function up2k_init(subtle) {
}
}
}
window.addEventListener('resize', onresize);
onresize();
onresize100.add(onresize, true);
if (MOBILE) {
// android-chrome wobbles for a bit; firefox / iOS-safari are OK

View File

@@ -7,6 +7,7 @@ if (!window.console || !console.log)
var wah = '',
NOAC = 'autocorrect="off" autocapitalize="off"',
L, tt, treectl, thegrid, up2k, asmCrypto, hashwasm, vbar, marked,
CB = '?_=' + Date.now(),
R = SR.slice(1),
@@ -1051,6 +1052,67 @@ function cliptxt(txt, ok) {
}
function Debounce(delay) {
var r = this;
r.delay = delay;
r.timer = 0;
r.t_hit = 0;
r.t_run = 0;
r.q = [];
r.add = function (fun, run) {
r.rm(fun);
r.q.push(fun);
if (run)
fun();
};
r.rm = function (fun) {
apop(r.q, fun);
};
r.run = function () {
if (crashed)
return;
r.t_run = Date.now();
var q = r.q.slice(0);
for (var a = 0; a < q.length; a++)
q[a]();
};
r.hit = function () {
if (crashed)
return;
var now = Date.now(),
td_hit = now - r.t_hit,
td_run = now - r.t_run;
if (td_run >= r.delay * 2)
r.t_run = now;
if (td_run >= r.delay && td_run <= r.delay * 2) {
// r.delay is also deadline
clearTimeout(r.timer);
return r.run();
}
if (td_hit < r.delay / 5)
return;
clearTimeout(r.timer);
r.timer = setTimeout(r.run, r.delay);
r.t_hit = now;
};
};
var onresize100 = new Debounce(100);
window.addEventListener('resize', onresize100.hit);
var timer = (function () {
var r = {};
r.q = [];
@@ -1104,7 +1166,7 @@ var tt = (function () {
var prev = null;
r.cshow = function () {
if (this !== prev)
r.show.bind(this)();
r.show.call(this);
prev = this;
};
@@ -1116,7 +1178,7 @@ var tt = (function () {
return;
if (Date.now() - r.lvis < 400)
return r.show.bind(this)();
return r.show.call(this);
tev = setTimeout(r.show.bind(this), 800);
if (TOUCH)
@@ -1274,6 +1336,9 @@ var toast = (function () {
r.visible = false;
r.txt = null;
r.tag = obj; // filler value (null is scary)
r.p_txt = '';
r.p_sec = 0;
r.p_t = 0;
function scrollchk() {
if (scrolling)
@@ -1306,10 +1371,23 @@ var toast = (function () {
};
r.show = function (cl, sec, txt, tag) {
var same = r.visible && txt == r.p_txt && r.p_sec == sec,
delta = Date.now() - r.p_t;
if (same && delta < 100)
return;
r.p_txt = txt;
r.p_sec = sec;
r.p_t = Date.now();
clearTimeout(te);
if (sec)
te = setTimeout(r.hide, sec * 1000);
if (same && delta < 1000)
return;
if (txt.indexOf('<body>') + 1)
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
@@ -1488,7 +1566,7 @@ var modal = (function () {
cb_ok = cok;
cb_ng = cng === undefined ? cok : null;
cb_up = fun;
html += '<input id="modali" type="text" /><div id="modalb">' + ok_cancel + '</div>';
html += '<input id="modali" type="text" ' + NOAC + ' /><div id="modalb">' + ok_cancel + '</div>';
r.show(html);
ebi('modali').value = v || '';

View File

@@ -1,3 +1,62 @@
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0725-1550 `v1.8.8` just boring bugfixes
final release until late august unless something bad happens and i end up building this thing on a shinkansen
## recent security / vulnerability fixes
* there is a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` in case of future important updates
* [v1.8.7](https://github.com/9001/copyparty/releases/tag/v1.8.7) (2023-07-23) - [CVE-2023-38501](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-38501) - reflected XSS
* [v1.8.2](https://github.com/9001/copyparty/releases/tag/v1.8.2) (2023-07-14) - [CVE-2023-37474](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-37474) - path traversal (first CVE)
* all serverlogs reviewed so far (5 public servers) showed no signs of exploitation
## bugfixes
* range-select with shiftclick:
* don't crash when entering another folder and shift-clicking some more
* remember selection origin when lazy-loading more stuff into the viewport
* markdown editor:
* fix confusing warnings when the browser cache decides it *really* wants to cache
* and when a document starts with a newline
* remember intended actions such as `?edit` on login prompts
* Windows: TLS-cert generation (triggered by network changes) could occasionally fail
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0723-1543 `v1.8.7` XSS for days
at the lack of better ideas, there is now a [discord server](https://discord.gg/25J8CdTT6G) with an `@everyone` for all future important updates such as this one
## bugfixes
* reflected XSS through `/?k304` and `/?setck`
* if someone tricked you into clicking a URL containing a chain of `%0d` and `%0a` they could potentially have moved/deleted existing files on the server, or uploaded new files, using your account
* if you use a reverse proxy, you can check if you have been exploited like so:
* nginx: grep your logs for URLs containing `%0d%0a%0d%0a`, for example using the following command:
```bash
(gzip -dc access.log*.gz; cat access.log) | sed -r 's/" [0-9]+ .*//' | grep -iE '%0[da]%0[da]%0[da]%0[da]'
```
* if you find any traces of exploitation (or just want to be on the safe side) it's recommended to change the passwords of your copyparty accounts
* huge thanks *again* to @TheHackyDog !
* the original fix for CVE-2023-37474 broke the download links for u2c.py and partyfuse.py
* fix mediaplayer spinlock if the server only has a single audio file
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0721-0036 `v1.8.6` fix reflected XSS
## bugfixes
* reflected XSS through `/?hc` (the optional subfolder parameter to the [connect](https://a.ocv.me/?hc) page)
* if someone tricked you into clicking `http://127.0.0.1:3923/?hc=<script>alert(1)</script>` they could potentially have moved/deleted existing files on the server, or uploaded new files, using your account
* if you use a reverse proxy, you can check if you have been exploited like so:
* nginx: grep your logs for URLs containing `?hc=` with `<` somewhere in its value, for example using the following command:
```bash
(gzip -dc access.log*.gz; cat access.log) | sed -r 's/" [0-9]+ .*//' | grep -E '[?&](hc|pw)=.*[<>]'
```
* if you find any traces of exploitation (or just want to be on the safe side) it's recommended to change the passwords of your copyparty accounts
* thanks again to @TheHackyDog !
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2023-0718-0746 `v1.8.4` range-select v2

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-ac" \
org.opencontainers.image.description="copyparty with Pillow and FFmpeg (image/audio/video thumbnails, audio transcoding, media tags)"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
RUN apk --no-cache add !pyc \
wget \
@@ -19,4 +20,4 @@ RUN apk --no-cache add !pyc \
COPY i/dist/copyparty-sfx.py ./
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-dj" \
org.opencontainers.image.description="copyparty with all optional dependencies, including musical key / bpm detection"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
COPY i/bin/mtag/install-deps.sh ./
COPY i/bin/mtag/audio-bpm.py /mtag/
@@ -35,4 +36,4 @@ RUN apk add -U !pyc \
COPY i/dist/copyparty-sfx.py ./
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-im" \
org.opencontainers.image.description="copyparty with Pillow and Mutagen (image thumbnails, media tags)"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
RUN apk --no-cache add !pyc \
wget \
@@ -18,4 +19,4 @@ RUN apk --no-cache add !pyc \
COPY i/dist/copyparty-sfx.py ./
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-iv" \
org.opencontainers.image.description="copyparty with Pillow, FFmpeg, libvips (image/audio/video thumbnails, audio transcoding, media tags)"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
RUN apk add -U !pyc \
wget \
@@ -25,4 +26,4 @@ RUN apk add -U !pyc \
COPY i/dist/copyparty-sfx.py ./
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "-c", "/z/initcfg"]

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-min" \
org.opencontainers.image.description="just copyparty, no thumbnails / media tags / audio transcoding"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
RUN apk --no-cache add !pyc \
python3 \
@@ -17,4 +18,4 @@ RUN apk --no-cache add !pyc \
COPY i/dist/copyparty-sfx.py ./
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "/z/copyparty-sfx.py", "--no-crt", "--no-thumb", "-c", "/z/initcfg"]

View File

@@ -5,7 +5,8 @@ LABEL org.opencontainers.image.url="https://github.com/9001/copyparty" \
org.opencontainers.image.licenses="MIT" \
org.opencontainers.image.title="copyparty-min-pip" \
org.opencontainers.image.description="just copyparty, no thumbnails, no media tags, no audio transcoding"
ENV PYTHONPYCACHEPREFIX=/tmp/pyc
ENV PYTHONPYCACHEPREFIX=/tmp/pyc \
XDG_CONFIG_HOME=/cfg
RUN apk --no-cache add python3 py3-pip !pyc \
&& python3 -m pip install copyparty \
@@ -17,4 +18,4 @@ RUN apk --no-cache add python3 py3-pip !pyc \
WORKDIR /w
EXPOSE 3923
ENTRYPOINT ["python3", "-m", "copyparty", "-c", "/z/initcfg"]
ENTRYPOINT ["python3", "-m", "copyparty", "--no-crt", "--no-thumb", "-c", "/z/initcfg"]

View File

@@ -15,6 +15,7 @@ docker run --rm -it -u 1000 -p 3923:3923 -v /mnt/nas:/w -v $PWD/cfgdir:/cfg copy
* `copyparty/ac` is the recommended [image edition](#editions)
* you can download the image from github instead by replacing `copyparty/ac` with `ghcr.io/9001/copyparty-ac`
* if you are using rootless podman, remove `-u 1000`
* if you have selinux, append `:z` to all `-v` args (for example `-v /mnt/nas:/w:z`)
i'm unfamiliar with docker-compose and alternatives so let me know if this section could be better 🙏

View File

@@ -21,6 +21,7 @@ copyparty/httpconn.py,
copyparty/httpsrv.py,
copyparty/ico.py,
copyparty/mdns.py,
copyparty/metrics.py,
copyparty/mtag.py,
copyparty/multicast.py,
copyparty/pwhash.py,

View File

@@ -12,7 +12,7 @@ import tempfile
import unittest
from tests import util as tu
from tests.util import Cfg
from tests.util import Cfg, eprint
from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli
@@ -93,7 +93,7 @@ class TestHttpCli(unittest.TestCase):
res = "ok " + fp in ret
print("[{}] {} {} = {}".format(fp, rok, wok, res))
if rok != res:
print("\033[33m{}\n# {}\033[0m".format(ret, furl))
eprint("\033[33m{}\n# {}\033[0m".format(ret, furl))
self.fail()
# file browser: html
@@ -101,7 +101,7 @@ class TestHttpCli(unittest.TestCase):
res = "'{}'".format(self.fn) in ret
print(res)
if rok != res:
print("\033[33m{}\n# {}\033[0m".format(ret, durl))
eprint("\033[33m{}\n# {}\033[0m".format(ret, durl))
self.fail()
# file browser: json
@@ -110,7 +110,7 @@ class TestHttpCli(unittest.TestCase):
res = '"{}"'.format(self.fn) in ret
print(res)
if rok != res:
print("\033[33m{}\n# {}\033[0m".format(ret, url))
eprint("\033[33m{}\n# {}\033[0m".format(ret, url))
self.fail()
# tar
@@ -132,7 +132,9 @@ class TestHttpCli(unittest.TestCase):
if durl.split("/")[-1] in self.can_read:
ref = [x for x in vfiles if self.in_dive(top + "/" + durl, x)]
for f in ref:
print("{}: {}".format("ok" if f in tar_ok else "NG", f))
ok = f in tar_ok
pr = print if ok else eprint
pr("{}: {}".format("ok" if ok else "NG", f))
ref.sort()
tar_ok.sort()
self.assertEqual(ref, tar_ok)

View File

@@ -1,4 +1,9 @@
#!/usr/bin/env python3
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import re
import sys
import time
import shutil
@@ -22,6 +27,12 @@ def nah(*a, **ka):
return False
def eprint(*a, **ka):
ka["file"] = sys.stderr
print(*a, **ka)
sys.stderr.flush()
if MACOS:
import posixpath
@@ -113,7 +124,7 @@ class Cfg(Namespace):
ex = "df loris re_maxage rproxy rsp_jtr rsp_slp s_wr_slp theme themes turbo"
ka.update(**{k: 0 for k in ex.split()})
ex = "ah_alg doctitle favico html_head lg_sbf log_fk md_sbf mth name textfiles unlist R RS SR"
ex = "ah_alg bname doctitle favico html_head lg_sbf log_fk md_sbf mth name textfiles unlist vname R RS SR"
ka.update(**{k: "" for k in ex.split()})
ex = "on403 on404 xad xar xau xban xbd xbr xbu xiu xm"
@@ -179,6 +190,8 @@ class VHttpSrv(object):
self.gpwd = Garda("")
self.g404 = Garda("")
self.ptn_cc = re.compile(r"[\x00-\x1f]")
def cachebuster(self):
return "a"