Compare commits

..

7 Commits

Author SHA1 Message Date
ed
a0c1239246 v1.8.0 2023-06-26 00:05:12 +00:00
ed
b8e851c332 cloudflare update + cosmetics:
* toastb padding fixes scrollbar on norwegian 403 in firefox
* fix text aspect ratio in seekbaron compact toggle
* crashpage had link overlaps on homepage
2023-06-25 23:09:29 +00:00
ed
baaf2eb24d include mdns names in tls cert 2023-06-25 22:06:35 +00:00
ed
e197895c10 support hashed passwords; closes #39 2023-06-25 21:50:33 +00:00
ed
cb75efa05d md-editor: index file and trigger upload hooks 2023-06-20 18:11:35 +00:00
ed
8b0cf2c982 volflags to limit volume size / num files; closes #40 2023-06-19 00:42:45 +00:00
ed
fc7d9e1f9c update pkgs to 1.7.6 2023-06-11 09:13:58 +00:00
26 changed files with 567 additions and 70 deletions

View File

@@ -85,6 +85,7 @@ turn almost any device into a file server with resumable uploads/downloads using
* [security](#security) - some notes on hardening
* [gotchas](#gotchas) - behavior that might be unexpected
* [cors](#cors) - cross-site request config
* [password hashing](#password-hashing) - you can hash passwords
* [https](#https) - both HTTP and HTTPS are accepted
* [recovering from crashes](#recovering-from-crashes)
* [client crashes](#client-crashes)
@@ -980,6 +981,8 @@ set upload rules using volflags, some examples:
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`)
* `:c,df=4g` block uploads if there would be less than 4 GiB free disk space afterwards
* `:c,vmaxb=1g` block uploads if total volume size would exceed 1 GiB afterwards
* `:c,vmaxn=4k` block uploads if volume would contain more than 4096 files afterwards
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
@@ -1566,6 +1569,17 @@ by default, except for `GET` and `HEAD` operations, all requests must either:
cors can be configured with `--acao` and `--acam`, or the protections entirely disabled with `--allow-csrf`
## password hashing
you can hash passwords before putting them into config files / providing them as arguments; see `--help-pwhash` for all the details
`--ah-alg argon2` enables it, and if you have any plaintext passwords then it'll print the hashed versions on startup so you can replace them
optionally also specify `--ah-cli` to enter an interactive mode where it will hash passwords without ever writing the plaintext ones to disk
the default configs take about 0.4 sec and 256 MiB RAM to process a new password on a decent laptop
## https
both HTTP and HTTPS are accepted by default, but letting a [reverse proxy](#reverse-proxy) handle the https/tls/ssl would be better (probably more secure by default)
@@ -1613,6 +1627,8 @@ mandatory deps:
install these to enable bonus features
enable hashed passwords in config: `argon2-cffi`
enable ftp-server:
* for just plaintext FTP, `pyftpdlib` (is built into the SFX)
* with TLS encryption, `pyftpdlib pyopenssl`

View File

@@ -1,6 +1,6 @@
# Maintainer: icxes <dev.null@need.moe>
pkgname=copyparty
pkgver="1.7.4"
pkgver="1.7.6"
pkgrel=1
pkgdesc="Portable file sharing hub"
arch=("any")
@@ -15,11 +15,12 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
"libkeyfinder-git: detection of musical keys"
"qm-vamp-plugins: BPM detection"
"python-pyopenssl: ftps functionality"
"python-argon2_cffi: hashed passwords in config"
"python-impacket-git: smb support (bad idea)"
)
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
backup=("etc/${pkgname}.d/init" )
sha256sums=("ae304043e806e3c39cd39c8919a514c2b2ec0f35b4cddb114eaf0e66b6826265")
sha256sums=("e44bfb2e998677a160343ed4aa87741e653dbc27db594e6a00935e89b90cd3f4")
build() {
cd "${srcdir}/${pkgname}-${pkgver}"

View File

@@ -1,4 +1,7 @@
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, pillow, pyvips, ffmpeg, mutagen,
{ lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, ffmpeg, mutagen,
# use argon2id-hashed passwords in config files (sha2 is always available)
withHashedPasswords ? true,
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
withThumbnails ? true,
@@ -35,6 +38,7 @@ let
++ lib.optional withFastThumbnails pyvips
++ lib.optional withMediaProcessing ffmpeg
++ lib.optional withBasicAudioMetadata mutagen
++ lib.optional withHashedPasswords argon2-cffi
);
in stdenv.mkDerivation {
pname = "copyparty";

View File

@@ -1,5 +1,5 @@
{
"url": "https://github.com/9001/copyparty/releases/download/v1.7.4/copyparty-sfx.py",
"version": "1.7.4",
"hash": "sha256-du7LYr28gzGy1zV2sTaUDHJfQ4dFFeyQS/TZCX42J5Q="
"url": "https://github.com/9001/copyparty/releases/download/v1.7.6/copyparty-sfx.py",
"version": "1.7.6",
"hash": "sha256-jwvQfG36lsp/oWN9DfR03kHyodo2kANoY3V930/ALds="
}

View File

@@ -258,6 +258,19 @@ def get_fk_salt(cert_path) -> str:
return ret.decode("utf-8")
def get_ah_salt() -> str:
fp = os.path.join(E.cfg, "ah-salt.txt")
try:
with open(fp, "rb") as f:
ret = f.read().strip()
except:
ret = base64.b64encode(os.urandom(18))
with open(fp, "wb") as f:
f.write(ret + b"\n")
return ret.decode("utf-8")
def ensure_locale() -> None:
safe = "en_US.UTF-8"
for x in [
@@ -622,6 +635,38 @@ def get_sects():
"""
),
],
[
"pwhash",
"password hashing",
dedent(
"""
when \033[36m--ah-alg\033[0m is not the default [\033[32mnone\033[0m], all account passwords must be hashed
passwords can be hashed on the commandline with \033[36m--ah-gen\033[0m, but copyparty will also hash and print any passwords that are non-hashed (password which do not start with '+') and then terminate afterwards
\033[36m--ah-alg\033[0m specifies the hashing algorithm and a list of optional comma-separated arguments:
\033[36m--ah-alg argon2\033[0m # which is the same as:
\033[36m--ah-alg argon2,3,256,4,19\033[0m
use argon2id with timecost 3, 256 MiB, 4 threads, version 19 (0x13/v1.3)
\033[36m--ah-alg scrypt\033[0m # which is the same as:
\033[36m--ah-alg scrypt,13,2,8,4\033[0m
use scrypt with cost 2**13, 2 iterations, blocksize 8, 4 threads
\033[36m--ah-alg sha2\033[0m # which is the same as:
\033[36m--ah-alg sha2,424242\033[0m
use sha2-512 with 424242 iterations
recommended: \033[32m--ah-alg argon2\033[0m
(takes about 0.4 sec and 256M RAM to process a new password)
argon2 needs python-package argon2-cffi,
scrypt needs openssl,
sha2 is always available
"""
),
],
]
@@ -730,6 +775,7 @@ def add_cert(ap, cert_path):
ap2.add_argument("--crt-exact", action="store_true", help="do not add wildcard entries for each --crt-ns")
ap2.add_argument("--crt-noip", action="store_true", help="do not add autodetected IP addresses into cert")
ap2.add_argument("--crt-nolo", action="store_true", help="do not add 127.0.0.1 / localhost into cert")
ap2.add_argument("--crt-nohn", action="store_true", help="do not add mDNS names / hostname into cert")
ap2.add_argument("--crt-dir", metavar="PATH", default=cert_dir, help="where to save the CA cert")
ap2.add_argument("--crt-cdays", metavar="D", type=float, default=3650, help="ca-certificate expiration time in days")
ap2.add_argument("--crt-sdays", metavar="D", type=float, default=365, help="server-cert expiration time in days")
@@ -844,7 +890,7 @@ def add_optouts(ap):
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (as specified by the 'lifetime' volflag)")
def add_safety(ap, fk_salt):
def add_safety(ap):
ap2 = ap.add_argument_group('safety options')
ap2.add_argument("-s", action="count", default=0, help="increase safety: Disable thumbnails / potentially dangerous software (ffmpeg/pillow/vips), hide partial uploads, avoid crawlers.\n └─Alias of\033[32m --dotpart --no-thumb --no-mtag-ff --no-robots --force-js")
ap2.add_argument("-ss", action="store_true", help="further increase safety: Prevent js-injection, accidental move/delete, broken symlinks, webdav, 404 on 403, ban on excessive 404s.\n └─Alias of\033[32m -s --unpost=0 --no-del --no-mv --hardlink --vague-403 --ban-404=50,60,1440 -nih")
@@ -852,8 +898,6 @@ def add_safety(ap, fk_salt):
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="do a sanity/safety check of all volumes on startup; arguments \033[33mUSER\033[0m,\033[33mVOL\033[0m,\033[33mFLAGS\033[0m; example [\033[32m**,*,ln,p,r\033[0m]")
ap2.add_argument("--xvol", action="store_true", help="never follow symlinks leaving the volume root, unless the link is into another volume where the user has similar access (volflag=xvol)")
ap2.add_argument("--xdev", action="store_true", help="stay within the filesystem of the volume root; do not descend into other devices (symlink or bind-mount to another HDD, ...) (volflag=xdev)")
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files -- this one DOES matter")
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
@@ -870,6 +914,16 @@ def add_safety(ap, fk_salt):
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like described in --acao)")
def add_salt(ap, fk_salt, ah_salt):
ap2 = ap.add_argument_group('salting options')
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: argon2 scrypt sha2 none (each optionally followed by alg-specific comma-sep. config)")
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if --ah-alg is none (default)")
ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]")
ap2.add_argument("--ah-cli", action="store_true", help="interactive shell which hashes passwords without ever storing or displaying the original passwords")
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
def add_shutdown(ap):
ap2 = ap.add_argument_group('shutdown options')
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
@@ -980,7 +1034,7 @@ def add_ui(ap, retry):
ap2.add_argument("--lang", metavar="LANG", type=u, default="eng", help="language")
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
ap2.add_argument("--themes", metavar="NUM", type=int, default=8, help="number of themes installed")
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching REGEX in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\.(js|css)$\033[0m] (volflag=unlist)")
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching REGEX in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
@@ -1030,6 +1084,7 @@ def run_argparse(
cert_path = os.path.join(E.cfg, "cert.pem")
fk_salt = get_fk_salt(cert_path)
ah_salt = get_ah_salt()
hcores = min(CORES, 4) # optimal on py3.11 @ r5-4500U
@@ -1053,7 +1108,8 @@ def run_argparse(
add_ftp(ap)
add_webdav(ap)
add_smb(ap)
add_safety(ap, fk_salt)
add_safety(ap)
add_salt(ap, fk_salt, ah_salt)
add_optouts(ap)
add_shutdown(ap)
add_yolo(ap)
@@ -1138,16 +1194,22 @@ def main(argv: Optional[list[str]] = None) -> None:
supp = args_from_cfg(v)
argv.extend(supp)
deprecated: list[tuple[str, str]] = []
deprecated: list[tuple[str, str]] = [("--salt", "--warksalt")]
for dk, nk in deprecated:
try:
idx = argv.index(dk)
except:
idx = -1
ov = ""
for n, k in enumerate(argv):
if k == dk or k.startswith(dk + "="):
idx = n
if "=" in k:
ov = "=" + k.split("=", 1)[1]
if idx < 0:
continue
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
lprint(msg.format(dk, nk))
argv[idx] = nk
argv[idx] = nk + ov
time.sleep(2)
da = len(argv) == 1

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 7, 6)
CODENAME = "unlinked"
BUILD_DT = (2023, 6, 11)
VERSION = (1, 8, 0)
CODENAME = "argon"
BUILD_DT = (2023, 6, 26)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -15,6 +15,7 @@ from datetime import datetime
from .__init__ import ANYWIN, TYPE_CHECKING, WINDOWS
from .bos import bos
from .cfg import flagdescs, permdescs, vf_bmap, vf_cmap, vf_vmap
from .pwhash import PWHash
from .util import (
IMPLICATIONS,
META_NOBOTS,
@@ -40,7 +41,10 @@ if True: # pylint: disable=using-constant-test
from .util import NamedLogger, RootLogger
if TYPE_CHECKING:
pass
from .broker_mp import BrokerMp
from .broker_thr import BrokerThr
from .broker_util import BrokerCli
# Vflags: TypeAlias = dict[str, str | bool | float | list[str]]
# Vflags: TypeAlias = dict[str, Any]
# Mflags: TypeAlias = dict[str, Vflags]
@@ -90,6 +94,8 @@ class Lim(object):
self.dfl = 0 # free disk space limit
self.dft = 0 # last-measured time
self.dfv = 0 # currently free
self.vbmax = 0 # volume bytes max
self.vnmax = 0 # volume max num files
self.smin = 0 # filesize min
self.smax = 0 # filesize max
@@ -119,8 +125,11 @@ class Lim(object):
ip: str,
rem: str,
sz: int,
ptop: str,
abspath: str,
broker: Optional[Union["BrokerCli", "BrokerMp", "BrokerThr"]] = None,
reg: Optional[dict[str, dict[str, Any]]] = None,
volgetter: str = "up2k.get_volsize",
) -> tuple[str, str]:
if reg is not None and self.reg is None:
self.reg = reg
@@ -131,6 +140,7 @@ class Lim(object):
self.chk_rem(rem)
if sz != -1:
self.chk_sz(sz)
self.chk_vsz(broker, ptop, sz, volgetter)
self.chk_df(abspath, sz) # side effects; keep last-ish
ap2, vp2 = self.rot(abspath)
@@ -146,6 +156,25 @@ class Lim(object):
if self.smax and sz > self.smax:
raise Pebkac(400, "file too big")
def chk_vsz(
self,
broker: Optional[Union["BrokerCli", "BrokerMp", "BrokerThr"]],
ptop: str,
sz: int,
volgetter: str = "up2k.get_volsize",
) -> None:
if not broker or not self.vbmax + self.vnmax:
return
x = broker.ask(volgetter, ptop)
nbytes, nfiles = x.get()
if self.vbmax and self.vbmax < nbytes + sz:
raise Pebkac(400, "volume has exceeded max size")
if self.vnmax and self.vnmax < nfiles + 1:
raise Pebkac(400, "volume has exceeded max num.files")
def chk_df(self, abspath: str, sz: int, already_written: bool = False) -> None:
if not self.dfl:
return
@@ -266,7 +295,7 @@ class Lim(object):
self.bupc[ip] = mark
if mark >= self.bmax:
raise Pebkac(429, "ingress saturated")
raise Pebkac(429, "upload size limit exceeded")
class VFS(object):
@@ -729,6 +758,7 @@ class AuthSrv(object):
warn_anonwrite: bool = True,
dargs: Optional[argparse.Namespace] = None,
) -> None:
self.ah = PWHash(args)
self.args = args
self.dargs = dargs or args
self.log_func = log_func
@@ -1106,6 +1136,8 @@ class AuthSrv(object):
self.log("\n{0}\n{1}{0}".format(t, "\n".join(slns)))
raise
self.setup_pwhash(acct)
# case-insensitive; normalize
if WINDOWS:
cased = {}
@@ -1290,6 +1322,16 @@ class AuthSrv(object):
use = True
lim.bmax, lim.bwin = [unhumanize(x) for x in zs.split(",")]
zs = vol.flags.get("vmaxb")
if zs:
use = True
lim.vbmax = unhumanize(zs)
zs = vol.flags.get("vmaxn")
if zs:
use = True
lim.vnmax = unhumanize(zs)
if use:
vol.lim = lim
@@ -1536,6 +1578,10 @@ class AuthSrv(object):
self.log(t, 1)
errors = True
if self.args.smb and self.ah.on and acct:
self.log("--smb can only be used when --ah-alg is none", 1)
errors = True
for vol in vfs.all_vols.values():
for k in list(vol.flags.keys()):
if re.match("^-[^-]+$", k):
@@ -1605,7 +1651,51 @@ class AuthSrv(object):
self.re_pwd = None
pwds = [re.escape(x) for x in self.iacct.keys()]
if pwds:
self.re_pwd = re.compile("=(" + "|".join(pwds) + ")([]&; ]|$)")
if self.ah.on:
zs = r"(\[H\] pw:.*|[?&]pw=)([^&]+)"
else:
zs = r"(\[H\] pw:.*|=)(" + "|".join(pwds) + r")([]&; ]|$)"
self.re_pwd = re.compile(zs)
def setup_pwhash(self, acct: dict[str, str]) -> None:
self.ah = PWHash(self.args)
if not self.ah.on:
return
if self.args.ah_cli:
self.ah.cli()
sys.exit()
elif self.args.ah_gen == "-":
self.ah.stdin()
sys.exit()
elif self.args.ah_gen:
print(self.ah.hash(self.args.ah_gen))
sys.exit()
if not acct:
return
changed = False
for uname, pw in list(acct.items())[:]:
if pw.startswith("+") and len(pw) == 33:
continue
changed = True
hpw = self.ah.hash(pw)
acct[uname] = hpw
t = "hashed password for account {}: {}"
self.log(t.format(uname, hpw), 3)
if not changed:
return
lns = []
for uname, pw in acct.items():
lns.append(" {}: {}".format(uname, pw))
t = "please use the following hashed passwords in your config:\n{}"
self.log(t.format("\n".join(lns)), 3)
def chk_sqlite_threadsafe(self) -> str:
v = SQLITE_VER[-1:]

View File

@@ -9,7 +9,7 @@ import queue
from .__init__ import CORES, TYPE_CHECKING
from .broker_mpw import MpWorker
from .broker_util import try_exec
from .broker_util import ExceptionalQueue, try_exec
from .util import Daemon, mp
if TYPE_CHECKING:
@@ -107,6 +107,19 @@ class BrokerMp(object):
if retq_id:
proc.q_pend.put((retq_id, "retq", rv))
def ask(self, dest: str, *args: Any) -> ExceptionalQueue:
# new non-ipc invoking managed service in hub
obj = self.hub
for node in dest.split("."):
obj = getattr(obj, node)
rv = try_exec(True, obj, *args)
retq = ExceptionalQueue(1)
retq.put(rv)
return retq
def say(self, dest: str, *args: Any) -> None:
"""
send message to non-hub component in other process,

View File

@@ -10,6 +10,9 @@ from .util import Netdev, runcmd
HAVE_CFSSL = True
if True: # pylint: disable=using-constant-test
from .util import RootLogger
def ensure_cert(log: "RootLogger", args) -> None:
"""
@@ -118,6 +121,9 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
names.append(ip.split("/")[0])
if args.crt_nolo:
names = [x for x in names if x not in ("localhost", "127.0.0.1", "::1")]
if not args.crt_nohn:
names.append(args.name)
names.append(args.name + ".local")
if not names:
names = ["127.0.0.1"]
if "127.0.0.1" in names or "::1" in names:

View File

@@ -78,7 +78,9 @@ flagcats = {
},
"upload rules": {
"maxn=250,600": "max 250 uploads over 15min",
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g)",
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)",
"vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)",
"vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)",
"rand": "force randomized filenames, 9 chars long by default",
"nrand=N": "randomized filenames are N chars long",
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",

View File

@@ -79,10 +79,13 @@ class FtpAuth(DummyAuthorizer):
raise AuthenticationFailed("banned")
asrv = self.hub.asrv
if username == "anonymous":
uname = "*"
else:
uname = asrv.iacct.get(password, "") or asrv.iacct.get(username, "") or "*"
uname = "*"
if username != "anonymous":
for zs in (password, username):
zs = asrv.iacct.get(asrv.ah.hash(zs), "")
if zs:
uname = zs
break
if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)):
g = self.hub.gpwd

View File

@@ -173,13 +173,16 @@ class HttpCli(object):
def log(self, msg: str, c: Union[int, str] = 0) -> None:
ptn = self.asrv.re_pwd
if ptn and ptn.search(msg):
msg = ptn.sub(self.unpwd, msg)
if self.asrv.ah.on:
msg = ptn.sub("\033[7m pw \033[27m", msg)
else:
msg = ptn.sub(self.unpwd, msg)
self.log_func(self.log_src, msg, c)
def unpwd(self, m: Match[str]) -> str:
a, b = m.groups()
return "=\033[7m {} \033[27m{}".format(self.asrv.iacct[a], b)
a, b, c = m.groups()
return "{}\033[7m {} \033[27m{}".format(a, self.asrv.iacct[b], c)
def _check_nonfatal(self, ex: Pebkac, post: bool) -> bool:
if post:
@@ -383,13 +386,14 @@ class HttpCli(object):
zs = base64.b64decode(zb).decode("utf-8")
# try "pwd", "x:pwd", "pwd:x"
for bauth in [zs] + zs.split(":", 1)[::-1]:
if self.asrv.iacct.get(bauth):
hpw = self.asrv.ah.hash(bauth)
if self.asrv.iacct.get(hpw):
break
except:
pass
self.pw = uparam.get("pw") or self.headers.get("pw") or bauth or cookie_pw
self.uname = self.asrv.iacct.get(self.pw) or "*"
self.uname = self.asrv.iacct.get(self.asrv.ah.hash(self.pw)) or "*"
self.rvol = self.asrv.vfs.aread[self.uname]
self.wvol = self.asrv.vfs.awrite[self.uname]
self.mvol = self.asrv.vfs.amove[self.uname]
@@ -1358,7 +1362,9 @@ class HttpCli(object):
lim = vfs.get_dbv(rem)[0].lim
fdir = vfs.canonical(rem)
if lim:
fdir, rem = lim.all(self.ip, rem, remains, fdir)
fdir, rem = lim.all(
self.ip, rem, remains, vfs.realpath, fdir, self.conn.hsrv.broker
)
fn = None
if rem and not self.trailing_slash and not bos.path.isdir(fdir):
@@ -1491,6 +1497,7 @@ class HttpCli(object):
lim.bup(self.ip, post_sz)
try:
lim.chk_sz(post_sz)
lim.chk_vsz(self.conn.hsrv.broker, vfs.realpath, post_sz)
except:
bos.unlink(path)
raise
@@ -1965,7 +1972,7 @@ class HttpCli(object):
return True
def get_pwd_cookie(self, pwd: str) -> str:
if pwd in self.asrv.iacct:
if self.asrv.ah.hash(pwd) in self.asrv.iacct:
msg = "login ok"
dur = int(60 * 60 * self.args.logout)
else:
@@ -2101,7 +2108,9 @@ class HttpCli(object):
lim = vfs.get_dbv(rem)[0].lim
fdir_base = vfs.canonical(rem)
if lim:
fdir_base, rem = lim.all(self.ip, rem, -1, fdir_base)
fdir_base, rem = lim.all(
self.ip, rem, -1, vfs.realpath, fdir_base, self.conn.hsrv.broker
)
upload_vpath = "{}/{}".format(vfs.vpath, rem).strip("/")
if not nullwrite:
bos.makedirs(fdir_base)
@@ -2194,6 +2203,7 @@ class HttpCli(object):
try:
lim.chk_df(tabspath, sz, True)
lim.chk_sz(sz)
lim.chk_vsz(self.conn.hsrv.broker, vfs.realpath, sz)
lim.chk_bup(self.ip)
lim.chk_nup(self.ip)
except:
@@ -2369,7 +2379,7 @@ class HttpCli(object):
fp = vfs.canonical(rp)
lim = vfs.get_dbv(rem)[0].lim
if lim:
fp, rp = lim.all(self.ip, rp, clen, fp)
fp, rp = lim.all(self.ip, rp, clen, vfs.realpath, fp, self.conn.hsrv.broker)
bos.makedirs(fp)
fp = os.path.join(fp, fn)
@@ -2440,6 +2450,25 @@ class HttpCli(object):
if p_field != "body":
raise Pebkac(400, "expected body, got {}".format(p_field))
xbu = vfs.flags.get("xbu")
if xbu:
if not runhook(
self.log,
xbu,
fp,
self.vpath,
self.host,
self.uname,
time.time(),
0,
self.ip,
time.time(),
"",
):
t = "save blocked by xbu server config"
self.log(t, 1)
raise Pebkac(403, t)
if bos.path.exists(fp):
bos.unlink(fp)
@@ -2451,6 +2480,7 @@ class HttpCli(object):
lim.bup(self.ip, sz)
try:
lim.chk_sz(sz)
lim.chk_vsz(self.conn.hsrv.broker, vfs.realpath, sz)
except:
bos.unlink(fp)
raise
@@ -2459,6 +2489,39 @@ class HttpCli(object):
new_lastmod3 = int(new_lastmod * 1000)
sha512 = sha512[:56]
xau = vfs.flags.get("xau")
if xau and not runhook(
self.log,
xau,
fp,
self.vpath,
self.host,
self.uname,
new_lastmod,
sz,
self.ip,
new_lastmod,
"",
):
t = "save blocked by xau server config"
self.log(t, 1)
os.unlink(fp)
raise Pebkac(403, t)
vfs, rem = vfs.get_dbv(rem)
self.conn.hsrv.broker.say(
"up2k.hash_file",
vfs.realpath,
vfs.vpath,
vfs.flags,
vsplit(rem)[0],
fn,
self.ip,
new_lastmod,
self.uname,
True,
)
response = json.dumps(
{"ok": True, "lastmod": new_lastmod3, "size": sz, "sha512": sha512}
)

View File

@@ -17,7 +17,9 @@ class Ico(object):
def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]:
"""placeholder to make thumbnails not break"""
zb = hashlib.sha1(ext.encode("utf-8")).digest()[2:4]
bext = ext.encode("ascii", "replace")
ext = bext.decode("utf-8")
zb = hashlib.sha1(bext).digest()[2:4]
if PY2:
zb = [ord(x) for x in zb]

145
copyparty/pwhash.py Normal file
View File

@@ -0,0 +1,145 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import argparse
import base64
import hashlib
import sys
import threading
from .__init__ import unicode
class PWHash(object):
def __init__(self, args: argparse.Namespace):
self.args = args
try:
alg, ac = args.ah_alg.split(",")
except:
alg = args.ah_alg
ac = {}
if alg == "none":
alg = ""
self.alg = alg
self.ac = ac
if not alg:
self.on = False
self.hash = unicode
return
self.on = True
self.salt = args.ah_salt.encode("utf-8")
self.cache: dict[str, str] = {}
self.mutex = threading.Lock()
self.hash = self._cache_hash
if alg == "sha2":
self._hash = self._gen_sha2
elif alg == "scrypt":
self._hash = self._gen_scrypt
elif alg == "argon2":
self._hash = self._gen_argon2
else:
t = "unsupported password hashing algorithm [{}], must be one of these: argon2 scrypt sha2 none"
raise Exception(t.format(alg))
def _cache_hash(self, plain: str) -> str:
with self.mutex:
try:
return self.cache[plain]
except:
pass
if not plain:
return ""
if len(plain) > 255:
raise Exception("password too long")
if len(self.cache) > 9000:
self.cache = {}
ret = self._hash(plain)
self.cache[plain] = ret
return ret
def _gen_sha2(self, plain: str) -> str:
its = int(self.ac[0]) if self.ac else 424242
bplain = plain.encode("utf-8")
ret = b"\n"
for _ in range(its):
ret = hashlib.sha512(self.salt + bplain + ret).digest()
return "+" + base64.urlsafe_b64encode(ret[:24]).decode("utf-8")
def _gen_scrypt(self, plain: str) -> str:
cost = 2 << 13
its = 2
blksz = 8
para = 4
try:
cost = 2 << int(self.ac[0])
its = int(self.ac[1])
blksz = int(self.ac[2])
para = int(self.ac[3])
except:
pass
ret = plain.encode("utf-8")
for _ in range(its):
ret = hashlib.scrypt(ret, salt=self.salt, n=cost, r=blksz, p=para, dklen=24)
return "+" + base64.urlsafe_b64encode(ret).decode("utf-8")
def _gen_argon2(self, plain: str) -> str:
from argon2.low_level import Type as ArgonType
from argon2.low_level import hash_secret
time_cost = 3
mem_cost = 256
parallelism = 4
version = 19
try:
time_cost = int(self.ac[0])
mem_cost = int(self.ac[1])
parallelism = int(self.ac[2])
version = int(self.ac[3])
except:
pass
bplain = plain.encode("utf-8")
bret = hash_secret(
secret=bplain,
salt=self.salt,
time_cost=time_cost,
memory_cost=mem_cost * 1024,
parallelism=parallelism,
hash_len=24,
type=ArgonType.ID,
version=version,
)
ret = bret.split(b"$")[-1].decode("utf-8")
return "+" + ret.replace("/", "_").replace("+", "-")
def stdin(self) -> None:
while True:
ln = sys.stdin.readline().strip()
if not ln:
break
print(self.hash(ln))
def cli(self) -> None:
import getpass
while True:
p1 = getpass.getpass("password> ")
p2 = getpass.getpass("again or just hit ENTER> ")
if p2 and p1 != p2:
print("\033[31minputs don't match; try again\033[0m", file=sys.stderr)
continue
print(self.hash(p1))
print()

View File

@@ -69,7 +69,7 @@ class U2idx(object):
fsize = body["size"]
fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
wark = up2k_wark_from_hashlist(self.args.warksalt, fsize, fhash)
uq = "substr(w,1,16) = ? and w = ?"
uv: list[Union[str, int]] = [wark[:16], wark]

View File

@@ -41,6 +41,7 @@ from .util import (
gen_filekey,
gen_filekey_dbg,
hidedir,
humansize,
min_ex,
quotep,
rand_name,
@@ -56,6 +57,7 @@ from .util import (
sfsenc,
spack,
statdir,
unhumanize,
vjoin,
vsplit,
w8b64dec,
@@ -110,7 +112,7 @@ class Up2k(object):
self.args = hub.args
self.log_func = hub.log
self.salt = self.args.salt
self.salt = self.args.warksalt
self.r_hash = re.compile("^[0-9a-zA-Z_-]{44}$")
self.gid = 0
@@ -125,6 +127,8 @@ class Up2k(object):
self.registry: dict[str, dict[str, dict[str, Any]]] = {}
self.flags: dict[str, dict[str, Any]] = {}
self.droppable: dict[str, list[str]] = {}
self.volnfiles: dict["sqlite3.Cursor", int] = {}
self.volsize: dict["sqlite3.Cursor", int] = {}
self.volstate: dict[str, str] = {}
self.vol_act: dict[str, float] = {}
self.busy_aps: set[str] = set()
@@ -261,6 +265,20 @@ class Up2k(object):
}
return json.dumps(ret, indent=4)
def get_volsize(self, ptop: str) -> tuple[int, int]:
with self.mutex:
return self._get_volsize(ptop)
def _get_volsize(self, ptop: str) -> tuple[int, int]:
cur = self.cur[ptop]
nbytes = self.volsize[cur]
nfiles = self.volnfiles[cur]
for j in list(self.registry.get(ptop, {}).values()):
nbytes += j["size"]
nfiles += 1
return (nbytes, nfiles)
def rescan(
self, all_vols: dict[str, VFS], scan_vols: list[str], wait: bool, fscan: bool
) -> str:
@@ -810,6 +828,8 @@ class Up2k(object):
try:
cur = self._open_db(db_path)
self.cur[ptop] = cur
self.volsize[cur] = 0
self.volnfiles[cur] = 0
# speeds measured uploading 520 small files on a WD20SPZX (SMR 2.5" 5400rpm 4kb)
dbd = flags["dbd"]
@@ -917,6 +937,24 @@ class Up2k(object):
db.c.connection.commit()
if vol.flags.get("vmaxb") or vol.flags.get("vmaxn"):
zs = "select count(sz), sum(sz) from up"
vn, vb = db.c.execute(zs).fetchone()
vb = vb or 0
vb += vn * 2048
self.volsize[db.c] = vb
self.volnfiles[db.c] = vn
vmaxb = unhumanize(vol.flags.get("vmaxb") or "0")
vmaxn = unhumanize(vol.flags.get("vmaxn") or "0")
t = "{} / {} ( {} / {} files) in {}".format(
humansize(vb, True),
humansize(vmaxb, True),
humansize(vn, True).rstrip("B"),
humansize(vmaxn, True).rstrip("B"),
vol.realpath,
)
self.log(t)
return True, bool(n_add or n_rm or do_vac)
def _build_dir(
@@ -1092,7 +1130,7 @@ class Up2k(object):
top, rp, dts, lmod, dsz, sz
)
self.log(t)
self.db_rm(db.c, rd, fn)
self.db_rm(db.c, rd, fn, 0)
ret += 1
db.n += 1
in_db = []
@@ -1175,7 +1213,7 @@ class Up2k(object):
rm_files = [x for x in hits if x not in seen_files]
n_rm = len(rm_files)
for fn in rm_files:
self.db_rm(db.c, rd, fn)
self.db_rm(db.c, rd, fn, 0)
if n_rm:
self.log("forgot {} deleted files".format(n_rm))
@@ -2284,7 +2322,9 @@ class Up2k(object):
if lost:
c2 = None
for cur, dp_dir, dp_fn in lost:
self.db_rm(cur, dp_dir, dp_fn)
t = "forgetting deleted file: /{}"
self.log(t.format(vjoin(vjoin(vfs.vpath, dp_dir), dp_fn)))
self.db_rm(cur, dp_dir, dp_fn, cj["size"])
if c2 and c2 != cur:
c2.connection.commit()
@@ -2418,7 +2458,14 @@ class Up2k(object):
if vfs.lim:
ap2, cj["prel"] = vfs.lim.all(
cj["addr"], cj["prel"], cj["size"], ap1, reg
cj["addr"],
cj["prel"],
cj["size"],
cj["ptop"],
ap1,
self.hub.broker,
reg,
"up2k._get_volsize",
)
bos.makedirs(ap2)
vfs.lim.nup(cj["addr"])
@@ -2736,7 +2783,7 @@ class Up2k(object):
self._symlink(dst, d2, self.flags[ptop], lmod=lmod)
if cur:
self.db_rm(cur, rd, fn)
self.db_rm(cur, rd, fn, job["size"])
self.db_add(cur, vflags, rd, fn, lmod, *z2[3:])
if cur:
@@ -2779,7 +2826,7 @@ class Up2k(object):
self.db_act = self.vol_act[ptop] = time.time()
try:
self.db_rm(cur, rd, fn)
self.db_rm(cur, rd, fn, sz)
self.db_add(
cur,
vflags,
@@ -2809,13 +2856,17 @@ class Up2k(object):
return True
def db_rm(self, db: "sqlite3.Cursor", rd: str, fn: str) -> None:
def db_rm(self, db: "sqlite3.Cursor", rd: str, fn: str, sz: int) -> None:
sql = "delete from up where rd = ? and fn = ?"
try:
db.execute(sql, (rd, fn))
r = db.execute(sql, (rd, fn))
except:
assert self.mem_cur
db.execute(sql, s3enc(self.mem_cur, rd, fn))
r = db.execute(sql, s3enc(self.mem_cur, rd, fn))
if r.rowcount:
self.volsize[db] -= sz
self.volnfiles[db] -= 1
def db_add(
self,
@@ -2844,6 +2895,9 @@ class Up2k(object):
v = (wark, int(ts), sz, rd, fn, ip or "", int(at or 0))
db.execute(sql, v)
self.volsize[db] += sz
self.volnfiles[db] += 1
xau = False if skip_xau else vflags.get("xau")
dst = djoin(ptop, rd, fn)
if xau and not runhook(
@@ -2991,12 +3045,12 @@ class Up2k(object):
break
abspath = djoin(adir, fn)
st = bos.stat(abspath)
volpath = "{}/{}".format(vrem, fn).strip("/")
vpath = "{}/{}".format(dbv.vpath, volpath).strip("/")
self.log("rm {}\n {}".format(vpath, abspath))
_ = dbv.get(volpath, uname, *permsets[0])
if xbd:
st = bos.stat(abspath)
if not runhook(
self.log,
xbd,
@@ -3020,14 +3074,26 @@ class Up2k(object):
try:
ptop = dbv.realpath
cur, wark, _, _, _, _ = self._find_from_vpath(ptop, volpath)
self._forget_file(ptop, volpath, cur, wark, True)
self._forget_file(ptop, volpath, cur, wark, True, st.st_size)
finally:
if cur:
cur.connection.commit()
bos.unlink(abspath)
if xad:
runhook(self.log, xad, abspath, vpath, "", uname, 0, 0, ip, 0, "")
runhook(
self.log,
xad,
abspath,
vpath,
"",
uname,
st.st_mtime,
st.st_size,
ip,
0,
"",
)
if is_dir:
ok, ng = rmdirs(self.log_func, scandir, True, atop, 1)
@@ -3203,7 +3269,7 @@ class Up2k(object):
if c2 and c2 != c1:
self._copy_tags(c1, c2, w)
self._forget_file(svn.realpath, srem, c1, w, c1 != c2)
self._forget_file(svn.realpath, srem, c1, w, c1 != c2, fsize)
self._relink(w, svn.realpath, srem, dabs)
curs.add(c1)
@@ -3279,6 +3345,7 @@ class Up2k(object):
cur: Optional["sqlite3.Cursor"],
wark: Optional[str],
drop_tags: bool,
sz: int,
) -> None:
"""forgets file in db, fixes symlinks, does not delete"""
srd, sfn = vsplit(vrem)
@@ -3293,7 +3360,7 @@ class Up2k(object):
q = "delete from mt where w=?"
cur.execute(q, (wark[:16],))
self.db_rm(cur, srd, sfn)
self.db_rm(cur, srd, sfn, sz)
reg = self.registry.get(ptop)
if reg:

View File

@@ -1626,7 +1626,12 @@ def unhumanize(sz: str) -> int:
pass
mc = sz[-1:].lower()
mi = {"k": 1024, "m": 1024 * 1024, "g": 1024 * 1024 * 1024}.get(mc, 1)
mi = {
"k": 1024,
"m": 1024 * 1024,
"g": 1024 * 1024 * 1024,
"t": 1024 * 1024 * 1024 * 1024,
}.get(mc, 1)
return int(float(sz[:-1]) * mi)

View File

@@ -3080,6 +3080,8 @@ function eval_hash() {
// compact media player
function setacmp() {
clmod(ebi('widget'), 'cmp', props.mcmp);
pbar.onresize();
vbar.onresize();
}
bcfg_bind(props, 'mcmp', 'au_compact', false, setacmp);
setacmp();

View File

@@ -73,7 +73,7 @@ html {
#toastb {
max-height: 70vh;
overflow-y: auto;
padding: 1px;
padding: .1em;
}
#toast.scroll #toastb {
overflow-y: scroll;

View File

@@ -159,8 +159,8 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
window.onerror = undefined;
var html = [
'<h1>you hit a bug!</h1>',
'<p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a> / <a href="?b=u">basic</a></p>',
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">github issue</a></p>',
'<p style="font-size:1.3em;margin:0;line-height:2em">try to <a href="#" onclick="localStorage.clear();location.reload();">reset copyparty settings</a> if you are stuck here, or <a href="#" onclick="ignex();">ignore this</a> / <a href="#" onclick="ignex(true);">ignore all</a> / <a href="?b=u">basic</a></p>',
'<p style="color:#fff">please send me a screenshot arigathanks gozaimuch: <a href="<ghi>" target="_blank">new github issue</a></p>',
'<p class="b">' + esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)).replace(/\n/g, '<br />') + '</p>',
'<p><b>UA:</b> ' + esc(navigator.userAgent + '')
];
@@ -225,7 +225,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
'#exbox{background:#222;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' +
'#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' +
'#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' +
'#exbox a{text-decoration:underline;color:#fc0} ' +
'#exbox a{text-decoration:underline;color:#fc0;background:#222;border:none} ' +
'#exbox h1{margin:.5em 1em 0 0;padding:0} ' +
'#exbox p.b{border-top:1px solid #999;margin:1em 0 0 0;font-size:1em} ' +
'#exbox ul, #exbox li {margin:0 0 0 .5em;padding:0} ' +
@@ -1786,16 +1786,17 @@ function xhrchk(xhr, prefix, e404, lvl, tag) {
if (xhr.status < 400 && xhr.status >= 200)
return true;
if (xhr.status == 403)
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
fun = toast[lvl || 'err'],
is_cf = /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser|\/chall[e]nge-platform|"chall[e]nge-error|nable Ja[v]aScript and cook/.test(errtxt);
if (xhr.status == 403 && !is_cf)
return toast.err(0, prefix + (L && L.xhr403 || "403: access denied\n\ntry pressing F5, maybe you got logged out"), tag);
if (xhr.status == 404)
return toast.err(0, prefix + e404, tag);
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
fun = toast[lvl || 'err'];
if (xhr.status == 503 && /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser/.test(errtxt)) {
if (is_cf && (xhr.status == 403 || xhr.status == 503)) {
var now = Date.now(), td = now - cf_cha_t;
if (td < 15000)
return;

View File

@@ -4,8 +4,9 @@
* [future plans](#future-plans) - some improvement ideas
* [design](#design)
* [up2k](#up2k) - quick outline of the up2k protocol
* [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/)
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
* [why not tus](#why-not-tus) - I didn't know about [tus](https://tus.io/)
* [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right?
* [hashed passwords](#hashed-passwords) - regarding the curious decisions
* [http api](#http-api)
* [read](#read)
* [write](#write)
@@ -68,14 +69,14 @@ regarding the frequent server log message during uploads;
* on this http connection, `2.77 GiB` transferred, `102.9 MiB/s` average, `948` chunks handled
* client says `4` uploads OK, `0` failed, `3` busy, `1` queued, `10042 MiB` total size, `7198 MiB` and `00:01:09` left
## why not tus
### why not tus
I didn't know about [tus](https://tus.io/) when I made this, but:
* up2k has the advantage that it supports parallel uploading of non-contiguous chunks straight into the final file -- [tus does a merge at the end](https://tus.io/protocols/resumable-upload.html#concatenation) which is slow and taxing on the server HDD / filesystem (unless i'm misunderstanding)
* up2k has the slight disadvantage of requiring the client to hash the entire file before an upload can begin, but this has the benefit of immediately skipping duplicate files
* and the hashing happens in a separate thread anyways so it's usually not a bottleneck
## why chunk-hashes
### why chunk-hashes
a single sha512 would be better, right?
@@ -92,6 +93,15 @@ hashwasm would solve the streaming issue but reduces hashing speed for sha512 (x
* blake2 might be a better choice since xxh is non-cryptographic, but that gets ~15 MiB/s on slower androids
# hashed passwords
regarding the curious decisions
there is a static salt for all passwords;
* because most copyparty APIs allow users to authenticate using only their password, making the username unknown, so impossible to do per-account salts
* the drawback of this is that an attacker can bruteforce all accounts in parallel, however most copyparty instances only have a handful of accounts in the first place, and it can be compensated by increasing the hashing cost anyways
# http api
* table-column `params` = URL parameters; `?foo=bar&qux=...`

View File

@@ -48,6 +48,7 @@ thumbnails2 = ["pyvips"]
audiotags = ["mutagen"]
ftpd = ["pyftpdlib"]
ftps = ["pyftpdlib", "pyopenssl"]
pwhash = ["argon2-cffi"]
[project.scripts]
copyparty = "copyparty.__main__:main"

View File

@@ -23,6 +23,7 @@ copyparty/ico.py,
copyparty/mdns.py,
copyparty/mtag.py,
copyparty/multicast.py,
copyparty/pwhash.py,
copyparty/res,
copyparty/res/__init__.py,
copyparty/res/COPYING.txt,

View File

@@ -16,6 +16,8 @@ cat $f | awk '
h=0
};
};
/```/{o=!o}
o{next}
/^#/{s=1;rs=0;pr()}
/^#* *(nix package)/{rs=1}
/^#* *(install on android|dev env setup|just the sfx|complete release|optional gpl stuff|nixos module)|`$/{s=rs}

View File

@@ -140,6 +140,7 @@ args = {
"audiotags": ["mutagen"],
"ftpd": ["pyftpdlib"],
"ftps": ["pyftpdlib", "pyopenssl"],
"pwhash": ["argon2-cffi"],
},
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
"scripts": ["bin/partyfuse.py", "bin/u2c.py"],

View File

@@ -98,7 +98,7 @@ class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None):
ka = {}
ex = "daw dav_auth dav_inf dav_mac dav_rt dotsrch e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js getmod grid hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_thumb no_vthumb no_zip nrand nw rand vc xdev xlink xvol"
ex = "daw dav_auth dav_inf dav_mac dav_rt dotsrch e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js getmod grid hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_thumb no_vthumb no_zip nrand nw rand smb vc xdev xlink xvol"
ka.update(**{k: False for k in ex.split()})
ex = "dotpart no_rescan no_sendfile no_voldump plain_ip"
@@ -113,7 +113,7 @@ class Cfg(Namespace):
ex = "df loris re_maxage rproxy rsp_jtr rsp_slp s_wr_slp theme themes turbo"
ka.update(**{k: 0 for k in ex.split()})
ex = "doctitle favico html_head lg_sbf log_fk md_sbf mth textfiles unlist R RS SR"
ex = "ah_alg doctitle favico html_head lg_sbf log_fk md_sbf mth textfiles unlist R RS SR"
ka.update(**{k: "" for k in ex.split()})
ex = "xad xar xau xbd xbr xbu xiu xm"