Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e2dec2510f | ||
|
|
da5ad2ab9f | ||
|
|
eaa4b04a22 | ||
|
|
3051b13108 | ||
|
|
4c4e48bab7 | ||
|
|
01a3eb29cb | ||
|
|
73f7249c5f | ||
|
|
18c6559199 | ||
|
|
e66ece993f | ||
|
|
0686860624 | ||
|
|
24ce46b380 | ||
|
|
a49bf81ff2 | ||
|
|
64501fd7f1 | ||
|
|
db3c0b0907 | ||
|
|
edda117a7a | ||
|
|
cdface0dd5 | ||
|
|
be6afe2d3a | ||
|
|
9163780000 | ||
|
|
d7aa7dfe64 | ||
|
|
f1decb531d |
21
README.md
21
README.md
@@ -48,6 +48,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||
* [shares](#shares) - share a file or folder by creating a temporary link
|
||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||
* [rss feeds](#rss-feeds) - monitor a folder with your RSS reader
|
||||
* [recent uploads](#recent-uploads) - list all recent uploads
|
||||
* [media player](#media-player) - plays almost every audio format there is
|
||||
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
||||
@@ -339,6 +340,9 @@ same order here too
|
||||
|
||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
||||
|
||||
* [Chrome issue 383568268](https://issues.chromium.org/issues/383568268) -- filereaders in webworkers can OOM / crash the browser-tab
|
||||
* copyparty has a workaround which seems to work well enough
|
||||
|
||||
* [Firefox issue 1790500](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500) -- entire browser can crash after uploading ~4000 small files
|
||||
|
||||
* Android: music playback randomly stops due to [battery usage settings](#fix-unreliable-playback-on-android)
|
||||
@@ -714,7 +718,7 @@ files go into `[ok]` if they exist (and you get a link to where it is), otherwis
|
||||
|
||||
### unpost
|
||||
|
||||
undo/delete accidental uploads
|
||||
undo/delete accidental uploads using the `[🧯]` tab in the UI
|
||||
|
||||

|
||||
|
||||
@@ -873,6 +877,17 @@ url parameters:
|
||||
* uppercase = reverse-sort; `M` = oldest file first
|
||||
|
||||
|
||||
## recent uploads
|
||||
|
||||
list all recent uploads by clicking "show recent uploads" in the controlpanel
|
||||
|
||||
will show uploader IP and upload-time if the visitor has the admin permission
|
||||
|
||||
* global-option `--ups-when` makes upload-time visible to all users, and not just admins
|
||||
|
||||
note that the [🧯 unpost](#unpost) feature is better suited for viewing *your own* recent uploads, as it includes the option to undo/delete them
|
||||
|
||||
|
||||
## media player
|
||||
|
||||
plays almost every audio format there is (if the server has FFmpeg installed for on-demand transcoding)
|
||||
@@ -1488,7 +1503,9 @@ replace copyparty passwords with oauth and such
|
||||
|
||||
you can disable the built-in password-based login system, and instead replace it with a separate piece of software (an identity provider) which will then handle authenticating / authorizing of users; this makes it possible to login with passkeys / fido2 / webauthn / yubikey / ldap / active directory / oauth / many other single-sign-on contraptions
|
||||
|
||||
a popular choice is [Authelia](https://www.authelia.com/) (config-file based), another one is [authentik](https://goauthentik.io/) (GUI-based, more complex)
|
||||
* the regular config-defined users will be used as a fallback for requests which don't include a valid (trusted) IdP username header
|
||||
|
||||
some popular identity providers are [Authelia](https://www.authelia.com/) (config-file based) and [authentik](https://goauthentik.io/) (GUI-based, more complex)
|
||||
|
||||
there is a [docker-compose example](./docs/examples/docker/idp-authelia-traefik) which is hopefully a good starting point (alternatively see [./docs/idp.md](./docs/idp.md) if you're the DIY type)
|
||||
|
||||
|
||||
52
bin/u2c.py
52
bin/u2c.py
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
S_VERSION = "2.6"
|
||||
S_BUILD_DT = "2024-11-10"
|
||||
S_VERSION = "2.7"
|
||||
S_BUILD_DT = "2024-12-06"
|
||||
|
||||
"""
|
||||
u2c.py: upload to copyparty
|
||||
@@ -1033,8 +1033,8 @@ class Ctl(object):
|
||||
handshake(self.ar, file, False)
|
||||
|
||||
def _fancy(self):
|
||||
atexit.register(self.cleanup_vt100)
|
||||
if VT100 and not self.ar.ns:
|
||||
atexit.register(self.cleanup_vt100)
|
||||
ss.scroll_region(3)
|
||||
|
||||
Daemon(self.hasher)
|
||||
@@ -1042,6 +1042,7 @@ class Ctl(object):
|
||||
Daemon(self.handshaker)
|
||||
Daemon(self.uploader)
|
||||
|
||||
last_sp = -1
|
||||
while True:
|
||||
with self.exit_cond:
|
||||
self.exit_cond.wait(0.07)
|
||||
@@ -1080,6 +1081,12 @@ class Ctl(object):
|
||||
else:
|
||||
txt = " "
|
||||
|
||||
if not VT100: # OSC9;4 (taskbar-progress)
|
||||
sp = int(self.up_b * 100 / self.nbytes) or 1
|
||||
if last_sp != sp:
|
||||
last_sp = sp
|
||||
txt += "\033]9;4;1;%d\033\\" % (sp,)
|
||||
|
||||
if not self.up_br:
|
||||
spd = self.hash_b / ((time.time() - self.t0) or 1)
|
||||
eta = (self.nbytes - self.hash_b) / (spd or 1)
|
||||
@@ -1097,6 +1104,8 @@ class Ctl(object):
|
||||
tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r"
|
||||
|
||||
t = "%s eta @ %s/s, %s, %d# left\033[K" % (self.eta, spd, sleft, nleft)
|
||||
if not self.hash_b:
|
||||
t = " now hashing..."
|
||||
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
||||
|
||||
if self.ar.wlist:
|
||||
@@ -1117,7 +1126,10 @@ class Ctl(object):
|
||||
handshake(self.ar, file, False)
|
||||
|
||||
def cleanup_vt100(self):
|
||||
ss.scroll_region(None)
|
||||
if VT100:
|
||||
ss.scroll_region(None)
|
||||
else:
|
||||
eprint("\033]9;4;0\033\\")
|
||||
eprint("\033[J\033]0;\033\\")
|
||||
|
||||
def cb_hasher(self, file, ofs):
|
||||
@@ -1538,6 +1550,38 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
except:
|
||||
pass
|
||||
|
||||
# msys2 doesn't uncygpath absolute paths with whitespace
|
||||
if not VT100:
|
||||
zsl = []
|
||||
for fn in ar.files:
|
||||
if re.search("^/[a-z]/", fn):
|
||||
fn = r"%s:\%s" % (fn[1:2], fn[3:])
|
||||
zsl.append(fn.replace("/", "\\"))
|
||||
ar.files = zsl
|
||||
|
||||
fok = []
|
||||
fng = []
|
||||
for fn in ar.files:
|
||||
if os.path.exists(fn):
|
||||
fok.append(fn)
|
||||
elif VT100:
|
||||
fng.append(fn)
|
||||
else:
|
||||
# windows leaves glob-expansion to the invoked process... okayyy let's get to work
|
||||
from glob import glob
|
||||
|
||||
fns = glob(fn)
|
||||
if fns:
|
||||
fok.extend(fns)
|
||||
else:
|
||||
fng.append(fn)
|
||||
|
||||
if fng:
|
||||
t = "some files/folders were not found:\n %s"
|
||||
raise Exception(t % ("\n ".join(fng),))
|
||||
|
||||
ar.files = fok
|
||||
|
||||
if ar.drd:
|
||||
ar.dr = True
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Maintainer: icxes <dev.null@need.moe>
|
||||
pkgname=copyparty
|
||||
pkgver="1.16.2"
|
||||
pkgver="1.16.5"
|
||||
pkgrel=1
|
||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||
arch=("any")
|
||||
@@ -21,7 +21,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
||||
)
|
||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||
backup=("etc/${pkgname}.d/init" )
|
||||
sha256sums=("5ecc1626e3f3a7bb7de5e6697742cd5c5990e20afec867d8de648afb04fbc04b")
|
||||
sha256sums=("2830086bd872aaa5174c2ca73ba395439e85c883d85438263bd89521c5f37d9c")
|
||||
|
||||
build() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.16.2/copyparty-sfx.py",
|
||||
"version": "1.16.2",
|
||||
"hash": "sha256-HdgVtsoSX3qgDkNOD9e0PRZlY/wL6T02Q6zECfX/TdM="
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.16.5/copyparty-sfx.py",
|
||||
"version": "1.16.5",
|
||||
"hash": "sha256-rfZ76ujA6bLXYW52qP2pK8gdwDdED91mLOF2gzquG8E="
|
||||
}
|
||||
@@ -91,6 +91,9 @@ web/mde.html
|
||||
web/mde.js
|
||||
web/msg.css
|
||||
web/msg.html
|
||||
web/rups.css
|
||||
web/rups.html
|
||||
web/rups.js
|
||||
web/shares.css
|
||||
web/shares.html
|
||||
web/shares.js
|
||||
|
||||
@@ -1083,7 +1083,7 @@ def add_cert(ap, cert_path):
|
||||
def add_auth(ap):
|
||||
ses_db = os.path.join(E.cfg, "sessions.db")
|
||||
ap2 = ap.add_argument_group('IdP / identity provider / user authentication options')
|
||||
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks and assume the request-header \033[33mHN\033[0m contains the username of the requesting user (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
||||
ap2.add_argument("--idp-h-usr", metavar="HN", type=u, default="", help="bypass the copyparty authentication checks if the request-header \033[33mHN\033[0m contains a username to associate the request with (for use with authentik/oauth/...)\n\033[1;31mWARNING:\033[0m if you enable this, make sure clients are unable to specify this header themselves; must be washed away and replaced by a reverse-proxy")
|
||||
ap2.add_argument("--idp-h-grp", metavar="HN", type=u, default="", help="assume the request-header \033[33mHN\033[0m contains the groupname of the requesting user; can be referenced in config files for group-based access control")
|
||||
ap2.add_argument("--idp-h-key", metavar="HN", type=u, default="", help="optional but recommended safeguard; your reverse-proxy will insert a secret header named \033[33mHN\033[0m into all requests, and the other IdP headers will be ignored if this header is not present")
|
||||
ap2.add_argument("--idp-gsep", metavar="RE", type=u, default="|:;+,", help="if there are multiple groups in \033[33m--idp-h-grp\033[0m, they are separated by one of the characters in \033[33mRE\033[0m")
|
||||
@@ -1250,7 +1250,6 @@ def add_optouts(ap):
|
||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||
ap2.add_argument("--no-tarcmp", action="store_true", help="disable download as compressed tar (?tar=gz, ?tar=bz2, ?tar=xz, ?tar=gz:9, ...)")
|
||||
ap2.add_argument("--no-lifetime", action="store_true", help="do not allow clients (or server config) to schedule an upload to be deleted after a given time")
|
||||
ap2.add_argument("--no-up-list", action="store_true", help="don't show list of incoming files in controlpanel")
|
||||
ap2.add_argument("--no-pipe", action="store_true", help="disable race-the-beam (lockstep download of files which are currently being uploaded) (volflag=nopipe)")
|
||||
ap2.add_argument("--no-db-ip", action="store_true", help="do not write uploader IPs into the database")
|
||||
|
||||
@@ -1326,7 +1325,10 @@ def add_admin(ap):
|
||||
ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)")
|
||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
||||
ap2.add_argument("--no-ups-page", action="store_true", help="disable ?ru (list of recent uploads)")
|
||||
ap2.add_argument("--no-up-list", action="store_true", help="don't show list of incoming files in controlpanel")
|
||||
ap2.add_argument("--dl-list", metavar="LVL", type=int, default=2, help="who can see active downloads in the controlpanel? [\033[32m0\033[0m]=nobody, [\033[32m1\033[0m]=admins, [\033[32m2\033[0m]=everyone")
|
||||
ap2.add_argument("--ups-when", action="store_true", help="let everyone see upload timestamps on the ?ru page, not just admins")
|
||||
|
||||
|
||||
def add_thumbnail(ap):
|
||||
@@ -1505,6 +1507,7 @@ def add_debug(ap):
|
||||
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than \033[33mNUM\033[0m files at \033[33m--kf-dir\033[0m already; default: 6.3 GiB max (200*32M)")
|
||||
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at \033[33mPATH\033[0m; default: folder named 'bf' wherever copyparty was started")
|
||||
ap2.add_argument("--bf-log", metavar="PATH", type=u, default="", help="bak-flips: log corruption info to a textfile at \033[33mPATH\033[0m")
|
||||
ap2.add_argument("--no-cfg-cmt-warn", action="store_true", help=argparse.SUPPRESS)
|
||||
|
||||
|
||||
# fmt: on
|
||||
@@ -1737,7 +1740,7 @@ def main(argv: Optional[list[str]] = None) -> None:
|
||||
except:
|
||||
lprint("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
|
||||
|
||||
if al.ansi:
|
||||
if not al.ansi:
|
||||
al.wintitle = ""
|
||||
|
||||
# propagate implications
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 16, 3)
|
||||
VERSION = (1, 16, 6)
|
||||
CODENAME = "COPYparty"
|
||||
BUILD_DT = (2024, 12, 4)
|
||||
BUILD_DT = (2024, 12, 19)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -212,7 +212,7 @@ class Lim(object):
|
||||
|
||||
df, du, err = get_df(abspath, True)
|
||||
if err:
|
||||
t = "failed to read disk space usage for [%s]: %s"
|
||||
t = "failed to read disk space usage for %r: %s"
|
||||
self.log(t % (abspath, err), 3)
|
||||
self.dfv = 0xAAAAAAAAA # 42.6 GiB
|
||||
else:
|
||||
@@ -526,7 +526,7 @@ class VFS(object):
|
||||
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
||||
if relchk(vpath):
|
||||
if self.log:
|
||||
self.log("vfs", "invalid relpath [{}]".format(vpath))
|
||||
self.log("vfs", "invalid relpath %r @%s" % (vpath, uname))
|
||||
raise Pebkac(422)
|
||||
|
||||
cvpath = undot(vpath)
|
||||
@@ -543,11 +543,11 @@ class VFS(object):
|
||||
if req and uname not in d and uname != LEELOO_DALLAS:
|
||||
if vpath != cvpath and vpath != "." and self.log:
|
||||
ap = vn.canonical(rem)
|
||||
t = "{} has no {} in [{}] => [{}] => [{}]"
|
||||
self.log("vfs", t.format(uname, msg, vpath, cvpath, ap), 6)
|
||||
t = "%s has no %s in %r => %r => %r"
|
||||
self.log("vfs", t % (uname, msg, vpath, cvpath, ap), 6)
|
||||
|
||||
t = 'you don\'t have %s-access in "/%s" or below "/%s"'
|
||||
raise Pebkac(err, t % (msg, cvpath, vn.vpath))
|
||||
t = "you don't have %s-access in %r or below %r"
|
||||
raise Pebkac(err, t % (msg, "/" + cvpath, "/" + vn.vpath))
|
||||
|
||||
return vn, rem
|
||||
|
||||
@@ -658,7 +658,7 @@ class VFS(object):
|
||||
seen: list[str],
|
||||
uname: str,
|
||||
permsets: list[list[bool]],
|
||||
wantdots: bool,
|
||||
wantdots: int,
|
||||
scandir: bool,
|
||||
lstat: bool,
|
||||
subvols: bool = True,
|
||||
@@ -693,8 +693,8 @@ class VFS(object):
|
||||
and fsroot in seen
|
||||
):
|
||||
if self.log:
|
||||
t = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}/{}"
|
||||
self.log("vfs.walk", t.format(seen[-1], fsroot, self.vpath, rem), 3)
|
||||
t = "bailing from symlink loop,\n prev: %r\n curr: %r\n from: %r / %r"
|
||||
self.log("vfs.walk", t % (seen[-1], fsroot, self.vpath, rem), 3)
|
||||
return
|
||||
|
||||
if "xdev" in self.flags or "xvol" in self.flags:
|
||||
@@ -706,7 +706,7 @@ class VFS(object):
|
||||
rm1.append(le)
|
||||
_ = [vfs_ls.remove(x) for x in rm1] # type: ignore
|
||||
|
||||
dots_ok = wantdots and uname in dbv.axs.udot
|
||||
dots_ok = wantdots and (wantdots == 2 or uname in dbv.axs.udot)
|
||||
if not dots_ok:
|
||||
vfs_ls = [x for x in vfs_ls if "/." not in "/" + x[0]]
|
||||
|
||||
@@ -760,7 +760,7 @@ class VFS(object):
|
||||
# if single folder: the folder itself is the top-level item
|
||||
folder = "" if flt or not wrap else (vpath.split("/")[-1].lstrip(".") or "top")
|
||||
|
||||
g = self.walk(folder, vrem, [], uname, [[True, False]], True, scandir, False)
|
||||
g = self.walk(folder, vrem, [], uname, [[True, False]], 1, scandir, False)
|
||||
for _, _, vpath, apath, files, rd, vd in g:
|
||||
if flt:
|
||||
files = [x for x in files if x[0] in flt]
|
||||
@@ -818,8 +818,8 @@ class VFS(object):
|
||||
|
||||
if vdev != st.st_dev:
|
||||
if self.log:
|
||||
t = "xdev: {}[{}] => {}[{}]"
|
||||
self.log("vfs", t.format(vdev, self.realpath, st.st_dev, ap), 3)
|
||||
t = "xdev: %s[%r] => %s[%r]"
|
||||
self.log("vfs", t % (vdev, self.realpath, st.st_dev, ap), 3)
|
||||
|
||||
return None
|
||||
|
||||
@@ -829,7 +829,7 @@ class VFS(object):
|
||||
return vn
|
||||
|
||||
if self.log:
|
||||
self.log("vfs", "xvol: [{}]".format(ap), 3)
|
||||
self.log("vfs", "xvol: %r" % (ap,), 3)
|
||||
|
||||
return None
|
||||
|
||||
@@ -914,7 +914,7 @@ class AuthSrv(object):
|
||||
|
||||
self.idp_accs[uname] = gnames
|
||||
|
||||
t = "reinitializing due to new user from IdP: [%s:%s]"
|
||||
t = "reinitializing due to new user from IdP: [%r:%r]"
|
||||
self.log(t % (uname, gnames), 3)
|
||||
|
||||
if not broker:
|
||||
@@ -1568,7 +1568,7 @@ class AuthSrv(object):
|
||||
continue
|
||||
|
||||
if self.args.shr_v:
|
||||
t = "loading %s share [%s] by [%s] => [%s]"
|
||||
t = "loading %s share %r by %r => %r"
|
||||
self.log(t % (s_pr, s_k, s_un, s_vp))
|
||||
|
||||
if s_pw:
|
||||
@@ -1765,7 +1765,7 @@ class AuthSrv(object):
|
||||
use = True
|
||||
try:
|
||||
_ = float(zs)
|
||||
zs = "%sg" % (zs)
|
||||
zs = "%sg" % (zs,)
|
||||
except:
|
||||
pass
|
||||
lim.dfl = unhumanize(zs)
|
||||
@@ -2538,7 +2538,7 @@ class AuthSrv(object):
|
||||
return
|
||||
|
||||
elif self.args.chpw_v == 2:
|
||||
t = "chpw: %d changed" % (len(uok))
|
||||
t = "chpw: %d changed" % (len(uok),)
|
||||
if urst:
|
||||
t += ", \033[0munchanged:\033[35m %s" % (", ".join(list(urst)))
|
||||
|
||||
@@ -2696,7 +2696,7 @@ class AuthSrv(object):
|
||||
[],
|
||||
u,
|
||||
[[True, False]],
|
||||
True,
|
||||
1,
|
||||
not self.args.no_scandir,
|
||||
False,
|
||||
False,
|
||||
@@ -3017,6 +3017,19 @@ def expand_config_file(
|
||||
|
||||
ret.append("#\033[36m closed{}\033[0m".format(ipath))
|
||||
|
||||
zsl = []
|
||||
for ln in ret:
|
||||
zs = ln.split(" #")[0]
|
||||
if " #" in zs and zs.split("#")[0].strip():
|
||||
zsl.append(ln)
|
||||
if zsl and "no-cfg-cmt-warn" not in "\n".join(ret):
|
||||
t = "\033[33mWARNING: there is less than two spaces before the # in the following config lines, so instead of assuming that this is a comment, the whole line will become part of the config value:\n\n>>> %s\n\nif you are familiar with this and would like to mute this warning, specify the global-option no-cfg-cmt-warn\n\033[0m"
|
||||
t = t % ("\n>>> ".join(zsl),)
|
||||
if log:
|
||||
log(t)
|
||||
else:
|
||||
print(t, file=sys.stderr)
|
||||
|
||||
|
||||
def upgrade_cfg_fmt(
|
||||
log: Optional["NamedLogger"], args: argparse.Namespace, orig: list[str], cfg_fp: str
|
||||
|
||||
@@ -42,14 +42,14 @@ class Fstab(object):
|
||||
self.cache = {}
|
||||
|
||||
fs = "ext4"
|
||||
msg = "failed to determine filesystem at [{}]; assuming {}\n{}"
|
||||
msg = "failed to determine filesystem at %r; assuming %s\n%s"
|
||||
|
||||
if ANYWIN:
|
||||
fs = "vfat"
|
||||
try:
|
||||
path = self._winpath(path)
|
||||
except:
|
||||
self.log(msg.format(path, fs, min_ex()), 3)
|
||||
self.log(msg % (path, fs, min_ex()), 3)
|
||||
return fs
|
||||
|
||||
path = undot(path)
|
||||
@@ -61,11 +61,11 @@ class Fstab(object):
|
||||
try:
|
||||
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
||||
except:
|
||||
self.log(msg.format(path, fs, min_ex()), 3)
|
||||
self.log(msg % (path, fs, min_ex()), 3)
|
||||
|
||||
fs = fs.lower()
|
||||
self.cache[path] = fs
|
||||
self.log("found {} at {}".format(fs, path))
|
||||
self.log("found %s at %r" % (fs, path))
|
||||
return fs
|
||||
|
||||
def _winpath(self, path: str) -> str:
|
||||
|
||||
@@ -479,8 +479,8 @@ class HttpCli(object):
|
||||
if vpath.startswith(self.args.R):
|
||||
vpath = vpath[len(self.args.R) + 1 :]
|
||||
else:
|
||||
t = "incorrect --rp-loc or webserver config; expected vpath starting with [{}] but got [{}]"
|
||||
self.log(t.format(self.args.R, vpath), 1)
|
||||
t = "incorrect --rp-loc or webserver config; expected vpath starting with %r but got %r"
|
||||
self.log(t % (self.args.R, vpath), 1)
|
||||
|
||||
self.ouparam = uparam.copy()
|
||||
|
||||
@@ -518,7 +518,7 @@ class HttpCli(object):
|
||||
return self.tx_qr()
|
||||
|
||||
if relchk(self.vpath) and (self.vpath != "*" or self.mode != "OPTIONS"):
|
||||
self.log("invalid relpath [{}]".format(self.vpath))
|
||||
self.log("invalid relpath %r" % ("/" + self.vpath,))
|
||||
self.cbonk(self.conn.hsrv.gmal, self.req, "bad_vp", "invalid relpaths")
|
||||
return self.tx_404() and self.keepalive
|
||||
|
||||
@@ -542,8 +542,14 @@ class HttpCli(object):
|
||||
except:
|
||||
pass
|
||||
|
||||
self.pw = uparam.get("pw") or self.headers.get("pw") or bauth or cookie_pw
|
||||
self.uname = (
|
||||
self.asrv.sesa.get(self.pw)
|
||||
or self.asrv.iacct.get(self.asrv.ah.hash(self.pw))
|
||||
or "*"
|
||||
)
|
||||
|
||||
if self.args.idp_h_usr:
|
||||
self.pw = ""
|
||||
idp_usr = self.headers.get(self.args.idp_h_usr) or ""
|
||||
if idp_usr:
|
||||
idp_grp = (
|
||||
@@ -588,20 +594,11 @@ class HttpCli(object):
|
||||
idp_grp = ""
|
||||
|
||||
if idp_usr in self.asrv.vfs.aread:
|
||||
self.pw = ""
|
||||
self.uname = idp_usr
|
||||
self.html_head += "<script>var is_idp=1</script>\n"
|
||||
else:
|
||||
self.log("unknown username: [%s]" % (idp_usr), 1)
|
||||
self.uname = "*"
|
||||
else:
|
||||
self.uname = "*"
|
||||
else:
|
||||
self.pw = uparam.get("pw") or self.headers.get("pw") or bauth or cookie_pw
|
||||
self.uname = (
|
||||
self.asrv.sesa.get(self.pw)
|
||||
or self.asrv.iacct.get(self.asrv.ah.hash(self.pw))
|
||||
or "*"
|
||||
)
|
||||
self.log("unknown username: %r" % (idp_usr,), 1)
|
||||
|
||||
if self.args.ipu and self.uname == "*":
|
||||
self.uname = self.conn.ipu_iu[self.conn.ipu_nm.map(self.ip)]
|
||||
@@ -666,7 +663,7 @@ class HttpCli(object):
|
||||
origin = self.headers.get("origin", "<?>")
|
||||
proto = "https://" if self.is_https else "http://"
|
||||
guess = "modifying" if (origin and host) else "stripping"
|
||||
t = "cors-reject %s because request-header Origin='%s' does not match request-protocol '%s' and host '%s' based on request-header Host='%s' (note: if this request is not malicious, check if your reverse-proxy is accidentally %s request headers, in particular 'Origin', for example by running copyparty with --ihead='*' to show all request headers)"
|
||||
t = "cors-reject %s because request-header Origin=%r does not match request-protocol %r and host %r based on request-header Host=%r (note: if this request is not malicious, check if your reverse-proxy is accidentally %s request headers, in particular 'Origin', for example by running copyparty with --ihead='*' to show all request headers)"
|
||||
self.log(t % (self.mode, origin, proto, self.host, host, guess), 3)
|
||||
raise Pebkac(403, "rejected by cors-check")
|
||||
|
||||
@@ -712,7 +709,7 @@ class HttpCli(object):
|
||||
|
||||
if pex.code != 404 or self.do_log:
|
||||
self.log(
|
||||
"http%d: %s\033[0m, %s" % (pex.code, msg, self.vpath),
|
||||
"http%d: %s\033[0m, %r" % (pex.code, msg, "/" + self.vpath),
|
||||
6 if em.startswith("client d/c ") else 3,
|
||||
)
|
||||
|
||||
@@ -1121,6 +1118,8 @@ class HttpCli(object):
|
||||
logmsg += " [\033[36m" + rval + "\033[0m]"
|
||||
|
||||
self.log(logmsg)
|
||||
if "%" in self.req:
|
||||
self.log(" `-- %r" % (self.vpath,))
|
||||
|
||||
# "embedded" resources
|
||||
if self.vpath.startswith(".cpr"):
|
||||
@@ -1155,8 +1154,8 @@ class HttpCli(object):
|
||||
return self.tx_res(res_path)
|
||||
|
||||
if res_path != undot(res_path):
|
||||
t = "malicious user; attempted path traversal [{}] => [{}]"
|
||||
self.log(t.format(self.vpath, res_path), 1)
|
||||
t = "malicious user; attempted path traversal %r => %r"
|
||||
self.log(t % ("/" + self.vpath, res_path), 1)
|
||||
self.cbonk(self.conn.hsrv.gmal, self.req, "trav", "path traversal")
|
||||
|
||||
self.tx_404()
|
||||
@@ -1167,11 +1166,11 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
if not self.can_read and not self.can_write and not self.can_get:
|
||||
t = "@{} has no access to [{}]"
|
||||
t = "@%s has no access to %r"
|
||||
|
||||
if "on403" in self.vn.flags:
|
||||
t += " (on403)"
|
||||
self.log(t.format(self.uname, self.vpath))
|
||||
self.log(t % (self.uname, "/" + self.vpath))
|
||||
ret = self.on40x(self.vn.flags["on403"], self.vn, self.rem)
|
||||
if ret == "true":
|
||||
return True
|
||||
@@ -1190,7 +1189,7 @@ class HttpCli(object):
|
||||
if self.vpath:
|
||||
ptn = self.args.nonsus_urls
|
||||
if not ptn or not ptn.search(self.vpath):
|
||||
self.log(t.format(self.uname, self.vpath))
|
||||
self.log(t % (self.uname, "/" + self.vpath))
|
||||
|
||||
return self.tx_404(True)
|
||||
|
||||
@@ -1234,6 +1233,9 @@ class HttpCli(object):
|
||||
if "dls" in self.uparam:
|
||||
return self.tx_dls()
|
||||
|
||||
if "ru" in self.uparam:
|
||||
return self.tx_rups()
|
||||
|
||||
if "h" in self.uparam:
|
||||
return self.tx_mounts()
|
||||
|
||||
@@ -1384,6 +1386,8 @@ class HttpCli(object):
|
||||
def handle_propfind(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("PFIND %s @%s" % (self.req, self.uname))
|
||||
if "%" in self.req:
|
||||
self.log(" `-- %r" % (self.vpath,))
|
||||
|
||||
if self.args.no_dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
@@ -1434,14 +1438,14 @@ class HttpCli(object):
|
||||
if depth == "infinity":
|
||||
# allow depth:0 from unmapped root, but require read-axs otherwise
|
||||
if not self.can_read and (self.vpath or self.asrv.vfs.realpath):
|
||||
t = "depth:infinity requires read-access in /%s"
|
||||
t = t % (self.vpath,)
|
||||
t = "depth:infinity requires read-access in %r"
|
||||
t = t % ("/" + self.vpath,)
|
||||
self.log(t, 3)
|
||||
raise Pebkac(401, t)
|
||||
|
||||
if not stat.S_ISDIR(topdir["st"].st_mode):
|
||||
t = "depth:infinity can only be used on folders; /%s is 0o%o"
|
||||
t = t % (self.vpath, topdir["st"])
|
||||
t = "depth:infinity can only be used on folders; %r is 0o%o"
|
||||
t = t % ("/" + self.vpath, topdir["st"])
|
||||
self.log(t, 3)
|
||||
raise Pebkac(400, t)
|
||||
|
||||
@@ -1467,7 +1471,7 @@ class HttpCli(object):
|
||||
elif depth == "0" or not stat.S_ISDIR(st.st_mode):
|
||||
# propfind on a file; return as topdir
|
||||
if not self.can_read and not self.can_get:
|
||||
self.log("inaccessible: [%s]" % (self.vpath,))
|
||||
self.log("inaccessible: %r" % ("/" + self.vpath,))
|
||||
raise Pebkac(401, "authenticate")
|
||||
|
||||
elif depth == "1":
|
||||
@@ -1494,7 +1498,7 @@ class HttpCli(object):
|
||||
raise Pebkac(412, t.format(depth, t2))
|
||||
|
||||
if not self.can_read and not self.can_write and not fgen:
|
||||
self.log("inaccessible: [%s]" % (self.vpath,))
|
||||
self.log("inaccessible: %r" % ("/" + self.vpath,))
|
||||
raise Pebkac(401, "authenticate")
|
||||
|
||||
fgen = itertools.chain([topdir], fgen)
|
||||
@@ -1565,12 +1569,14 @@ class HttpCli(object):
|
||||
def handle_proppatch(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("PPATCH %s @%s" % (self.req, self.uname))
|
||||
if "%" in self.req:
|
||||
self.log(" `-- %r" % (self.vpath,))
|
||||
|
||||
if self.args.no_dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
|
||||
if not self.can_write:
|
||||
self.log("{} tried to proppatch [{}]".format(self.uname, self.vpath))
|
||||
self.log("%s tried to proppatch %r" % (self.uname, "/" + self.vpath))
|
||||
raise Pebkac(401, "authenticate")
|
||||
|
||||
from xml.etree import ElementTree as ET
|
||||
@@ -1620,13 +1626,15 @@ class HttpCli(object):
|
||||
def handle_lock(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("LOCK %s @%s" % (self.req, self.uname))
|
||||
if "%" in self.req:
|
||||
self.log(" `-- %r" % (self.vpath,))
|
||||
|
||||
if self.args.no_dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
|
||||
# win7+ deadlocks if we say no; just smile and nod
|
||||
if not self.can_write and "Microsoft-WebDAV" not in self.ua:
|
||||
self.log("{} tried to lock [{}]".format(self.uname, self.vpath))
|
||||
self.log("%s tried to lock %r" % (self.uname, "/" + self.vpath))
|
||||
raise Pebkac(401, "authenticate")
|
||||
|
||||
from xml.etree import ElementTree as ET
|
||||
@@ -1685,12 +1693,14 @@ class HttpCli(object):
|
||||
def handle_unlock(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("UNLOCK %s @%s" % (self.req, self.uname))
|
||||
if "%" in self.req:
|
||||
self.log(" `-- %r" % (self.vpath,))
|
||||
|
||||
if self.args.no_dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
|
||||
if not self.can_write and "Microsoft-WebDAV" not in self.ua:
|
||||
self.log("{} tried to lock [{}]".format(self.uname, self.vpath))
|
||||
self.log("%s tried to lock %r" % (self.uname, "/" + self.vpath))
|
||||
raise Pebkac(401, "authenticate")
|
||||
|
||||
self.send_headers(None, 204)
|
||||
@@ -1702,6 +1712,8 @@ class HttpCli(object):
|
||||
|
||||
if self.do_log:
|
||||
self.log("MKCOL %s @%s" % (self.req, self.uname))
|
||||
if "%" in self.req:
|
||||
self.log(" `-- %r" % (self.vpath,))
|
||||
|
||||
try:
|
||||
return self._mkdir(self.vpath, True)
|
||||
@@ -1753,6 +1765,8 @@ class HttpCli(object):
|
||||
def handle_options(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("OPTIONS %s @%s" % (self.req, self.uname))
|
||||
if "%" in self.req:
|
||||
self.log(" `-- %r" % (self.vpath,))
|
||||
|
||||
oh = self.out_headers
|
||||
oh["Allow"] = ", ".join(self.conn.hsrv.mallow)
|
||||
@@ -1768,10 +1782,14 @@ class HttpCli(object):
|
||||
|
||||
def handle_delete(self) -> bool:
|
||||
self.log("DELETE %s @%s" % (self.req, self.uname))
|
||||
if "%" in self.req:
|
||||
self.log(" `-- %r" % (self.vpath,))
|
||||
return self.handle_rm([])
|
||||
|
||||
def handle_put(self) -> bool:
|
||||
self.log("PUT %s @%s" % (self.req, self.uname))
|
||||
self.log("PUT %s @%s" % (self.req, self.uname))
|
||||
if "%" in self.req:
|
||||
self.log(" `-- %r" % (self.vpath,))
|
||||
|
||||
if not self.can_write:
|
||||
t = "user %s does not have write-access under /%s"
|
||||
@@ -1790,6 +1808,8 @@ class HttpCli(object):
|
||||
|
||||
def handle_post(self) -> bool:
|
||||
self.log("POST %s @%s" % (self.req, self.uname))
|
||||
if "%" in self.req:
|
||||
self.log(" `-- %r" % (self.vpath,))
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
try:
|
||||
@@ -1834,7 +1854,7 @@ class HttpCli(object):
|
||||
|
||||
if "save" in opt:
|
||||
post_sz, _, _, _, path, _ = self.dump_to_file(False)
|
||||
self.log("urlform: {} bytes, {}".format(post_sz, path))
|
||||
self.log("urlform: %d bytes, %r" % (post_sz, path))
|
||||
elif "print" in opt:
|
||||
reader, _ = self.get_body_reader()
|
||||
buf = b""
|
||||
@@ -1845,8 +1865,8 @@ class HttpCli(object):
|
||||
|
||||
if buf:
|
||||
orig = buf.decode("utf-8", "replace")
|
||||
t = "urlform_raw {} @ {}\n {}\n"
|
||||
self.log(t.format(len(orig), self.vpath, orig))
|
||||
t = "urlform_raw %d @ %r\n %r\n"
|
||||
self.log(t % (len(orig), "/" + self.vpath, orig))
|
||||
try:
|
||||
zb = unquote(buf.replace(b"+", b" "))
|
||||
plain = zb.decode("utf-8", "replace")
|
||||
@@ -1872,8 +1892,8 @@ class HttpCli(object):
|
||||
plain,
|
||||
)
|
||||
|
||||
t = "urlform_dec {} @ {}\n {}\n"
|
||||
self.log(t.format(len(plain), self.vpath, plain))
|
||||
t = "urlform_dec %d @ %r\n %r\n"
|
||||
self.log(t % (len(plain), "/" + self.vpath, plain))
|
||||
|
||||
except Exception as ex:
|
||||
self.log(repr(ex))
|
||||
@@ -2123,7 +2143,7 @@ class HttpCli(object):
|
||||
try:
|
||||
ext = self.conn.hsrv.magician.ext(path)
|
||||
except Exception as ex:
|
||||
self.log("filetype detection failed for [{}]: {}".format(path, ex), 6)
|
||||
self.log("filetype detection failed for %r: %s" % (path, ex), 6)
|
||||
ext = None
|
||||
|
||||
if ext:
|
||||
@@ -2223,8 +2243,8 @@ class HttpCli(object):
|
||||
def handle_stash(self, is_put: bool) -> bool:
|
||||
post_sz, sha_hex, sha_b64, remains, path, url = self.dump_to_file(is_put)
|
||||
spd = self._spd(post_sz)
|
||||
t = "{} wrote {}/{} bytes to {} # {}"
|
||||
self.log(t.format(spd, post_sz, remains, path, sha_b64[:28])) # 21
|
||||
t = "%s wrote %d/%d bytes to %r # %s"
|
||||
self.log(t % (spd, post_sz, remains, path, sha_b64[:28])) # 21
|
||||
|
||||
ac = self.uparam.get(
|
||||
"want", self.headers.get("accept", "").lower().split(";")[-1]
|
||||
@@ -2254,7 +2274,7 @@ class HttpCli(object):
|
||||
flags: dict[str, Any],
|
||||
) -> None:
|
||||
now = time.time()
|
||||
t = "bad-chunk: %.3f %s %s %d %s %s %s"
|
||||
t = "bad-chunk: %.3f %s %s %d %s %s %r"
|
||||
t = t % (now, bad_sha, good_sha, ofs, self.ip, self.uname, ap)
|
||||
self.log(t, 5)
|
||||
|
||||
@@ -2394,7 +2414,7 @@ class HttpCli(object):
|
||||
body = json.loads(json_buf.decode(enc, "replace"))
|
||||
try:
|
||||
zds = {k: v for k, v in body.items()}
|
||||
zds["hash"] = "%d chunks" % (len(body["hash"]))
|
||||
zds["hash"] = "%d chunks" % (len(body["hash"]),)
|
||||
except:
|
||||
zds = body
|
||||
t = "POST len=%d type=%s ip=%s user=%s req=%r json=%s"
|
||||
@@ -2438,7 +2458,7 @@ class HttpCli(object):
|
||||
if not bos.path.isdir(dst):
|
||||
bos.makedirs(dst)
|
||||
except OSError as ex:
|
||||
self.log("makedirs failed [{}]".format(dst))
|
||||
self.log("makedirs failed %r" % (dst,))
|
||||
if not bos.path.isdir(dst):
|
||||
if ex.errno == errno.EACCES:
|
||||
raise Pebkac(500, "the server OS denied write-access")
|
||||
@@ -2463,7 +2483,7 @@ class HttpCli(object):
|
||||
# strip common suffix (uploader's folder structure)
|
||||
vp_req, vp_vfs = vroots(self.vpath, vjoin(dbv.vpath, vrem))
|
||||
if not ret["purl"].startswith(vp_vfs):
|
||||
t = "share-mapping failed; req=[%s] dbv=[%s] vrem=[%s] n1=[%s] n2=[%s] purl=[%s]"
|
||||
t = "share-mapping failed; req=%r dbv=%r vrem=%r n1=%r n2=%r purl=%r"
|
||||
zt = (self.vpath, dbv.vpath, vrem, vp_req, vp_vfs, ret["purl"])
|
||||
raise Pebkac(500, t % zt)
|
||||
ret["purl"] = vp_req + ret["purl"][len(vp_vfs) :]
|
||||
@@ -2512,13 +2532,13 @@ class HttpCli(object):
|
||||
# search by query params
|
||||
q = body["q"]
|
||||
n = body.get("n", self.args.srch_hits)
|
||||
self.log("qj: {} |{}|".format(q, n))
|
||||
self.log("qj: %r |%d|" % (q, n))
|
||||
hits, taglist, trunc = idx.search(self.uname, vols, q, n)
|
||||
msg = len(hits)
|
||||
|
||||
idx.p_end = time.time()
|
||||
idx.p_dur = idx.p_end - t0
|
||||
self.log("q#: {} ({:.2f}s)".format(msg, idx.p_dur))
|
||||
self.log("q#: %r (%.2fs)" % (msg, idx.p_dur))
|
||||
|
||||
order = []
|
||||
for t in self.args.mte:
|
||||
@@ -2629,7 +2649,7 @@ class HttpCli(object):
|
||||
t = "your client is sending %d bytes which is too much (server expected %d bytes at most)"
|
||||
raise Pebkac(400, t % (remains, maxsize))
|
||||
|
||||
t = "writing %s %s+%d #%d+%d %s"
|
||||
t = "writing %r %s+%d #%d+%d %s"
|
||||
chunkno = cstart0[0] // chunksize
|
||||
zs = " ".join([chashes[0][:15]] + [x[:9] for x in chashes[1:]])
|
||||
self.log(t % (path, cstart0, remains, chunkno, len(chashes), zs))
|
||||
@@ -2741,7 +2761,7 @@ class HttpCli(object):
|
||||
cinf = self.headers.get("x-up2k-stat", "")
|
||||
|
||||
spd = self._spd(postsize)
|
||||
self.log("{:70} thank {}".format(spd, cinf))
|
||||
self.log("%70s thank %r" % (spd, cinf))
|
||||
self.reply(b"thank")
|
||||
return True
|
||||
|
||||
@@ -2823,7 +2843,7 @@ class HttpCli(object):
|
||||
logpwd = "%" + ub64enc(zb[:12]).decode("ascii")
|
||||
|
||||
if pwd != "x":
|
||||
self.log("invalid password: {}".format(logpwd), 3)
|
||||
self.log("invalid password: %r" % (logpwd,), 3)
|
||||
self.cbonk(self.conn.hsrv.gpwd, pwd, "pw", "invalid passwords")
|
||||
|
||||
msg = "naw dude"
|
||||
@@ -2859,7 +2879,7 @@ class HttpCli(object):
|
||||
rem = sanitize_vpath(rem, "/")
|
||||
fn = vfs.canonical(rem)
|
||||
if not fn.startswith(vfs.realpath):
|
||||
self.log("invalid mkdir [%s] [%s]" % (self.gctx, vpath), 1)
|
||||
self.log("invalid mkdir %r %r" % (self.gctx, vpath), 1)
|
||||
raise Pebkac(422)
|
||||
|
||||
if not nullwrite:
|
||||
@@ -3025,9 +3045,9 @@ class HttpCli(object):
|
||||
elif bos.path.exists(abspath):
|
||||
try:
|
||||
wunlink(self.log, abspath, vfs.flags)
|
||||
t = "overwriting file with new upload: %s"
|
||||
t = "overwriting file with new upload: %r"
|
||||
except:
|
||||
t = "toctou while deleting for ?replace: %s"
|
||||
t = "toctou while deleting for ?replace: %r"
|
||||
self.log(t % (abspath,))
|
||||
else:
|
||||
open_args = {}
|
||||
@@ -3110,7 +3130,7 @@ class HttpCli(object):
|
||||
f, tnam = ren_open(tnam, "wb", self.args.iobuf, **open_args)
|
||||
try:
|
||||
tabspath = os.path.join(fdir, tnam)
|
||||
self.log("writing to {}".format(tabspath))
|
||||
self.log("writing to %r" % (tabspath,))
|
||||
sz, sha_hex, sha_b64 = copier(
|
||||
p_data, f, hasher, max_sz, self.args.s_wr_slp
|
||||
)
|
||||
@@ -3295,7 +3315,7 @@ class HttpCli(object):
|
||||
jmsg["files"].append(jpart)
|
||||
|
||||
vspd = self._spd(sz_total, False)
|
||||
self.log("{} {}".format(vspd, msg))
|
||||
self.log("%s %r" % (vspd, msg))
|
||||
|
||||
suf = ""
|
||||
if not nullwrite and self.args.write_uplog:
|
||||
@@ -3558,7 +3578,7 @@ class HttpCli(object):
|
||||
if req == zs:
|
||||
return True
|
||||
|
||||
t = "wrong dirkey, want %s, got %s\n vp: %s\n ap: %s"
|
||||
t = "wrong dirkey, want %s, got %s\n vp: %r\n ap: %r"
|
||||
self.log(t % (zs, req, self.req, ap), 6)
|
||||
return False
|
||||
|
||||
@@ -3586,7 +3606,7 @@ class HttpCli(object):
|
||||
if req == zs:
|
||||
return True
|
||||
|
||||
t = "wrong filekey, want %s, got %s\n vp: %s\n ap: %s"
|
||||
t = "wrong filekey, want %s, got %s\n vp: %r\n ap: %r"
|
||||
self.log(t % (zs, req, self.req, ap), 6)
|
||||
return False
|
||||
|
||||
@@ -3648,7 +3668,7 @@ class HttpCli(object):
|
||||
elif ph == "srv.htime":
|
||||
sv = datetime.now(UTC).strftime("%Y-%m-%d, %H:%M:%S")
|
||||
else:
|
||||
self.log("unknown placeholder in server config: [%s]" % (ph), 3)
|
||||
self.log("unknown placeholder in server config: [%s]" % (ph,), 3)
|
||||
continue
|
||||
|
||||
sv = self.conn.hsrv.ptn_hsafe.sub("_", sv)
|
||||
@@ -3805,7 +3825,7 @@ class HttpCli(object):
|
||||
self.pipes.set(req_path, job)
|
||||
except Exception as ex:
|
||||
if getattr(ex, "errno", 0) != errno.ENOENT:
|
||||
self.log("will not pipe [%s]; %s" % (ap_data, ex), 6)
|
||||
self.log("will not pipe %r; %s" % (ap_data, ex), 6)
|
||||
ptop = None
|
||||
|
||||
#
|
||||
@@ -4095,7 +4115,7 @@ class HttpCli(object):
|
||||
if lower >= data_end:
|
||||
if data_end:
|
||||
t = "pipe: uploader is too slow; aborting download at %.2f MiB"
|
||||
self.log(t % (data_end / M))
|
||||
self.log(t % (data_end / M,))
|
||||
raise Pebkac(416, "uploader is too slow")
|
||||
|
||||
raise Pebkac(416, "no data available yet; please retry in a bit")
|
||||
@@ -4239,7 +4259,7 @@ class HttpCli(object):
|
||||
|
||||
cdis = "attachment; filename=\"{}.{}\"; filename*=UTF-8''{}.{}"
|
||||
cdis = cdis.format(afn, ext, ufn, ext)
|
||||
self.log(cdis)
|
||||
self.log(repr(cdis))
|
||||
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
|
||||
|
||||
fgen = vn.zipgen(
|
||||
@@ -4902,9 +4922,9 @@ class HttpCli(object):
|
||||
raise Pebkac(500, "sqlite3 not found on server; unpost is disabled")
|
||||
raise Pebkac(500, "server busy, cannot unpost; please retry in a bit")
|
||||
|
||||
filt = self.uparam.get("filter") or ""
|
||||
lm = "ups [{}]".format(filt)
|
||||
self.log(lm)
|
||||
zs = self.uparam.get("filter") or ""
|
||||
filt = re.compile(zs, re.I) if zs else None
|
||||
lm = "ups %r" % (zs,)
|
||||
|
||||
if self.args.shr and self.vpath.startswith(self.args.shr1):
|
||||
shr_dbv, shr_vrem = self.vn.get_dbv(self.rem)
|
||||
@@ -4945,13 +4965,18 @@ class HttpCli(object):
|
||||
|
||||
nfk, fk_alg = fk_vols.get(vol) or (0, 0)
|
||||
|
||||
q = "select sz, rd, fn, at from up where ip=? and at>?"
|
||||
n = 2000
|
||||
q = "select sz, rd, fn, at from up where ip=? and at>? order by at desc"
|
||||
for sz, rd, fn, at in cur.execute(q, (self.ip, lim)):
|
||||
vp = "/" + "/".join(x for x in [vol.vpath, rd, fn] if x)
|
||||
if filt and filt not in vp:
|
||||
if filt and not filt.search(vp):
|
||||
continue
|
||||
|
||||
rv = {"vp": quotep(vp), "sz": sz, "at": at, "nfk": nfk}
|
||||
n -= 1
|
||||
if not n:
|
||||
break
|
||||
|
||||
rv = {"vp": vp, "sz": sz, "at": at, "nfk": nfk}
|
||||
if nfk:
|
||||
rv["ap"] = vol.canonical(vjoin(rd, fn))
|
||||
rv["fk_alg"] = fk_alg
|
||||
@@ -4961,9 +4986,13 @@ class HttpCli(object):
|
||||
ret.sort(key=lambda x: x["at"], reverse=True) # type: ignore
|
||||
ret = ret[:2000]
|
||||
|
||||
if len(ret) > 2000:
|
||||
ret = ret[:2000]
|
||||
|
||||
ret.sort(key=lambda x: x["at"], reverse=True) # type: ignore
|
||||
n = 0
|
||||
for rv in ret[:11000]:
|
||||
|
||||
for rv in ret:
|
||||
rv["vp"] = quotep(rv["vp"])
|
||||
nfk = rv.pop("nfk")
|
||||
if not nfk:
|
||||
continue
|
||||
@@ -4980,12 +5009,6 @@ class HttpCli(object):
|
||||
)
|
||||
rv["vp"] += "?k=" + fk[:nfk]
|
||||
|
||||
n += 1
|
||||
if n > 2000:
|
||||
break
|
||||
|
||||
ret = ret[:2000]
|
||||
|
||||
if shr_dbv:
|
||||
# translate vpaths from share-target to share-url
|
||||
# to satisfy access checks
|
||||
@@ -5009,6 +5032,125 @@ class HttpCli(object):
|
||||
self.reply(jtxt.encode("utf-8", "replace"), mime="application/json")
|
||||
return True
|
||||
|
||||
def tx_rups(self) -> bool:
|
||||
if self.args.no_ups_page:
|
||||
raise Pebkac(500, "listing of recent uploads is disabled in server config")
|
||||
|
||||
idx = self.conn.get_u2idx()
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
if not HAVE_SQLITE3:
|
||||
raise Pebkac(500, "sqlite3 not found on server; recent-uploads n/a")
|
||||
raise Pebkac(500, "server busy, cannot list recent uploads; please retry")
|
||||
|
||||
sfilt = self.uparam.get("filter") or ""
|
||||
filt = re.compile(sfilt, re.I) if sfilt else None
|
||||
lm = "ru %r" % (sfilt,)
|
||||
self.log(lm)
|
||||
|
||||
ret: list[dict[str, Any]] = []
|
||||
t0 = time.time()
|
||||
allvols = [
|
||||
x
|
||||
for x in self.asrv.vfs.all_vols.values()
|
||||
if "e2d" in x.flags and ("*" in x.axs.uread or self.uname in x.axs.uread)
|
||||
]
|
||||
fk_vols = {
|
||||
vol: (vol.flags["fk"], 2 if "fka" in vol.flags else 1)
|
||||
for vol in allvols
|
||||
if "fk" in vol.flags and "*" not in vol.axs.uread
|
||||
}
|
||||
|
||||
for vol in allvols:
|
||||
cur = idx.get_cur(vol)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
nfk, fk_alg = fk_vols.get(vol) or (0, 0)
|
||||
adm = "*" in vol.axs.uadmin or self.uname in vol.axs.uadmin
|
||||
dots = "*" in vol.axs.udot or self.uname in vol.axs.udot
|
||||
|
||||
n = 1000
|
||||
q = "select sz, rd, fn, ip, at from up where at>0 order by at desc"
|
||||
for sz, rd, fn, ip, at in cur.execute(q):
|
||||
vp = "/" + "/".join(x for x in [vol.vpath, rd, fn] if x)
|
||||
if filt and not filt.search(vp):
|
||||
continue
|
||||
|
||||
if not dots and "/." in vp:
|
||||
continue
|
||||
|
||||
n -= 1
|
||||
if not n:
|
||||
break
|
||||
|
||||
rv = {
|
||||
"vp": vp,
|
||||
"sz": sz,
|
||||
"ip": ip,
|
||||
"at": at,
|
||||
"nfk": nfk,
|
||||
"adm": adm,
|
||||
}
|
||||
if nfk:
|
||||
rv["ap"] = vol.canonical(vjoin(rd, fn))
|
||||
rv["fk_alg"] = fk_alg
|
||||
|
||||
ret.append(rv)
|
||||
if len(ret) > 2000:
|
||||
ret.sort(key=lambda x: x["at"], reverse=True) # type: ignore
|
||||
ret = ret[:1000]
|
||||
|
||||
if len(ret) > 1000:
|
||||
ret = ret[:1000]
|
||||
|
||||
ret.sort(key=lambda x: x["at"], reverse=True) # type: ignore
|
||||
|
||||
for rv in ret:
|
||||
rv["evp"] = quotep(rv["vp"])
|
||||
nfk = rv.pop("nfk")
|
||||
if not nfk:
|
||||
continue
|
||||
|
||||
alg = rv.pop("fk_alg")
|
||||
ap = rv.pop("ap")
|
||||
try:
|
||||
st = bos.stat(ap)
|
||||
except:
|
||||
continue
|
||||
|
||||
fk = self.gen_fk(
|
||||
alg, self.args.fk_salt, ap, st.st_size, 0 if ANYWIN else st.st_ino
|
||||
)
|
||||
rv["vp"] += "?k=" + fk[:nfk]
|
||||
|
||||
if self.args.ups_when:
|
||||
for rv in ret:
|
||||
adm = rv.pop("adm")
|
||||
if not adm:
|
||||
rv["ip"] = "(You)" if rv["ip"] == self.ip else "(?)"
|
||||
else:
|
||||
for rv in ret:
|
||||
adm = rv.pop("adm")
|
||||
if not adm:
|
||||
rv["ip"] = "(You)" if rv["ip"] == self.ip else "(?)"
|
||||
rv["at"] = 0
|
||||
|
||||
if self.is_vproxied:
|
||||
for v in ret:
|
||||
v["vp"] = self.args.SR + v["vp"]
|
||||
|
||||
self.log("%s #%d %.2fsec" % (lm, len(ret), time.time() - t0))
|
||||
|
||||
if "j" in self.ouparam:
|
||||
jtxt = json.dumps(ret, separators=(",\n", ": "))
|
||||
self.reply(jtxt.encode("utf-8", "replace"), mime="application/json")
|
||||
return True
|
||||
|
||||
rows = [[x["vp"], x["evp"], x["sz"], x["ip"], x["at"]] for x in ret]
|
||||
html = self.j2s("rups", this=self, rows=rows, filt=sfilt, now=int(time.time()))
|
||||
self.reply(html.encode("utf-8"), status=200)
|
||||
return True
|
||||
|
||||
def tx_shares(self) -> bool:
|
||||
if self.uname == "*":
|
||||
self.loud_reply("you're not logged in")
|
||||
@@ -5714,7 +5856,7 @@ class HttpCli(object):
|
||||
linf = stats.get(fn) or bos.lstat(fspath)
|
||||
inf = bos.stat(fspath) if stat.S_ISLNK(linf.st_mode) else linf
|
||||
except:
|
||||
self.log("broken symlink: {}".format(repr(fspath)))
|
||||
self.log("broken symlink: %r" % (fspath,))
|
||||
continue
|
||||
|
||||
is_dir = stat.S_ISDIR(inf.st_mode)
|
||||
@@ -5829,8 +5971,7 @@ class HttpCli(object):
|
||||
erd_efn = s3enc(idx.mem_cur, rd, fn)
|
||||
r = icur.execute(q, erd_efn)
|
||||
except:
|
||||
t = "tag read error, {}/{}\n{}"
|
||||
self.log(t.format(rd, fn, min_ex()))
|
||||
self.log("tag read error, %r / %r\n%s" % (rd, fn, min_ex()))
|
||||
break
|
||||
|
||||
tags = {k: v for k, v in r}
|
||||
@@ -5950,10 +6091,10 @@ class HttpCli(object):
|
||||
if doc.lower().endswith(".md") and "exp" in vn.flags:
|
||||
doctxt = self._expand(doctxt, vn.flags.get("exp_md") or [])
|
||||
else:
|
||||
self.log("doc 2big: [{}]".format(doc), c=6)
|
||||
self.log("doc 2big: %r" % (doc,), 6)
|
||||
doctxt = "( size of textfile exceeds serverside limit )"
|
||||
else:
|
||||
self.log("doc 404: [{}]".format(doc), c=6)
|
||||
self.log("doc 404: %r" % (doc,), 6)
|
||||
doctxt = "( textfile not found )"
|
||||
|
||||
if doctxt is not None:
|
||||
|
||||
@@ -172,15 +172,16 @@ class HttpSrv(object):
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FunctionLoader(lambda f: load_jinja2_resource(self.E, f))
|
||||
jn = [
|
||||
"splash",
|
||||
"shares",
|
||||
"svcs",
|
||||
"browser",
|
||||
"browser2",
|
||||
"msg",
|
||||
"cf",
|
||||
"md",
|
||||
"mde",
|
||||
"cf",
|
||||
"msg",
|
||||
"rups",
|
||||
"shares",
|
||||
"splash",
|
||||
"svcs",
|
||||
]
|
||||
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
||||
self.prism = has_resource(self.E, "web/deps/prism.js.gz")
|
||||
|
||||
@@ -194,7 +194,7 @@ def au_unpk(
|
||||
|
||||
except Exception as ex:
|
||||
if ret:
|
||||
t = "failed to decompress audio file [%s]: %r"
|
||||
t = "failed to decompress audio file %r: %r"
|
||||
log(t % (abspath, ex))
|
||||
wunlink(log, ret, vn.flags if vn else VF_CAREFUL)
|
||||
|
||||
@@ -582,7 +582,7 @@ class MTag(object):
|
||||
raise Exception()
|
||||
except Exception as ex:
|
||||
if self.args.mtag_v:
|
||||
self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90")
|
||||
self.log("mutagen-err [%s] @ %r" % (ex, abspath), "90")
|
||||
|
||||
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||
|
||||
@@ -699,8 +699,8 @@ class MTag(object):
|
||||
ret[tag] = zj[tag]
|
||||
except:
|
||||
if self.args.mtag_v:
|
||||
t = "mtag error: tagname {}, parser {}, file {} => {}"
|
||||
self.log(t.format(tagname, parser.bin, abspath, min_ex()))
|
||||
t = "mtag error: tagname %r, parser %r, file %r => %r"
|
||||
self.log(t % (tagname, parser.bin, abspath, min_ex()), 6)
|
||||
|
||||
if ap != abspath:
|
||||
wunlink(self.log, ap, VF_CAREFUL)
|
||||
|
||||
@@ -263,7 +263,7 @@ class SMB(object):
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
yeet("blocked by xbu server config: " + vpath)
|
||||
yeet("blocked by xbu server config: %r" % (vpath,))
|
||||
|
||||
ret = bos.open(ap, flags, *a, mode=chmod, **ka)
|
||||
if wr:
|
||||
|
||||
@@ -110,7 +110,7 @@ def errdesc(
|
||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||
|
||||
for fn, err in errors:
|
||||
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
|
||||
report.extend([" file: %r" % (fn,), "error: %s" % (err,), ""])
|
||||
|
||||
btxt = "\r\n".join(report).encode("utf-8", "replace")
|
||||
btxt = vol_san(list(vfs.all_vols.values()), btxt)
|
||||
|
||||
@@ -402,17 +402,17 @@ class TcpSrv(object):
|
||||
if not netdevs:
|
||||
continue
|
||||
|
||||
added = "nothing"
|
||||
removed = "nothing"
|
||||
add = []
|
||||
rem = []
|
||||
for k, v in netdevs.items():
|
||||
if k not in self.netdevs:
|
||||
added = "{} = {}".format(k, v)
|
||||
add.append("\n added %s = %s" % (k, v))
|
||||
for k, v in self.netdevs.items():
|
||||
if k not in netdevs:
|
||||
removed = "{} = {}".format(k, v)
|
||||
rem.append("\nremoved %s = %s" % (k, v))
|
||||
|
||||
t = "network change detected:\n added {}\033[0;33m\nremoved {}"
|
||||
self.log("tcpsrv", t.format(added, removed), 3)
|
||||
t = "network change detected:\033[32m%s\033[33m%s"
|
||||
self.log("tcpsrv", t % ("".join(add), "".join(rem)), 3)
|
||||
self.netdevs = netdevs
|
||||
self._distribute_netdevs()
|
||||
|
||||
|
||||
@@ -357,7 +357,7 @@ class Tftpd(object):
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
yeet("blocked by xbu server config: " + vpath)
|
||||
yeet("blocked by xbu server config: %r" % (vpath,))
|
||||
|
||||
if not self.args.tftp_nols and bos.path.isdir(ap):
|
||||
return self._ls(vpath, "", 0, True)
|
||||
|
||||
@@ -109,13 +109,13 @@ class ThumbCli(object):
|
||||
fmt = sfmt
|
||||
|
||||
elif fmt[:1] == "p" and not is_au and not is_vid:
|
||||
t = "cannot thumbnail [%s]: png only allowed for waveforms"
|
||||
self.log(t % (rem), 6)
|
||||
t = "cannot thumbnail %r: png only allowed for waveforms"
|
||||
self.log(t % (rem,), 6)
|
||||
return None
|
||||
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
self.log("no histpath for %r" % (ptop,))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||
|
||||
@@ -239,7 +239,7 @@ class ThumbSrv(object):
|
||||
def get(self, ptop: str, rem: str, mtime: float, fmt: str) -> Optional[str]:
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
self.log("no histpath for %r" % (ptop,))
|
||||
return None
|
||||
|
||||
tpath = thumb_path(histpath, rem, mtime, fmt, self.fmt_ffa)
|
||||
@@ -249,7 +249,7 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
try:
|
||||
self.busy[tpath].append(cond)
|
||||
self.log("joined waiting room for %s" % (tpath,))
|
||||
self.log("joined waiting room for %r" % (tpath,))
|
||||
except:
|
||||
thdir = os.path.dirname(tpath)
|
||||
bos.makedirs(os.path.join(thdir, "w"))
|
||||
@@ -266,11 +266,11 @@ class ThumbSrv(object):
|
||||
allvols = list(self.asrv.vfs.all_vols.values())
|
||||
vn = next((x for x in allvols if x.realpath == ptop), None)
|
||||
if not vn:
|
||||
self.log("ptop [{}] not in {}".format(ptop, allvols), 3)
|
||||
self.log("ptop %r not in %s" % (ptop, allvols), 3)
|
||||
vn = self.asrv.vfs.all_aps[0][1]
|
||||
|
||||
self.q.put((abspath, tpath, fmt, vn))
|
||||
self.log("conv {} :{} \033[0m{}".format(tpath, fmt, abspath), c=6)
|
||||
self.log("conv %r :%s \033[0m%r" % (tpath, fmt, abspath), 6)
|
||||
|
||||
while not self.stopping:
|
||||
with self.mutex:
|
||||
@@ -375,8 +375,8 @@ class ThumbSrv(object):
|
||||
fun(ap_unpk, ttpath, fmt, vn)
|
||||
break
|
||||
except Exception as ex:
|
||||
msg = "{} could not create thumbnail of {}\n{}"
|
||||
msg = msg.format(fun.__name__, abspath, min_ex())
|
||||
msg = "%s could not create thumbnail of %r\n%s"
|
||||
msg = msg % (fun.__name__, abspath, min_ex())
|
||||
c: Union[str, int] = 1 if "<Signals.SIG" in msg else "90"
|
||||
self.log(msg, c)
|
||||
if getattr(ex, "returncode", 0) != 321:
|
||||
|
||||
@@ -136,7 +136,7 @@ class U2idx(object):
|
||||
ptop = vn.realpath
|
||||
histpath = self.asrv.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
self.log("no histpath for %r" % (ptop,))
|
||||
return None
|
||||
|
||||
db_path = os.path.join(histpath, "up2k.db")
|
||||
@@ -151,7 +151,7 @@ class U2idx(object):
|
||||
db = sqlite3.connect(uri, timeout=2, uri=True, check_same_thread=False)
|
||||
cur = db.cursor()
|
||||
cur.execute('pragma table_info("up")').fetchone()
|
||||
self.log("ro: {}".format(db_path))
|
||||
self.log("ro: %r" % (db_path,))
|
||||
except:
|
||||
self.log("could not open read-only: {}\n{}".format(uri, min_ex()))
|
||||
# may not fail until the pragma so unset it
|
||||
@@ -161,7 +161,7 @@ class U2idx(object):
|
||||
# on windows, this steals the write-lock from up2k.deferred_init --
|
||||
# seen on win 10.0.17763.2686, py 3.10.4, sqlite 3.37.2
|
||||
cur = sqlite3.connect(db_path, timeout=2, check_same_thread=False).cursor()
|
||||
self.log("opened {}".format(db_path))
|
||||
self.log("opened %r" % (db_path,))
|
||||
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
|
||||
@@ -794,7 +794,7 @@ class Up2k(object):
|
||||
if ccd != cd:
|
||||
continue
|
||||
|
||||
self.log("xiu: {}# {}".format(len(wrfs), cmd))
|
||||
self.log("xiu: %d# %r" % (len(wrfs), cmd))
|
||||
runihook(self.log, cmd, vol, ups)
|
||||
|
||||
def _vis_job_progress(self, job: dict[str, Any]) -> str:
|
||||
@@ -1060,7 +1060,7 @@ class Up2k(object):
|
||||
"""mutex(main,reg) me"""
|
||||
histpath = self.vfs.histtab.get(ptop)
|
||||
if not histpath:
|
||||
self.log("no histpath for [{}]".format(ptop))
|
||||
self.log("no histpath for %r" % (ptop,))
|
||||
return None
|
||||
|
||||
db_path = os.path.join(histpath, "up2k.db")
|
||||
@@ -1154,7 +1154,7 @@ class Up2k(object):
|
||||
job["poke"] = time.time()
|
||||
job["busy"] = {}
|
||||
else:
|
||||
self.log("ign deleted file in snap: [{}]".format(fp))
|
||||
self.log("ign deleted file in snap: %r" % (fp,))
|
||||
if not n4g:
|
||||
rm.append(k)
|
||||
continue
|
||||
@@ -1386,12 +1386,12 @@ class Up2k(object):
|
||||
xvol: bool,
|
||||
) -> tuple[int, int, int]:
|
||||
if xvol and not rcdir.startswith(top):
|
||||
self.log("skip xvol: [{}] -> [{}]".format(cdir, rcdir), 6)
|
||||
self.log("skip xvol: %r -> %r" % (cdir, rcdir), 6)
|
||||
return 0, 0, 0
|
||||
|
||||
if rcdir in seen:
|
||||
t = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
|
||||
self.log(t.format(seen[-1], rcdir, cdir), 3)
|
||||
t = "bailing from symlink loop,\n prev: %r\n curr: %r\n from: %r"
|
||||
self.log(t % (seen[-1], rcdir, cdir), 3)
|
||||
return 0, 0, 0
|
||||
|
||||
# total-files-added, total-num-files, recursive-size
|
||||
@@ -1447,7 +1447,7 @@ class Up2k(object):
|
||||
and inf.st_dev != dev
|
||||
and not (ANYWIN and bos.stat(rap).st_dev == dev)
|
||||
):
|
||||
self.log("skip xdev {}->{}: {}".format(dev, inf.st_dev, abspath), 6)
|
||||
self.log("skip xdev %s->%s: %r" % (dev, inf.st_dev, abspath), 6)
|
||||
continue
|
||||
if abspath in excl or rap in excl:
|
||||
unreg.append(rp)
|
||||
@@ -1476,10 +1476,10 @@ class Up2k(object):
|
||||
tnf += i2
|
||||
rsz += i3
|
||||
except:
|
||||
t = "failed to index subdir [{}]:\n{}"
|
||||
self.log(t.format(abspath, min_ex()), c=1)
|
||||
t = "failed to index subdir %r:\n%s"
|
||||
self.log(t % (abspath, min_ex()), 1)
|
||||
elif not stat.S_ISREG(inf.st_mode):
|
||||
self.log("skip type-0%o file [%s]" % (inf.st_mode, abspath))
|
||||
self.log("skip type-0%o file %r" % (inf.st_mode, abspath))
|
||||
else:
|
||||
# self.log("file: {}".format(abspath))
|
||||
if rp.endswith(".PARTIAL") and time.time() - lmod < 60:
|
||||
@@ -1562,7 +1562,7 @@ class Up2k(object):
|
||||
db.c.execute("insert into cv values (?,?,?)", (crd, cdn, cv))
|
||||
db.n += 1
|
||||
except Exception as ex:
|
||||
self.log("cover {}/{} failed: {}".format(rd, cv, ex), 6)
|
||||
self.log("cover %r/%r failed: %s" % (rd, cv, ex), 6)
|
||||
|
||||
seen_files = set([x[2] for x in files]) # for dropcheck
|
||||
for sz, lmod, fn in files:
|
||||
@@ -1584,9 +1584,9 @@ class Up2k(object):
|
||||
self.pp.n -= 1
|
||||
dw, dts, dsz, ip, at = in_db[0]
|
||||
if len(in_db) > 1:
|
||||
t = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
|
||||
t = "WARN: multiple entries: %r => %r |%d|\n%r"
|
||||
rep_db = "\n".join([repr(x) for x in in_db])
|
||||
self.log(t.format(top, rp, len(in_db), rep_db))
|
||||
self.log(t % (top, rp, len(in_db), rep_db))
|
||||
dts = -1
|
||||
|
||||
if fat32 and abs(dts - lmod) == 1:
|
||||
@@ -1595,10 +1595,8 @@ class Up2k(object):
|
||||
if dts == lmod and dsz == sz and (nohash or dw[0] != "#" or not sz):
|
||||
continue
|
||||
|
||||
t = "reindex [{}] => [{}] mtime({}/{}) size({}/{})".format(
|
||||
top, rp, dts, lmod, dsz, sz
|
||||
)
|
||||
self.log(t)
|
||||
t = "reindex %r => %r mtime(%s/%s) size(%s/%s)"
|
||||
self.log(t % (top, rp, dts, lmod, dsz, sz))
|
||||
self.db_rm(db.c, rd, fn, 0)
|
||||
tfa += 1
|
||||
db.n += 1
|
||||
@@ -1614,14 +1612,14 @@ class Up2k(object):
|
||||
wark = up2k_wark_from_metadata(self.salt, sz, lmod, rd, fn)
|
||||
else:
|
||||
if sz > 1024 * 1024:
|
||||
self.log("file: {}".format(abspath))
|
||||
self.log("file: %r" % (abspath,))
|
||||
|
||||
try:
|
||||
hashes, _ = self._hashlist_from_file(
|
||||
abspath, "a{}, ".format(self.pp.n)
|
||||
)
|
||||
except Exception as ex:
|
||||
self.log("hash: {} @ [{}]".format(repr(ex), abspath))
|
||||
self.log("hash: %r @ %r" % (ex, abspath))
|
||||
continue
|
||||
|
||||
if not hashes:
|
||||
@@ -1667,8 +1665,8 @@ class Up2k(object):
|
||||
assert erd_erd # type: ignore # !rm
|
||||
|
||||
if n:
|
||||
t = "forgetting {} shadowed autoindexed files in [{}] > [{}]"
|
||||
self.log(t.format(n, top, sh_rd))
|
||||
t = "forgetting %d shadowed autoindexed files in %r > %r"
|
||||
self.log(t % (n, top, sh_rd))
|
||||
|
||||
q = "delete from dh where (d = ? or d like ?||'/%')"
|
||||
db.c.execute(q, erd_erd)
|
||||
@@ -1865,7 +1863,7 @@ class Up2k(object):
|
||||
stl = bos.lstat(abspath)
|
||||
st = bos.stat(abspath) if stat.S_ISLNK(stl.st_mode) else stl
|
||||
except Exception as ex:
|
||||
self.log("missing file: %s" % (abspath,), 3)
|
||||
self.log("missing file: %r" % (abspath,), 3)
|
||||
f404.append((drd, dfn, w))
|
||||
continue
|
||||
|
||||
@@ -1876,12 +1874,12 @@ class Up2k(object):
|
||||
w2 = up2k_wark_from_metadata(self.salt, sz2, mt2, rd, fn)
|
||||
else:
|
||||
if sz2 > 1024 * 1024 * 32:
|
||||
self.log("file: {}".format(abspath))
|
||||
self.log("file: %r" % (abspath,))
|
||||
|
||||
try:
|
||||
hashes, _ = self._hashlist_from_file(abspath, pf)
|
||||
except Exception as ex:
|
||||
self.log("hash: {} @ [{}]".format(repr(ex), abspath))
|
||||
self.log("hash: %r @ %r" % (ex, abspath))
|
||||
continue
|
||||
|
||||
if not hashes:
|
||||
@@ -1901,9 +1899,8 @@ class Up2k(object):
|
||||
|
||||
rewark.append((drd, dfn, w2, sz2, mt2))
|
||||
|
||||
t = "hash mismatch: {}\n db: {} ({} byte, {})\n fs: {} ({} byte, {})"
|
||||
t = t.format(abspath, w, sz, mt, w2, sz2, mt2)
|
||||
self.log(t, 1)
|
||||
t = "hash mismatch: %r\n db: %s (%d byte, %d)\n fs: %s (%d byte, %d)"
|
||||
self.log(t % (abspath, w, sz, mt, w2, sz2, mt2), 1)
|
||||
|
||||
if e2vp and (rewark or f404):
|
||||
self.hub.retcode = 1
|
||||
@@ -2451,7 +2448,7 @@ class Up2k(object):
|
||||
q.task_done()
|
||||
|
||||
def _log_tag_err(self, parser: Any, abspath: str, ex: Any) -> None:
|
||||
msg = "{} failed to read tags from {}:\n{}".format(parser, abspath, ex)
|
||||
msg = "%s failed to read tags from %r:\n%s" % (parser, abspath, ex)
|
||||
self.log(msg.lstrip(), c=1 if "<Signals.SIG" in msg else 3)
|
||||
|
||||
def _tagscan_file(
|
||||
@@ -2991,11 +2988,11 @@ class Up2k(object):
|
||||
job = rj
|
||||
break
|
||||
else:
|
||||
self.log("asserting contents of %s" % (orig_ap,))
|
||||
self.log("asserting contents of %r" % (orig_ap,))
|
||||
hashes2, st = self._hashlist_from_file(orig_ap)
|
||||
wark2 = up2k_wark_from_hashlist(self.salt, st.st_size, hashes2)
|
||||
if dwark != wark2:
|
||||
t = "will not dedup (fs index desync): fs=%s, db=%s, file: %s\n%s"
|
||||
t = "will not dedup (fs index desync): fs=%s, db=%s, file: %r\n%s"
|
||||
self.log(t % (wark2, dwark, orig_ap, rj))
|
||||
lost.append(dupe[3:])
|
||||
continue
|
||||
@@ -3013,8 +3010,8 @@ class Up2k(object):
|
||||
if lost:
|
||||
c2 = None
|
||||
for cur, dp_dir, dp_fn in lost:
|
||||
t = "forgetting desynced db entry: /{}"
|
||||
self.log(t.format(vjoin(vjoin(vfs.vpath, dp_dir), dp_fn)))
|
||||
t = "forgetting desynced db entry: %r"
|
||||
self.log(t % ("/" + vjoin(vjoin(vfs.vpath, dp_dir), dp_fn)))
|
||||
self.db_rm(cur, dp_dir, dp_fn, cj["size"])
|
||||
if c2 and c2 != cur:
|
||||
c2.connection.commit()
|
||||
@@ -3043,8 +3040,8 @@ class Up2k(object):
|
||||
except:
|
||||
# missing; restart
|
||||
if not self.args.nw and not n4g:
|
||||
t = "forgetting deleted partial upload at {}"
|
||||
self.log(t.format(path))
|
||||
t = "forgetting deleted partial upload at %r"
|
||||
self.log(t % (path,))
|
||||
del reg[wark]
|
||||
break
|
||||
|
||||
@@ -3055,19 +3052,25 @@ class Up2k(object):
|
||||
pass
|
||||
|
||||
elif st.st_size != rj["size"]:
|
||||
t = "will not dedup (fs index desync): {}, size fs={} db={}, mtime fs={} db={}, file: {}\n{}"
|
||||
t = t.format(
|
||||
wark, st.st_size, rj["size"], st.st_mtime, rj["lmod"], path, rj
|
||||
t = "will not dedup (fs index desync): %s, size fs=%d db=%d, mtime fs=%d db=%d, file: %r\n%s"
|
||||
t = t % (
|
||||
wark,
|
||||
st.st_size,
|
||||
rj["size"],
|
||||
st.st_mtime,
|
||||
rj["lmod"],
|
||||
path,
|
||||
rj,
|
||||
)
|
||||
self.log(t)
|
||||
del reg[wark]
|
||||
|
||||
elif inc_ap != orig_ap and not data_ok and "done" in reg[wark]:
|
||||
self.log("asserting contents of %s" % (orig_ap,))
|
||||
self.log("asserting contents of %r" % (orig_ap,))
|
||||
hashes2, _ = self._hashlist_from_file(orig_ap)
|
||||
wark2 = up2k_wark_from_hashlist(self.salt, st.st_size, hashes2)
|
||||
if wark != wark2:
|
||||
t = "will not dedup (fs index desync): fs=%s, idx=%s, file: %s\n%s"
|
||||
t = "will not dedup (fs index desync): fs=%s, idx=%s, file: %r\n%s"
|
||||
self.log(t % (wark2, wark, orig_ap, rj))
|
||||
del reg[wark]
|
||||
|
||||
@@ -3084,7 +3087,7 @@ class Up2k(object):
|
||||
vsrc = djoin(job["vtop"], job["prel"], job["name"])
|
||||
vsrc = vsrc.replace("\\", "/") # just for prints anyways
|
||||
if "done" not in job:
|
||||
self.log("unfinished:\n {0}\n {1}".format(src, dst))
|
||||
self.log("unfinished:\n %r\n %r" % (src, dst))
|
||||
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
||||
err += "/" + quotep(vsrc) + " "
|
||||
|
||||
@@ -3093,14 +3096,15 @@ class Up2k(object):
|
||||
if cur:
|
||||
dupe = (cj["prel"], cj["name"], cj["lmod"])
|
||||
try:
|
||||
self.dupesched[src].append(dupe)
|
||||
if dupe not in self.dupesched[src]:
|
||||
self.dupesched[src].append(dupe)
|
||||
except:
|
||||
self.dupesched[src] = [dupe]
|
||||
|
||||
raise Pebkac(422, err)
|
||||
|
||||
elif "nodupe" in vfs.flags:
|
||||
self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
|
||||
self.log("dupe-reject:\n %r\n %r" % (src, dst))
|
||||
err = "upload rejected, file already exists:\n"
|
||||
err += "/" + quotep(vsrc) + " "
|
||||
raise Pebkac(409, err)
|
||||
@@ -3162,7 +3166,7 @@ class Up2k(object):
|
||||
"",
|
||||
)
|
||||
if not hr:
|
||||
t = "upload blocked by xbu server config: %s" % (dst,)
|
||||
t = "upload blocked by xbu server config: %r" % (dst,)
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
if hr.get("reloc"):
|
||||
@@ -3303,7 +3307,7 @@ class Up2k(object):
|
||||
times = (int(time.time()), int(cj["lmod"]))
|
||||
bos.utime(ap, times, False)
|
||||
|
||||
self.log("touched %s from %d to %d" % (ap, job["lmod"], cj["lmod"]))
|
||||
self.log("touched %r from %d to %d" % (ap, job["lmod"], cj["lmod"]))
|
||||
except Exception as ex:
|
||||
self.log("umod failed, %r" % (ex,), 3)
|
||||
|
||||
@@ -3318,7 +3322,7 @@ class Up2k(object):
|
||||
|
||||
fp = djoin(fdir, fname)
|
||||
if job.get("replace") and bos.path.exists(fp):
|
||||
self.log("replacing existing file at {}".format(fp))
|
||||
self.log("replacing existing file at %r" % (fp,))
|
||||
cur = None
|
||||
ptop = job["ptop"]
|
||||
vf = self.flags.get(ptop) or {}
|
||||
@@ -3361,9 +3365,9 @@ class Up2k(object):
|
||||
raise Exception(t % (src, fsrc, dst))
|
||||
|
||||
if verbose:
|
||||
t = "linking dupe:\n point-to: {0}\n link-loc: {1}"
|
||||
t = "linking dupe:\n point-to: {0!r}\n link-loc: {1!r}"
|
||||
if fsrc:
|
||||
t += "\n data-src: {2}"
|
||||
t += "\n data-src: {2!r}"
|
||||
self.log(t.format(src, dst, fsrc))
|
||||
|
||||
if self.args.nw:
|
||||
@@ -3433,7 +3437,7 @@ class Up2k(object):
|
||||
elif fsrc and bos.path.isfile(fsrc):
|
||||
csrc = fsrc
|
||||
else:
|
||||
t = "BUG: no valid sources to link from! orig(%s) fsrc(%s) link(%s)"
|
||||
t = "BUG: no valid sources to link from! orig(%r) fsrc(%r) link(%r)"
|
||||
self.log(t, 1)
|
||||
raise Exception(t % (src, fsrc, dst))
|
||||
shutil.copy2(fsenc(csrc), fsenc(dst))
|
||||
@@ -3611,15 +3615,12 @@ class Up2k(object):
|
||||
atomic_move(self.log, src, dst, vflags)
|
||||
|
||||
times = (int(time.time()), int(job["lmod"]))
|
||||
self.log(
|
||||
"no more chunks, setting times {} ({}) on {}".format(
|
||||
times, bos.path.getsize(dst), dst
|
||||
)
|
||||
)
|
||||
t = "no more chunks, setting times %s (%d) on %r"
|
||||
self.log(t % (times, bos.path.getsize(dst), dst))
|
||||
try:
|
||||
bos.utime(dst, times)
|
||||
except:
|
||||
self.log("failed to utime ({}, {})".format(dst, times))
|
||||
self.log("failed to utime (%r, %s)" % (dst, times))
|
||||
|
||||
zs = "prel name lmod size ptop vtop wark dwrk host user addr"
|
||||
z2 = [job[x] for x in zs.split()]
|
||||
@@ -3936,7 +3937,7 @@ class Up2k(object):
|
||||
if jrem == rem:
|
||||
if job["ptop"] != ptop:
|
||||
t = "job.ptop [%s] != vol.ptop [%s] ??"
|
||||
raise Exception(t % (job["ptop"] != ptop))
|
||||
raise Exception(t % (job["ptop"], ptop))
|
||||
partial = vn.canonical(vjoin(job["prel"], job["tnam"]))
|
||||
break
|
||||
if partial:
|
||||
@@ -3976,7 +3977,7 @@ class Up2k(object):
|
||||
if is_dir:
|
||||
# note: deletion inside shares would require a rewrite here;
|
||||
# shares necessitate get_dbv which is incompatible with walk
|
||||
g = vn0.walk("", rem0, [], uname, permsets, True, scandir, True)
|
||||
g = vn0.walk("", rem0, [], uname, permsets, 2, scandir, True)
|
||||
if unpost:
|
||||
raise Pebkac(400, "cannot unpost folders")
|
||||
elif stat.S_ISLNK(st.st_mode) or stat.S_ISREG(st.st_mode):
|
||||
@@ -3984,7 +3985,7 @@ class Up2k(object):
|
||||
vpath_dir = vsplit(vpath)[0]
|
||||
g = [(vn, voldir, vpath_dir, adir, [(fn, 0)], [], {})] # type: ignore
|
||||
else:
|
||||
self.log("rm: skip type-0%o file [%s]" % (st.st_mode, atop))
|
||||
self.log("rm: skip type-0%o file %r" % (st.st_mode, atop))
|
||||
return 0, [], []
|
||||
|
||||
xbd = vn.flags.get("xbd")
|
||||
@@ -4008,7 +4009,7 @@ class Up2k(object):
|
||||
|
||||
volpath = ("%s/%s" % (vrem, fn)).strip("/")
|
||||
vpath = ("%s/%s" % (dbv.vpath, volpath)).strip("/")
|
||||
self.log("rm %s\n %s" % (vpath, abspath))
|
||||
self.log("rm %r\n %r" % (vpath, abspath))
|
||||
if not unpost:
|
||||
# recursion-only sanchk
|
||||
_ = dbv.get(volpath, uname, *permsets[0])
|
||||
@@ -4031,8 +4032,8 @@ class Up2k(object):
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
t = "delete blocked by xbd server config: {}"
|
||||
self.log(t.format(abspath), 1)
|
||||
t = "delete blocked by xbd server config: %r"
|
||||
self.log(t % (abspath,), 1)
|
||||
continue
|
||||
|
||||
n_files += 1
|
||||
@@ -4089,6 +4090,7 @@ class Up2k(object):
|
||||
raise Pebkac(400, "cp: cannot copy parent into subfolder")
|
||||
|
||||
svn, srem = self.vfs.get(svp, uname, True, False)
|
||||
dvn, drem = self.vfs.get(dvp, uname, False, True)
|
||||
svn_dbv, _ = svn.get_dbv(srem)
|
||||
sabs = svn.canonical(srem, False)
|
||||
curs: set["sqlite3.Cursor"] = set()
|
||||
@@ -4111,8 +4113,12 @@ class Up2k(object):
|
||||
permsets = [[True, False]]
|
||||
scandir = not self.args.no_scandir
|
||||
|
||||
# if user can see dotfiles in target volume, only include
|
||||
# dots from source vols where user also has the dot perm
|
||||
dots = 1 if uname in dvn.axs.udot else 2
|
||||
|
||||
# don't use svn_dbv; would skip subvols due to _ls `if not rem:`
|
||||
g = svn.walk("", srem, [], uname, permsets, True, scandir, True)
|
||||
g = svn.walk("", srem, [], uname, permsets, dots, scandir, True)
|
||||
with self.mutex:
|
||||
try:
|
||||
for dbv, vrem, _, atop, files, rd, vd in g:
|
||||
@@ -4121,7 +4127,7 @@ class Up2k(object):
|
||||
svpf = "/".join(x for x in [dbv.vpath, vrem, fn[0]] if x)
|
||||
if not svpf.startswith(svp + "/"): # assert
|
||||
self.log(min_ex(), 1)
|
||||
t = "cp: bug at %s, top %s%s"
|
||||
t = "cp: bug at %r, top %r%s"
|
||||
raise Pebkac(500, t % (svpf, svp, SEESLOG))
|
||||
|
||||
dvpf = dvp + svpf[len(svp) :]
|
||||
@@ -4161,7 +4167,7 @@ class Up2k(object):
|
||||
except:
|
||||
pass # broken symlink; keep as-is
|
||||
elif not stat.S_ISREG(st.st_mode):
|
||||
self.log("skipping type-0%o file [%s]" % (st.st_mode, sabs))
|
||||
self.log("skipping type-0%o file %r" % (st.st_mode, sabs))
|
||||
return ""
|
||||
else:
|
||||
is_link = False
|
||||
@@ -4189,7 +4195,7 @@ class Up2k(object):
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
t = "copy blocked by xbr server config: {}".format(svp)
|
||||
t = "copy blocked by xbr server config: %r" % (svp,)
|
||||
self.log(t, 1)
|
||||
raise Pebkac(405, t)
|
||||
|
||||
@@ -4226,7 +4232,7 @@ class Up2k(object):
|
||||
)
|
||||
curs.add(c2)
|
||||
else:
|
||||
self.log("not found in src db: [{}]".format(svp))
|
||||
self.log("not found in src db: %r" % (svp,))
|
||||
|
||||
try:
|
||||
if is_link and st != stl:
|
||||
@@ -4243,7 +4249,7 @@ class Up2k(object):
|
||||
if ex.errno != errno.EXDEV:
|
||||
raise
|
||||
|
||||
self.log("using plain copy (%s):\n %s\n %s" % (ex.strerror, sabs, dabs))
|
||||
self.log("using plain copy (%s):\n %r\n %r" % (ex.strerror, sabs, dabs))
|
||||
b1, b2 = fsenc(sabs), fsenc(dabs)
|
||||
is_link = os.path.islink(b1) # due to _relink
|
||||
try:
|
||||
@@ -4324,13 +4330,13 @@ class Up2k(object):
|
||||
scandir = not self.args.no_scandir
|
||||
|
||||
# following symlinks is too scary
|
||||
g = svn.walk("", srem, [], uname, permsets, True, scandir, True)
|
||||
g = svn.walk("", srem, [], uname, permsets, 2, scandir, True)
|
||||
for dbv, vrem, _, atop, files, rd, vd in g:
|
||||
if dbv != jail:
|
||||
# fail early (prevent partial moves)
|
||||
raise Pebkac(400, "mv: source folder contains other volumes")
|
||||
|
||||
g = svn.walk("", srem, [], uname, permsets, True, scandir, True)
|
||||
g = svn.walk("", srem, [], uname, permsets, 2, scandir, True)
|
||||
with self.mutex:
|
||||
try:
|
||||
for dbv, vrem, _, atop, files, rd, vd in g:
|
||||
@@ -4343,7 +4349,7 @@ class Up2k(object):
|
||||
svpf = "/".join(x for x in [dbv.vpath, vrem, fn[0]] if x)
|
||||
if not svpf.startswith(svp + "/"): # assert
|
||||
self.log(min_ex(), 1)
|
||||
t = "mv: bug at %s, top %s%s"
|
||||
t = "mv: bug at %r, top %r%s"
|
||||
raise Pebkac(500, t % (svpf, svp, SEESLOG))
|
||||
|
||||
dvpf = dvp + svpf[len(svp) :]
|
||||
@@ -4362,7 +4368,7 @@ class Up2k(object):
|
||||
for ap in reversed(zsl):
|
||||
if not ap.startswith(sabs):
|
||||
self.log(min_ex(), 1)
|
||||
t = "mv_d: bug at %s, top %s%s"
|
||||
t = "mv_d: bug at %r, top %r%s"
|
||||
raise Pebkac(500, t % (ap, sabs, SEESLOG))
|
||||
|
||||
rem = ap[len(sabs) :].replace(os.sep, "/").lstrip("/")
|
||||
@@ -4433,7 +4439,7 @@ class Up2k(object):
|
||||
time.time(),
|
||||
"",
|
||||
):
|
||||
t = "move blocked by xbr server config: {}".format(svp)
|
||||
t = "move blocked by xbr server config: %r" % (svp,)
|
||||
self.log(t, 1)
|
||||
raise Pebkac(405, t)
|
||||
|
||||
@@ -4443,8 +4449,8 @@ class Up2k(object):
|
||||
|
||||
if is_dirlink:
|
||||
dlabs = absreal(sabs)
|
||||
t = "moving symlink from [{}] to [{}], target [{}]"
|
||||
self.log(t.format(sabs, dabs, dlabs))
|
||||
t = "moving symlink from %r to %r, target %r"
|
||||
self.log(t % (sabs, dabs, dlabs))
|
||||
mt = bos.path.getmtime(sabs, False)
|
||||
wunlink(self.log, sabs, svn.flags)
|
||||
self._symlink(dlabs, dabs, dvn.flags, False, lmod=mt)
|
||||
@@ -4516,7 +4522,7 @@ class Up2k(object):
|
||||
)
|
||||
curs.add(c2)
|
||||
else:
|
||||
self.log("not found in src db: [{}]".format(svp))
|
||||
self.log("not found in src db: %r" % (svp,))
|
||||
|
||||
try:
|
||||
if is_xvol and has_dupes:
|
||||
@@ -4537,7 +4543,7 @@ class Up2k(object):
|
||||
if ex.errno != errno.EXDEV:
|
||||
raise
|
||||
|
||||
self.log("using copy+delete (%s):\n %s\n %s" % (ex.strerror, sabs, dabs))
|
||||
self.log("using copy+delete (%s):\n %r\n %r" % (ex.strerror, sabs, dabs))
|
||||
b1, b2 = fsenc(sabs), fsenc(dabs)
|
||||
is_link = os.path.islink(b1) # due to _relink
|
||||
try:
|
||||
@@ -4645,7 +4651,7 @@ class Up2k(object):
|
||||
"""
|
||||
srd, sfn = vsplit(vrem)
|
||||
has_dupes = False
|
||||
self.log("forgetting {}".format(vrem))
|
||||
self.log("forgetting %r" % (vrem,))
|
||||
if wark and cur:
|
||||
self.log("found {} in db".format(wark))
|
||||
if drop_tags:
|
||||
@@ -4676,6 +4682,13 @@ class Up2k(object):
|
||||
t = "forgetting partial upload {} ({})"
|
||||
p = self._vis_job_progress(job)
|
||||
self.log(t.format(wark, p))
|
||||
|
||||
src = djoin(ptop, vrem)
|
||||
zi = len(self.dupesched.pop(src, []))
|
||||
if zi:
|
||||
t = "...and forgetting %d links in dupesched"
|
||||
self.log(t % (zi,))
|
||||
|
||||
assert wark
|
||||
del reg[wark]
|
||||
|
||||
@@ -4714,7 +4727,7 @@ class Up2k(object):
|
||||
dvrem = vjoin(rd, fn).strip("/")
|
||||
if ptop != sptop or srem != dvrem:
|
||||
dupes.append([ptop, dvrem])
|
||||
self.log("found {} dupe: [{}] {}".format(wark, ptop, dvrem))
|
||||
self.log("found %s dupe: %r %r" % (wark, ptop, dvrem))
|
||||
|
||||
if not dupes:
|
||||
return 0
|
||||
@@ -4727,7 +4740,7 @@ class Up2k(object):
|
||||
d = links if bos.path.islink(ap) else full
|
||||
d[ap] = (ptop, vp)
|
||||
except:
|
||||
self.log("relink: not found: [{}]".format(ap))
|
||||
self.log("relink: not found: %r" % (ap,))
|
||||
|
||||
# self.log("full:\n" + "\n".join(" {:90}: {}".format(*x) for x in full.items()))
|
||||
# self.log("links:\n" + "\n".join(" {:90}: {}".format(*x) for x in links.items()))
|
||||
@@ -4735,7 +4748,7 @@ class Up2k(object):
|
||||
# deleting final remaining full copy; swap it with a symlink
|
||||
slabs = list(sorted(links.keys()))[0]
|
||||
ptop, rem = links.pop(slabs)
|
||||
self.log("linkswap [{}] and [{}]".format(sabs, slabs))
|
||||
self.log("linkswap %r and %r" % (sabs, slabs))
|
||||
mt = bos.path.getmtime(slabs, False)
|
||||
flags = self.flags.get(ptop) or {}
|
||||
atomic_move(self.log, sabs, slabs, flags)
|
||||
@@ -4770,7 +4783,7 @@ class Up2k(object):
|
||||
|
||||
zs = absreal(alink)
|
||||
if ldst != zs:
|
||||
t = "relink because computed != actual destination:\n %s\n %s"
|
||||
t = "relink because computed != actual destination:\n %r\n %r"
|
||||
self.log(t % (ldst, zs), 3)
|
||||
ldst = zs
|
||||
faulty = True
|
||||
@@ -4785,7 +4798,7 @@ class Up2k(object):
|
||||
t = "relink because symlink verification failed: %s; %r"
|
||||
self.log(t % (ex, ex), 3)
|
||||
|
||||
self.log("relinking [%s] to [%s]" % (alink, dabs))
|
||||
self.log("relinking %r to %r" % (alink, dabs))
|
||||
flags = self.flags.get(parts[0]) or {}
|
||||
try:
|
||||
lmod = bos.path.getmtime(alink, False)
|
||||
@@ -4900,7 +4913,7 @@ class Up2k(object):
|
||||
"",
|
||||
)
|
||||
if not hr:
|
||||
t = "upload blocked by xbu server config: {}".format(vp_chk)
|
||||
t = "upload blocked by xbu server config: %r" % (vp_chk,)
|
||||
self.log(t, 1)
|
||||
raise Pebkac(403, t)
|
||||
if hr.get("reloc"):
|
||||
@@ -4950,7 +4963,7 @@ class Up2k(object):
|
||||
try:
|
||||
sp.check_call(["fsutil", "sparse", "setflag", abspath])
|
||||
except:
|
||||
self.log("could not sparse [{}]".format(abspath), 3)
|
||||
self.log("could not sparse %r" % (abspath,), 3)
|
||||
relabel = True
|
||||
sprs = False
|
||||
|
||||
@@ -5137,7 +5150,7 @@ class Up2k(object):
|
||||
self._tag_file(cur, entags, wark, abspath, tags)
|
||||
cur.connection.commit()
|
||||
|
||||
self.log("tagged {} ({}+{})".format(abspath, ntags1, len(tags) - ntags1))
|
||||
self.log("tagged %r (%d+%d)" % (abspath, ntags1, len(tags) - ntags1))
|
||||
|
||||
def _hasher(self) -> None:
|
||||
with self.hashq_mutex:
|
||||
@@ -5156,7 +5169,7 @@ class Up2k(object):
|
||||
if not self._hash_t(task) and self.stop:
|
||||
return
|
||||
except Exception as ex:
|
||||
self.log("failed to hash %s: %s" % (task, ex), 1)
|
||||
self.log("failed to hash %r: %s" % (task, ex), 1)
|
||||
|
||||
def _hash_t(
|
||||
self, task: tuple[str, str, dict[str, Any], str, str, str, float, str, bool]
|
||||
@@ -5168,7 +5181,7 @@ class Up2k(object):
|
||||
return True
|
||||
|
||||
abspath = djoin(ptop, rd, fn)
|
||||
self.log("hashing " + abspath)
|
||||
self.log("hashing %r" % (abspath,))
|
||||
inf = bos.stat(abspath)
|
||||
if not inf.st_size:
|
||||
wark = up2k_wark_from_metadata(
|
||||
@@ -5261,7 +5274,7 @@ class Up2k(object):
|
||||
|
||||
x = pathmod(self.vfs, "", req_vp, {"vp": fvp, "fn": fn})
|
||||
if not x:
|
||||
t = "hook_fx(%s): failed to resolve %s based on %s"
|
||||
t = "hook_fx(%s): failed to resolve %r based on %r"
|
||||
self.log(t % (act, fvp, req_vp))
|
||||
continue
|
||||
|
||||
|
||||
@@ -1024,7 +1024,7 @@ class ProgressPrinter(threading.Thread):
|
||||
now = time.time()
|
||||
if msg and now - tp > 10:
|
||||
tp = now
|
||||
self.log("progress: %s" % (msg,), 6)
|
||||
self.log("progress: %r" % (msg,), 6)
|
||||
|
||||
if no_stdout:
|
||||
continue
|
||||
@@ -1626,7 +1626,7 @@ class MultipartParser(object):
|
||||
(only the fallback non-js uploader relies on these filenames)
|
||||
"""
|
||||
for ln in read_header(self.sr, 2, 2592000):
|
||||
self.log(ln)
|
||||
self.log(repr(ln))
|
||||
|
||||
m = self.re_ctype.match(ln)
|
||||
if m:
|
||||
@@ -1917,11 +1917,11 @@ def gen_filekey_dbg(
|
||||
if p2 != fspath:
|
||||
raise Exception()
|
||||
except:
|
||||
t = "maybe wrong abspath for filekey;\norig: {}\nreal: {}"
|
||||
log(t.format(fspath, p2), 1)
|
||||
t = "maybe wrong abspath for filekey;\norig: %r\nreal: %r"
|
||||
log(t % (fspath, p2), 1)
|
||||
|
||||
t = "fk({}) salt({}) size({}) inode({}) fspath({}) at({})"
|
||||
log(t.format(ret[:8], salt, fsize, inode, fspath, ctx), 5)
|
||||
t = "fk(%s) salt(%s) size(%d) inode(%d) fspath(%r) at(%s)"
|
||||
log(t % (ret[:8], salt, fsize, inode, fspath, ctx), 5)
|
||||
|
||||
return ret
|
||||
|
||||
@@ -2277,7 +2277,7 @@ def log_reloc(
|
||||
rem: str,
|
||||
) -> None:
|
||||
nap, nvp, nfn, (nvn, nrem) = pm
|
||||
t = "reloc %s:\nold ap [%s]\nnew ap [%s\033[36m/%s\033[0m]\nold vp [%s]\nnew vp [%s\033[36m/%s\033[0m]\nold fn [%s]\nnew fn [%s]\nold vfs [%s]\nnew vfs [%s]\nold rem [%s]\nnew rem [%s]"
|
||||
t = "reloc %s:\nold ap %r\nnew ap %r\033[36m/%r\033[0m\nold vp %r\nnew vp %r\033[36m/%r\033[0m\nold fn %r\nnew fn %r\nold vfs %r\nnew vfs %r\nold rem %r\nnew rem %r"
|
||||
log(t % (re, ap, nap, nfn, vp, nvp, nfn, fn, nfn, vn.vpath, nvn.vpath, rem, nrem))
|
||||
|
||||
|
||||
@@ -2448,7 +2448,7 @@ def lsof(log: "NamedLogger", abspath: str) -> None:
|
||||
try:
|
||||
rc, so, se = runcmd([b"lsof", b"-R", fsenc(abspath)], timeout=45)
|
||||
zs = (so.strip() + "\n" + se.strip()).strip()
|
||||
log("lsof {} = {}\n{}".format(abspath, rc, zs), 3)
|
||||
log("lsof %r = %s\n%s" % (abspath, rc, zs), 3)
|
||||
except:
|
||||
log("lsof failed; " + min_ex(), 3)
|
||||
|
||||
@@ -2484,17 +2484,17 @@ def _fs_mvrm(
|
||||
for attempt in range(90210):
|
||||
try:
|
||||
if ino and os.stat(bsrc).st_ino != ino:
|
||||
t = "src inode changed; aborting %s %s"
|
||||
t = "src inode changed; aborting %s %r"
|
||||
log(t % (act, src), 1)
|
||||
return False
|
||||
if (dst and not atomic) and os.path.exists(bdst):
|
||||
t = "something appeared at dst; aborting rename [%s] ==> [%s]"
|
||||
t = "something appeared at dst; aborting rename %r ==> %r"
|
||||
log(t % (src, dst), 1)
|
||||
return False
|
||||
osfun(*args)
|
||||
if attempt:
|
||||
now = time.time()
|
||||
t = "%sd in %.2f sec, attempt %d: %s"
|
||||
t = "%sd in %.2f sec, attempt %d: %r"
|
||||
log(t % (act, now - t0, attempt + 1, src))
|
||||
return True
|
||||
except OSError as ex:
|
||||
@@ -2506,7 +2506,7 @@ def _fs_mvrm(
|
||||
if not attempt:
|
||||
if not PY2:
|
||||
ino = os.stat(bsrc).st_ino
|
||||
t = "%s failed (err.%d); retrying for %d sec: [%s]"
|
||||
t = "%s failed (err.%d); retrying for %d sec: %r"
|
||||
log(t % (act, ex.errno, maxtime + 0.99, src))
|
||||
|
||||
time.sleep(chill)
|
||||
@@ -3535,7 +3535,7 @@ def runhook(
|
||||
log, src, cmd, ap, vp, host, uname, perms, mt, sz, ip, at, txt
|
||||
)
|
||||
if log and args.hook_v:
|
||||
log("hook(%s) [%s] => \033[32m%s" % (src, cmd, hr), 6)
|
||||
log("hook(%s) %r => \033[32m%s" % (src, cmd, hr), 6)
|
||||
if not hr:
|
||||
return {}
|
||||
for k, v in hr.items():
|
||||
|
||||
@@ -2785,6 +2785,7 @@ html.b #u2conf a.b:hover {
|
||||
padding-left: .2em;
|
||||
}
|
||||
.fsearch_explain {
|
||||
color: var(--a-dark);
|
||||
padding-left: .7em;
|
||||
font-size: 1.1em;
|
||||
line-height: 0;
|
||||
|
||||
@@ -542,6 +542,7 @@ var Ls = {
|
||||
"u_hashdone": 'hashing done',
|
||||
"u_hashing": 'hash',
|
||||
"u_hs": 'handshaking...',
|
||||
"u_started": "the files are now being uploaded; see [🚀]",
|
||||
"u_dupdefer": "duplicate; will be processed after all other files",
|
||||
"u_actx": "click this text to prevent loss of<br />performance when switching to other windows/tabs",
|
||||
"u_fixed": "OK! Fixed it 👍",
|
||||
@@ -577,6 +578,7 @@ var Ls = {
|
||||
"ue_la": 'you are currently logged in as "{0}"',
|
||||
"ue_sr": 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the magnifying glass 🔎 (next to the big SEARCH button), and try uploading again\n\nsorry',
|
||||
"ue_ta": 'try uploading again, it should work now',
|
||||
"ue_ab": "this file is already being uploaded into another folder, and that upload must be completed before the file can be uploaded elsewhere.\n\nYou can abort and forget the initial upload using the top-left 🧯",
|
||||
"ur_1uo": "OK: File uploaded successfully",
|
||||
"ur_auo": "OK: All {0} files uploaded successfully",
|
||||
"ur_1so": "OK: File found on server",
|
||||
@@ -1129,6 +1131,7 @@ var Ls = {
|
||||
"u_hashdone": 'befaring ferdig',
|
||||
"u_hashing": 'les',
|
||||
"u_hs": 'serveren tenker...',
|
||||
"u_started": "filene blir nå lastet opp 🚀",
|
||||
"u_dupdefer": "duplikat; vil bli håndtert til slutt",
|
||||
"u_actx": "klikk her for å forhindre tap av<br />ytelse ved bytte til andre vinduer/faner",
|
||||
"u_fixed": "OK! Løste seg 👍",
|
||||
@@ -1164,6 +1167,7 @@ var Ls = {
|
||||
"ue_la": 'du er logget inn som "{0}"',
|
||||
"ue_sr": 'du er i filsøk-modus\n\nbytt til opplastning ved å klikke på forstørrelsesglasset 🔎 (ved siden av den store FILSØK-knappen) og prøv igjen\n\nsorry',
|
||||
"ue_ta": 'prøv å laste opp igjen, det burde funke nå',
|
||||
"ue_ab": "den samme filen er allerede under opplastning til en annen mappe, og den må fullføres der før filen kan lastes opp andre steder.\n\nDu kan avbryte og glemme den påbegynte opplastningen ved hjelp av 🧯 oppe til venstre",
|
||||
"ur_1uo": "OK: Filen ble lastet opp",
|
||||
"ur_auo": "OK: Alle {0} filene ble lastet opp",
|
||||
"ur_1so": "OK: Filen ble funnet på serveren",
|
||||
@@ -1716,6 +1720,7 @@ var Ls = {
|
||||
"u_hashdone": '哈希完成',
|
||||
"u_hashing": '哈希',
|
||||
"u_hs": '正在等待服务器...',
|
||||
"u_started": "文件现在正在上传 🚀", //m
|
||||
"u_dupdefer": "这是一个重复文件。它将在所有其他文件上传后进行处理",
|
||||
"u_actx": "单击此文本以防止切换到其他窗口/选项卡时性能下降",
|
||||
"u_fixed": "好! 已修复 👍",
|
||||
@@ -1751,6 +1756,7 @@ var Ls = {
|
||||
"ue_la": '你当前以 "{0}" 登录',
|
||||
"ue_sr": '你当前处于文件搜索模式\n\n通过点击大搜索按钮旁边的放大镜 🔎 切换到上传模式,然后重试上传\n\n抱歉',
|
||||
"ue_ta": '尝试再次上传,现在应该能正常工作',
|
||||
"ue_ab": "这份文件正在上传到另一个文件夹,必须完成该上传后,才能将文件上传到其他位置。\n\n您可以通过左上角的🧯中止并忘记该上传。", //m
|
||||
"ur_1uo": "成功:文件上传成功",
|
||||
"ur_auo": "成功:所有 {0} 个文件上传成功",
|
||||
"ur_1so": "成功:文件在服务器上找到",
|
||||
@@ -9465,7 +9471,23 @@ var unpost = (function () {
|
||||
toast.ok(5, this.responseText);
|
||||
|
||||
if (!QS('#op_unpost a[me]'))
|
||||
ebi(goto_unpost());
|
||||
goto_unpost();
|
||||
|
||||
var fi = window.up2k && up2k.st.files;
|
||||
if (fi && fi.length < 9) {
|
||||
for (var a = 0; a < fi.length; a++) {
|
||||
var f = fi[a];
|
||||
if (!f.done && (f.rechecks || f.want_recheck) &&
|
||||
!has(up2k.st.todo.handshake, f) &&
|
||||
!has(up2k.st.busy.handshake, f)
|
||||
) {
|
||||
up2k.st.todo.handshake.push(f);
|
||||
up2k.ui.seth(f.n, 2, L.u_hashdone);
|
||||
up2k.ui.seth(f.n, 1, '📦 wait');
|
||||
up2k.ui.move(f.n, 'bz');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ct.onclick = function (e) {
|
||||
|
||||
114
copyparty/web/rups.css
Normal file
114
copyparty/web/rups.css
Normal file
@@ -0,0 +1,114 @@
|
||||
html {
|
||||
color: #333;
|
||||
background: #f7f7f7;
|
||||
font-family: sans-serif;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#wrap {
|
||||
margin: 2em auto;
|
||||
padding: 0 1em 3em 1em;
|
||||
line-height: 2.3em;
|
||||
}
|
||||
form {
|
||||
display: inline;
|
||||
padding-left: 1em;
|
||||
}
|
||||
input[type=submit],
|
||||
a {
|
||||
color: #047;
|
||||
background: #fff;
|
||||
text-decoration: none;
|
||||
border: none;
|
||||
border-bottom: 1px solid #8ab;
|
||||
border-radius: .2em;
|
||||
padding: .2em .6em;
|
||||
margin: 0 .3em;
|
||||
}
|
||||
#wrap td a {
|
||||
margin: 0;
|
||||
line-height: 1em;
|
||||
display: inline-block;
|
||||
white-space: initial;
|
||||
font-family: var(--font-main), sans-serif;
|
||||
}
|
||||
#repl {
|
||||
border: none;
|
||||
background: none;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
position: fixed;
|
||||
bottom: .25em;
|
||||
left: .2em;
|
||||
}
|
||||
#wrap table {
|
||||
border-collapse: collapse;
|
||||
position: relative;
|
||||
margin-top: 2em;
|
||||
}
|
||||
#wrap th {
|
||||
top: -1px;
|
||||
position: sticky;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
#wrap td {
|
||||
font-family: var(--font-mono), monospace, monospace;
|
||||
white-space: pre; /*date*/
|
||||
overflow: hidden; /*ipv6*/
|
||||
}
|
||||
#wrap th:first-child,
|
||||
#wrap td:first-child {
|
||||
text-align: right;
|
||||
}
|
||||
#wrap td,
|
||||
#wrap th {
|
||||
text-align: left;
|
||||
padding: .3em .6em;
|
||||
max-width: 30vw;
|
||||
}
|
||||
#wrap tr:hover td {
|
||||
background: #ddd;
|
||||
box-shadow: 0 -1px 0 rgba(128, 128, 128, 0.5) inset;
|
||||
}
|
||||
#wrap th:first-child,
|
||||
#wrap td:first-child {
|
||||
border-radius: .5em 0 0 .5em;
|
||||
}
|
||||
#wrap th:last-child,
|
||||
#wrap td:last-child {
|
||||
border-radius: 0 .5em .5em 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.z {
|
||||
background: #222;
|
||||
color: #ccc;
|
||||
}
|
||||
html.bz {
|
||||
background: #11121d;
|
||||
color: #bbd;
|
||||
}
|
||||
html.z input[type=submit],
|
||||
html.z a {
|
||||
color: #fff;
|
||||
background: #057;
|
||||
border-color: #37a;
|
||||
}
|
||||
html.z input[type=text] {
|
||||
color: #ddd;
|
||||
background: #223;
|
||||
border: none;
|
||||
border-bottom: 1px solid #fc5;
|
||||
border-radius: .2em;
|
||||
padding: .2em .3em;
|
||||
}
|
||||
html.z #wrap th {
|
||||
background: #222;
|
||||
}
|
||||
html.bz #wrap th {
|
||||
background: #223;
|
||||
}
|
||||
html.z #wrap tr:hover td {
|
||||
background: #000;
|
||||
}
|
||||
67
copyparty/web/rups.html
Normal file
67
copyparty/web/rups.html
Normal file
@@ -0,0 +1,67 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>{{ s_doctitle }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<meta name="theme-color" content="#{{ tcolor }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/rups.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ r }}/.cpr/ui.css?_={{ ts }}">
|
||||
{{ html_head }}
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
<a id="a" href="{{ r }}/?ru" class="af">refresh</a>
|
||||
<a id="a" href="{{ r }}/?h" class="af">control-panel</a>
|
||||
<form method="get" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ r }}">
|
||||
<input type="hidden" name="ru" value="a" />
|
||||
Filter: <input type="text" name="filter" size="20" placeholder="documents/passwords" value="{{ filt }}" />
|
||||
<input type="submit" />
|
||||
</form>
|
||||
<span id="hits"></span>
|
||||
<table id="tab"><thead><tr>
|
||||
<th>size</th>
|
||||
<th>who</th>
|
||||
<th>when</th>
|
||||
<th>age</th>
|
||||
<th>dir</th>
|
||||
<th>file</th>
|
||||
</tr></thead><tbody>
|
||||
{% for vp, evp, sz, ip, at in rows %}
|
||||
<tr>
|
||||
<td>{{ sz }}</td>
|
||||
<td>{{ ip }}</td>
|
||||
<td>{{ at }}</td>
|
||||
<td>{{ (now-at) }}</td>
|
||||
<td></td>
|
||||
<td><a href="{{ r }}{{ evp }}">{{ vp|e }}</a></td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody></table>
|
||||
{% if not rows %}
|
||||
(the database is not aware of any uploads)
|
||||
{% endif %}
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var SR = {{ r|tojson }},
|
||||
NOW = {{ now }},
|
||||
lang="{{ lang }}",
|
||||
dfavico="{{ favico }}";
|
||||
|
||||
var STG = window.localStorage;
|
||||
document.documentElement.className = (STG && STG.cpp_thm) || "{{ this.args.theme }}";
|
||||
|
||||
</script>
|
||||
<script src="{{ r }}/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="{{ r }}/.cpr/rups.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
34
copyparty/web/rups.js
Normal file
34
copyparty/web/rups.js
Normal file
@@ -0,0 +1,34 @@
|
||||
(function() {
|
||||
var tab = ebi('tab').tBodies[0],
|
||||
tr = Array.prototype.slice.call(tab.rows, 0),
|
||||
rows = [];
|
||||
|
||||
for (var a = 0; a < tr.length; a++) {
|
||||
var td = tr[a].cells,
|
||||
an = td[5].children[0];
|
||||
|
||||
rows.push([
|
||||
td[0].textContent,
|
||||
td[2].textContent,
|
||||
td[3].textContent,
|
||||
an.textContent,
|
||||
an.getAttribute('href'),
|
||||
]);
|
||||
}
|
||||
|
||||
for (var a = 0; a < rows.length; a++) {
|
||||
var t = rows[a],
|
||||
sz = t[0],
|
||||
at = parseInt(t[1]),
|
||||
nam = vsplit(t[3]),
|
||||
dh = vsplit(t[4])[0];
|
||||
|
||||
tr[a].cells[0].innerHTML = sz.replace(/\B(?=(\d{3})+(?!\d))/g, " ");
|
||||
tr[a].cells[2].innerHTML = at ? unix2iso(at) : '(?)';
|
||||
tr[a].cells[3].innerHTML = at ? shumantime(t[2]) : '(?)';
|
||||
tr[a].cells[4].innerHTML = '<a href="' + dh + '">' + nam[0] + '</a>';
|
||||
tr[a].cells[5].children[0].innerHTML = nam[1].split('?')[0];
|
||||
}
|
||||
|
||||
ebi('hits').innerHTML = '-- showing ' + rows.length + ' files';
|
||||
})();
|
||||
@@ -44,9 +44,10 @@ a {
|
||||
bottom: .25em;
|
||||
left: .2em;
|
||||
}
|
||||
table {
|
||||
#wrap table {
|
||||
border-collapse: collapse;
|
||||
position: relative;
|
||||
position: relative;
|
||||
margin-top: 2em;
|
||||
}
|
||||
th {
|
||||
top: -1px;
|
||||
@@ -62,6 +63,14 @@ th {
|
||||
#wrap td+td+td+td+td+td+td+td {
|
||||
font-family: var(--font-mono), monospace, monospace;
|
||||
}
|
||||
#wrap th:first-child,
|
||||
#wrap td:first-child {
|
||||
border-radius: .5em 0 0 .5em;
|
||||
}
|
||||
#wrap th:last-child,
|
||||
#wrap td:last-child {
|
||||
border-radius: 0 .5em .5em 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -81,3 +90,6 @@ html.bz {
|
||||
color: #bbd;
|
||||
background: #11121d;
|
||||
}
|
||||
html.bz th {
|
||||
background: #223;
|
||||
}
|
||||
|
||||
@@ -58,6 +58,8 @@
|
||||
{% if not rows %}
|
||||
(you don't have any active shares btw)
|
||||
{% endif %}
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var SR = {{ r|tojson }},
|
||||
|
||||
@@ -157,6 +157,7 @@
|
||||
<blockquote id="ad">enabling no304 will disable all caching; try this if k304 wasn't enough. This will waste a huge amount of network traffic!</blockquote></li>
|
||||
{% endif %}
|
||||
|
||||
<li><a id="af" href="{{ r }}/?ru">show recent uploads</a></li>
|
||||
<li><a id="k" href="{{ r }}/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
|
||||
</ul>
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ var Ls = {
|
||||
"ac1": "skru på no304",
|
||||
"ad1": "no304 stopper all bruk av cache. Hvis ikke k304 var nok, prøv denne. Vil mangedoble dataforbruk!",
|
||||
"ae1": "utgående:",
|
||||
"af1": "vis nylig opplastede filer",
|
||||
},
|
||||
"eng": {
|
||||
"d2": "shows the state of all active threads",
|
||||
@@ -88,6 +89,7 @@ var Ls = {
|
||||
"ac1": "开启 k304",
|
||||
"ad1": "启用 no304 将禁用所有缓存;如果 k304 不够,可以尝试此选项。这将消耗大量的网络流量!", //m
|
||||
"ae1": "正在下载:", //m
|
||||
"af1": "显示最近上传的文件", //m
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -53,7 +53,6 @@
|
||||
{% if s %}
|
||||
<li>running <code>rclone mount</code> on LAN (or just dont have valid certificates)? add <code>--no-check-certificate</code></li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
|
||||
@@ -137,7 +136,6 @@
|
||||
{% if args.ftps %}
|
||||
<li>running on LAN (or just dont have valid certificates)? add <code>no_check_certificate=true</code> to the config command</li>
|
||||
{% endif %}
|
||||
<li>running <code>rclone mount</code> as root? add <code>--allow-other</code></li>
|
||||
<li>old version of rclone? replace all <code>=</code> with <code> </code> (space)</li>
|
||||
</ul>
|
||||
<p>if you want to use the native FTP client in windows instead (please dont), press <code>win+R</code> and run this command:</p>
|
||||
|
||||
@@ -695,8 +695,9 @@ function Donut(uc, st) {
|
||||
}
|
||||
|
||||
if (++r.tc >= 10) {
|
||||
var s = r.eta === null ? 'paused' : r.eta > 60 ? shumantime(r.eta) : (r.eta + 's');
|
||||
wintitle("{0}%, {1}, #{2}, ".format(
|
||||
f2f(v * 100 / t, 1), shumantime(r.eta), st.files.length - st.nfile.upload), true);
|
||||
f2f(v * 100 / t, 1), s, st.files.length - st.nfile.upload), true);
|
||||
r.tc = 0;
|
||||
}
|
||||
|
||||
@@ -1359,7 +1360,15 @@ function up2k_init(subtle) {
|
||||
draw_each = good_files.length < 50;
|
||||
|
||||
if (WebAssembly && !hws.length) {
|
||||
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
|
||||
var nw = Math.min(navigator.hardwareConcurrency || 4, 16);
|
||||
|
||||
if (CHROME) {
|
||||
// chrome-bug 383568268 // #124
|
||||
nw = Math.max(1, (nw > 4 ? 4 : (nw - 1)));
|
||||
nw = (subtle && !MOBILE && nw > 2) ? 2 : nw;
|
||||
}
|
||||
|
||||
for (var a = 0; a < nw; a++)
|
||||
hws.push(new Worker(SR + '/.cpr/w.hash.js?_=' + TS));
|
||||
|
||||
if (!subtle)
|
||||
@@ -2408,6 +2417,9 @@ function up2k_init(subtle) {
|
||||
msg = 'done';
|
||||
|
||||
if (t.postlist.length) {
|
||||
if (t.rechecks && QS('#opa_del.act'))
|
||||
toast.inf(30, L.u_started, L.u_unpt);
|
||||
|
||||
var arr = st.todo.upload,
|
||||
sort = arr.length && arr[arr.length - 1].nfile > t.n;
|
||||
|
||||
@@ -2518,8 +2530,13 @@ function up2k_init(subtle) {
|
||||
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||
t.rechecks = 0;
|
||||
t.want_recheck = true;
|
||||
err = L.u_dupdefer;
|
||||
cls = 'defer';
|
||||
if (st.busy.upload.length || st.busy.handshake.length || st.bytes.uploaded) {
|
||||
err = L.u_dupdefer;
|
||||
cls = 'defer';
|
||||
}
|
||||
}
|
||||
if (err_pend) {
|
||||
err += ' <a href="#" onclick="toast.inf(60, L.ue_ab);" class="fsearch_explain">(' + L.u_expl + ')</a>';
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -886,8 +886,11 @@ if (window.Number && Number.isFinite)
|
||||
|
||||
function f2f(val, nd) {
|
||||
// 10.toFixed(1) returns 10.00 for certain values of 10
|
||||
if (!isNum(val))
|
||||
val = 999;
|
||||
if (!isNum(val)) {
|
||||
val = parseFloat(val);
|
||||
if (!isNum(val))
|
||||
val = 999;
|
||||
}
|
||||
val = (val * Math.pow(10, nd)).toFixed(0).split('.')[0];
|
||||
return nd ? (val.slice(0, -nd) || '0') + '.' + val.slice(-nd) : val;
|
||||
}
|
||||
|
||||
@@ -1,3 +1,88 @@
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-1211-2236 `v1.16.5` 4chrome
|
||||
|
||||
## 🧪 new features
|
||||
|
||||
* #124 add workaround for a chrome bug (crash during upload) 24ce46b3
|
||||
* chrome and chromium-based browsers could OOM
|
||||
* https://issues.chromium.org/issues/383568268
|
||||
|
||||
* #122 "hybrid IdP", regular users can still auth while [IdP](https://github.com/9001/copyparty#identity-providers) is enabled 64501fd7
|
||||
* previously, enabling IdP would entirely disable password-based login
|
||||
* now, password-auth is attempted for requests without a valid IdP header
|
||||
|
||||
## 🩹 bugfixes
|
||||
|
||||
* the terminal window title would only change if `--no-ansi` was specified, which is exactly the opposite of what it should be (and now is) doing db3c0b09
|
||||
|
||||
## 🔧 other changes
|
||||
|
||||
* mDNS: better log messages when several IPs are added/removed a49bf81f
|
||||
* webdeps: update dompurify 06868606
|
||||
|
||||
----
|
||||
|
||||
this release includes a build of [copyparty-winpe64.exe](https://github.com/9001/copyparty/releases/download/v1.16.5/copyparty-winpe64.exe) since the last one was [almost a year ago](https://github.com/9001/copyparty/releases/tag/v1.10.1)
|
||||
|
||||
* winpe64.exe is only for *very* specific usecases, you almost definitely *do not* want to download it, please just grab the regular [copyparty.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty.exe) instead (works on all 64bit machines running win8 or newer)
|
||||
|
||||
* the only difference between winpe64.exe and [copyparty32.exe](https://github.com/9001/copyparty/releases/latest/download/copyparty32.exe) is that winpe64.exe works in the win7x64 PE (rescue-env), which makes it *almost* entirely useless, and every bit as dangerous to use as copyparty32.exe
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-1207-0024 `v1.16.4` ux is hard
|
||||
|
||||
## 🧪 new features
|
||||
|
||||
* improve the upload ui so it explains how to abort an unfinished upload when someone uploads to the wrong folder by accident be6afe2d
|
||||
* also reduces serverload slightly when cloning an incoming file to multiple destinations
|
||||
* u2c (commandline uploader): windows improvements 91637800
|
||||
* now supports globbing (filename wildcards) on windows
|
||||
* progressbar in the windows taskbar (requires conemu or the "new windows terminal")
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-1204-0003 `v1.16.3` 120%
|
||||
|
||||
## 🧪 new features
|
||||
|
||||
* #120 add option `--srch-excl` and volflag `srch_excl` for excluding certain paths from search results 697a4fa8
|
||||
* mDNS: add workaround for https://github.com/avahi/avahi/issues/379 6c1cf68b 94d1924f
|
||||
* Avahi mDNS Reflection, sometimes used in intricate LAN setups, doesn't understand NSEC records and corrupts them
|
||||
* the workaround makes copyparty able to read the corrupted packets, but clients without a similar workaround will require either `--zm4` or `--zm6` so copyparty doesn't include the usual NSEC records
|
||||
* this is mentioned in a very loud warning in the logs when necessary
|
||||
* mDNS: option to silently ignore buggy devices instead of spamming the log with parser errors 395af051
|
||||
* webdav: support listing unmapped root with infinite recursion (Depth:0) 21a3f369
|
||||
* embed current sort config into media URLs (gallery/music) 0f257c93 4cfdc4c5 01670827
|
||||
* ensures that anyone clicking your link will see the files in the same order as you
|
||||
* can be confgured serverside (`--hsortn`, volflag `hsortn`) and clientside (`#sort` in settings)
|
||||
* URL and UI options to disable checksum calculation of PUT, bup, basic uploads c5a000d2
|
||||
* also allows [choosing either md5, sha1, sha256, or blake2](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#write) instead of the default sha512
|
||||
* can give uploads a nice speed boost when copyparty is running on a potato
|
||||
|
||||
## 🩹 bugfixes
|
||||
|
||||
* webdav: more correct login challenge 2ce82339
|
||||
* the previous behavior could make some clients reluctant to send the password
|
||||
* #120 forget metadata of all files (including uploads) when shadowed d168b2ac
|
||||
* thanks to @Gremious for all the debugging to narrow this down!
|
||||
* #120 drop volume caches if relevant config is changed (mainly indexing filters) 2f83c6c7
|
||||
* #121 couldn't access arbitrary toplevel files from accounts with `h` permission 1f5f42f2
|
||||
|
||||
## 🔧 other changes
|
||||
|
||||
* exclude thumbnails from accesslog by default 9082c470
|
||||
* filesearch: show a final summary of time-elapsed and average hashing speed 8a631f04
|
||||
* improve phrasing of debug messages during indexing at startup 127f414e
|
||||
* `--license` no longer depends on opensource.org at build time 33c4ccff
|
||||
* update deps 6cedcfbf
|
||||
* copyparty.exe: python 3.12.7 => 3.12.8
|
||||
* webdeps: hashwasm, dompurify
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-1123-2336 `v1.16.2` webdav upload fix
|
||||
|
||||
|
||||
@@ -143,6 +143,9 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| GET | `?dls` | show active downloads (do this as admin) |
|
||||
| GET | `?ups` | show recent uploads from your IP |
|
||||
| GET | `?ups&filter=f` | ...where URL contains `f` |
|
||||
| GET | `?ru` | show all recent uploads |
|
||||
| GET | `?ru&filter=f` | ...where URL contains `f` |
|
||||
| GET | `?ru&j` | ...as json |
|
||||
| GET | `?mime=foo` | specify return mimetype `foo` |
|
||||
| GET | `?v` | render markdown file at URL |
|
||||
| GET | `?v` | open image/video/audio in mediaplayer |
|
||||
|
||||
@@ -3,7 +3,7 @@ WORKDIR /z
|
||||
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
||||
ver_hashwasm=4.12.0 \
|
||||
ver_marked=4.3.0 \
|
||||
ver_dompf=3.2.2 \
|
||||
ver_dompf=3.2.3 \
|
||||
ver_mde=2.18.0 \
|
||||
ver_codemirror=5.65.18 \
|
||||
ver_fontawesome=5.13.0 \
|
||||
|
||||
@@ -105,6 +105,9 @@ copyparty/web/mde.html,
|
||||
copyparty/web/mde.js,
|
||||
copyparty/web/msg.css,
|
||||
copyparty/web/msg.html,
|
||||
copyparty/web/rups.css,
|
||||
copyparty/web/rups.html,
|
||||
copyparty/web/rups.js,
|
||||
copyparty/web/shares.css,
|
||||
copyparty/web/shares.html,
|
||||
copyparty/web/shares.js,
|
||||
|
||||
@@ -80,6 +80,7 @@ var tl_cpanel = {
|
||||
"ac1": "enable no304",
|
||||
"ad1": "enabling no304 will disable all caching; try this if k304 wasn't enough. This will waste a huge amount of network traffic!",
|
||||
"ae1": "active downloads:",
|
||||
"af1": "show recent uploads",
|
||||
},
|
||||
};
|
||||
|
||||
@@ -291,6 +292,7 @@ var tl_browser = {
|
||||
"cl_uopts": "up2k switches",
|
||||
"cl_favico": "favicon",
|
||||
"cl_bigdir": "big dirs",
|
||||
"cl_hsort": "#sort",
|
||||
"cl_keytype": "key notation",
|
||||
"cl_hiddenc": "hidden columns",
|
||||
"cl_hidec": "hide",
|
||||
@@ -333,6 +335,7 @@ var tl_browser = {
|
||||
|
||||
"cdt_lim": "max number of files to show in a folder",
|
||||
"cdt_ask": "when scrolling to the bottom,$Ninstead of loading more files,$Nask what to do",
|
||||
"cdt_hsort": "how many sorting rules (<code>,sorthref</code>) to include in media-URLs. Setting this to 0 will also ignore sorting-rules included in media links when clicking them",
|
||||
|
||||
"tt_entree": "show navpane (directory tree sidebar)$NHotkey: B",
|
||||
"tt_detree": "show breadcrumbs$NHotkey: B",
|
||||
@@ -625,6 +628,7 @@ var tl_browser = {
|
||||
"u_hashdone": 'hashing done',
|
||||
"u_hashing": 'hash',
|
||||
"u_hs": 'handshaking...',
|
||||
"u_started": "the files are now being uploaded; see [🚀]",
|
||||
"u_dupdefer": "duplicate; will be processed after all other files",
|
||||
"u_actx": "click this text to prevent loss of<br />performance when switching to other windows/tabs",
|
||||
"u_fixed": "OK! Fixed it 👍",
|
||||
@@ -660,6 +664,7 @@ var tl_browser = {
|
||||
"ue_la": 'you are currently logged in as "{0}"',
|
||||
"ue_sr": 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the magnifying glass 🔎 (next to the big SEARCH button), and try uploading again\n\nsorry',
|
||||
"ue_ta": 'try uploading again, it should work now',
|
||||
"ue_ab": "this file is already being uploaded into another folder, and that upload must be completed before the file can be uploaded elsewhere.\n\nYou can abort and forget the initial upload using the top-left 🧯",
|
||||
"ur_1uo": "OK: File uploaded successfully",
|
||||
"ur_auo": "OK: All {0} files uploaded successfully",
|
||||
"ur_1so": "OK: File found on server",
|
||||
|
||||
@@ -115,6 +115,7 @@ var tl_cpanel = {{
|
||||
"ac1": "enable no304",
|
||||
"ad1": "enabling no304 will disable all caching; try this if k304 wasn't enough. This will waste a huge amount of network traffic!",
|
||||
"ae1": "active downloads:",
|
||||
"af1": "show recent uploads",
|
||||
}},
|
||||
}};
|
||||
|
||||
|
||||
@@ -33,14 +33,6 @@ def eprint(*a, **ka):
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
if MACOS:
|
||||
import posixpath
|
||||
|
||||
posixpath.islink = nah
|
||||
os.path.islink = nah
|
||||
# 25% faster; until any tests do symlink stuff
|
||||
|
||||
|
||||
from copyparty.__main__ import init_E
|
||||
from copyparty.broker_thr import BrokerThr
|
||||
from copyparty.ico import Ico
|
||||
|
||||
Reference in New Issue
Block a user