Compare commits
30 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cdface0dd5 | ||
|
|
be6afe2d3a | ||
|
|
9163780000 | ||
|
|
d7aa7dfe64 | ||
|
|
f1decb531d | ||
|
|
99399c698b | ||
|
|
1f5f42f216 | ||
|
|
9082c4702f | ||
|
|
6cedcfbf77 | ||
|
|
8a631f045e | ||
|
|
a6a2ee5b6b | ||
|
|
016708276c | ||
|
|
4cfdc4c513 | ||
|
|
0f257c9308 | ||
|
|
c8104b6e78 | ||
|
|
1a1d731043 | ||
|
|
c5a000d2ae | ||
|
|
94d1924fa9 | ||
|
|
6c1cf68bca | ||
|
|
395af051bd | ||
|
|
42fd66675e | ||
|
|
21a3f3699b | ||
|
|
d168b2acac | ||
|
|
2ce8233921 | ||
|
|
697a4fa8a4 | ||
|
|
2f83c6c7d1 | ||
|
|
127f414e9c | ||
|
|
33c4ccffab | ||
|
|
bafe7f5a09 | ||
|
|
baf41112d1 |
@@ -1097,11 +1097,12 @@ using the GUI (winXP or later):
|
||||
* on winXP only, click the `Sign up for online storage` hyperlink instead and put the URL there
|
||||
* providing your password as the username is recommended; the password field can be anything or empty
|
||||
|
||||
known client bugs:
|
||||
the webdav client that's built into windows has the following list of bugs; you can avoid all of these by connecting with rclone instead:
|
||||
* win7+ doesn't actually send the password to the server when reauthenticating after a reboot unless you first try to login with an incorrect password and then switch to the correct password
|
||||
* or just type your password into the username field instead to get around it entirely
|
||||
* connecting to a folder which allows anonymous read will make writing impossible, as windows has decided it doesn't need to login
|
||||
* workaround: connect twice; first to a folder which requires auth, then to the folder you actually want, and leave both of those mounted
|
||||
* or set the server-option `--dav-auth` to force password-auth for all webdav clients
|
||||
* win7+ may open a new tcp connection for every file and sometimes forgets to close them, eventually needing a reboot
|
||||
* maybe NIC-related (??), happens with win10-ltsc on e1000e but not virtio
|
||||
* windows cannot access folders which contain filenames with invalid unicode or forbidden characters (`<>:"/\|?*`), or names ending with `.`
|
||||
@@ -1268,7 +1269,7 @@ note:
|
||||
|
||||
### exclude-patterns
|
||||
|
||||
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash \.iso$` or the volflag `:c,nohash=\.iso$`, this has the following consequences:
|
||||
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash '\.iso$'` or the volflag `:c,nohash=\.iso$`, this has the following consequences:
|
||||
* initial indexing is way faster, especially when the volume is on a network disk
|
||||
* makes it impossible to [file-search](#file-search)
|
||||
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
||||
@@ -1279,6 +1280,8 @@ similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noi
|
||||
|
||||
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
|
||||
|
||||
to exclude certain filepaths from search-results, use `--srch-excl` or volflag `srch_excl` instead of `--no-idx`, for example `--srch-excl 'password|logs/[0-9]'`
|
||||
|
||||
### filesystem guards
|
||||
|
||||
avoid traversing into other filesystems using `--xdev` / volflag `:c,xdev`, skipping any symlinks or bind-mounts to another HDD for example
|
||||
|
||||
60
bin/u2c.py
60
bin/u2c.py
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
S_VERSION = "2.6"
|
||||
S_BUILD_DT = "2024-11-10"
|
||||
S_VERSION = "2.7"
|
||||
S_BUILD_DT = "2024-12-06"
|
||||
|
||||
"""
|
||||
u2c.py: upload to copyparty
|
||||
@@ -1033,8 +1033,8 @@ class Ctl(object):
|
||||
handshake(self.ar, file, False)
|
||||
|
||||
def _fancy(self):
|
||||
atexit.register(self.cleanup_vt100)
|
||||
if VT100 and not self.ar.ns:
|
||||
atexit.register(self.cleanup_vt100)
|
||||
ss.scroll_region(3)
|
||||
|
||||
Daemon(self.hasher)
|
||||
@@ -1042,6 +1042,7 @@ class Ctl(object):
|
||||
Daemon(self.handshaker)
|
||||
Daemon(self.uploader)
|
||||
|
||||
last_sp = -1
|
||||
while True:
|
||||
with self.exit_cond:
|
||||
self.exit_cond.wait(0.07)
|
||||
@@ -1080,6 +1081,12 @@ class Ctl(object):
|
||||
else:
|
||||
txt = " "
|
||||
|
||||
if not VT100: # OSC9;4 (taskbar-progress)
|
||||
sp = int(self.up_b * 100 / self.nbytes) or 1
|
||||
if last_sp != sp:
|
||||
last_sp = sp
|
||||
txt += "\033]9;4;1;%d\033\\" % (sp,)
|
||||
|
||||
if not self.up_br:
|
||||
spd = self.hash_b / ((time.time() - self.t0) or 1)
|
||||
eta = (self.nbytes - self.hash_b) / (spd or 1)
|
||||
@@ -1097,6 +1104,8 @@ class Ctl(object):
|
||||
tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r"
|
||||
|
||||
t = "%s eta @ %s/s, %s, %d# left\033[K" % (self.eta, spd, sleft, nleft)
|
||||
if not self.hash_b:
|
||||
t = " now hashing..."
|
||||
eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail))
|
||||
|
||||
if self.ar.wlist:
|
||||
@@ -1117,7 +1126,10 @@ class Ctl(object):
|
||||
handshake(self.ar, file, False)
|
||||
|
||||
def cleanup_vt100(self):
|
||||
ss.scroll_region(None)
|
||||
if VT100:
|
||||
ss.scroll_region(None)
|
||||
else:
|
||||
eprint("\033]9;4;0\033\\")
|
||||
eprint("\033[J\033]0;\033\\")
|
||||
|
||||
def cb_hasher(self, file, ofs):
|
||||
@@ -1132,7 +1144,9 @@ class Ctl(object):
|
||||
isdir = stat.S_ISDIR(inf.st_mode)
|
||||
if self.ar.z or self.ar.drd:
|
||||
rd = rel if isdir else os.path.dirname(rel)
|
||||
srd = rd.decode("utf-8", "replace").replace("\\", "/")
|
||||
srd = rd.decode("utf-8", "replace").replace("\\", "/").rstrip("/")
|
||||
if srd:
|
||||
srd += "/"
|
||||
if prd != rd:
|
||||
prd = rd
|
||||
ls = {}
|
||||
@@ -1167,11 +1181,11 @@ class Ctl(object):
|
||||
bnames = [x for x in ls if x not in lnodes and x != b".hist"]
|
||||
vpath = self.ar.url.split("://")[-1].split("/", 1)[-1]
|
||||
names = [x.decode("utf-8", WTF8) for x in bnames]
|
||||
locs = [vpath + srd + "/" + x for x in names]
|
||||
locs = [vpath + srd + x for x in names]
|
||||
while locs:
|
||||
req = locs
|
||||
while req:
|
||||
print("DELETING ~%s/#%s" % (srd, len(req)))
|
||||
print("DELETING ~%s#%s" % (srd, len(req)))
|
||||
body = json.dumps(req).encode("utf-8")
|
||||
sc, txt = web.req(
|
||||
"POST", self.ar.url + "?delete", {}, body, MJ
|
||||
@@ -1536,6 +1550,38 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
except:
|
||||
pass
|
||||
|
||||
# msys2 doesn't uncygpath absolute paths with whitespace
|
||||
if not VT100:
|
||||
zsl = []
|
||||
for fn in ar.files:
|
||||
if re.search("^/[a-z]/", fn):
|
||||
fn = r"%s:\%s" % (fn[1:2], fn[3:])
|
||||
zsl.append(fn.replace("/", "\\"))
|
||||
ar.files = zsl
|
||||
|
||||
fok = []
|
||||
fng = []
|
||||
for fn in ar.files:
|
||||
if os.path.exists(fn):
|
||||
fok.append(fn)
|
||||
elif VT100:
|
||||
fng.append(fn)
|
||||
else:
|
||||
# windows leaves glob-expansion to the invoked process... okayyy let's get to work
|
||||
from glob import glob
|
||||
|
||||
fns = glob(fn)
|
||||
if fns:
|
||||
fok.extend(fns)
|
||||
else:
|
||||
fng.append(fn)
|
||||
|
||||
if fng:
|
||||
t = "some files/folders were not found:\n %s"
|
||||
raise Exception(t % ("\n ".join(fng),))
|
||||
|
||||
ar.files = fok
|
||||
|
||||
if ar.drd:
|
||||
ar.dr = True
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Maintainer: icxes <dev.null@need.moe>
|
||||
pkgname=copyparty
|
||||
pkgver="1.16.1"
|
||||
pkgver="1.16.3"
|
||||
pkgrel=1
|
||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||
arch=("any")
|
||||
@@ -21,7 +21,7 @@ optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tag
|
||||
)
|
||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||
backup=("etc/${pkgname}.d/init" )
|
||||
sha256sums=("48506881f7920ad9d528763833a8cc3d1b6df39402bbe1cb90c3ff58c865dfc6")
|
||||
sha256sums=("82e79e4b27661b23986b0eba7bd38c42d5f8a2d2c278b755134ac08588a7f27e")
|
||||
|
||||
build() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.16.1/copyparty-sfx.py",
|
||||
"version": "1.16.1",
|
||||
"hash": "sha256-vlxAuVtd/o11CIC6E6K6UDUdtYDzQ6u7kG3Qc3eqJ+U="
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.16.3/copyparty-sfx.py",
|
||||
"version": "1.16.3",
|
||||
"hash": "sha256-O1yVIUJ/zy1kde4CHxeBEex2gT+HWUJ28nhHgonW6Qg="
|
||||
}
|
||||
@@ -1124,6 +1124,8 @@ def add_zc_mdns(ap):
|
||||
ap2.add_argument("--zm6", action="store_true", help="IPv6 only")
|
||||
ap2.add_argument("--zmv", action="store_true", help="verbose mdns")
|
||||
ap2.add_argument("--zmvv", action="store_true", help="verboser mdns")
|
||||
ap2.add_argument("--zm-no-pe", action="store_true", help="mute parser errors (invalid incoming MDNS packets)")
|
||||
ap2.add_argument("--zm-nwa-1", action="store_true", help="disable workaround for avahi-bug #379 (corruption in Avahi's mDNS reflection feature)")
|
||||
ap2.add_argument("--zms", metavar="dhf", type=u, default="", help="list of services to announce -- d=webdav h=http f=ftp s=smb -- lowercase=plaintext uppercase=TLS -- default: all enabled services except http/https (\033[32mDdfs\033[0m if \033[33m--ftp\033[0m and \033[33m--smb\033[0m is set, \033[32mDd\033[0m otherwise)")
|
||||
ap2.add_argument("--zm-ld", metavar="PATH", type=u, default="", help="link a specific folder for webdav shares")
|
||||
ap2.add_argument("--zm-lh", metavar="PATH", type=u, default="", help="link a specific folder for http shares")
|
||||
@@ -1316,7 +1318,7 @@ def add_logging(ap):
|
||||
ap2.add_argument("--log-htp", action="store_true", help="debug: print http-server threadpool scaling")
|
||||
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="print request \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
||||
ap2.add_argument("--ohead", metavar="HEADER", type=u, action='append', help="print response \033[33mHEADER\033[0m; [\033[32m*\033[0m]=all")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|[?&]th=[wjp]|/\.(_|ql_|DS_Store$|localized$)", help="dont log URLs matching regex \033[33mRE\033[0m")
|
||||
|
||||
|
||||
def add_admin(ap):
|
||||
@@ -1401,6 +1403,7 @@ def add_db_general(ap, hcores):
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10.0, help="defer any scheduled volume reindexing until \033[33mSEC\033[0m seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=45, help="search deadline -- terminate searches running for more than \033[33mSEC\033[0m seconds")
|
||||
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
|
||||
ap2.add_argument("--srch-excl", metavar="PTN", type=u, default="", help="regex: exclude files from search results if the file-URL matches \033[33mPTN\033[0m (case-sensitive). Example: [\033[32mpassword|logs/[0-9]\033[0m] any URL containing 'password' or 'logs/DIGIT' (volflag=srch_excl)")
|
||||
ap2.add_argument("--dotsrch", action="store_true", help="show dotfiles in search results (volflags: dotsrch | nodotsrch)")
|
||||
|
||||
|
||||
@@ -1458,6 +1461,7 @@ def add_ui(ap, retry):
|
||||
ap2.add_argument("--au-vol", metavar="0-100", type=int, default=50, choices=range(0, 101), help="default audio/video volume percent")
|
||||
ap2.add_argument("--sort", metavar="C,C,C", type=u, default="href", help="default sort order, comma-separated column IDs (see header tooltips), prefix with '-' for descending. Examples: \033[32mhref -href ext sz ts tags/Album tags/.tn\033[0m (volflag=sort)")
|
||||
ap2.add_argument("--nsort", action="store_true", help="default-enable natural sort of filenames with leading numbers (volflag=nsort)")
|
||||
ap2.add_argument("--hsortn", metavar="N", type=int, default=2, help="number of sorting rules to include in media URLs by default (volflag=hsortn)")
|
||||
ap2.add_argument("--unlist", metavar="REGEX", type=u, default="", help="don't show files matching \033[33mREGEX\033[0m in file list. Purely cosmetic! Does not affect API calls, just the browser. Example: [\033[32m\\.(js|css)$\033[0m] (volflag=unlist)")
|
||||
ap2.add_argument("--favico", metavar="TXT", type=u, default="c 000 none" if retry else "🎉 000 none", help="\033[33mfavicon-text\033[0m [ \033[33mforeground\033[0m [ \033[33mbackground\033[0m ] ], set blank to disable")
|
||||
ap2.add_argument("--mpmc", metavar="URL", type=u, default="", help="change the mediaplayer-toggle mouse cursor; URL to a folder with {2..5}.png inside (or disable with [\033[32m.\033[0m])")
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 16, 2)
|
||||
VERSION = (1, 16, 4)
|
||||
CODENAME = "COPYparty"
|
||||
BUILD_DT = (2024, 11, 23)
|
||||
BUILD_DT = (2024, 12, 7)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -1880,6 +1880,7 @@ class AuthSrv(object):
|
||||
["no_hash", "nohash"],
|
||||
["no_idx", "noidx"],
|
||||
["og_ua", "og_ua"],
|
||||
["srch_excl", "srch_excl"],
|
||||
]:
|
||||
if vf in vol.flags:
|
||||
ptn = re.compile(vol.flags.pop(vf))
|
||||
@@ -2086,6 +2087,22 @@ class AuthSrv(object):
|
||||
self.log(t.format(mtp), 1)
|
||||
errors = True
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
re1: Optional[re.Pattern] = vol.flags.get("srch_excl")
|
||||
excl = [re1.pattern] if re1 else []
|
||||
|
||||
vpaths = []
|
||||
vtop = vol.vpath
|
||||
for vp2 in vfs.all_vols.keys():
|
||||
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
|
||||
vpaths.append(re.escape(vp2[len(vtop) :].lstrip("/")))
|
||||
if vpaths:
|
||||
excl.append("^(%s)/" % ("|".join(vpaths),))
|
||||
|
||||
vol.flags["srch_re_dots"] = re.compile("|".join(excl or ["^$"]))
|
||||
excl.extend([r"^\.", r"/\."])
|
||||
vol.flags["srch_re_nodot"] = re.compile("|".join(excl))
|
||||
|
||||
have_daw = False
|
||||
for vol in vfs.all_nodes.values():
|
||||
daw = vol.flags.get("daw") or self.args.daw
|
||||
@@ -2314,6 +2331,7 @@ class AuthSrv(object):
|
||||
"idx": "e2d" in vf,
|
||||
"itag": "e2t" in vf,
|
||||
"dnsort": "nsort" in vf,
|
||||
"dhsortn": vf["hsortn"],
|
||||
"dsort": vf["sort"],
|
||||
"dcrop": vf["crop"],
|
||||
"dth3x": vf["th3x"],
|
||||
@@ -2339,6 +2357,7 @@ class AuthSrv(object):
|
||||
"dgrid": "grid" in vf,
|
||||
"dgsel": "gsel" in vf,
|
||||
"dnsort": "nsort" in vf,
|
||||
"dhsortn": vf["hsortn"],
|
||||
"dsort": vf["sort"],
|
||||
"dcrop": vf["crop"],
|
||||
"dth3x": vf["th3x"],
|
||||
|
||||
@@ -70,6 +70,7 @@ def vf_vmap() -> dict[str, str]:
|
||||
}
|
||||
for k in (
|
||||
"dbd",
|
||||
"hsortn",
|
||||
"html_head",
|
||||
"lg_sbf",
|
||||
"md_sbf",
|
||||
@@ -191,6 +192,7 @@ flagcats = {
|
||||
"xvol": "do not follow symlinks leaving the volume root",
|
||||
"dotsrch": "show dotfiles in search results",
|
||||
"nodotsrch": "hide dotfiles in search results (default)",
|
||||
"srch_excl": "exclude search results with URL matching this regex",
|
||||
},
|
||||
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
|
||||
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
|
||||
|
||||
@@ -14,6 +14,7 @@ import re
|
||||
import socket
|
||||
import stat
|
||||
import string
|
||||
import sys
|
||||
import threading # typechk
|
||||
import time
|
||||
import uuid
|
||||
@@ -76,6 +77,7 @@ from .util import (
|
||||
html_escape,
|
||||
humansize,
|
||||
ipnorm,
|
||||
justcopy,
|
||||
load_resource,
|
||||
loadpy,
|
||||
log_reloc,
|
||||
@@ -124,6 +126,8 @@ if not hasattr(socket, "AF_UNIX"):
|
||||
|
||||
_ = (argparse, threading)
|
||||
|
||||
USED4SEC = {"usedforsecurity": False} if sys.version_info > (3, 9) else {}
|
||||
|
||||
NO_CACHE = {"Cache-Control": "no-cache"}
|
||||
|
||||
ALL_COOKIES = "k304 no304 js idxh dots cppwd cppws".split()
|
||||
@@ -137,6 +141,10 @@ READMES = [[0, ["preadme.md", "PREADME.md"]], [1, ["readme.md", "README.md"]]]
|
||||
|
||||
RSS_SORT = {"m": "mt", "u": "at", "n": "fn", "s": "sz"}
|
||||
|
||||
A_FILE = os.stat_result(
|
||||
(0o644, -1, -1, 1, 1000, 1000, 8, 0x39230101, 0x39230101, 0x39230101)
|
||||
)
|
||||
|
||||
|
||||
class HttpCli(object):
|
||||
"""
|
||||
@@ -1243,7 +1251,7 @@ class HttpCli(object):
|
||||
self.log("RSS %s @%s" % (self.req, self.uname))
|
||||
|
||||
if not self.can_read:
|
||||
return self.tx_404()
|
||||
return self.tx_404(True)
|
||||
|
||||
vn = self.vn
|
||||
if not vn.flags.get("rss"):
|
||||
@@ -1424,7 +1432,8 @@ class HttpCli(object):
|
||||
|
||||
depth = self.headers.get("depth", "infinity").lower()
|
||||
if depth == "infinity":
|
||||
if not self.can_read:
|
||||
# allow depth:0 from unmapped root, but require read-axs otherwise
|
||||
if not self.can_read and (self.vpath or self.asrv.vfs.realpath):
|
||||
t = "depth:infinity requires read-access in /%s"
|
||||
t = t % (self.vpath,)
|
||||
self.log(t, 3)
|
||||
@@ -1484,7 +1493,7 @@ class HttpCli(object):
|
||||
t2 = " or 'infinity'" if self.args.dav_inf else ""
|
||||
raise Pebkac(412, t.format(depth, t2))
|
||||
|
||||
if not self.can_read and not self.can_write and not self.can_get and not fgen:
|
||||
if not self.can_read and not self.can_write and not fgen:
|
||||
self.log("inaccessible: [%s]" % (self.vpath,))
|
||||
raise Pebkac(401, "authenticate")
|
||||
|
||||
@@ -1766,7 +1775,7 @@ class HttpCli(object):
|
||||
|
||||
if not self.can_write:
|
||||
t = "user %s does not have write-access under /%s"
|
||||
raise Pebkac(403, t % (self.uname, self.vn.vpath))
|
||||
raise Pebkac(403 if self.pw else 401, t % (self.uname, self.vn.vpath))
|
||||
|
||||
if not self.args.no_dav and self._applesan():
|
||||
return self.headers.get("content-length") == "0"
|
||||
@@ -2059,10 +2068,31 @@ class HttpCli(object):
|
||||
# small toctou, but better than clobbering a hardlink
|
||||
wunlink(self.log, path, vfs.flags)
|
||||
|
||||
hasher = None
|
||||
copier = hashcopy
|
||||
if "ck" in self.ouparam or "ck" in self.headers:
|
||||
zs = self.ouparam.get("ck") or self.headers.get("ck") or ""
|
||||
if not zs or zs == "no":
|
||||
copier = justcopy
|
||||
elif zs == "md5":
|
||||
hasher = hashlib.md5(**USED4SEC)
|
||||
elif zs == "sha1":
|
||||
hasher = hashlib.sha1(**USED4SEC)
|
||||
elif zs == "sha256":
|
||||
hasher = hashlib.sha256(**USED4SEC)
|
||||
elif zs in ("blake2", "b2"):
|
||||
hasher = hashlib.blake2b(**USED4SEC)
|
||||
elif zs in ("blake2s", "b2s"):
|
||||
hasher = hashlib.blake2s(**USED4SEC)
|
||||
elif zs == "sha512":
|
||||
pass
|
||||
else:
|
||||
raise Pebkac(500, "unknown hash alg")
|
||||
|
||||
f, fn = ren_open(fn, *open_a, **params)
|
||||
try:
|
||||
path = os.path.join(fdir, fn)
|
||||
post_sz, sha_hex, sha_b64 = hashcopy(reader, f, None, 0, self.args.s_wr_slp)
|
||||
post_sz, sha_hex, sha_b64 = copier(reader, f, hasher, 0, self.args.s_wr_slp)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
@@ -2299,8 +2329,8 @@ class HttpCli(object):
|
||||
# kinda silly but has the least side effects
|
||||
return self.handle_new_md()
|
||||
|
||||
if act == "bput":
|
||||
return self.handle_plain_upload(file0)
|
||||
if act in ("bput", "uput"):
|
||||
return self.handle_plain_upload(file0, act == "uput")
|
||||
|
||||
if act == "tput":
|
||||
return self.handle_text_upload()
|
||||
@@ -2917,13 +2947,41 @@ class HttpCli(object):
|
||||
)
|
||||
|
||||
def handle_plain_upload(
|
||||
self, file0: list[tuple[str, Optional[str], Generator[bytes, None, None]]]
|
||||
self,
|
||||
file0: list[tuple[str, Optional[str], Generator[bytes, None, None]]],
|
||||
nohash: bool,
|
||||
) -> bool:
|
||||
assert self.parser
|
||||
nullwrite = self.args.nw
|
||||
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||
self._assert_safe_rem(rem)
|
||||
|
||||
halg = "sha512"
|
||||
hasher = None
|
||||
copier = hashcopy
|
||||
if nohash:
|
||||
halg = ""
|
||||
copier = justcopy
|
||||
elif "ck" in self.ouparam or "ck" in self.headers:
|
||||
halg = self.ouparam.get("ck") or self.headers.get("ck") or ""
|
||||
if not halg or halg == "no":
|
||||
copier = justcopy
|
||||
halg = ""
|
||||
elif halg == "md5":
|
||||
hasher = hashlib.md5(**USED4SEC)
|
||||
elif halg == "sha1":
|
||||
hasher = hashlib.sha1(**USED4SEC)
|
||||
elif halg == "sha256":
|
||||
hasher = hashlib.sha256(**USED4SEC)
|
||||
elif halg in ("blake2", "b2"):
|
||||
hasher = hashlib.blake2b(**USED4SEC)
|
||||
elif halg in ("blake2s", "b2s"):
|
||||
hasher = hashlib.blake2s(**USED4SEC)
|
||||
elif halg == "sha512":
|
||||
pass
|
||||
else:
|
||||
raise Pebkac(500, "unknown hash alg")
|
||||
|
||||
upload_vpath = self.vpath
|
||||
lim = vfs.get_dbv(rem)[0].lim
|
||||
fdir_base = vfs.canonical(rem)
|
||||
@@ -3053,8 +3111,8 @@ class HttpCli(object):
|
||||
try:
|
||||
tabspath = os.path.join(fdir, tnam)
|
||||
self.log("writing to {}".format(tabspath))
|
||||
sz, sha_hex, sha_b64 = hashcopy(
|
||||
p_data, f, None, max_sz, self.args.s_wr_slp
|
||||
sz, sha_hex, sha_b64 = copier(
|
||||
p_data, f, hasher, max_sz, self.args.s_wr_slp
|
||||
)
|
||||
if sz == 0:
|
||||
raise Pebkac(400, "empty files in post")
|
||||
@@ -3186,10 +3244,15 @@ class HttpCli(object):
|
||||
jmsg["error"] = errmsg
|
||||
errmsg = "ERROR: " + errmsg
|
||||
|
||||
if halg:
|
||||
file_fmt = '{0}: {1} // {2} // {3} bytes // <a href="/{4}">{5}</a> {6}\n'
|
||||
else:
|
||||
file_fmt = '{3} bytes // <a href="/{4}">{5}</a> {6}\n'
|
||||
|
||||
for sz, sha_hex, sha_b64, ofn, lfn, ap in files:
|
||||
vsuf = ""
|
||||
if (self.can_read or self.can_upget) and "fk" in vfs.flags:
|
||||
st = bos.stat(ap)
|
||||
st = A_FILE if nullwrite else bos.stat(ap)
|
||||
alg = 2 if "fka" in vfs.flags else 1
|
||||
vsuf = "?k=" + self.gen_fk(
|
||||
alg,
|
||||
@@ -3204,7 +3267,8 @@ class HttpCli(object):
|
||||
|
||||
vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
|
||||
rel_url = quotep(self.args.RS + vpath) + vsuf
|
||||
msg += 'sha512: {} // {} // {} bytes // <a href="/{}">{}</a> {}\n'.format(
|
||||
msg += file_fmt.format(
|
||||
halg,
|
||||
sha_hex[:56],
|
||||
sha_b64,
|
||||
sz,
|
||||
@@ -3220,13 +3284,14 @@ class HttpCli(object):
|
||||
self.host,
|
||||
rel_url,
|
||||
),
|
||||
"sha512": sha_hex[:56],
|
||||
"sha_b64": sha_b64,
|
||||
"sz": sz,
|
||||
"fn": lfn,
|
||||
"fn_orig": ofn,
|
||||
"path": rel_url,
|
||||
}
|
||||
if halg:
|
||||
jpart[halg] = sha_hex[:56]
|
||||
jpart["sha_b64"] = sha_b64
|
||||
jmsg["files"].append(jpart)
|
||||
|
||||
vspd = self._spd(sz_total, False)
|
||||
@@ -4624,6 +4689,18 @@ class HttpCli(object):
|
||||
if "th" in self.ouparam:
|
||||
return self.tx_svg("e" + pt[:3])
|
||||
|
||||
# most webdav clients will not send credentials until they
|
||||
# get 401'd, so send a challenge if we're Absolutely Sure
|
||||
# that the client is not a graphical browser
|
||||
if (
|
||||
rc == 403
|
||||
and not self.pw
|
||||
and not self.ua.startswith("Mozilla/")
|
||||
and "sec-fetch-site" not in self.headers
|
||||
):
|
||||
rc = 401
|
||||
self.out_headers["WWW-Authenticate"] = 'Basic realm="a"'
|
||||
|
||||
t = t.format(self.args.SR)
|
||||
qv = quotep(self.vpaths) + self.ourlq()
|
||||
html = self.j2s(
|
||||
@@ -5265,7 +5342,7 @@ class HttpCli(object):
|
||||
st = bos.stat(abspath)
|
||||
except:
|
||||
if "on404" not in vn.flags:
|
||||
return self.tx_404()
|
||||
return self.tx_404(not self.can_read)
|
||||
|
||||
ret = self.on40x(vn.flags["on404"], vn, rem)
|
||||
if ret == "true":
|
||||
@@ -5276,9 +5353,9 @@ class HttpCli(object):
|
||||
try:
|
||||
st = bos.stat(abspath)
|
||||
except:
|
||||
return self.tx_404()
|
||||
return self.tx_404(not self.can_read)
|
||||
else:
|
||||
return self.tx_404()
|
||||
return self.tx_404(not self.can_read)
|
||||
|
||||
if rem.startswith(".hist/up2k.") or (
|
||||
rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
|
||||
@@ -5405,13 +5482,13 @@ class HttpCli(object):
|
||||
vrem = vjoin(vrem, fn)
|
||||
abspath = ap2
|
||||
break
|
||||
elif self.vpath.rsplit("/", 1)[1] in ("index.htm", "index.html"):
|
||||
elif self.vpath.rsplit("/", 1)[-1] in ("index.htm", "index.html"):
|
||||
fk_pass = True
|
||||
|
||||
if not is_dir and (self.can_read or self.can_get):
|
||||
if not self.can_read and not fk_pass and "fk" in vn.flags:
|
||||
if not use_filekey:
|
||||
return self.tx_404()
|
||||
return self.tx_404(True)
|
||||
|
||||
if add_og and not abspath.lower().endswith(".md"):
|
||||
if og_ua or self.host not in self.headers.get("referer", ""):
|
||||
|
||||
@@ -25,6 +25,7 @@ from .stolen.dnslib import (
|
||||
DNSHeader,
|
||||
DNSQuestion,
|
||||
DNSRecord,
|
||||
set_avahi_379,
|
||||
)
|
||||
from .util import CachedSet, Daemon, Netdev, list_ips, min_ex
|
||||
|
||||
@@ -72,6 +73,9 @@ class MDNS(MCast):
|
||||
self.ngen = ngen
|
||||
self.ttl = 300
|
||||
|
||||
if not self.args.zm_nwa_1:
|
||||
set_avahi_379()
|
||||
|
||||
zs = self.args.name + ".local."
|
||||
zs = zs.encode("ascii", "replace").decode("ascii", "replace")
|
||||
self.hn = "-".join(x for x in zs.split("?") if x) or (
|
||||
@@ -336,6 +340,9 @@ class MDNS(MCast):
|
||||
self.log("stopped", 2)
|
||||
return
|
||||
|
||||
if self.args.zm_no_pe:
|
||||
continue
|
||||
|
||||
t = "{} {} \033[33m|{}| {}\n{}".format(
|
||||
self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex()
|
||||
)
|
||||
|
||||
@@ -8,7 +8,7 @@ from itertools import chain
|
||||
from .bimap import Bimap, BimapError
|
||||
from .bit import get_bits, set_bits
|
||||
from .buffer import BufferError
|
||||
from .label import DNSBuffer, DNSLabel
|
||||
from .label import DNSBuffer, DNSLabel, set_avahi_379
|
||||
from .ranges import IP4, IP6, H, I, check_bytes
|
||||
|
||||
|
||||
@@ -426,7 +426,7 @@ class RR(object):
|
||||
if rdlength:
|
||||
rdata = RDMAP.get(QTYPE.get(rtype), RD).parse(buffer, rdlength)
|
||||
else:
|
||||
rdata = ""
|
||||
rdata = RD(b"a")
|
||||
return cls(rname, rtype, rclass, ttl, rdata)
|
||||
except (BufferError, BimapError) as e:
|
||||
raise DNSError("Error unpacking RR [offset=%d]: %s" % (buffer.offset, e))
|
||||
|
||||
@@ -11,6 +11,23 @@ LDH = set(range(33, 127))
|
||||
ESCAPE = re.compile(r"\\([0-9][0-9][0-9])")
|
||||
|
||||
|
||||
avahi_379 = 0
|
||||
|
||||
|
||||
def set_avahi_379():
|
||||
global avahi_379
|
||||
avahi_379 = 1
|
||||
|
||||
|
||||
def log_avahi_379(args):
|
||||
global avahi_379
|
||||
if avahi_379 == 2:
|
||||
return
|
||||
avahi_379 = 2
|
||||
t = "Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d];\n\033[35m NOTE: this is probably avahi-bug #379, packet corruption in Avahi's mDNS-reflection feature. Copyparty has a workaround and is OK, but other devices need either --zm4 or --zm6"
|
||||
raise BufferError(t % args)
|
||||
|
||||
|
||||
class DNSLabelError(Exception):
|
||||
pass
|
||||
|
||||
@@ -96,8 +113,11 @@ class DNSBuffer(Buffer):
|
||||
)
|
||||
if pointer < self.offset:
|
||||
self.offset = pointer
|
||||
elif avahi_379:
|
||||
log_avahi_379((self.offset, pointer, len(self.data)))
|
||||
label.extend(b"a")
|
||||
break
|
||||
else:
|
||||
|
||||
raise BufferError(
|
||||
"Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d]"
|
||||
% (self.offset, pointer, len(self.data))
|
||||
|
||||
@@ -793,7 +793,7 @@ class SvcHub(object):
|
||||
al.exp_md = odfusion(exp, al.exp_md.replace(" ", ","))
|
||||
al.exp_lg = odfusion(exp, al.exp_lg.replace(" ", ","))
|
||||
|
||||
for k in ["no_hash", "no_idx", "og_ua"]:
|
||||
for k in ["no_hash", "no_idx", "og_ua", "srch_excl"]:
|
||||
ptn = getattr(self.args, k)
|
||||
if ptn:
|
||||
setattr(self.args, k, re.compile(ptn))
|
||||
|
||||
@@ -324,7 +324,8 @@ class U2idx(object):
|
||||
sort: bool,
|
||||
lim: int,
|
||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||
if self.args.srch_dbg:
|
||||
dbg = self.args.srch_dbg
|
||||
if dbg:
|
||||
t = "searching across all %s volumes in which the user has 'r' (full read access):\n %s"
|
||||
zs = "\n ".join(["/%s = %s" % (x.vpath, x.realpath) for x in vols])
|
||||
self.log(t % (len(vols), zs), 5)
|
||||
@@ -367,14 +368,14 @@ class U2idx(object):
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
excl = []
|
||||
for vp2 in self.asrv.vfs.all_vols.keys():
|
||||
if vp2.startswith((vtop + "/").lstrip("/")) and vtop != vp2:
|
||||
excl.append(vp2[len(vtop) :].lstrip("/"))
|
||||
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||
zs = "srch_re_dots" if dots else "srch_re_nodot"
|
||||
rex: re.Pattern = flags.get(zs) # type: ignore
|
||||
|
||||
if self.args.srch_dbg:
|
||||
t = "searching in volume /%s (%s), excludelist %s"
|
||||
self.log(t % (vtop, ptop, excl), 5)
|
||||
if dbg:
|
||||
t = "searching in volume /%s (%s), excluding %s"
|
||||
self.log(t % (vtop, ptop, rex.pattern), 5)
|
||||
rex_cfg: Optional[re.Pattern] = flags.get("srch_excl")
|
||||
|
||||
self.active_cur = cur
|
||||
|
||||
@@ -387,7 +388,6 @@ class U2idx(object):
|
||||
|
||||
sret = []
|
||||
fk = flags.get("fk")
|
||||
dots = flags.get("dotsrch") and uname in vol.axs.udot
|
||||
fk_alg = 2 if "fka" in flags else 1
|
||||
c = cur.execute(uq, tuple(vuv))
|
||||
for hit in c:
|
||||
@@ -396,20 +396,23 @@ class U2idx(object):
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
if rd in excl or any([x for x in excl if rd.startswith(x + "/")]):
|
||||
if self.args.srch_dbg:
|
||||
zs = vjoin(vjoin(vtop, rd), fn)
|
||||
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
||||
self.log(t % (vtop, zs), 1)
|
||||
vp = vjoin(vjoin(vtop, rd), fn)
|
||||
|
||||
if vp in seen_rps:
|
||||
continue
|
||||
|
||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x]))
|
||||
if not dots and "/." in ("/" + rp):
|
||||
continue
|
||||
|
||||
if rp in seen_rps:
|
||||
if rex.search(vp):
|
||||
if dbg:
|
||||
if rex_cfg and rex_cfg.search(vp): # type: ignore
|
||||
self.log("filtered by srch_excl: %s" % (vp,), 6)
|
||||
elif not dots and "/." in ("/" + vp):
|
||||
pass
|
||||
else:
|
||||
t = "database inconsistency in volume '/%s'; ignoring: %s"
|
||||
self.log(t % (vtop, vp), 1)
|
||||
continue
|
||||
|
||||
rp = quotep(vp)
|
||||
if not fk:
|
||||
suf = ""
|
||||
else:
|
||||
@@ -431,7 +434,7 @@ class U2idx(object):
|
||||
if lim < 0:
|
||||
break
|
||||
|
||||
if self.args.srch_dbg:
|
||||
if dbg:
|
||||
t = "in volume '/%s': hit: %s"
|
||||
self.log(t % (vtop, rp), 5)
|
||||
|
||||
@@ -461,7 +464,7 @@ class U2idx(object):
|
||||
ret.extend(sret)
|
||||
# print("[{}] {}".format(ptop, sret))
|
||||
|
||||
if self.args.srch_dbg:
|
||||
if dbg:
|
||||
t = "in volume '/%s': got %d hits, %d total so far"
|
||||
self.log(t % (vtop, len(sret), len(ret)), 5)
|
||||
|
||||
|
||||
@@ -88,16 +88,23 @@ if TYPE_CHECKING:
|
||||
zsg = "avif,avifs,bmp,gif,heic,heics,heif,heifs,ico,j2p,j2k,jp2,jpeg,jpg,jpx,png,tga,tif,tiff,webp"
|
||||
CV_EXTS = set(zsg.split(","))
|
||||
|
||||
zsg = "nohash noidx xdev xvol"
|
||||
VF_AFFECTS_INDEXING = set(zsg.split(" "))
|
||||
|
||||
|
||||
SBUSY = "cannot receive uploads right now;\nserver busy with %s.\nPlease wait; the client will retry..."
|
||||
|
||||
HINT_HISTPATH = "you could try moving the database to another location (preferably an SSD or NVME drive) using either the --hist argument (global option for all volumes), or the hist volflag (just for this volume)"
|
||||
|
||||
|
||||
NULLSTAT = os.stat_result((0, -1, -1, 0, 0, 0, 0, 0, 0, 0))
|
||||
|
||||
|
||||
class Dbw(object):
|
||||
def __init__(self, c: "sqlite3.Cursor", n: int, t: float) -> None:
|
||||
def __init__(self, c: "sqlite3.Cursor", n: int, nf: int, t: float) -> None:
|
||||
self.c = c
|
||||
self.n = n
|
||||
self.nf = nf
|
||||
self.t = t
|
||||
|
||||
|
||||
@@ -1074,7 +1081,8 @@ class Up2k(object):
|
||||
ft = "\033[0;32m{}{:.0}"
|
||||
ff = "\033[0;35m{}{:.0}"
|
||||
fv = "\033[0;36m{}:\033[90m{}"
|
||||
fx = set(("html_head", "rm_re_t", "rm_re_r", "mv_re_t", "mv_re_r"))
|
||||
zs = "html_head mv_re_r mv_re_t rm_re_r rm_re_t srch_re_dots srch_re_nodot"
|
||||
fx = set(zs.split())
|
||||
fd = vf_bmap()
|
||||
fd.update(vf_cmap())
|
||||
fd.update(vf_vmap())
|
||||
@@ -1231,10 +1239,17 @@ class Up2k(object):
|
||||
def _verify_db_cache(self, cur: "sqlite3.Cursor", vpath: str) -> None:
|
||||
# check if list of intersecting volumes changed since last use; drop caches if so
|
||||
prefix = (vpath + "/").lstrip("/")
|
||||
zsl = [x for x in self.vfs.all_vols if x.startswith(prefix)]
|
||||
zsl = [x[len(prefix) :] for x in zsl]
|
||||
zsl.sort()
|
||||
zb = hashlib.sha1("\n".join(zsl).encode("utf-8", "replace")).digest()
|
||||
vps = [x for x in self.vfs.all_vols if x.startswith(prefix)]
|
||||
vps.sort()
|
||||
seed = [x[len(prefix) :] for x in vps]
|
||||
|
||||
# also consider volflags which affect indexing
|
||||
for vp in vps:
|
||||
vf = self.vfs.all_vols[vp].flags
|
||||
vf = {k: v for k, v in vf.items() if k in VF_AFFECTS_INDEXING}
|
||||
seed.append(str(sorted(vf.items())))
|
||||
|
||||
zb = hashlib.sha1("\n".join(seed).encode("utf-8", "replace")).digest()
|
||||
vcfg = ub64enc(zb[:18]).decode("ascii")
|
||||
|
||||
c = cur.execute("select v from kv where k = 'volcfg'")
|
||||
@@ -1267,7 +1282,7 @@ class Up2k(object):
|
||||
assert reg and self.pp # !rm
|
||||
cur, db_path = reg
|
||||
|
||||
db = Dbw(cur, 0, time.time())
|
||||
db = Dbw(cur, 0, 0, time.time())
|
||||
self.pp.n = next(db.c.execute("select count(w) from up"))[0]
|
||||
|
||||
excl = [
|
||||
@@ -1319,7 +1334,7 @@ class Up2k(object):
|
||||
self.hub.log_stacks()
|
||||
|
||||
if db.n:
|
||||
self.log("commit {} new files".format(db.n))
|
||||
self.log("commit %d new files; %d updates" % (db.nf, db.n))
|
||||
|
||||
if self.args.no_dhash:
|
||||
if db.c.execute("select d from dh").fetchone():
|
||||
@@ -1621,12 +1636,13 @@ class Up2k(object):
|
||||
# skip upload hooks by not providing vflags
|
||||
self.db_add(db.c, {}, rd, fn, lmod, sz, "", "", wark, wark, "", "", ip, at)
|
||||
db.n += 1
|
||||
db.nf += 1
|
||||
tfa += 1
|
||||
td = time.time() - db.t
|
||||
if db.n >= 4096 or td >= 60:
|
||||
self.log("commit {} new files".format(db.n))
|
||||
self.log("commit %d new files; %d updates" % (db.nf, db.n))
|
||||
db.c.connection.commit()
|
||||
db.n = 0
|
||||
db.n = db.nf = 0
|
||||
db.t = time.time()
|
||||
|
||||
if not self.args.no_dhash:
|
||||
@@ -1639,7 +1655,7 @@ class Up2k(object):
|
||||
# drop shadowed folders
|
||||
for sh_rd in unreg:
|
||||
n = 0
|
||||
q = "select count(w) from up where (rd=? or rd like ?||'/%') and +at == 0"
|
||||
q = "select count(w) from up where (rd=? or rd like ?||'/%')"
|
||||
for sh_erd in [sh_rd, "//" + w8b64enc(sh_rd)]:
|
||||
try:
|
||||
erd_erd = (sh_erd, sh_erd)
|
||||
@@ -1657,7 +1673,7 @@ class Up2k(object):
|
||||
q = "delete from dh where (d = ? or d like ?||'/%')"
|
||||
db.c.execute(q, erd_erd)
|
||||
|
||||
q = "delete from up where (rd=? or rd like ?||'/%') and +at == 0"
|
||||
q = "delete from up where (rd=? or rd like ?||'/%')"
|
||||
db.c.execute(q, erd_erd)
|
||||
tfa += n
|
||||
|
||||
@@ -2929,7 +2945,7 @@ class Up2k(object):
|
||||
raise Exception()
|
||||
except Exception as ex:
|
||||
if n4g:
|
||||
st = os.stat_result((0, -1, -1, 0, 0, 0, 0, 0, 0, 0))
|
||||
st = NULLSTAT
|
||||
else:
|
||||
lost.append((cur, dp_dir, dp_fn))
|
||||
continue
|
||||
@@ -3077,7 +3093,8 @@ class Up2k(object):
|
||||
if cur:
|
||||
dupe = (cj["prel"], cj["name"], cj["lmod"])
|
||||
try:
|
||||
self.dupesched[src].append(dupe)
|
||||
if dupe not in self.dupesched[src]:
|
||||
self.dupesched[src].append(dupe)
|
||||
except:
|
||||
self.dupesched[src] = [dupe]
|
||||
|
||||
@@ -4660,6 +4677,13 @@ class Up2k(object):
|
||||
t = "forgetting partial upload {} ({})"
|
||||
p = self._vis_job_progress(job)
|
||||
self.log(t.format(wark, p))
|
||||
|
||||
src = djoin(ptop, vrem)
|
||||
zi = len(self.dupesched.pop(src, []))
|
||||
if zi:
|
||||
t = "...and forgetting %d links in dupesched"
|
||||
self.log(t % (zi,))
|
||||
|
||||
assert wark
|
||||
del reg[wark]
|
||||
|
||||
|
||||
@@ -2796,6 +2796,26 @@ def yieldfile(fn: str, bufsz: int) -> Generator[bytes, None, None]:
|
||||
yield buf
|
||||
|
||||
|
||||
def justcopy(
|
||||
fin: Generator[bytes, None, None],
|
||||
fout: Union[typing.BinaryIO, typing.IO[Any]],
|
||||
hashobj: Optional["hashlib._Hash"],
|
||||
max_sz: int,
|
||||
slp: float,
|
||||
) -> tuple[int, str, str]:
|
||||
tlen = 0
|
||||
for buf in fin:
|
||||
tlen += len(buf)
|
||||
if max_sz and tlen > max_sz:
|
||||
continue
|
||||
|
||||
fout.write(buf)
|
||||
if slp:
|
||||
time.sleep(slp)
|
||||
|
||||
return tlen, "checksum-disabled", "checksum-disabled"
|
||||
|
||||
|
||||
def hashcopy(
|
||||
fin: Generator[bytes, None, None],
|
||||
fout: Union[typing.BinaryIO, typing.IO[Any]],
|
||||
@@ -3506,7 +3526,6 @@ def runhook(
|
||||
txt: str,
|
||||
) -> dict[str, Any]:
|
||||
assert broker or up2k # !rm
|
||||
asrv = (broker or up2k).asrv
|
||||
args = (broker or up2k).args
|
||||
vp = vp.replace("\\", "/")
|
||||
ret = {"rc": 0}
|
||||
|
||||
@@ -2785,6 +2785,7 @@ html.b #u2conf a.b:hover {
|
||||
padding-left: .2em;
|
||||
}
|
||||
.fsearch_explain {
|
||||
color: var(--a-dark);
|
||||
padding-left: .7em;
|
||||
font-size: 1.1em;
|
||||
line-height: 0;
|
||||
|
||||
@@ -206,6 +206,7 @@ var Ls = {
|
||||
"cl_uopts": "up2k switches",
|
||||
"cl_favico": "favicon",
|
||||
"cl_bigdir": "big dirs",
|
||||
"cl_hsort": "#sort",
|
||||
"cl_keytype": "key notation",
|
||||
"cl_hiddenc": "hidden columns",
|
||||
"cl_hidec": "hide",
|
||||
@@ -248,6 +249,7 @@ var Ls = {
|
||||
|
||||
"cdt_lim": "max number of files to show in a folder",
|
||||
"cdt_ask": "when scrolling to the bottom,$Ninstead of loading more files,$Nask what to do",
|
||||
"cdt_hsort": "how many sorting rules (<code>,sorthref</code>) to include in media-URLs. Setting this to 0 will also ignore sorting-rules included in media links when clicking them",
|
||||
|
||||
"tt_entree": "show navpane (directory tree sidebar)$NHotkey: B",
|
||||
"tt_detree": "show breadcrumbs$NHotkey: B",
|
||||
@@ -521,6 +523,7 @@ var Ls = {
|
||||
"u_pott": "<p>files: <b>{0}</b> finished, <b>{1}</b> failed, <b>{2}</b> busy, <b>{3}</b> queued</p>",
|
||||
"u_ever": "this is the basic uploader; up2k needs at least<br>chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1",
|
||||
"u_su2k": 'this is the basic uploader; <a href="#" id="u2yea">up2k</a> is better',
|
||||
"u_uput": 'optimize for speed (skip checksum)',
|
||||
"u_ewrite": 'you do not have write-access to this folder',
|
||||
"u_eread": 'you do not have read-access to this folder',
|
||||
"u_enoi": 'file-search is not enabled in server config',
|
||||
@@ -539,6 +542,7 @@ var Ls = {
|
||||
"u_hashdone": 'hashing done',
|
||||
"u_hashing": 'hash',
|
||||
"u_hs": 'handshaking...',
|
||||
"u_started": "the files are now being uploaded; see [🚀]",
|
||||
"u_dupdefer": "duplicate; will be processed after all other files",
|
||||
"u_actx": "click this text to prevent loss of<br />performance when switching to other windows/tabs",
|
||||
"u_fixed": "OK! Fixed it 👍",
|
||||
@@ -574,6 +578,7 @@ var Ls = {
|
||||
"ue_la": 'you are currently logged in as "{0}"',
|
||||
"ue_sr": 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the magnifying glass 🔎 (next to the big SEARCH button), and try uploading again\n\nsorry',
|
||||
"ue_ta": 'try uploading again, it should work now',
|
||||
"ue_ab": "this file is already being uploaded into another folder, and that upload must be completed before the file can be uploaded elsewhere.\n\nYou can abort and forget the initial upload using the top-left 🧯",
|
||||
"ur_1uo": "OK: File uploaded successfully",
|
||||
"ur_auo": "OK: All {0} files uploaded successfully",
|
||||
"ur_1so": "OK: File found on server",
|
||||
@@ -790,6 +795,7 @@ var Ls = {
|
||||
"cl_uopts": "up2k-brytere",
|
||||
"cl_favico": "favicon",
|
||||
"cl_bigdir": "store mapper",
|
||||
"cl_hsort": "#sort",
|
||||
"cl_keytype": "notasjon for musikalsk dur",
|
||||
"cl_hiddenc": "skjulte kolonner",
|
||||
"cl_hidec": "skjul",
|
||||
@@ -832,6 +838,7 @@ var Ls = {
|
||||
|
||||
"cdt_lim": "maks antall filer å vise per mappe",
|
||||
"cdt_ask": "vis knapper for å laste flere filer nederst på siden istedenfor å gradvis laste mer av mappen når man scroller ned",
|
||||
"cdt_hsort": "antall sorterings-regler (<code>,sorthref</code>) som skal inkluderes når media-URL'er genereres. Hvis denne er 0 så vil sorterings-regler i URL'er hverken bli generert eller lest",
|
||||
|
||||
"tt_entree": "bytt til mappehierarki$NSnarvei: B",
|
||||
"tt_detree": "bytt til tradisjonell sti-visning$NSnarvei: B",
|
||||
@@ -1105,6 +1112,7 @@ var Ls = {
|
||||
"u_pott": "<p>filer: <b>{0}</b> ferdig, <b>{1}</b> feilet, <b>{2}</b> behandles, <b>{3}</b> i kø</p>",
|
||||
"u_ever": "dette er den primitive opplasteren; up2k krever minst:<br>chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1",
|
||||
"u_su2k": 'dette er den primitive opplasteren; <a href="#" id="u2yea">up2k</a> er bedre',
|
||||
"u_uput": 'litt raskere (uten sha512)',
|
||||
"u_ewrite": 'du har ikke skrivetilgang i denne mappen',
|
||||
"u_eread": 'du har ikke lesetilgang i denne mappen',
|
||||
"u_enoi": 'filsøk er deaktivert i serverkonfigurasjonen',
|
||||
@@ -1123,6 +1131,7 @@ var Ls = {
|
||||
"u_hashdone": 'befaring ferdig',
|
||||
"u_hashing": 'les',
|
||||
"u_hs": 'serveren tenker...',
|
||||
"u_started": "filene blir nå lastet opp 🚀",
|
||||
"u_dupdefer": "duplikat; vil bli håndtert til slutt",
|
||||
"u_actx": "klikk her for å forhindre tap av<br />ytelse ved bytte til andre vinduer/faner",
|
||||
"u_fixed": "OK! Løste seg 👍",
|
||||
@@ -1158,6 +1167,7 @@ var Ls = {
|
||||
"ue_la": 'du er logget inn som "{0}"',
|
||||
"ue_sr": 'du er i filsøk-modus\n\nbytt til opplastning ved å klikke på forstørrelsesglasset 🔎 (ved siden av den store FILSØK-knappen) og prøv igjen\n\nsorry',
|
||||
"ue_ta": 'prøv å laste opp igjen, det burde funke nå',
|
||||
"ue_ab": "den samme filen er allerede under opplastning til en annen mappe, og den må fullføres der før filen kan lastes opp andre steder.\n\nDu kan avbryte og glemme den påbegynte opplastningen ved hjelp av 🧯 oppe til venstre",
|
||||
"ur_1uo": "OK: Filen ble lastet opp",
|
||||
"ur_auo": "OK: Alle {0} filene ble lastet opp",
|
||||
"ur_1so": "OK: Filen ble funnet på serveren",
|
||||
@@ -1374,6 +1384,7 @@ var Ls = {
|
||||
"cl_uopts": "up2k 开关",
|
||||
"cl_favico": "网站图标",
|
||||
"cl_bigdir": "最大目录数",
|
||||
"cl_hsort": "#sort", //m
|
||||
"cl_keytype": "键位符号",
|
||||
"cl_hiddenc": "隐藏列",
|
||||
"cl_hidec": "隐藏",
|
||||
@@ -1416,6 +1427,7 @@ var Ls = {
|
||||
|
||||
"cdt_lim": "文件夹中显示的最大文件数",
|
||||
"cdt_ask": "滚动到底部时,$N不会加载更多文件,$N而是询问你该怎么做",
|
||||
"cdt_hsort": "包含在媒体 URL 中的排序规则 (<code>,sorthref</code>) 数量。将其设置为 0 时,点击媒体链接时也会忽略排序规则。", //m
|
||||
|
||||
"tt_entree": "显示导航面板(目录树侧边栏)$N快捷键: B",
|
||||
"tt_detree": "显示面包屑导航$N快捷键: B",
|
||||
@@ -1689,6 +1701,7 @@ var Ls = {
|
||||
"u_pott": "<p>个文件: <b>{0}</b> 已完成, <b>{1}</b> 失败, <b>{2}</b> 正在处理, <b>{3}</b> 排队中</p>",
|
||||
"u_ever": "这是基本的上传工具; up2k 需要至少<br>chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1",
|
||||
"u_su2k": '这是基本的上传工具;<a href="#" id="u2yea">up2k</a> 更好',
|
||||
"u_uput": '提高速度(跳过校验和)',
|
||||
"u_ewrite": '你对这个文件夹没有写入权限',
|
||||
"u_eread": '你对这个文件夹没有读取权限',
|
||||
"u_enoi": '文件搜索在服务器配置中未启用',
|
||||
@@ -1707,6 +1720,7 @@ var Ls = {
|
||||
"u_hashdone": '哈希完成',
|
||||
"u_hashing": '哈希',
|
||||
"u_hs": '正在等待服务器...',
|
||||
"u_started": "文件现在正在上传 🚀", //m
|
||||
"u_dupdefer": "这是一个重复文件。它将在所有其他文件上传后进行处理",
|
||||
"u_actx": "单击此文本以防止切换到其他窗口/选项卡时性能下降",
|
||||
"u_fixed": "好! 已修复 👍",
|
||||
@@ -1742,6 +1756,7 @@ var Ls = {
|
||||
"ue_la": '你当前以 "{0}" 登录',
|
||||
"ue_sr": '你当前处于文件搜索模式\n\n通过点击大搜索按钮旁边的放大镜 🔎 切换到上传模式,然后重试上传\n\n抱歉',
|
||||
"ue_ta": '尝试再次上传,现在应该能正常工作',
|
||||
"ue_ab": "这份文件正在上传到另一个文件夹,必须完成该上传后,才能将文件上传到其他位置。\n\n您可以通过左上角的🧯中止并忘记该上传。", //m
|
||||
"ur_1uo": "成功:文件上传成功",
|
||||
"ur_auo": "成功:所有 {0} 个文件上传成功",
|
||||
"ur_1so": "成功:文件在服务器上找到",
|
||||
@@ -1935,6 +1950,10 @@ ebi('op_up2k').innerHTML = (
|
||||
|
||||
ebi('wrap').insertBefore(mknod('div', 'lazy'), ebi('epi'));
|
||||
|
||||
var x = ebi('bbsw');
|
||||
x.parentNode.insertBefore(mknod('div', null,
|
||||
'<input type="checkbox" id="uput" name="uput"><label for="uput">' + L.u_uput + '</label>'), x);
|
||||
|
||||
|
||||
(function () {
|
||||
var o = mknod('div');
|
||||
@@ -2014,6 +2033,13 @@ ebi('op_cfg').innerHTML = (
|
||||
' </td>\n' +
|
||||
' </div>\n' +
|
||||
'</div>\n' +
|
||||
'<div>\n' +
|
||||
' <h3>' + L.cl_hsort + '</h3>\n' +
|
||||
' <div>\n' +
|
||||
' <input type="text" id="hsortn" value="" ' + NOAC + ' style="width:3em" tt="' + L.cdt_hsort + '" />' +
|
||||
' </td>\n' +
|
||||
' </div>\n' +
|
||||
'</div>\n' +
|
||||
'<div><h3>' + L.cl_keytype + '</h3><div id="key_notation"></div></div>\n' +
|
||||
'<div><h3>' + L.cl_hiddenc + ' ' + (MOBILE ? '<a href="#" id="hcolsh">' + L.cl_hidec + '</a> / ' : '') + '<a href="#" id="hcolsr">' + L.cl_reset + '</a></h3><div id="hcols"></div></div>'
|
||||
);
|
||||
@@ -2178,6 +2204,37 @@ if (window.og_fn) {
|
||||
}
|
||||
|
||||
|
||||
var hsortn = ebi('hsortn').value = icfg_get('hsortn', dhsortn);
|
||||
ebi('hsortn').oninput = function (e) {
|
||||
var n = parseInt(this.value);
|
||||
swrite('hsortn', hsortn = (isNum(n) ? n : dhsortn));
|
||||
};
|
||||
(function() {
|
||||
var args = ('' + hash0).split(/,sort/g);
|
||||
if (args.length < 2)
|
||||
return;
|
||||
|
||||
var ret = [];
|
||||
for (var a = 1; a < args.length; a++) {
|
||||
var t = '', n = 1, z = args[a].split(',')[0];
|
||||
if (z.startsWith('-')) {
|
||||
z = z.slice(1);
|
||||
n = -1;
|
||||
}
|
||||
if (z == "sz" || z.indexOf('/.') + 1)
|
||||
t = "int";
|
||||
ret.push([z, n, t]);
|
||||
}
|
||||
n = Math.min(ret.length, hsortn);
|
||||
if (n) {
|
||||
var cmp = jread('fsort', []);
|
||||
if (JSON.stringify(ret.slice(0, n) !=
|
||||
JSON.stringify(cmp.slice(0, n))))
|
||||
jwrite('fsort', ret);
|
||||
}
|
||||
})();
|
||||
|
||||
|
||||
var mpl = (function () {
|
||||
var have_mctl = 'mediaSession' in navigator && window.MediaMetadata;
|
||||
|
||||
@@ -3935,7 +3992,7 @@ function play(tid, is_ev, seek) {
|
||||
var o = ebi(oid);
|
||||
o.setAttribute('id', 'thx_js');
|
||||
if (mpl.aplay)
|
||||
sethash(oid);
|
||||
sethash(oid + getsort());
|
||||
o.setAttribute('id', oid);
|
||||
}
|
||||
|
||||
@@ -4210,6 +4267,11 @@ function eval_hash() {
|
||||
}
|
||||
bcfg_bind(props, 'mcmp', 'au_compact', false, setacmp);
|
||||
setacmp();
|
||||
|
||||
// toggle bup checksums
|
||||
ebi('uput').onchange = function() {
|
||||
QS('#op_bup input[name="act"]').value = this.checked ? 'uput' : 'bput';
|
||||
};
|
||||
})();
|
||||
|
||||
|
||||
@@ -4239,13 +4301,28 @@ function read_dsort(txt) {
|
||||
read_dsort(dsort);
|
||||
|
||||
|
||||
function getsort() {
|
||||
var ret = '',
|
||||
sopts = jread('fsort');
|
||||
|
||||
sopts = sopts && sopts.length ? sopts : dsort;
|
||||
|
||||
for (var a = 0; a < Math.min(hsortn, sopts.length); a++)
|
||||
ret += ',sort' + (sopts[a][1] < 0 ? '-' : '') + sopts[a][0];
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
function sortfiles(nodes) {
|
||||
if (!nodes.length)
|
||||
return nodes;
|
||||
|
||||
var sopts = jread('fsort', jcp(dsort)),
|
||||
var sopts = jread('fsort'),
|
||||
dir1st = sread('dir1st') !== '0';
|
||||
|
||||
sopts = sopts && sopts.length ? sopts : jcp(dsort);
|
||||
|
||||
var collator = !clgot(ebi('nsort'), 'on') ? null :
|
||||
new Intl.Collator([], {numeric: true});
|
||||
|
||||
@@ -4264,6 +4341,8 @@ function sortfiles(nodes) {
|
||||
if (!name)
|
||||
continue;
|
||||
|
||||
name = name.toLowerCase();
|
||||
|
||||
if (name == 'ts')
|
||||
typ = 'int';
|
||||
|
||||
@@ -6191,7 +6270,7 @@ var thegrid = (function () {
|
||||
esc(uricom_dec(h.split('/').pop())) + '</a>';
|
||||
},
|
||||
onChange: function (i) {
|
||||
sethash('g' + r.bbox[i].imageElement.getAttribute('ref'));
|
||||
sethash('g' + r.bbox[i].imageElement.getAttribute('ref') + getsort());
|
||||
}
|
||||
});
|
||||
r.bbox = br[0][0];
|
||||
@@ -9392,7 +9471,23 @@ var unpost = (function () {
|
||||
toast.ok(5, this.responseText);
|
||||
|
||||
if (!QS('#op_unpost a[me]'))
|
||||
ebi(goto_unpost());
|
||||
goto_unpost();
|
||||
|
||||
var fi = window.up2k && up2k.st.files;
|
||||
if (fi && fi.length < 9) {
|
||||
for (var a = 0; a < fi.length; a++) {
|
||||
var f = fi[a];
|
||||
if (!f.done && (f.rechecks || f.want_recheck) &&
|
||||
!has(up2k.st.todo.handshake, f) &&
|
||||
!has(up2k.st.busy.handshake, f)
|
||||
) {
|
||||
up2k.st.todo.handshake.push(f);
|
||||
up2k.ui.seth(f.n, 2, L.u_hashdone);
|
||||
up2k.ui.seth(f.n, 1, '📦 wait');
|
||||
up2k.ui.move(f.n, 'bz');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ct.onclick = function (e) {
|
||||
|
||||
@@ -17,8 +17,8 @@ var chromedbg = function () { console.log(arguments); }
|
||||
var dbg = function () { };
|
||||
|
||||
// replace dbg with the real deal here or in the console:
|
||||
// dbg = chromedbg
|
||||
// dbg = console.log
|
||||
// dbg = chromedbg;
|
||||
// dbg = console.log;
|
||||
|
||||
|
||||
// dodge browser issues
|
||||
|
||||
@@ -1556,8 +1556,10 @@ function up2k_init(subtle) {
|
||||
if (nhash) {
|
||||
st.time.hashing += td;
|
||||
t.push(['u2etah', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||
if (uc.fsearch)
|
||||
if (uc.fsearch) {
|
||||
st.time.busy += td;
|
||||
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||
}
|
||||
}
|
||||
|
||||
var b_up = st.bytes.inflight + st.bytes.uploaded,
|
||||
@@ -2406,6 +2408,9 @@ function up2k_init(subtle) {
|
||||
msg = 'done';
|
||||
|
||||
if (t.postlist.length) {
|
||||
if (t.rechecks && QS('#opa_del.act'))
|
||||
toast.inf(30, L.u_started, L.u_unpt);
|
||||
|
||||
var arr = st.todo.upload,
|
||||
sort = arr.length && arr[arr.length - 1].nfile > t.n;
|
||||
|
||||
@@ -2516,8 +2521,13 @@ function up2k_init(subtle) {
|
||||
if (!t.rechecks && (err_pend || err_srcb)) {
|
||||
t.rechecks = 0;
|
||||
t.want_recheck = true;
|
||||
err = L.u_dupdefer;
|
||||
cls = 'defer';
|
||||
if (st.busy.upload.length || st.busy.handshake.length || st.bytes.uploaded) {
|
||||
err = L.u_dupdefer;
|
||||
cls = 'defer';
|
||||
}
|
||||
}
|
||||
if (err_pend) {
|
||||
err += ' <a href="#" onclick="toast.inf(60, L.ue_ab);" class="fsearch_explain">(' + L.u_expl + ')</a>';
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -578,7 +578,9 @@ function yscroll() {
|
||||
|
||||
function showsort(tab) {
|
||||
var v, vn, v1, v2, th = tab.tHead,
|
||||
sopts = jread('fsort', jcp(dsort));
|
||||
sopts = jread('fsort');
|
||||
|
||||
sopts = sopts && sopts.length ? sopts : dsort;
|
||||
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
|
||||
@@ -615,10 +617,13 @@ function sortTable(table, col, cb) {
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
i, reverse = /s0[^r]/.exec(th[col].className + ' ') ? -1 : 1;
|
||||
|
||||
var stype = th[col].getAttribute('sort');
|
||||
var kname = th[col].getAttribute('name'),
|
||||
stype = th[col].getAttribute('sort');
|
||||
try {
|
||||
var nrules = [], rules = jread("fsort", []);
|
||||
rules.unshift([th[col].getAttribute('name'), reverse, stype || '']);
|
||||
var nrules = [],
|
||||
rules = kname == 'href' ? [] : jread("fsort", []);
|
||||
|
||||
rules.unshift([kname, reverse, stype || '']);
|
||||
for (var a = 0; a < rules.length; a++) {
|
||||
var add = true;
|
||||
for (var b = 0; b < a; b++)
|
||||
@@ -979,11 +984,33 @@ function apop(arr, v) {
|
||||
}
|
||||
|
||||
|
||||
function jcp(obj) {
|
||||
function jcp1(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
|
||||
function jcp2(src) {
|
||||
if (Array.isArray(src)) {
|
||||
var ret = [];
|
||||
for (var a = 0; a < src.length; ++a) {
|
||||
var sub = src[a];
|
||||
ret.push((sub === null) ? sub : (sub instanceof Date) ? new Date(sub.valueOf()) : (typeof sub === 'object') ? jcp2(sub) : sub);
|
||||
}
|
||||
} else {
|
||||
var ret = {};
|
||||
for (var key in src) {
|
||||
var sub = src[key];
|
||||
ret[key] = sub === null ? sub : (sub instanceof Date) ? new Date(sub.valueOf()) : (typeof sub === 'object') ? jcp2(sub) : sub;
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
|
||||
|
||||
// jcp1 50% faster on android-chrome, jcp2 7x everywhere else
|
||||
var jcp = MOBILE && CHROME ? jcp1 : jcp2;
|
||||
|
||||
|
||||
function sdrop(key) {
|
||||
try {
|
||||
STG.removeItem(key);
|
||||
|
||||
@@ -1,3 +1,71 @@
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-1204-0003 `v1.16.3` 120%
|
||||
|
||||
## 🧪 new features
|
||||
|
||||
* #120 add option `--srch-excl` and volflag `srch_excl` for excluding certain paths from search results 697a4fa8
|
||||
* mDNS: add workaround for https://github.com/avahi/avahi/issues/379 6c1cf68b 94d1924f
|
||||
* Avahi mDNS Reflection, sometimes used in intricate LAN setups, doesn't understand NSEC records and corrupts them
|
||||
* the workaround makes copyparty able to read the corrupted packets, but clients without a similar workaround will require either `--zm4` or `--zm6` so copyparty doesn't include the usual NSEC records
|
||||
* this is mentioned in a very loud warning in the logs when necessary
|
||||
* mDNS: option to silently ignore buggy devices instead of spamming the log with parser errors 395af051
|
||||
* webdav: support listing unmapped root with infinite recursion (Depth:0) 21a3f369
|
||||
* embed current sort config into media URLs (gallery/music) 0f257c93 4cfdc4c5 01670827
|
||||
* ensures that anyone clicking your link will see the files in the same order as you
|
||||
* can be confgured serverside (`--hsortn`, volflag `hsortn`) and clientside (`#sort` in settings)
|
||||
* URL and UI options to disable checksum calculation of PUT, bup, basic uploads c5a000d2
|
||||
* also allows [choosing either md5, sha1, sha256, or blake2](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#write) instead of the default sha512
|
||||
* can give uploads a nice speed boost when copyparty is running on a potato
|
||||
|
||||
## 🩹 bugfixes
|
||||
|
||||
* webdav: more correct login challenge 2ce82339
|
||||
* the previous behavior could make some clients reluctant to send the password
|
||||
* #120 forget metadata of all files (including uploads) when shadowed d168b2ac
|
||||
* thanks to @Gremious for all the debugging to narrow this down!
|
||||
* #120 drop volume caches if relevant config is changed (mainly indexing filters) 2f83c6c7
|
||||
* #121 couldn't access arbitrary toplevel files from accounts with `h` permission 1f5f42f2
|
||||
|
||||
## 🔧 other changes
|
||||
|
||||
* exclude thumbnails from accesslog by default 9082c470
|
||||
* filesearch: show a final summary of time-elapsed and average hashing speed 8a631f04
|
||||
* improve phrasing of debug messages during indexing at startup 127f414e
|
||||
* `--license` no longer depends on opensource.org at build time 33c4ccff
|
||||
* update deps 6cedcfbf
|
||||
* copyparty.exe: python 3.12.7 => 3.12.8
|
||||
* webdeps: hashwasm, dompurify
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-1123-2336 `v1.16.2` webdav upload fix
|
||||
|
||||
## 🧪 new features
|
||||
|
||||
* add `--nsort` and volflag `nsort` to default-enable natural sort of filenames with leading digits 8f7ffcf3
|
||||
* video-player: support `.mov` files which contain browser-native codecs 2d0cbdf1
|
||||
|
||||
## 🩹 bugfixes
|
||||
|
||||
* #119 v1.16.0 broke webdav uploads from rclone and possibly other clients 7dfbfc72
|
||||
* a collection of webdav unittests will be added soon to prevent similar issues in the future
|
||||
* #118 ip-ranges can be mixed with `lan` when specifying the list of trusted proxies for `x-forwarded-for` with `--xff-src`
|
||||
* found and fixed by @codemicro (thx!) 0e31cfa7
|
||||
* ux:
|
||||
* in the grid-view, markdown files would open in the generic text viewer 520ac8f4
|
||||
* qr-codes (create-share, view-share) didn't render on chrome db069c3d
|
||||
* qr-codes could cause layout-shifting 5afb562a
|
||||
* fix layout-shifting for ongoing downloads in controlpanel 9c8507a0
|
||||
* cosmetic eta jank b10843d0
|
||||
|
||||
## 🔧 other changes
|
||||
|
||||
* upto 7% faster folder listings due to refactoring for more ux knobs 0c43b592
|
||||
* fix resource leaks (only affected tests/debug) 2ab8924e
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2024-1115-2218 `v1.16.1` cbz thumbnails
|
||||
|
||||
|
||||
@@ -170,10 +170,14 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| method | params | body | result |
|
||||
|--|--|--|--|
|
||||
| PUT | | (binary data) | upload into file at URL |
|
||||
| PUT | `?ck` | (binary data) | upload without checksum gen (faster) |
|
||||
| PUT | `?ck=md5` | (binary data) | return md5 instead of sha512 |
|
||||
| PUT | `?gz` | (binary data) | compress with gzip and write into file at URL |
|
||||
| PUT | `?xz` | (binary data) | compress with xz and write into file at URL |
|
||||
| mPOST | | `f=FILE` | upload `FILE` into the folder at URL |
|
||||
| mPOST | `?j` | `f=FILE` | ...and reply with json |
|
||||
| mPOST | `?ck` | `f=FILE` | ...and disable checksum gen (faster) |
|
||||
| mPOST | `?ck=md5` | `f=FILE` | ...and return md5 instead of sha512 |
|
||||
| mPOST | `?replace` | `f=FILE` | ...and overwrite existing files |
|
||||
| mPOST | `?media` | `f=FILE` | ...and return medialink (not hotlink) |
|
||||
| mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL |
|
||||
@@ -192,6 +196,12 @@ upload modifiers:
|
||||
| `Accept: url` | `want=url` | return just the file URL |
|
||||
| `Rand: 4` | `rand=4` | generate random filename with 4 characters |
|
||||
| `Life: 30` | `life=30` | delete file after 30 seconds |
|
||||
| `CK: no` | `ck` | disable serverside checksum (maybe faster) |
|
||||
| `CK: md5` | `ck=md5` | return md5 checksum instead of sha512 |
|
||||
| `CK: sha1` | `ck=sha1` | return sha1 checksum |
|
||||
| `CK: sha256` | `ck=sha256` | return sha256 checksum |
|
||||
| `CK: b2` | `ck=b2` | return blake2b checksum |
|
||||
| `CK: b2s` | `ck=b2s` | return blake2s checksum |
|
||||
|
||||
* `life` only has an effect if the volume has a lifetime, and the volume lifetime must be greater than the file's
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
FROM alpine:3.18
|
||||
WORKDIR /z
|
||||
ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
||||
ver_hashwasm=4.10.0 \
|
||||
ver_hashwasm=4.12.0 \
|
||||
ver_marked=4.3.0 \
|
||||
ver_dompf=3.1.7 \
|
||||
ver_dompf=3.2.2 \
|
||||
ver_mde=2.18.0 \
|
||||
ver_codemirror=5.65.16 \
|
||||
ver_codemirror=5.65.18 \
|
||||
ver_fontawesome=5.13.0 \
|
||||
ver_prism=1.29.0 \
|
||||
ver_zopfli=1.0.3
|
||||
@@ -16,7 +16,7 @@ ENV ver_asmcrypto=c72492f4a66e17a0e5dd8ad7874de354f3ccdaa5 \
|
||||
# https://github.com/codemirror/codemirror5/releases
|
||||
# https://github.com/cure53/DOMPurify/releases
|
||||
# https://github.com/Daninet/hash-wasm/releases
|
||||
# https://github.com/openpgpjs/asmcrypto.js
|
||||
# https://github.com/openpgpjs/asmcrypto.js/commits/main/
|
||||
# https://github.com/google/zopfli/tags
|
||||
|
||||
|
||||
|
||||
40
scripts/genlic.py
Executable file
40
scripts/genlic.py
Executable file
@@ -0,0 +1,40 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import re, os, sys, codecs
|
||||
|
||||
outfile = os.path.realpath(sys.argv[1])
|
||||
|
||||
os.chdir(os.path.dirname(__file__))
|
||||
|
||||
with open("../docs/lics.txt", "rb") as f:
|
||||
s = f.read().decode("utf-8").rstrip("\n") + "\n\n\n\n"
|
||||
s = re.sub("\nC: ", "\nCopyright (c) ", s)
|
||||
s = re.sub("\nL: ", "\nLicense: ", s)
|
||||
ret = s.split("\n")
|
||||
|
||||
lics = [
|
||||
"MIT License",
|
||||
"BSD 2-Clause License",
|
||||
"BSD 3-Clause License",
|
||||
"SIL Open Font License v1.1",
|
||||
]
|
||||
|
||||
for n, lic in enumerate(lics, 1):
|
||||
with open("lics/%d.r13" % (n,), "rb") as f:
|
||||
s = f.read().decode("utf-8")
|
||||
s = codecs.decode(s, "rot_13")
|
||||
s = "\n--- %s ---\n\n%s" % (lic, s)
|
||||
ret.extend(s.split("\n"))
|
||||
|
||||
for n, ln in enumerate(ret):
|
||||
if not ln.startswith("--- "):
|
||||
continue
|
||||
pad = " " * ((80 - len(ln)) // 2)
|
||||
ln = "%s\033[07m%s\033[0m" % (pad, ln)
|
||||
ret[n] = ln
|
||||
|
||||
ret.append("")
|
||||
ret.append("")
|
||||
|
||||
with open(outfile, "wb") as f:
|
||||
f.write(("\n".join(ret)).encode("utf-8"))
|
||||
@@ -1,54 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
outfile="$($(command -v realpath || command -v grealpath) "$1")"
|
||||
|
||||
[ -e genlic.sh ] || cd scripts
|
||||
[ -e genlic.sh ]
|
||||
|
||||
f=../build/mit.txt
|
||||
[ -e $f ] ||
|
||||
curl https://opensource.org/licenses/MIT |
|
||||
awk '/div>/{o=0}o>1;o{o++}/;COPYRIGHT HOLDER/{o=1}' |
|
||||
awk '{gsub(/<[^>]+>/,"")};1' >$f
|
||||
|
||||
f=../build/isc.txt
|
||||
[ -e $f ] ||
|
||||
curl https://opensource.org/licenses/ISC |
|
||||
awk '/div>/{o=0}o>2;o{o++}/;OWNER/{o=1}' |
|
||||
awk '{gsub(/<[^>]+>/,"")};/./{b=0}!/./{b++}b>1{next}1' >$f
|
||||
|
||||
f=../build/2bsd.txt
|
||||
[ -e $f ] ||
|
||||
curl https://opensource.org/licenses/BSD-2-Clause |
|
||||
awk '/div>/{o=0}o>1;o{o++}/HOLDER/{o=1}' |
|
||||
awk '{gsub(/<[^>]+>/,"")};1' >$f
|
||||
|
||||
f=../build/3bsd.txt
|
||||
[ -e $f ] ||
|
||||
curl https://opensource.org/licenses/BSD-3-Clause |
|
||||
awk '/div>/{o=0}o>1;o{o++}/HOLDER/{o=1}' |
|
||||
awk '{gsub(/<[^>]+>/,"")};1' >$f
|
||||
|
||||
f=../build/ofl.txt
|
||||
[ -e $f ] ||
|
||||
curl https://opensource.org/licenses/OFL-1.1 |
|
||||
awk '/PREAMBLE/{o=1}/sil\.org/{o=0}!o{next}/./{printf "%s ",$0;next}{print"\n"}' |
|
||||
awk '{gsub(/<[^>]+>/,"");gsub(/^\s+/,"");gsub(/&/,"\\&")}/./{b=0}!/./{b++}b>1{next}1' >$f
|
||||
|
||||
(sed -r 's/^L: /License: /;s/^C: /Copyright (c) /' <../docs/lics.txt
|
||||
printf '\n\n--- MIT License ---\n\n'; cat ../build/mit.txt
|
||||
printf '\n\n--- ISC License ---\n\n'; cat ../build/isc.txt
|
||||
printf '\n\n--- BSD 2-Clause License ---\n\n'; cat ../build/2bsd.txt
|
||||
printf '\n\n--- BSD 3-Clause License ---\n\n'; cat ../build/3bsd.txt
|
||||
printf '\n\n--- SIL Open Font License v1.1 ---\n\n'; cat ../build/ofl.txt
|
||||
) |
|
||||
while IFS= read -r x; do
|
||||
[ "${x:0:4}" = "--- " ] || {
|
||||
printf '%s\n' "$x"
|
||||
continue
|
||||
}
|
||||
n=${#x}
|
||||
p=$(( (80-n)/2 ))
|
||||
printf "%${p}s\033[07m%s\033[0m\n" "" "$x"
|
||||
done > "$outfile"
|
||||
@@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import re
|
||||
import socket
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
@@ -25,13 +26,20 @@ def readclip():
|
||||
return sp.check_output(cmd.split()).decode("utf-8")
|
||||
except:
|
||||
pass
|
||||
raise Exception("need one of these: xsel xclip pbpaste")
|
||||
|
||||
|
||||
def cnv(src):
|
||||
hostname = str(socket.gethostname()).split(".")[0]
|
||||
|
||||
yield '<html style="background:#222;color:#fff"><body>'
|
||||
skip_sfx = False
|
||||
in_sfx = 0
|
||||
in_salt = 0
|
||||
in_name = 0
|
||||
in_cores = 0
|
||||
in_hash_mt = False
|
||||
in_th_ram_max = 0
|
||||
|
||||
while True:
|
||||
ln = next(src)
|
||||
@@ -43,6 +51,7 @@ def cnv(src):
|
||||
|
||||
for ln in src:
|
||||
ln = ln.rstrip()
|
||||
t = ln
|
||||
if re.search(r"^<font[^>]+>copyparty v[0-9]", ln):
|
||||
in_sfx = 3
|
||||
if in_sfx:
|
||||
@@ -56,11 +65,39 @@ def cnv(src):
|
||||
in_salt = 3
|
||||
if in_salt:
|
||||
in_salt -= 1
|
||||
t = ln
|
||||
ln = re.sub(r">[0-9a-zA-Z/+]{24}<", ">24-character-autogenerated<", ln)
|
||||
ln = re.sub(r">[0-9a-zA-Z/+]{40}<", ">40-character-autogenerated<", ln)
|
||||
if t != ln:
|
||||
in_salt = 0
|
||||
if "--name TXT" in ln:
|
||||
in_name = 3
|
||||
if in_name:
|
||||
in_name -= 1
|
||||
ln = ln.replace(">" + hostname + "<", ">hostname<")
|
||||
if t != ln:
|
||||
in_name = 0
|
||||
if "--hash-mt CORES" in ln:
|
||||
in_cores = 3
|
||||
in_hash_mt = True
|
||||
if "--mtag-mt CORES" in ln or "--th-mt CORES" in ln:
|
||||
in_cores = 3
|
||||
if in_cores:
|
||||
in_cores -= 1
|
||||
zs = ">numCores"
|
||||
if in_hash_mt:
|
||||
zs += " if 5 or less"
|
||||
ln = re.sub(r">[0-9]{1,2}<", zs + "<", ln)
|
||||
if t != ln:
|
||||
in_cores = 0
|
||||
in_hash_mt = False
|
||||
if "--th-ram-max GB" in ln:
|
||||
in_th_ram_max = 3
|
||||
if in_th_ram_max:
|
||||
in_th_ram_max -= 1
|
||||
ln = re.sub(r">[0-9]{1,2}\.[0-9]<", ">dynamic<", ln)
|
||||
if t != ln:
|
||||
in_th_ram_max = 0
|
||||
|
||||
ln = ln.replace(">/home/ed/", ">~/")
|
||||
if ln.startswith("0" * 20):
|
||||
skip_sfx = True
|
||||
|
||||
@@ -6,6 +6,10 @@ s`/home/ed/`~/`;
|
||||
s/uuid:[0-9a-f-]{36}/autogenerated/;
|
||||
s/(-salt SALT.*default: )[0-9a-zA-Z/+]{24}\)/\124-character-autogenerated)/;
|
||||
s/(-salt SALT.*default: )[0-9a-zA-Z/+]{40}\)/\140-character-autogenerated)/;
|
||||
s/(--name TXT.*default: )[^)]+/\1hostname/;
|
||||
s/(--hash-mt CORES.*default: )[0-9]+/\1numCores if 5 or less/;
|
||||
s/(--mtag-mt|th-mt)( CORES.*default: )[0-9]+/\1\2numCores/;
|
||||
s/(--th-ram-max GB.*default: )[0-9\.]+/\1dynamic/;
|
||||
' | awk '
|
||||
/^copyparty/{a=1} !a{next}
|
||||
/^0{20}/{b=1} b&&/^copyparty v[0-9]+\./{s=3}
|
||||
|
||||
5
scripts/lics/1.r13
Normal file
5
scripts/lics/1.r13
Normal file
@@ -0,0 +1,5 @@
|
||||
Crezvffvba vf urerol tenagrq, serr bs punetr, gb nal crefba bognvavat n pbcl bs guvf fbsgjner naq nffbpvngrq qbphzragngvba svyrf (gur "Fbsgjner"), gb qrny va gur Fbsgjner jvgubhg erfgevpgvba, vapyhqvat jvgubhg yvzvgngvba gur evtugf gb hfr, pbcl, zbqvsl, zretr, choyvfu, qvfgevohgr, fhoyvprafr, naq/be fryy pbcvrf bs gur Fbsgjner, naq gb crezvg crefbaf gb jubz gur Fbsgjner vf sheavfurq gb qb fb, fhowrpg gb gur sbyybjvat pbaqvgvbaf:
|
||||
|
||||
Gur nobir pbclevtug abgvpr naq guvf crezvffvba abgvpr funyy or vapyhqrq va nyy pbcvrf be fhofgnagvny cbegvbaf bs gur Fbsgjner.
|
||||
|
||||
GUR FBSGJNER VF CEBIVQRQ "NF VF", JVGUBHG JNEENAGL BS NAL XVAQ, RKCERFF BE VZCYVRQ, VAPYHQVAT OHG ABG YVZVGRQ GB GUR JNEENAGVRF BS ZREPUNAGNOVYVGL, SVGARFF SBE N CNEGVPHYNE CHECBFR NAQ ABAVASEVATRZRAG. VA AB RIRAG FUNYY GUR NHGUBEF BE PBCLEVTUG UBYQREF OR YVNOYR SBE NAL PYNVZ, QNZNTRF BE BGURE YVNOVYVGL, JURGURE VA NA NPGVBA BS PBAGENPG, GBEG BE BGUREJVFR, NEVFVAT SEBZ, BHG BS BE VA PBAARPGVBA JVGU GUR FBSGJNER BE GUR HFR BE BGURE QRNYVATF VA GUR FBSGJNER.
|
||||
7
scripts/lics/2.r13
Normal file
7
scripts/lics/2.r13
Normal file
@@ -0,0 +1,7 @@
|
||||
Erqvfgevohgvba naq hfr va fbhepr naq ovanel sbezf, jvgu be jvgubhg zbqvsvpngvba, ner crezvggrq cebivqrq gung gur sbyybjvat pbaqvgvbaf ner zrg:
|
||||
|
||||
1. Erqvfgevohgvbaf bs fbhepr pbqr zhfg ergnva gur nobir pbclevtug abgvpr, guvf yvfg bs pbaqvgvbaf naq gur sbyybjvat qvfpynvzre.
|
||||
|
||||
2. Erqvfgevohgvbaf va ovanel sbez zhfg ercebqhpr gur nobir pbclevtug abgvpr, guvf yvfg bs pbaqvgvbaf naq gur sbyybjvat qvfpynvzre va gur qbphzragngvba naq/be bgure zngrevnyf cebivqrq jvgu gur qvfgevohgvba.
|
||||
|
||||
GUVF FBSGJNER VF CEBIVQRQ OL GUR PBCLEVTUG UBYQREF NAQ PBAGEVOHGBEF "NF VF" NAQ NAL RKCERFF BE VZCYVRQ JNEENAGVRF, VAPYHQVAT, OHG ABG YVZVGRQ GB, GUR VZCYVRQ JNEENAGVRF BS ZREPUNAGNOVYVGL NAQ SVGARFF SBE N CNEGVPHYNE CHECBFR NER QVFPYNVZRQ. VA AB RIRAG FUNYY GUR PBCLEVTUG UBYQRE BE PBAGEVOHGBEF OR YVNOYR SBE NAL QVERPG, VAQVERPG, VAPVQRAGNY, FCRPVNY, RKRZCYNEL, BE PBAFRDHRAGVNY QNZNTRF (VAPYHQVAT, OHG ABG YVZVGRQ GB, CEBPHERZRAG BS FHOFGVGHGR TBBQF BE FREIVPRF; YBFF BS HFR, QNGN, BE CEBSVGF; BE OHFVARFF VAGREEHCGVBA) UBJRIRE PNHFRQ NAQ BA NAL GURBEL BS YVNOVYVGL, JURGURE VA PBAGENPG, FGEVPG YVNOVYVGL, BE GBEG (VAPYHQVAT ARTYVTRAPR BE BGUREJVFR) NEVFVAT VA NAL JNL BHG BS GUR HFR BS GUVF FBSGJNER, RIRA VS NQIVFRQ BS GUR CBFFVOVYVGL BS FHPU QNZNTR.
|
||||
9
scripts/lics/3.r13
Normal file
9
scripts/lics/3.r13
Normal file
@@ -0,0 +1,9 @@
|
||||
Erqvfgevohgvba naq hfr va fbhepr naq ovanel sbezf, jvgu be jvgubhg zbqvsvpngvba, ner crezvggrq cebivqrq gung gur sbyybjvat pbaqvgvbaf ner zrg:
|
||||
|
||||
1. Erqvfgevohgvbaf bs fbhepr pbqr zhfg ergnva gur nobir pbclevtug abgvpr, guvf yvfg bs pbaqvgvbaf naq gur sbyybjvat qvfpynvzre.
|
||||
|
||||
2. Erqvfgevohgvbaf va ovanel sbez zhfg ercebqhpr gur nobir pbclevtug abgvpr, guvf yvfg bs pbaqvgvbaf naq gur sbyybjvat qvfpynvzre va gur qbphzragngvba naq/be bgure zngrevnyf cebivqrq jvgu gur qvfgevohgvba.
|
||||
|
||||
3. Arvgure gur anzr bs gur pbclevtug ubyqre abe gur anzrf bs vgf pbagevohgbef znl or hfrq gb raqbefr be cebzbgr cebqhpgf qrevirq sebz guvf fbsgjner jvgubhg fcrpvsvp cevbe jevggra crezvffvba.
|
||||
|
||||
GUVF FBSGJNER VF CEBIVQRQ OL GUR PBCLEVTUG UBYQREF NAQ PBAGEVOHGBEF "NF VF" NAQ NAL RKCERFF BE VZCYVRQ JNEENAGVRF, VAPYHQVAT, OHG ABG YVZVGRQ GB, GUR VZCYVRQ JNEENAGVRF BS ZREPUNAGNOVYVGL NAQ SVGARFF SBE N CNEGVPHYNE CHECBFR NER QVFPYNVZRQ. VA AB RIRAG FUNYY GUR PBCLEVTUG UBYQRE BE PBAGEVOHGBEF OR YVNOYR SBE NAL QVERPG, VAQVERPG, VAPVQRAGNY, FCRPVNY, RKRZCYNEL, BE PBAFRDHRAGVNY QNZNTRF (VAPYHQVAT, OHG ABG YVZVGRQ GB, CEBPHERZRAG BS FHOFGVGHGR TBBQF BE FREIVPRF; YBFF BS HFR, QNGN, BE CEBSVGF; BE OHFVARFF VAGREEHCGVBA) UBJRIRE PNHFRQ NAQ BA NAL GURBEL BS YVNOVYVGL, JURGURE VA PBAGENPG, FGEVPG YVNOVYVGL, BE GBEG (VAPYHQVAT ARTYVTRAPR BE BGUREJVFR) NEVFVAT VA NAL JNL BHG BS GUR HFR BS GUVF FBSGJNER, RIRA VS NQIVFRQ BS GUR CBFFVOVYVGL BS FHPU QNZNTR.
|
||||
39
scripts/lics/4.r13
Normal file
39
scripts/lics/4.r13
Normal file
@@ -0,0 +1,39 @@
|
||||
CERNZOYR
|
||||
|
||||
Gur tbnyf bs gur Bcra Sbag Yvprafr (BSY) ner gb fgvzhyngr jbeyqjvqr qrirybczrag bs pbyynobengvir sbag cebwrpgf, gb fhccbeg gur sbag perngvba rssbegf bs npnqrzvp naq yvathvfgvp pbzzhavgvrf, naq gb cebivqr n serr naq bcra senzrjbex va juvpu sbagf znl or funerq naq vzcebirq va cnegarefuvc jvgu bguref.
|
||||
|
||||
Gur BSY nyybjf gur yvprafrq sbagf gb or hfrq, fghqvrq, zbqvsvrq naq erqvfgevohgrq serryl nf ybat nf gurl ner abg fbyq ol gurzfryirf. Gur sbagf, vapyhqvat nal qrevingvir jbexf, pna or ohaqyrq, rzorqqrq, erqvfgevohgrq naq/be fbyq jvgu nal fbsgjner cebivqrq gung nal erfreirq anzrf ner abg hfrq ol qrevingvir jbexf. Gur sbagf naq qrevingvirf, ubjrire, pnaabg or eryrnfrq haqre nal bgure glcr bs yvprafr. Gur erdhverzrag sbe sbagf gb erznva haqre guvf yvprafr qbrf abg nccyl gb nal qbphzrag perngrq hfvat gur sbagf be gurve qrevingvirf.
|
||||
|
||||
QRSVAVGVBAF
|
||||
|
||||
"Sbag Fbsgjner" ersref gb gur frg bs svyrf eryrnfrq ol gur Pbclevtug Ubyqre(f) haqre guvf yvprafr naq pyrneyl znexrq nf fhpu. Guvf znl vapyhqr fbhepr svyrf, ohvyq fpevcgf naq qbphzragngvba.
|
||||
|
||||
"Erfreirq Sbag Anzr" ersref gb nal anzrf fcrpvsvrq nf fhpu nsgre gur pbclevtug fgngrzrag(f).
|
||||
|
||||
"Bevtvany Irefvba" ersref gb gur pbyyrpgvba bs Sbag Fbsgjner pbzcbaragf nf qvfgevohgrq ol gur Pbclevtug Ubyqre(f).
|
||||
|
||||
"Zbqvsvrq Irefvba" ersref gb nal qrevingvir znqr ol nqqvat gb, qryrgvat, be fhofgvghgvat - va cneg be va jubyr - nal bs gur pbzcbaragf bs gur Bevtvany Irefvba, ol punatvat sbezngf be ol cbegvat gur Sbag Fbsgjner gb n arj raivebazrag.
|
||||
|
||||
"Nhgube" ersref gb nal qrfvtare, ratvarre, cebtenzzre, grpuavpny jevgre be bgure crefba jub pbagevohgrq gb gur Sbag Fbsgjner.
|
||||
|
||||
CREZVFFVBA & PBAQVGVBAF
|
||||
|
||||
Crezvffvba vf urerol tenagrq, serr bs punetr, gb nal crefba bognvavat n pbcl bs gur Sbag Fbsgjner, gb hfr, fghql, pbcl, zretr, rzorq, zbqvsl, erqvfgevohgr, naq fryy zbqvsvrq naq hazbqvsvrq pbcvrf bs gur Sbag Fbsgjner, fhowrpg gb gur sbyybjvat pbaqvgvbaf:
|
||||
|
||||
1) Arvgure gur Sbag Fbsgjner abe nal bs vgf vaqvivqhny pbzcbaragf, va Bevtvany be Zbqvsvrq Irefvbaf, znl or fbyq ol vgfrys.
|
||||
|
||||
2) Bevtvany be Zbqvsvrq Irefvbaf bs gur Sbag Fbsgjner znl or ohaqyrq, erqvfgevohgrq naq/be fbyq jvgu nal fbsgjner, cebivqrq gung rnpu pbcl pbagnvaf gur nobir pbclevtug abgvpr naq guvf yvprafr. Gurfr pna or vapyhqrq rvgure nf fgnaq-nybar grkg svyrf, uhzna-ernqnoyr urnqref be va gur nccebcevngr znpuvar-ernqnoyr zrgnqngn svryqf jvguva grkg be ovanel svyrf nf ybat nf gubfr svryqf pna or rnfvyl ivrjrq ol gur hfre.
|
||||
|
||||
3) Ab Zbqvsvrq Irefvba bs gur Sbag Fbsgjner znl hfr gur Erfreirq Sbag Anzr(f) hayrff rkcyvpvg jevggra crezvffvba vf tenagrq ol gur pbeerfcbaqvat Pbclevtug Ubyqre. Guvf erfgevpgvba bayl nccyvrf gb gur cevznel sbag anzr nf cerfragrq gb gur hfref.
|
||||
|
||||
4) Gur anzr(f) bs gur Pbclevtug Ubyqre(f) be gur Nhgube(f) bs gur Sbag Fbsgjner funyy abg or hfrq gb cebzbgr, raqbefr be nqiregvfr nal Zbqvsvrq Irefvba, rkprcg gb npxabjyrqtr gur pbagevohgvba(f) bs gur Pbclevtug Ubyqre(f) naq gur Nhgube(f) be jvgu gurve rkcyvpvg jevggra crezvffvba.
|
||||
|
||||
5) Gur Sbag Fbsgjner, zbqvsvrq be hazbqvsvrq, va cneg be va jubyr, zhfg or qvfgevohgrq ragveryl haqre guvf yvprafr, naq zhfg abg or qvfgevohgrq haqre nal bgure yvprafr. Gur erdhverzrag sbe sbagf gb erznva haqre guvf yvprafr qbrf abg nccyl gb nal qbphzrag perngrq hfvat gur Sbag Fbsgjner.
|
||||
|
||||
GREZVANGVBA
|
||||
|
||||
Guvf yvprafr orpbzrf ahyy naq ibvq vs nal bs gur nobir pbaqvgvbaf ner abg zrg.
|
||||
|
||||
QVFPYNVZRE
|
||||
|
||||
GUR SBAG FBSGJNER VF CEBIVQRQ "NF VF", JVGUBHG JNEENAGL BS NAL XVAQ, RKCERFF BE VZCYVRQ, VAPYHQVAT OHG ABG YVZVGRQ GB NAL JNEENAGVRF BS ZREPUNAGNOVYVGL, SVGARFF SBE N CNEGVPHYNE CHECBFR NAQ ABAVASEVATRZRAG BS PBCLEVTUG, CNGRAG, GENQRZNEX, BE BGURE EVTUG. VA AB RIRAG FUNYY GUR PBCLEVTUG UBYQRE OR YVNOYR SBE NAL PYNVZ, QNZNTRF BE BGURE YVNOVYVGL, VAPYHQVAT NAL TRARENY, FCRPVNY, VAQVERPG, VAPVQRAGNY, BE PBAFRDHRAGVNY QNZNTRF, JURGURE VA NA NPGVBA BS PBAGENPG, GBEG BE BGUREJVFR, NEVFVAT SEBZ, BHG BS GUR HFR BE VANOVYVGL GB HFR GUR SBAG FBSGJNER BE SEBZ BGURE QRNYVATF VA GUR SBAG FBSGJNER.
|
||||
3
scripts/lics/README.md
Normal file
3
scripts/lics/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
these are foss licenses in rot13 so scanners don't think copyparty isn't mit
|
||||
|
||||
1=mit 2=2bsd 3=3bsd 4=ofl
|
||||
12
scripts/lics/rot.py
Executable file
12
scripts/lics/rot.py
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os, codecs
|
||||
|
||||
for fn in os.listdir("."):
|
||||
if not fn.endswith(".txt"):
|
||||
continue
|
||||
with open(fn, "rb") as f:
|
||||
s = f.read().decode("utf-8")
|
||||
b = codecs.encode(s, "rot_13").encode("utf-8")
|
||||
with open(fn.replace("txt", "r13"), "wb") as f:
|
||||
f.write(b)
|
||||
@@ -100,14 +100,13 @@ load_env || {
|
||||
# cleanup
|
||||
rm -rf unt build/pypi
|
||||
|
||||
# grab licenses
|
||||
scripts/genlic.sh copyparty/res/COPYING.txt
|
||||
# generate license list
|
||||
scripts/genlic.py copyparty/res/COPYING.txt
|
||||
|
||||
# clean-ish packaging env
|
||||
rm -rf build/pypi
|
||||
mkdir -p build/pypi
|
||||
cp -pR pyproject.toml README.md LICENSE copyparty contrib bin scripts/strip_hints build/pypi/
|
||||
tar -c docs/lics.txt scripts/genlic.sh build/*.txt | tar -xC build/pypi/
|
||||
cd build/pypi
|
||||
|
||||
# delete junk
|
||||
|
||||
@@ -308,8 +308,7 @@ necho() {
|
||||
# remove type hints before build instead
|
||||
(cd copyparty; PYTHONPATH="..:$PYTHONPATH" "$pybin" ../../scripts/strip_hints/a.py; rm uh)
|
||||
|
||||
licfile=$(realpath copyparty/res/COPYING.txt)
|
||||
(cd ../scripts; ./genlic.sh "$licfile")
|
||||
(cd ../scripts; ./genlic.py ../copyparty/res/COPYING.txt)
|
||||
}
|
||||
|
||||
[ ! -e copyparty/web/deps/mini-fa.woff ] && [ $dl_wd ] && {
|
||||
|
||||
@@ -64,7 +64,7 @@ git archive hovudstraum | tar -xC "$rls_dir"
|
||||
echo ">>> export untracked deps"
|
||||
tar -c copyparty/web/deps | tar -xC "$rls_dir"
|
||||
|
||||
scripts/genlic.sh "$rls_dir/copyparty/res/COPYING.txt"
|
||||
scripts/genlic.py "$rls_dir/copyparty/res/COPYING.txt"
|
||||
|
||||
cd "$rls_dir"
|
||||
find -type d -exec chmod 755 '{}' \+
|
||||
|
||||
@@ -30,4 +30,4 @@ d1420c8417fad7888766dd26b9706a87c63e8f33dceeb8e26d0056d5127b0b3ed9272e44b4b76113
|
||||
2be320b4191f208cdd6af183c77ba2cf460ea52164ee45ac3ff17d6dfa57acd9deff016636c2dd42a21f4f6af977d5f72df7dacf599bebcf41757272354d14c1 pillow-10.4.0-cp312-cp312-win_amd64.whl
|
||||
896ddddbd4b85e86e0600cb65eb4c07fbc7f3802d47e7f660411e20b5500831469b97ed4770f25820f4e75cbfac40308da624fd86d4f62e578149d5c276a9cde pyinstaller-6.10.0-py3-none-win_amd64.whl
|
||||
873781decaeef07f6a79b0ed8b9f35f3fa534a1ea0d866991e40278a10818fa5b60c70b0d5828971b045364f1099694cd1e5d5d60d480acb93fcfbfbced4a09e pyinstaller_hooks_contrib-2024.8-py3-none-any.whl
|
||||
912b710007c7b29f29c0097aff8f825412166eed7777a7cef135b14316e8fff31b5df56d26d835d8ca090468cc0e914730f201a56caa3dd6dbef2f91088942b1 python-3.12.7-amd64.exe
|
||||
0f623c9ab52d050283e97a986ba626d86b04cd02fa7ffdf352740576940b142b264709abadb5d875c90f625b28103d7210b900e0d77f12c1c140108bd2a159aa python-3.12.8-amd64.exe
|
||||
|
||||
@@ -291,6 +291,7 @@ var tl_browser = {
|
||||
"cl_uopts": "up2k switches",
|
||||
"cl_favico": "favicon",
|
||||
"cl_bigdir": "big dirs",
|
||||
"cl_hsort": "#sort",
|
||||
"cl_keytype": "key notation",
|
||||
"cl_hiddenc": "hidden columns",
|
||||
"cl_hidec": "hide",
|
||||
@@ -333,6 +334,7 @@ var tl_browser = {
|
||||
|
||||
"cdt_lim": "max number of files to show in a folder",
|
||||
"cdt_ask": "when scrolling to the bottom,$Ninstead of loading more files,$Nask what to do",
|
||||
"cdt_hsort": "how many sorting rules (<code>,sorthref</code>) to include in media-URLs. Setting this to 0 will also ignore sorting-rules included in media links when clicking them",
|
||||
|
||||
"tt_entree": "show navpane (directory tree sidebar)$NHotkey: B",
|
||||
"tt_detree": "show breadcrumbs$NHotkey: B",
|
||||
@@ -414,7 +416,7 @@ var tl_browser = {
|
||||
"fr_emore": "select at least one item to rename",
|
||||
"fd_emore": "select at least one item to delete",
|
||||
"fc_emore": "select at least one item to cut",
|
||||
"fcp_emore": "select at least one item to copy",
|
||||
"fcp_emore": "select at least one item to copy to clipboard",
|
||||
|
||||
"fs_sc": "share the folder you're in",
|
||||
"fs_ss": "share the selected files",
|
||||
@@ -470,9 +472,11 @@ var tl_browser = {
|
||||
"fcc_ok": "copied {0} items to clipboard",
|
||||
"fcc_warn": 'copied {0} items to clipboard\n\nbut: only <b>this</b> browser-tab can paste them\n(since the selection is so absolutely massive)',
|
||||
|
||||
"fp_apply": "use these names",
|
||||
"fp_ecut": "first cut or copy some files / folders to paste / move\n\nnote: you can cut / paste across different browser tabs",
|
||||
"fp_ename": "these {0} items cannot be moved here (names already exist):",
|
||||
"fcp_ename": "these {0} items cannot be copied here (names already exist):",
|
||||
"fp_ename": "{0} items cannot be moved here because the names are already taken. Give them new names below to continue, or blank the name to skip them:",
|
||||
"fcp_ename": "{0} items cannot be copied here because the names are already taken. Give them new names below to continue, or blank the name to skip them:",
|
||||
"fp_emore": "there are still some filename collisions left to fix",
|
||||
"fp_ok": "move OK",
|
||||
"fcp_ok": "copy OK",
|
||||
"fp_busy": "moving {0} items...\n\n{1}",
|
||||
@@ -604,6 +608,7 @@ var tl_browser = {
|
||||
"u_pott": "<p>files: <b>{0}</b> finished, <b>{1}</b> failed, <b>{2}</b> busy, <b>{3}</b> queued</p>",
|
||||
"u_ever": "this is the basic uploader; up2k needs at least<br>chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1",
|
||||
"u_su2k": 'this is the basic uploader; <a href="#" id="u2yea">up2k</a> is better',
|
||||
"u_uput": 'optimize for speed (skip checksum)',
|
||||
"u_ewrite": 'you do not have write-access to this folder',
|
||||
"u_eread": 'you do not have read-access to this folder',
|
||||
"u_enoi": 'file-search is not enabled in server config',
|
||||
@@ -622,6 +627,7 @@ var tl_browser = {
|
||||
"u_hashdone": 'hashing done',
|
||||
"u_hashing": 'hash',
|
||||
"u_hs": 'handshaking...',
|
||||
"u_started": "the files are now being uploaded; see [🚀]",
|
||||
"u_dupdefer": "duplicate; will be processed after all other files",
|
||||
"u_actx": "click this text to prevent loss of<br />performance when switching to other windows/tabs",
|
||||
"u_fixed": "OK! Fixed it 👍",
|
||||
@@ -657,6 +663,7 @@ var tl_browser = {
|
||||
"ue_la": 'you are currently logged in as "{0}"',
|
||||
"ue_sr": 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the magnifying glass 🔎 (next to the big SEARCH button), and try uploading again\n\nsorry',
|
||||
"ue_ta": 'try uploading again, it should work now',
|
||||
"ue_ab": "this file is already being uploaded into another folder, and that upload must be completed before the file can be uploaded elsewhere.\n\nYou can abort and forget the initial upload using the top-left 🧯",
|
||||
"ur_1uo": "OK: File uploaded successfully",
|
||||
"ur_auo": "OK: All {0} files uploaded successfully",
|
||||
"ur_1so": "OK: File found on server",
|
||||
|
||||
@@ -114,6 +114,7 @@ var tl_cpanel = {{
|
||||
"ab1": "disable no304",
|
||||
"ac1": "enable no304",
|
||||
"ad1": "enabling no304 will disable all caching; try this if k304 wasn't enough. This will waste a huge amount of network traffic!",
|
||||
"ae1": "active downloads:",
|
||||
}},
|
||||
}};
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ from tests import util as tu
|
||||
from tests.util import Cfg
|
||||
|
||||
|
||||
class TestDedup(unittest.TestCase):
|
||||
class TestDedup(tu.TC):
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
|
||||
@@ -92,7 +92,7 @@ class TestDedup(unittest.TestCase):
|
||||
HttpCli(self.conn.setbuf(buf)).run()
|
||||
ret = self.conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
print("CP <-- ", ret)
|
||||
self.assertIn(" 201 Created", ret[0])
|
||||
self.assertStart("HTTP/1.1 201 Created\r", ret[0])
|
||||
self.assertEqual("k\r\n", ret[1])
|
||||
return ret
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from tests import util as tu
|
||||
from tests.util import Cfg
|
||||
|
||||
|
||||
class TestDedup(unittest.TestCase):
|
||||
class TestDedup(tu.TC):
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
|
||||
@@ -73,7 +73,7 @@ class TestDedup(unittest.TestCase):
|
||||
sfn, hs = self.do_post_hs(dn, fns[0], f1, True)
|
||||
for fn in fns[1:]:
|
||||
h, b = self.handshake(dn, fn, f1)
|
||||
self.assertIn(" 422 Unpro", h)
|
||||
self.assertStart("HTTP/1.1 422 Unpro", h)
|
||||
self.assertIn("a different location;", b)
|
||||
self.do_post_data(dn, fns[0], f1, True, sfn, hs)
|
||||
if not e2d:
|
||||
@@ -158,10 +158,10 @@ class TestDedup(unittest.TestCase):
|
||||
rm = cms[irm]
|
||||
dn, fn, _ = rm
|
||||
h, b = self.curl("%s/%s?delete" % (dn, fn), meth="POST")
|
||||
self.assertIn(" 200 OK", h)
|
||||
self.assertStart("HTTP/1.1 200 OK", h)
|
||||
self.assertIn("deleted 1 files", b)
|
||||
h, b = self.curl("%s/%s" % (dn, fn))
|
||||
self.assertIn(" 404 Not Fo", h)
|
||||
self.assertStart("HTTP/1.1 404 Not Fo", h)
|
||||
for cm in cms:
|
||||
if cm == rm:
|
||||
continue
|
||||
|
||||
@@ -286,8 +286,8 @@ class TestDots(unittest.TestCase):
|
||||
self.assertIn('">folder</text>', self.curl(zs, "u2")[1])
|
||||
|
||||
# fk enabled, so this should fail
|
||||
self.assertIn('">e404</text>', self.curl("dk,fk/f.t1?th=x", "u2")[1])
|
||||
self.assertIn('">e404</text>', self.curl("dk,fk/s1/f.t2?th=x", "u2")[1])
|
||||
self.assertIn('">e403</text>', self.curl("dk,fk/f.t1?th=x", "u2")[1])
|
||||
self.assertIn('">e403</text>', self.curl("dk,fk/s1/f.t2?th=x", "u2")[1])
|
||||
|
||||
# but dk should return correct filekeys, so try that
|
||||
zs = "dk,fk/%s&th=x" % (zj["files"][0]["href"])
|
||||
@@ -332,8 +332,8 @@ class TestDots(unittest.TestCase):
|
||||
self.assertIn('">folder</text>', self.curl(zs, "u2")[1])
|
||||
|
||||
# fk enabled, so this should fail
|
||||
self.assertIn('">e404</text>', self.curl("dks,fk/f.t1?th=x", "u2")[1])
|
||||
self.assertIn('">e404</text>', self.curl("dks,fk/s1/f.t2?th=x", "u2")[1])
|
||||
self.assertIn('">e403</text>', self.curl("dks,fk/f.t1?th=x", "u2")[1])
|
||||
self.assertIn('">e403</text>', self.curl("dks,fk/s1/f.t2?th=x", "u2")[1])
|
||||
|
||||
# but dk should return correct filekeys, so try that
|
||||
zs = "dks,fk/%s&th=x" % (zj["files"][0]["href"])
|
||||
|
||||
@@ -124,7 +124,7 @@ class TestDXML(unittest.TestCase):
|
||||
lk = parse_xml(txt)
|
||||
self.assertEqual(lk.tag, "{DAV:}lockinfo")
|
||||
|
||||
if not lk.find(r"./{DAV:}depth"):
|
||||
if lk.find(r"./{DAV:}depth") is None:
|
||||
lk.append(mktnod("D:depth", "infinity"))
|
||||
|
||||
lk.append(mkenod("D:timeout", mktnod("D:href", "Second-3600")))
|
||||
|
||||
@@ -23,7 +23,7 @@ def hdr(query):
|
||||
return h.format(query).encode("utf-8")
|
||||
|
||||
|
||||
class TestHooks(unittest.TestCase):
|
||||
class TestHooks(tu.TC):
|
||||
def setUp(self):
|
||||
self.conn: Optional[tu.VHttpConn] = None
|
||||
self.td = tu.get_ramdisk()
|
||||
@@ -74,7 +74,7 @@ class TestHooks(unittest.TestCase):
|
||||
self.cinit()
|
||||
|
||||
h, b = upfun(url_up)
|
||||
self.assertIn("201 Created", h)
|
||||
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||
h, b = self.curl(url_dl)
|
||||
self.assertEqual(b, "ok %s\n" % (url_up))
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ import zipfile
|
||||
from copyparty.authsrv import AuthSrv
|
||||
from copyparty.httpcli import HttpCli
|
||||
from tests import util as tu
|
||||
from tests.util import Cfg, eprint
|
||||
from tests.util import Cfg, eprint, pfind2ls
|
||||
|
||||
|
||||
def hdr(query):
|
||||
@@ -129,6 +129,24 @@ class TestHttpCli(unittest.TestCase):
|
||||
else:
|
||||
ref = []
|
||||
|
||||
h, b = self.propfind(durl, 1)
|
||||
fns = [x for x in pfind2ls(b) if not x.endswith("/")]
|
||||
if ref:
|
||||
self.assertIn("<D:propstat>", b)
|
||||
elif not rok and not wok:
|
||||
self.assertListEqual([], fns)
|
||||
else:
|
||||
self.assertIn("<D:multistatus", b)
|
||||
|
||||
h, b = self.propfind(durl, 0)
|
||||
fns = [x for x in pfind2ls(b) if not x.endswith("/")]
|
||||
if ref:
|
||||
self.assertIn("<D:propstat>", b)
|
||||
elif not rok:
|
||||
self.assertListEqual([], fns)
|
||||
else:
|
||||
self.assertIn("<D:multistatus", b)
|
||||
|
||||
if test_tar:
|
||||
url = durl + "?tar"
|
||||
h, b = self.curl(url, True)
|
||||
@@ -225,5 +243,12 @@ class TestHttpCli(unittest.TestCase):
|
||||
|
||||
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
|
||||
def propfind(self, url, depth=1):
|
||||
zs = "PROPFIND /%s HTTP/1.1\r\nDepth: %d\r\nPW: o\r\nConnection: close\r\n\r\n"
|
||||
buf = zs % (url, depth)
|
||||
conn = self.conn.setbuf(buf.encode("utf-8"))
|
||||
HttpCli(conn).run()
|
||||
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
|
||||
def log(self, src, msg, c=0):
|
||||
print(msg)
|
||||
|
||||
@@ -21,7 +21,7 @@ def hdr(query):
|
||||
return h.format(query).encode("utf-8")
|
||||
|
||||
|
||||
class TestMetrics(unittest.TestCase):
|
||||
class TestMetrics(tu.TC):
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
os.chdir(self.td)
|
||||
@@ -55,7 +55,7 @@ class TestMetrics(unittest.TestCase):
|
||||
self.conn = self.fstab = self.metrics = None
|
||||
self.cinit()
|
||||
h, b = self.curl(".cpr/metrics")
|
||||
self.assertIn(".1 200 OK", h)
|
||||
self.assertStart("HTTP/1.1 200 OK\r", h)
|
||||
ptns = r"""
|
||||
cpp_uptime_seconds [0-9]\.[0-9]{3}$
|
||||
cpp_boot_unixtime_seconds [0-9]{7,10}\.[0-9]{3}$
|
||||
|
||||
@@ -20,7 +20,7 @@ TODO inject tags into db and verify ls
|
||||
"""
|
||||
|
||||
|
||||
class TestDedup(unittest.TestCase):
|
||||
class TestDedup(tu.TC):
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
|
||||
@@ -130,7 +130,7 @@ class TestDedup(unittest.TestCase):
|
||||
HttpCli(self.conn.setbuf(buf)).run()
|
||||
ret = self.conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
print("MV <-- ", ret)
|
||||
self.assertIn(" 201 Created", ret[0])
|
||||
self.assertStart("HTTP/1.1 201 Created\r", ret[0])
|
||||
self.assertEqual("k\r\n", ret[1])
|
||||
return ret
|
||||
|
||||
|
||||
264
tests/test_webdav.py
Normal file
264
tests/test_webdav.py
Normal file
@@ -0,0 +1,264 @@
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import time
|
||||
import unittest
|
||||
|
||||
from copyparty.authsrv import AuthSrv
|
||||
from copyparty.httpcli import HttpCli
|
||||
from tests import util as tu
|
||||
from tests.util import TC, Cfg, pfind2ls
|
||||
|
||||
# tcpdump of `rclone ls dav:`
|
||||
RCLONE_PROPFIND = """PROPFIND /%s HTTP/1.1
|
||||
Host: 127.0.0.1:3923
|
||||
User-Agent: rclone/v1.67.0
|
||||
Content-Length: 308
|
||||
Authorization: Basic azp1
|
||||
Depth: 1
|
||||
Referer: http://127.0.0.1:3923/
|
||||
Accept-Encoding: gzip
|
||||
|
||||
<?xml version="1.0"?>
|
||||
<d:propfind xmlns:d="DAV:" xmlns:oc="http://owncloud.org/ns" xmlns:nc="http://nextcloud.org/ns">
|
||||
<d:prop>
|
||||
<d:displayname />
|
||||
<d:getlastmodified />
|
||||
<d:getcontentlength />
|
||||
<d:resourcetype />
|
||||
<d:getcontenttype />
|
||||
<oc:checksums />
|
||||
<oc:permissions />
|
||||
</d:prop>
|
||||
</d:propfind>
|
||||
"""
|
||||
|
||||
|
||||
# tcpdump of `rclone copy fa dav:/a/` (it does a mkcol first)
|
||||
RCLONE_MKCOL = """MKCOL /%s HTTP/1.1
|
||||
Host: 127.0.0.1:3923
|
||||
User-Agent: rclone/v1.67.0
|
||||
Authorization: Basic azp1
|
||||
Referer: http://127.0.0.1:3923/
|
||||
Accept-Encoding: gzip
|
||||
\n"""
|
||||
|
||||
|
||||
# tcpdump of `rclone copy fa dav:/a/` (the actual upload)
|
||||
RCLONE_PUT = """PUT /%s HTTP/1.1
|
||||
Host: 127.0.0.1:3923
|
||||
User-Agent: rclone/v1.67.0
|
||||
Content-Length: 6
|
||||
Authorization: Basic azp1
|
||||
Content-Type: application/octet-stream
|
||||
Oc-Checksum: SHA1:f5e3dc3fb27af53cd0005a1184e2df06481199e8
|
||||
Referer: http://127.0.0.1:3923/
|
||||
X-Oc-Mtime: 1689453578
|
||||
Accept-Encoding: gzip
|
||||
|
||||
fgsfds"""
|
||||
|
||||
|
||||
# tcpdump of `rclone delete dav:/a/d1/` (it does propfind recursively and then this on each file)
|
||||
# (note: `rclone rmdirs dav:/a/d1/` does the same thing but just each folder after asserting they're empty)
|
||||
RCLONE_DELETE = """DELETE /%s HTTP/1.1
|
||||
Host: 127.0.0.1:3923
|
||||
User-Agent: rclone/v1.67.0
|
||||
Authorization: Basic azp1
|
||||
Referer: http://127.0.0.1:3923/
|
||||
Accept-Encoding: gzip
|
||||
\n"""
|
||||
|
||||
|
||||
# tcpdump of `rclone move dav:/a/d1/d2 /a/d1/d3` (it does a lot of boilerplate propfinds/mkcols before)
|
||||
RCLONE_MOVE = """MOVE /%s HTTP/1.1
|
||||
Host: 127.0.0.1:3923
|
||||
User-Agent: rclone/v1.67.0
|
||||
Authorization: Basic azp1
|
||||
Destination: http://127.0.0.1:3923/%s
|
||||
Overwrite: T
|
||||
Referer: http://127.0.0.1:3923/
|
||||
Accept-Encoding: gzip
|
||||
\n"""
|
||||
|
||||
|
||||
class TestHttpCli(TC):
|
||||
def setUp(self):
|
||||
self.td = tu.get_ramdisk()
|
||||
self.maxDiff = 99999
|
||||
|
||||
def tearDown(self):
|
||||
self.conn.shutdown()
|
||||
os.chdir(tempfile.gettempdir())
|
||||
shutil.rmtree(self.td)
|
||||
|
||||
def test(self):
|
||||
td = os.path.join(self.td, "vfs")
|
||||
os.mkdir(td)
|
||||
os.chdir(td)
|
||||
|
||||
self.fn = "g{:x}g".format(int(time.time() * 3))
|
||||
vcfg = [
|
||||
"r:r:r,u",
|
||||
"w:w:w,u",
|
||||
"a:a:A,u",
|
||||
"x:x:r,u2",
|
||||
"x/r:x/r:r,u",
|
||||
"x/x:x/x:r,u2",
|
||||
]
|
||||
self.args = Cfg(v=vcfg, a=["u:u", "u2:u2"])
|
||||
self.asrv = AuthSrv(self.args, self.log)
|
||||
self.conn = tu.VHttpConn(self.args, self.asrv, self.log, b"", True)
|
||||
|
||||
self.fns = ["%s/%s" % (zs.split(":")[0], self.fn) for zs in vcfg]
|
||||
for fp in self.fns:
|
||||
try:
|
||||
os.makedirs(os.path.dirname(fp))
|
||||
except:
|
||||
pass
|
||||
with open(fp, "wb") as f:
|
||||
f.write(("ok %s\n" % (fp,)).encode("utf-8"))
|
||||
|
||||
##
|
||||
## depth:1 (regular listing)
|
||||
|
||||
# unmapped root; should return list of volumes
|
||||
h, b = self.req(RCLONE_PROPFIND % ("",))
|
||||
fns = pfind2ls(b)
|
||||
self.assertStart("HTTP/1.1 207 Multi-Status\r", h)
|
||||
self.assertListEqual(fns, ["/", "/a/", "/r/"])
|
||||
|
||||
# toplevel of a volume; has one file
|
||||
h, b = self.req(RCLONE_PROPFIND % ("a",))
|
||||
fns = pfind2ls(b)
|
||||
self.assertStart("HTTP/1.1 207 Multi-Status\r", h)
|
||||
self.assertListEqual(fns, ["/a/", "/a/" + self.fn])
|
||||
|
||||
# toplevel of a volume; has one file
|
||||
h, b = self.req(RCLONE_PROPFIND % ("r",))
|
||||
fns = pfind2ls(b)
|
||||
self.assertStart("HTTP/1.1 207 Multi-Status\r", h)
|
||||
self.assertListEqual(fns, ["/r/", "/r/" + self.fn])
|
||||
|
||||
# toplevel of write-only volume; has one file, will not list
|
||||
h, b = self.req(RCLONE_PROPFIND % ("w",))
|
||||
fns = pfind2ls(b)
|
||||
self.assertStart("HTTP/1.1 207 Multi-Status\r", h)
|
||||
self.assertListEqual(fns, ["/w/"])
|
||||
|
||||
##
|
||||
## auth challenge
|
||||
|
||||
bad_pfind = RCLONE_PROPFIND.replace("Authorization: Basic azp1\n", "")
|
||||
bad_put = RCLONE_PUT.replace("Authorization: Basic azp1\n", "")
|
||||
urls = ["", "r", "w", "a"]
|
||||
urls += [x + "/" + self.fn for x in urls[1:]]
|
||||
for url in urls:
|
||||
for q in (bad_pfind, bad_put):
|
||||
h, b = self.req(q % (url,))
|
||||
self.assertStart("HTTP/1.1 401 Unauthorized\r", h)
|
||||
self.assertIn('\nWWW-Authenticate: Basic realm="a"\r', h)
|
||||
|
||||
##
|
||||
## depth:0 (recursion)
|
||||
|
||||
# depth:0 from unmapped root should work;
|
||||
# will NOT list contents of /x/r/ due to current limitations
|
||||
# (stops descending at first non-accessible volume)
|
||||
recursive = RCLONE_PROPFIND.replace("Depth: 1\n", "")
|
||||
h, b = self.req(recursive % ("",))
|
||||
fns = pfind2ls(b)
|
||||
expect = ["/", "/a/", "/r/"]
|
||||
expect += [x + self.fn for x in expect[1:]]
|
||||
self.assertListEqual(fns, expect)
|
||||
|
||||
# same thing here...
|
||||
h, b = self.req(recursive % ("/x",))
|
||||
fns = pfind2ls(b)
|
||||
self.assertListEqual(fns, [])
|
||||
|
||||
# but this obviously works
|
||||
h, b = self.req(recursive % ("/x/r",))
|
||||
fns = pfind2ls(b)
|
||||
self.assertListEqual(fns, ["/x/r/", "/x/r/" + self.fn])
|
||||
|
||||
##
|
||||
## uploading
|
||||
|
||||
# rclone does a propfind on the target file first; expects 404
|
||||
h, b = self.req(RCLONE_PROPFIND % ("a/fa",))
|
||||
self.assertStart("HTTP/1.1 404 Not Found\r", h)
|
||||
|
||||
# then it does a mkcol (mkdir), expecting 405 (exists)
|
||||
h, b = self.req(RCLONE_MKCOL % ("a",))
|
||||
self.assertStart("HTTP/1.1 405 Method Not Allowed\r", h)
|
||||
|
||||
# then it uploads the file
|
||||
h, b = self.req(RCLONE_PUT % ("a/fa",))
|
||||
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||
|
||||
# then it does a propfind to confirm
|
||||
h, b = self.req(RCLONE_PROPFIND % ("a/fa",))
|
||||
fns = pfind2ls(b)
|
||||
self.assertStart("HTTP/1.1 207 Multi-Status\r", h)
|
||||
self.assertListEqual(fns, ["/a/fa"])
|
||||
|
||||
##
|
||||
## upload into set of subfolders that don't exist yet
|
||||
|
||||
# rclone does this:
|
||||
# propfind /a/d1/d2/fa => 404
|
||||
# mkcol /a/d1/d2/ => 409
|
||||
# propfind /a/d1/d2/ => 404
|
||||
# mkcol /a/d1/ => 201
|
||||
# mkcol /a/d1/d2/ => 201
|
||||
# put /a/d1/d2/fa => 201
|
||||
# propfind /a/d1/d2/fa => 207
|
||||
# ...some of which already tested above;
|
||||
|
||||
h, b = self.req(RCLONE_PROPFIND % ("/a/d1/d2/",))
|
||||
self.assertStart("HTTP/1.1 404 Not Found\r", h)
|
||||
|
||||
h, b = self.req(RCLONE_PROPFIND % ("/a/d1/",))
|
||||
self.assertStart("HTTP/1.1 404 Not Found\r", h)
|
||||
|
||||
h, b = self.req(RCLONE_MKCOL % ("/a/d1/d2/",))
|
||||
self.assertStart("HTTP/1.1 409 Conflict\r", h)
|
||||
|
||||
h, b = self.req(RCLONE_MKCOL % ("/a/d1/",))
|
||||
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||
|
||||
h, b = self.req(RCLONE_MKCOL % ("/a/d1/d2/",))
|
||||
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||
|
||||
h, b = self.req(RCLONE_PUT % ("a/d1/d2/fa",))
|
||||
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||
|
||||
##
|
||||
## rename
|
||||
|
||||
h, b = self.req(RCLONE_MOVE % ("a/d1/d2/", "a/d1/d3/"))
|
||||
self.assertStart("HTTP/1.1 201 Created\r", h)
|
||||
self.assertListEqual(os.listdir("a/d1"), ["d3"])
|
||||
|
||||
##
|
||||
## delete
|
||||
|
||||
h, b = self.req(RCLONE_DELETE % ("a/d1",))
|
||||
self.assertStart("HTTP/1.1 200 OK\r", h)
|
||||
if os.path.exists("a/d1"):
|
||||
self.fail("a/d1 still exists")
|
||||
|
||||
def req(self, q):
|
||||
h, b = q.split("\n\n", 1)
|
||||
q = h.replace("\n", "\r\n") + "\r\n\r\n" + b
|
||||
conn = self.conn.setbuf(q.encode("utf-8"))
|
||||
HttpCli(conn).run()
|
||||
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
|
||||
def log(self, src, msg, c=0):
|
||||
print(msg)
|
||||
@@ -12,6 +12,7 @@ import sys
|
||||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
from argparse import Namespace
|
||||
|
||||
import jinja2
|
||||
@@ -118,20 +119,34 @@ def get_ramdisk():
|
||||
return subdir(ret)
|
||||
|
||||
|
||||
def pfind2ls(xml):
|
||||
return [x.split("<", 1)[0] for x in xml.split("<D:href>")[1:]]
|
||||
|
||||
|
||||
class TC(unittest.TestCase):
|
||||
def __init__(self, *a, **ka):
|
||||
super(TC, self).__init__(*a, **ka)
|
||||
|
||||
def assertStart(self, member, container, msg=None):
|
||||
if not container.startswith(member):
|
||||
standardMsg = "%s not found in %s" % (member, container)
|
||||
self.fail(self._formatMessage(msg, standardMsg))
|
||||
|
||||
|
||||
class Cfg(Namespace):
|
||||
def __init__(self, a=None, v=None, c=None, **ka0):
|
||||
ka = {}
|
||||
|
||||
ex = "chpw daw dav_auth dav_inf dav_mac dav_rt e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp early_ban ed emp exp force_js getmod grid gsel hardlink ih ihead magic hardlink_only nid nih no_acode no_athumb no_clone no_cp no_dav no_db_ip no_del no_dirsz no_dupe no_lifetime no_logues no_mv no_pipe no_poll no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nsort nw og og_no_head og_s_title ohead q rand re_dirsz rss smb srch_dbg stats uqe vague_403 vc ver write_uplog xdev xlink xvol zs"
|
||||
ex = "chpw daw dav_auth dav_mac dav_rt e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp early_ban ed emp exp force_js getmod grid gsel hardlink ih ihead magic hardlink_only nid nih no_acode no_athumb no_bauth no_clone no_cp no_dav no_db_ip no_del no_dirsz no_dupe no_lifetime no_logues no_mv no_pipe no_poll no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nsort nw og og_no_head og_s_title ohead q rand re_dirsz rss smb srch_dbg srch_excl stats uqe vague_403 vc ver write_uplog xdev xlink xvol zs"
|
||||
ka.update(**{k: False for k in ex.split()})
|
||||
|
||||
ex = "dedup dotpart dotsrch hook_v no_dhash no_fastboot no_fpool no_htp no_rescan no_sendfile no_ses no_snap no_up_list no_voldump re_dhash plain_ip"
|
||||
ex = "dav_inf dedup dotpart dotsrch hook_v no_dhash no_fastboot no_fpool no_htp no_rescan no_sendfile no_ses no_snap no_up_list no_voldump re_dhash plain_ip"
|
||||
ka.update(**{k: True for k in ex.split()})
|
||||
|
||||
ex = "ah_cli ah_gen css_browser hist ipu js_browser js_other mime mimes no_forget no_hash no_idx nonsus_urls og_tpl og_ua"
|
||||
ka.update(**{k: None for k in ex.split()})
|
||||
|
||||
ex = "hash_mt safe_dedup srch_time u2abort u2j u2sz"
|
||||
ex = "hash_mt hsortn safe_dedup srch_time u2abort u2j u2sz"
|
||||
ka.update(**{k: 1 for k in ex.split()})
|
||||
|
||||
ex = "au_vol dl_list mtab_age reg_cap s_thead s_tbody th_convt"
|
||||
|
||||
Reference in New Issue
Block a user