Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d3ccd3f174 | ||
|
|
cb6de0387d | ||
|
|
abff40519d | ||
|
|
55c74ad164 | ||
|
|
673b4f7e23 | ||
|
|
d11e02da49 | ||
|
|
8790f89e08 | ||
|
|
33442026b8 | ||
|
|
03193de6d0 | ||
|
|
8675ff40f3 | ||
|
|
d88889d3fc | ||
|
|
6f244d4335 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -37,6 +37,7 @@ up.*.txt
|
||||
.hist/
|
||||
scripts/docker/*.out
|
||||
scripts/docker/*.err
|
||||
/perf.*
|
||||
|
||||
# nix build output link
|
||||
result
|
||||
|
||||
43
bin/up2k.py
43
bin/up2k.py
@@ -1,8 +1,8 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
S_VERSION = "1.6"
|
||||
S_BUILD_DT = "2023-04-20"
|
||||
S_VERSION = "1.7"
|
||||
S_BUILD_DT = "2023-04-26"
|
||||
|
||||
"""
|
||||
up2k.py: upload to copyparty
|
||||
@@ -21,6 +21,7 @@ import math
|
||||
import time
|
||||
import atexit
|
||||
import signal
|
||||
import socket
|
||||
import base64
|
||||
import hashlib
|
||||
import platform
|
||||
@@ -58,6 +59,7 @@ PY2 = sys.version_info < (3,)
|
||||
if PY2:
|
||||
from Queue import Queue
|
||||
from urllib import quote, unquote
|
||||
from urlparse import urlsplit, urlunsplit
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
bytes = str
|
||||
@@ -65,6 +67,7 @@ else:
|
||||
from queue import Queue
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
unicode = str
|
||||
|
||||
@@ -337,6 +340,30 @@ class CTermsize(object):
|
||||
ss = CTermsize()
|
||||
|
||||
|
||||
def undns(url):
|
||||
usp = urlsplit(url)
|
||||
hn = usp.hostname
|
||||
gai = None
|
||||
eprint("resolving host [{0}] ...".format(hn), end="")
|
||||
try:
|
||||
gai = socket.getaddrinfo(hn, None)
|
||||
hn = gai[0][4][0]
|
||||
except:
|
||||
t = "\n\033[31mfailed to resolve upload destination host;\033[0m\ngai={0}\n"
|
||||
eprint(t.format(repr(gai)))
|
||||
raise
|
||||
|
||||
if usp.port:
|
||||
hn = "{0}:{1}".format(hn, usp.port)
|
||||
if usp.username or usp.password:
|
||||
hn = "{0}:{1}@{2}".format(usp.username, usp.password, hn)
|
||||
|
||||
usp = usp._replace(netloc=hn)
|
||||
url = urlunsplit(usp)
|
||||
eprint(" {0}".format(url))
|
||||
return url
|
||||
|
||||
|
||||
def _scd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
with os.scandir(top) as dh:
|
||||
@@ -853,7 +880,7 @@ class Ctl(object):
|
||||
print(" ls ~{0}".format(srd))
|
||||
zb = self.ar.url.encode("utf-8")
|
||||
zb += quotep(rd.replace(b"\\", b"/"))
|
||||
r = req_ses.get(zb + b"?ls&dots", headers=headers)
|
||||
r = req_ses.get(zb + b"?ls<&dots", headers=headers)
|
||||
if not r:
|
||||
raise Exception("HTTP {0}".format(r.status_code))
|
||||
|
||||
@@ -931,7 +958,7 @@ class Ctl(object):
|
||||
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
if not VT100:
|
||||
upath = upath[4:]
|
||||
upath = upath.lstrip("\\?")
|
||||
|
||||
hs, sprs = handshake(self.ar, file, search)
|
||||
if search:
|
||||
@@ -1073,6 +1100,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
|
||||
ap = app.add_argument_group("compatibility")
|
||||
ap.add_argument("--cls", action="store_true", help="clear screen before start")
|
||||
ap.add_argument("--rh", action="store_true", help="resolve server hostname before upload (good for buggy networks, but TLS certs will break)")
|
||||
|
||||
ap = app.add_argument_group("folder sync")
|
||||
ap.add_argument("--dl", action="store_true", help="delete local files after uploading")
|
||||
@@ -1096,7 +1124,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
ar = app.parse_args()
|
||||
finally:
|
||||
if EXE and not sys.argv[1:]:
|
||||
print("*** hit enter to exit ***")
|
||||
eprint("*** hit enter to exit ***")
|
||||
try:
|
||||
input()
|
||||
except:
|
||||
@@ -1129,8 +1157,11 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
with open(fn, "rb") as f:
|
||||
ar.a = f.read().decode("utf-8").strip()
|
||||
|
||||
if ar.rh:
|
||||
ar.url = undns(ar.url)
|
||||
|
||||
if ar.cls:
|
||||
print("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
|
||||
eprint("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="")
|
||||
|
||||
ctl = Ctl(ar)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Maintainer: icxes <dev.null@need.moe>
|
||||
pkgname=copyparty
|
||||
pkgver="1.6.12"
|
||||
pkgver="1.6.14"
|
||||
pkgrel=1
|
||||
pkgdesc="Portable file sharing hub"
|
||||
arch=("any")
|
||||
@@ -26,7 +26,7 @@ source=("${url}/releases/download/v${pkgver}/${pkgname}-sfx.py"
|
||||
"https://raw.githubusercontent.com/9001/${pkgname}/v${pkgver}/LICENSE"
|
||||
)
|
||||
backup=("etc/${pkgname}.d/init" )
|
||||
sha256sums=("ce3702634fecf7cd6a5c79709a8e6cb1e9fe0748ec4754a278be7a60b618dd42"
|
||||
sha256sums=("f3294a22fdd086605fe8d14bfeff620c6cff45c9019fd7d4af1a0ddd9e0d3947"
|
||||
"b8565eba5e64dedba1cf6c7aac7e31c5a731ed7153d6810288a28f00a36c28b2"
|
||||
"f65c207e0670f9d78ad2e399bda18d5502ff30d2ac79e0e7fc48e7fbdc39afdc"
|
||||
"c4f396b083c9ec02ad50b52412c84d2a82be7f079b2d016e1c9fad22d68285ff"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.6.12/copyparty-sfx.py",
|
||||
"version": "1.6.12",
|
||||
"hash": "sha256-zjcCY0/s981qXHlwmo5ssen+B0jsR1SieL56YLYY3UI="
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.6.14/copyparty-sfx.py",
|
||||
"version": "1.6.14",
|
||||
"hash": "sha256-8ylKIv3QhmBf6NFL/v9iDGz/RckBn9fUrxoN3Z4NOUc="
|
||||
}
|
||||
@@ -279,8 +279,8 @@ def ensure_cert(al: argparse.Namespace) -> None:
|
||||
|
||||
with open(al.cert, "rb") as f:
|
||||
buf = f.read()
|
||||
o1 = buf.find(b"-BEGIN PRIVATE KEY-")
|
||||
o2 = buf.find(b"-BEGIN CERTIFICATE-")
|
||||
o1 = buf.find(b" PRIVATE KEY-")
|
||||
o2 = buf.find(b" CERTIFICATE-")
|
||||
m = "unsupported certificate format: "
|
||||
if o1 < 0:
|
||||
raise Exception(m + "no private key inside pem")
|
||||
@@ -619,9 +619,9 @@ def get_sects():
|
||||
|
||||
\033[32macid\033[0m = extremely safe but slow; the old default. Should never lose any data no matter what
|
||||
|
||||
\033[32mswal\033[0m = 2.4x faster uploads yet 99.9%% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes
|
||||
\033[32mswal\033[0m = 2.4x faster uploads yet 99.9% as safe -- theoretical chance of losing metadata for the ~200 most recently uploaded files if there's a power-loss or your OS crashes
|
||||
|
||||
\033[32mwal\033[0m = another 21x faster on HDDs yet 90%% as safe; same pitfall as \033[33mswal\033[0m except more likely
|
||||
\033[32mwal\033[0m = another 21x faster on HDDs yet 90% as safe; same pitfall as \033[33mswal\033[0m except more likely
|
||||
|
||||
\033[32myolo\033[0m = another 1.5x faster, and removes the occasional sudden upload-pause while the disk syncs, but now you're at risk of losing the entire database in a powerloss / OS-crash
|
||||
|
||||
@@ -710,6 +710,8 @@ def add_network(ap):
|
||||
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
||||
else:
|
||||
ap2.add_argument("--freebind", action="store_true", help="allow listening on IPs which do not yet exist, for example if the network interfaces haven't finished going up. Only makes sense for IPs other than '0.0.0.0', '127.0.0.1', '::', and '::1'. May require running as root (unless net.ipv6.ip_nonlocal_bind)")
|
||||
ap2.add_argument("--s-thead", metavar="SEC", type=int, default=120, help="socket timeout (read request header)")
|
||||
ap2.add_argument("--s-tbody", metavar="SEC", type=float, default=186, help="socket timeout (read/write request/response bodies). Use 60 on fast servers (default is extremely safe). Disable with 0 if reverse-proxied for a 2%% speed boost")
|
||||
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
|
||||
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="debug: socket write delay in seconds")
|
||||
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="debug: response delay in seconds")
|
||||
@@ -781,6 +783,7 @@ def add_webdav(ap):
|
||||
ap2.add_argument("--daw", action="store_true", help="enable full write support, even if client may not be webdav. \033[1;31mWARNING:\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
|
||||
ap2.add_argument("--dav-inf", action="store_true", help="allow depth:infinite requests (recursive file listing); extremely server-heavy but required for spec compliance -- luckily few clients rely on this")
|
||||
ap2.add_argument("--dav-mac", action="store_true", help="disable apple-garbage filter -- allow macos to create junk files (._* and .DS_Store, .Spotlight-*, .fseventsd, .Trashes, .AppleDouble, __MACOS)")
|
||||
ap2.add_argument("--dav-rt", action="store_true", help="show symlink-destination's lastmodified instead of the link itself; always enabled for recursive listings (volflag=davrt)")
|
||||
|
||||
|
||||
def add_smb(ap):
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 6, 13)
|
||||
VERSION = (1, 6, 15)
|
||||
CODENAME = "cors k"
|
||||
BUILD_DT = (2023, 4, 23)
|
||||
BUILD_DT = (2023, 4, 26)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -67,9 +67,9 @@ class AXS(object):
|
||||
self.upget: set[str] = set(upget or [])
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "AXS({})".format(
|
||||
return "AXS(%s)" % (
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self.__dict__[k])
|
||||
"%s=%r" % (k, self.__dict__[k])
|
||||
for k in "uread uwrite umove udel uget upget".split()
|
||||
)
|
||||
)
|
||||
@@ -304,9 +304,9 @@ class VFS(object):
|
||||
self.all_vols = {}
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "VFS({})".format(
|
||||
return "VFS(%s)" % (
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self.__dict__[k])
|
||||
"%s=%r" % (k, self.__dict__[k])
|
||||
for k in "realpath vpath axs flags".split()
|
||||
)
|
||||
)
|
||||
@@ -548,6 +548,8 @@ class VFS(object):
|
||||
seen = seen[:] + [fsroot]
|
||||
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
|
||||
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||
# if lstat: ignore folder symlinks since copyparty will never make those
|
||||
# (and we definitely don't want to descend into them)
|
||||
|
||||
rfiles.sort()
|
||||
rdirs.sort()
|
||||
|
||||
@@ -13,6 +13,7 @@ def vf_bmap() -> dict[str, str]:
|
||||
"no_dedup": "copydupes",
|
||||
"no_dupe": "nodupe",
|
||||
"no_forget": "noforget",
|
||||
"dav_rt": "davrt",
|
||||
}
|
||||
for k in (
|
||||
"dotsrch",
|
||||
@@ -142,7 +143,8 @@ flagcats = {
|
||||
"lg_sbf": "list of *logue-sandbox safeguards to disable",
|
||||
},
|
||||
"others": {
|
||||
"fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission'
|
||||
"fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission',
|
||||
"davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -113,7 +113,8 @@ class FtpFs(AbstractedFS):
|
||||
def __init__(
|
||||
self, root: str, cmd_channel: Any
|
||||
) -> None: # pylint: disable=super-init-not-called
|
||||
self.h = self.cmd_channel = cmd_channel # type: FTPHandler
|
||||
self.h = cmd_channel # type: FTPHandler
|
||||
self.cmd_channel = cmd_channel # type: FTPHandler
|
||||
self.hub: "SvcHub" = cmd_channel.hub
|
||||
self.args = cmd_channel.args
|
||||
self.uname = cmd_channel.uname
|
||||
|
||||
@@ -219,7 +219,7 @@ class HttpCli(object):
|
||||
|
||||
try:
|
||||
self.s.settimeout(2)
|
||||
headerlines = read_header(self.sr)
|
||||
headerlines = read_header(self.sr, self.args.s_thead, self.args.s_thead)
|
||||
self.in_hdr_recv = False
|
||||
if not headerlines:
|
||||
return False
|
||||
@@ -266,7 +266,7 @@ class HttpCli(object):
|
||||
)
|
||||
self.host = self.headers.get("host") or ""
|
||||
if not self.host:
|
||||
zs = "{}:{}".format(*list(self.s.getsockname()[:2]))
|
||||
zs = "%s:%s" % self.s.getsockname()[:2]
|
||||
self.host = zs[7:] if zs.startswith("::ffff:") else zs
|
||||
|
||||
n = self.args.rproxy
|
||||
@@ -403,7 +403,11 @@ class HttpCli(object):
|
||||
self.get_pwd_cookie(self.pw)
|
||||
|
||||
if self.is_rclone:
|
||||
# dots: always include dotfiles if permitted
|
||||
# lt: probably more important showing the correct timestamps of any dupes it just uploaded rather than the lastmod time of any non-copyparty-managed symlinks
|
||||
# b: basic-browser if it tries to parse the html listing
|
||||
uparam["dots"] = ""
|
||||
uparam["lt"] = ""
|
||||
uparam["b"] = ""
|
||||
cookies["b"] = ""
|
||||
|
||||
@@ -416,6 +420,8 @@ class HttpCli(object):
|
||||
self.can_upget,
|
||||
) = self.asrv.vfs.can_access(self.vpath, self.uname)
|
||||
|
||||
self.s.settimeout(self.args.s_tbody or None)
|
||||
|
||||
try:
|
||||
cors_k = self._cors()
|
||||
if self.mode in ("GET", "HEAD"):
|
||||
@@ -530,7 +536,7 @@ class HttpCli(object):
|
||||
mime: Optional[str] = None,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
) -> None:
|
||||
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
|
||||
response = ["%s %s %s" % (self.http_ver, status, HTTPCODE[status])]
|
||||
|
||||
if length is not None:
|
||||
response.append("Content-Length: " + unicode(length))
|
||||
@@ -554,11 +560,10 @@ class HttpCli(object):
|
||||
self.out_headers["Content-Type"] = mime
|
||||
|
||||
for k, zs in list(self.out_headers.items()) + self.out_headerlist:
|
||||
response.append("{}: {}".format(k, zs))
|
||||
response.append("%s: %s" % (k, zs))
|
||||
|
||||
try:
|
||||
# best practice to separate headers and body into different packets
|
||||
self.s.settimeout(None)
|
||||
self.s.sendall("\r\n".join(response).encode("utf-8") + b"\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c while replying headers")
|
||||
@@ -621,7 +626,7 @@ class HttpCli(object):
|
||||
if not kv:
|
||||
return ""
|
||||
|
||||
r = ["{}={}".format(k, quotep(zs)) if zs else k for k, zs in kv.items()]
|
||||
r = ["%s=%s" % (k, quotep(zs)) if zs else k for k, zs in kv.items()]
|
||||
return "?" + "&".join(r)
|
||||
|
||||
def redirect(
|
||||
@@ -864,10 +869,11 @@ class HttpCli(object):
|
||||
|
||||
props = set(props_lst)
|
||||
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False, err=401)
|
||||
tap = vn.canonical(rem)
|
||||
depth = self.headers.get("depth", "infinity").lower()
|
||||
|
||||
try:
|
||||
topdir = {"vp": "", "st": bos.stat(vn.canonical(rem))}
|
||||
topdir = {"vp": "", "st": bos.stat(tap)}
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.ENOENT:
|
||||
raise
|
||||
@@ -883,6 +889,9 @@ class HttpCli(object):
|
||||
self.reply(zb, 403, "application/xml; charset=utf-8")
|
||||
return True
|
||||
|
||||
# this will return symlink-target timestamps
|
||||
# because lstat=true would not recurse into subfolders
|
||||
# and this is a rare case where we actually want that
|
||||
fgen = vn.zipgen(
|
||||
rem,
|
||||
rem,
|
||||
@@ -896,7 +905,11 @@ class HttpCli(object):
|
||||
|
||||
elif depth == "1":
|
||||
_, vfs_ls, vfs_virt = vn.ls(
|
||||
rem, self.uname, not self.args.no_scandir, [[True, False]]
|
||||
rem,
|
||||
self.uname,
|
||||
not self.args.no_scandir,
|
||||
[[True, False]],
|
||||
lstat="davrt" not in vn.flags,
|
||||
)
|
||||
if not self.args.ed:
|
||||
names = set(exclude_dotfiles([x[0] for x in vfs_ls]))
|
||||
@@ -930,14 +943,23 @@ class HttpCli(object):
|
||||
for x in fgen:
|
||||
rp = vjoin(vtop, x["vp"])
|
||||
st: os.stat_result = x["st"]
|
||||
mtime = st.st_mtime
|
||||
if stat.S_ISLNK(st.st_mode):
|
||||
try:
|
||||
st = bos.stat(os.path.join(tap, x["vp"]))
|
||||
except:
|
||||
continue
|
||||
|
||||
isdir = stat.S_ISDIR(st.st_mode)
|
||||
|
||||
t = "<D:response><D:href>/{}{}</D:href><D:propstat><D:prop>"
|
||||
ret += t.format(quotep(rp), "/" if isdir and rp else "")
|
||||
ret += "<D:response><D:href>/%s%s</D:href><D:propstat><D:prop>" % (
|
||||
quotep(rp),
|
||||
"/" if isdir and rp else "",
|
||||
)
|
||||
|
||||
pvs: dict[str, str] = {
|
||||
"displayname": html_escape(rp.split("/")[-1]),
|
||||
"getlastmodified": formatdate(st.st_mtime, usegmt=True),
|
||||
"getlastmodified": formatdate(mtime, usegmt=True),
|
||||
"resourcetype": '<D:collection xmlns:D="DAV:"/>' if isdir else "",
|
||||
"supportedlock": '<D:lockentry xmlns:D="DAV:"><D:lockscope><D:exclusive/></D:lockscope><D:locktype><D:write/></D:locktype></D:lockentry>',
|
||||
}
|
||||
@@ -949,13 +971,13 @@ class HttpCli(object):
|
||||
if k not in props:
|
||||
continue
|
||||
elif v:
|
||||
ret += "<D:{0}>{1}</D:{0}>".format(k, v)
|
||||
ret += "<D:%s>%s</D:%s>" % (k, v, k)
|
||||
else:
|
||||
ret += "<D:{}/>".format(k)
|
||||
ret += "<D:%s/>" % (k,)
|
||||
|
||||
ret += "</D:prop><D:status>HTTP/1.1 200 OK</D:status></D:propstat>"
|
||||
|
||||
missing = ["<D:{}/>".format(x) for x in props if x not in pvs]
|
||||
missing = ["<D:%s/>" % (x,) for x in props if x not in pvs]
|
||||
if missing and clen:
|
||||
t = "<D:propstat><D:prop>{}</D:prop><D:status>HTTP/1.1 404 Not Found</D:status></D:propstat>"
|
||||
ret += t.format("".join(missing))
|
||||
@@ -1206,7 +1228,6 @@ class HttpCli(object):
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
try:
|
||||
self.s.settimeout(None)
|
||||
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
@@ -1218,7 +1239,6 @@ class HttpCli(object):
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
try:
|
||||
self.s.settimeout(None)
|
||||
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
@@ -1623,7 +1643,7 @@ class HttpCli(object):
|
||||
|
||||
spd1 = get_spd(nbytes, self.t0)
|
||||
spd2 = get_spd(self.conn.nbyte, self.conn.t0)
|
||||
return "{} {} n{}".format(spd1, spd2, self.conn.nreq)
|
||||
return "%s %s n%s" % (spd1, spd2, self.conn.nreq)
|
||||
|
||||
def handle_post_multipart(self) -> bool:
|
||||
self.parser = MultipartParser(self.log, self.sr, self.headers)
|
||||
@@ -3035,7 +3055,7 @@ class HttpCli(object):
|
||||
if self.is_vproxied:
|
||||
parents = self.args.R.split("/")
|
||||
for parent in parents[::-1]:
|
||||
ret = {"k{}".format(parent): ret, "a": []}
|
||||
ret = {"k%s" % (parent,): ret, "a": []}
|
||||
|
||||
zs = json.dumps(ret)
|
||||
self.reply(zs.encode("utf-8"), mime="application/json")
|
||||
@@ -3514,7 +3534,11 @@ class HttpCli(object):
|
||||
return self.tx_zip(k, v, self.vpath, vn, rem, [], self.args.ed)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||
rem, self.uname, not self.args.no_scandir, [[True, False], [False, True]]
|
||||
rem,
|
||||
self.uname,
|
||||
not self.args.no_scandir,
|
||||
[[True, False], [False, True]],
|
||||
lstat="lt" in self.uparam,
|
||||
)
|
||||
stats = {k: v for k, v in vfs_ls}
|
||||
ls_names = [x[0] for x in vfs_ls]
|
||||
@@ -3558,7 +3582,8 @@ class HttpCli(object):
|
||||
fspath = fsroot + "/" + fn
|
||||
|
||||
try:
|
||||
inf = stats.get(fn) or bos.stat(fspath)
|
||||
linf = stats.get(fn) or bos.lstat(fspath)
|
||||
inf = bos.stat(fspath) if stat.S_ISLNK(linf.st_mode) else linf
|
||||
except:
|
||||
self.log("broken symlink: {}".format(repr(fspath)))
|
||||
continue
|
||||
@@ -3569,19 +3594,26 @@ class HttpCli(object):
|
||||
if self.args.no_zip:
|
||||
margin = "DIR"
|
||||
else:
|
||||
margin = '<a href="{}?zip" rel="nofollow">zip</a>'.format(
|
||||
quotep(href)
|
||||
)
|
||||
margin = '<a href="%s?zip" rel="nofollow">zip</a>' % (quotep(href),)
|
||||
elif fn in hist:
|
||||
margin = '<a href="{}.hist/{}">#{}</a>'.format(
|
||||
base, html_escape(hist[fn][2], quot=True, crlf=True), hist[fn][0]
|
||||
margin = '<a href="%s.hist/%s">#%s</a>' % (
|
||||
base,
|
||||
html_escape(hist[fn][2], quot=True, crlf=True),
|
||||
hist[fn][0],
|
||||
)
|
||||
else:
|
||||
margin = "-"
|
||||
|
||||
sz = inf.st_size
|
||||
zd = datetime.utcfromtimestamp(inf.st_mtime)
|
||||
dt = zd.strftime("%Y-%m-%d %H:%M:%S")
|
||||
zd = datetime.utcfromtimestamp(linf.st_mtime)
|
||||
dt = "%04d-%02d-%02d %02d:%02d:%02d" % (
|
||||
zd.year,
|
||||
zd.month,
|
||||
zd.day,
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
)
|
||||
|
||||
try:
|
||||
ext = "---" if is_dir else fn.rsplit(".", 1)[1]
|
||||
@@ -3591,7 +3623,7 @@ class HttpCli(object):
|
||||
ext = "%"
|
||||
|
||||
if add_fk:
|
||||
href = "{}?k={}".format(
|
||||
href = "%s?k=%s" % (
|
||||
quotep(href),
|
||||
self.gen_fk(
|
||||
self.args.fk_salt, fspath, sz, 0 if ANYWIN else inf.st_ino
|
||||
@@ -3607,7 +3639,7 @@ class HttpCli(object):
|
||||
"sz": sz,
|
||||
"ext": ext,
|
||||
"dt": dt,
|
||||
"ts": int(inf.st_mtime),
|
||||
"ts": int(linf.st_mtime),
|
||||
}
|
||||
if is_dir:
|
||||
dirs.append(item)
|
||||
|
||||
@@ -647,8 +647,14 @@ class SvcHub(object):
|
||||
return
|
||||
|
||||
with self.log_mutex:
|
||||
ts = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")[:-3]
|
||||
self.logf.write("@{} [{}\033[0m] {}\n".format(ts, src, msg))
|
||||
zd = datetime.utcnow()
|
||||
ts = "%04d-%04d-%06d.%03d" % (
|
||||
zd.year,
|
||||
zd.month * 100 + zd.day,
|
||||
(zd.hour * 100 + zd.minute) * 100 + zd.second,
|
||||
zd.microsecond // 1000,
|
||||
)
|
||||
self.logf.write("@%s [%s\033[0m] %s\n" % (ts, src, msg))
|
||||
|
||||
now = time.time()
|
||||
if now >= self.next_day:
|
||||
@@ -678,23 +684,29 @@ class SvcHub(object):
|
||||
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
|
||||
self._set_next_day()
|
||||
|
||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
|
||||
fmt = "\033[36m%s \033[33m%-21s \033[0m%s\n"
|
||||
if not VT100:
|
||||
fmt = "{} {:21} {}\n"
|
||||
fmt = "%s %-21s %s\n"
|
||||
if "\033" in msg:
|
||||
msg = ansi_re.sub("", msg)
|
||||
if "\033" in src:
|
||||
src = ansi_re.sub("", src)
|
||||
elif c:
|
||||
if isinstance(c, int):
|
||||
msg = "\033[3{}m{}\033[0m".format(c, msg)
|
||||
msg = "\033[3%sm%s\033[0m" % (c, msg)
|
||||
elif "\033" not in c:
|
||||
msg = "\033[{}m{}\033[0m".format(c, msg)
|
||||
msg = "\033[%sm%s\033[0m" % (c, msg)
|
||||
else:
|
||||
msg = "{}{}\033[0m".format(c, msg)
|
||||
msg = "%s%s\033[0m" % (c, msg)
|
||||
|
||||
ts = datetime.utcfromtimestamp(now).strftime("%H:%M:%S.%f")[:-3]
|
||||
msg = fmt.format(ts, src, msg)
|
||||
zd = datetime.utcfromtimestamp(now)
|
||||
ts = "%02d:%02d:%02d.%03d" % (
|
||||
zd.hour,
|
||||
zd.minute,
|
||||
zd.second,
|
||||
zd.microsecond // 1000,
|
||||
)
|
||||
msg = fmt % (ts, src, msg)
|
||||
try:
|
||||
print(msg, end="")
|
||||
except UnicodeEncodeError:
|
||||
|
||||
@@ -274,6 +274,10 @@ class ThumbSrv(object):
|
||||
|
||||
tdir, tfn = os.path.split(tpath)
|
||||
ttpath = os.path.join(tdir, "w", tfn)
|
||||
try:
|
||||
bos.unlink(ttpath)
|
||||
except:
|
||||
pass
|
||||
|
||||
for fun in funs:
|
||||
try:
|
||||
@@ -570,11 +574,15 @@ class ThumbSrv(object):
|
||||
want_caf = tpath.endswith(".caf")
|
||||
tmp_opus = tpath
|
||||
if want_caf:
|
||||
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
|
||||
tmp_opus = tpath + ".opus"
|
||||
try:
|
||||
bos.unlink(tmp_opus)
|
||||
except:
|
||||
pass
|
||||
|
||||
caf_src = abspath if src_opus else tmp_opus
|
||||
|
||||
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
|
||||
if not want_caf or not src_opus:
|
||||
# fmt: off
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
@@ -633,6 +641,12 @@ class ThumbSrv(object):
|
||||
# fmt: on
|
||||
self._run_ff(cmd)
|
||||
|
||||
if tmp_opus != tpath:
|
||||
try:
|
||||
bos.unlink(tmp_opus)
|
||||
except:
|
||||
pass
|
||||
|
||||
def poke(self, tdir: str) -> None:
|
||||
if not self.poke_cd.poke(tdir):
|
||||
return
|
||||
|
||||
@@ -380,9 +380,9 @@ class Up2k(object):
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
fvp = "{}/{}".format(rd, fn).strip("/")
|
||||
fvp = ("%s/%s" % (rd, fn)).strip("/")
|
||||
if vp:
|
||||
fvp = "{}/{}".format(vp, fvp)
|
||||
fvp = "%s/%s" % (vp, fvp)
|
||||
|
||||
self._handle_rm(LEELOO_DALLAS, "", fvp, [])
|
||||
nrm += 1
|
||||
|
||||
@@ -537,7 +537,7 @@ class _Unrecv(object):
|
||||
self.log = log
|
||||
self.buf: bytes = b""
|
||||
|
||||
def recv(self, nbytes: int) -> bytes:
|
||||
def recv(self, nbytes: int, spins: int = 1) -> bytes:
|
||||
if self.buf:
|
||||
ret = self.buf[:nbytes]
|
||||
self.buf = self.buf[nbytes:]
|
||||
@@ -548,6 +548,10 @@ class _Unrecv(object):
|
||||
ret = self.s.recv(nbytes)
|
||||
break
|
||||
except socket.timeout:
|
||||
spins -= 1
|
||||
if spins <= 0:
|
||||
ret = b""
|
||||
break
|
||||
continue
|
||||
except:
|
||||
ret = b""
|
||||
@@ -590,7 +594,7 @@ class _LUnrecv(object):
|
||||
self.log = log
|
||||
self.buf = b""
|
||||
|
||||
def recv(self, nbytes: int) -> bytes:
|
||||
def recv(self, nbytes: int, spins: int) -> bytes:
|
||||
if self.buf:
|
||||
ret = self.buf[:nbytes]
|
||||
self.buf = self.buf[nbytes:]
|
||||
@@ -609,7 +613,7 @@ class _LUnrecv(object):
|
||||
def recv_ex(self, nbytes: int, raise_on_trunc: bool = True) -> bytes:
|
||||
"""read an exact number of bytes"""
|
||||
try:
|
||||
ret = self.recv(nbytes)
|
||||
ret = self.recv(nbytes, 1)
|
||||
err = False
|
||||
except:
|
||||
ret = b""
|
||||
@@ -617,7 +621,7 @@ class _LUnrecv(object):
|
||||
|
||||
while not err and len(ret) < nbytes:
|
||||
try:
|
||||
ret += self.recv(nbytes - len(ret))
|
||||
ret += self.recv(nbytes - len(ret), 1)
|
||||
except OSError:
|
||||
err = True
|
||||
|
||||
@@ -1292,7 +1296,7 @@ class MultipartParser(object):
|
||||
rfc1341/rfc1521/rfc2047/rfc2231/rfc2388/rfc6266/the-real-world
|
||||
(only the fallback non-js uploader relies on these filenames)
|
||||
"""
|
||||
for ln in read_header(self.sr):
|
||||
for ln in read_header(self.sr, 2, 2592000):
|
||||
self.log(ln)
|
||||
|
||||
m = self.re_ctype.match(ln)
|
||||
@@ -1492,15 +1496,15 @@ def get_boundary(headers: dict[str, str]) -> str:
|
||||
return m.group(2)
|
||||
|
||||
|
||||
def read_header(sr: Unrecv) -> list[str]:
|
||||
def read_header(sr: Unrecv, t_idle: int, t_tot: int) -> list[str]:
|
||||
t0 = time.time()
|
||||
ret = b""
|
||||
while True:
|
||||
if time.time() - t0 > 120:
|
||||
if time.time() - t0 >= t_tot:
|
||||
return []
|
||||
|
||||
try:
|
||||
ret += sr.recv(1024)
|
||||
ret += sr.recv(1024, t_idle // 2)
|
||||
except:
|
||||
if not ret:
|
||||
return []
|
||||
@@ -1549,7 +1553,7 @@ def rand_name(fdir: str, fn: str, rnd: int) -> str:
|
||||
def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||
return base64.urlsafe_b64encode(
|
||||
hashlib.sha512(
|
||||
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace")
|
||||
("%s %s %s %s" % (salt, fspath, fsize, inode)).encode("utf-8", "replace")
|
||||
).digest()
|
||||
).decode("ascii")
|
||||
|
||||
@@ -1658,7 +1662,7 @@ def uncyg(path: str) -> str:
|
||||
if len(path) > 2 and path[2] != "/":
|
||||
return path
|
||||
|
||||
return "{}:\\{}".format(path[1], path[3:])
|
||||
return "%s:\\%s" % (path[1], path[3:])
|
||||
|
||||
|
||||
def undot(path: str) -> str:
|
||||
@@ -1701,7 +1705,7 @@ def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
|
||||
|
||||
bad = ["con", "prn", "aux", "nul"]
|
||||
for n in range(1, 10):
|
||||
bad += "com{0} lpt{0}".format(n).split(" ")
|
||||
bad += ("com%s lpt%s" % (n, n)).split(" ")
|
||||
|
||||
if fn.lower().split(".")[0] in bad:
|
||||
fn = "_" + fn
|
||||
|
||||
@@ -1482,7 +1482,7 @@ var mpl = (function () {
|
||||
ebi('np_title').textContent = np.title || '';
|
||||
ebi('np_dur').textContent = np['.dur'] || '';
|
||||
ebi('np_url').textContent = get_vpath() + np.file.split('?')[0];
|
||||
ebi('np_img').setAttribute('src', cover); // dont give last.fm the pwd
|
||||
ebi('np_img').setAttribute('src', cover || ''); // dont give last.fm the pwd
|
||||
|
||||
navigator.mediaSession.metadata = new MediaMetadata(tags);
|
||||
navigator.mediaSession.setActionHandler('play', mplay);
|
||||
@@ -1499,6 +1499,7 @@ var mpl = (function () {
|
||||
if (!r.os_ctl)
|
||||
return;
|
||||
|
||||
// dead code; left for debug
|
||||
navigator.mediaSession.metadata = null;
|
||||
navigator.mediaSession.playbackState = "paused";
|
||||
|
||||
@@ -1538,12 +1539,13 @@ var re_au_native = can_ogg ? /\.(aac|flac|m4a|mp3|ogg|opus|wav)$/i :
|
||||
|
||||
|
||||
// extract songs + add play column
|
||||
var mpo = { "au": null, "au2": null, "acs": null };
|
||||
function MPlayer() {
|
||||
var r = this;
|
||||
r.id = Date.now();
|
||||
r.au = null;
|
||||
r.au = null;
|
||||
r.au2 = null;
|
||||
r.au = mpo.au;
|
||||
r.au2 = mpo.au2;
|
||||
r.acs = mpo.acs;
|
||||
r.tracks = {};
|
||||
r.order = [];
|
||||
r.cd_pause = 0;
|
||||
@@ -2483,7 +2485,7 @@ var afilt = (function () {
|
||||
if (mp.acs)
|
||||
mp.acs.disconnect();
|
||||
|
||||
mp.acs = null;
|
||||
mp.acs = mpo.acs = null;
|
||||
};
|
||||
|
||||
r.apply = function () {
|
||||
@@ -2722,7 +2724,9 @@ function play(tid, is_ev, seek) {
|
||||
|
||||
if (mp.au) {
|
||||
mp.au.pause();
|
||||
clmod(ebi('a' + mp.au.tid), 'act');
|
||||
var el = ebi('a' + mp.au.tid);
|
||||
if (el)
|
||||
clmod(el, 'act');
|
||||
}
|
||||
else {
|
||||
mp.au = new Audio();
|
||||
@@ -2895,7 +2899,7 @@ function autoplay_blocked(seek) {
|
||||
modal.confirm('<h6>' + L.mm_hashplay + '</h6>\n«' + esc(fn) + '»', function () {
|
||||
// chrome 91 may permanently taint on a failed play()
|
||||
// depending on win10 settings or something? idk
|
||||
mp.au = null;
|
||||
mp.au = mpo.au = null;
|
||||
|
||||
play(tid, true, seek);
|
||||
mp.fade_in();
|
||||
@@ -7280,18 +7284,25 @@ function reload_mp() {
|
||||
if (afilt)
|
||||
afilt.stop();
|
||||
|
||||
mp.au.pause();
|
||||
mp.au = null;
|
||||
mpo.au = mp.au;
|
||||
mpo.au2 = mp.au2;
|
||||
mpo.acs = mp.acs;
|
||||
mpl.unbuffer();
|
||||
}
|
||||
mpl.stop();
|
||||
var plays = QSA('tr>td:first-child>a.play');
|
||||
for (var a = plays.length - 1; a >= 0; a--)
|
||||
plays[a].parentNode.innerHTML = '-';
|
||||
|
||||
mp = new MPlayer();
|
||||
if (afilt)
|
||||
afilt.acst = {};
|
||||
if (mp.au && mp.au.tid) {
|
||||
var el = QS('a#a' + mp.au.tid);
|
||||
if (el)
|
||||
clmod(el, 'act', 1);
|
||||
|
||||
el = el && el.closest('tr');
|
||||
if (el)
|
||||
clmod(el, 'play', 1);
|
||||
}
|
||||
|
||||
setTimeout(pbar.onresize, 1);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,22 @@
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0424-0609 `v1.6.14` unsettable flags
|
||||
|
||||
## new features
|
||||
* unset a volflag (override a global option) by negating it (setting volflag `-flagname`)
|
||||
* new argument `--cert` to specify TLS certificate location
|
||||
* defaults to `~/.config/copyparty/cert.pem` like before
|
||||
|
||||
## bugfixes
|
||||
* in zip/tar downloads, always use the parent-folder name as the archive root
|
||||
* more reliable ftp authentication when providing password as username
|
||||
* connect-page: fix rclone ftps example
|
||||
|
||||
## other changes
|
||||
* stop suggesting `--http-only` and `--https-only` for performance since the difference is negligible
|
||||
* mention how some antivirus (avast, avg, mcafee) thinks that pillow's webp encoder is a virus, affecting `copyparty.exe`
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2023-0420-2141 `v1.6.12` as seen on nixos
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
* [building](#building)
|
||||
* [dev env setup](#dev-env-setup)
|
||||
* [just the sfx](#just-the-sfx)
|
||||
* [build from release tarball](#build-from-release-tarball) - uses the included prebuilt webdeps
|
||||
* [complete release](#complete-release)
|
||||
* [todo](#todo) - roughly sorted by priority
|
||||
* [discarded ideas](#discarded-ideas)
|
||||
@@ -110,6 +111,7 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
|
||||
| GET | `?ls&dots` | list files/folders at URL as JSON, including dotfiles |
|
||||
| GET | `?ls=t` | list files/folders at URL as plaintext |
|
||||
| GET | `?ls=v` | list files/folders at URL, terminal-formatted |
|
||||
| GET | `?lt` | in listings, use symlink timestamps rather than targets |
|
||||
| GET | `?b` | list files/folders at URL as simplified HTML |
|
||||
| GET | `?tree=.` | list one level of subdirectories inside URL |
|
||||
| GET | `?tree` | list one level of subdirectories for each level until URL |
|
||||
@@ -255,10 +257,31 @@ then build the sfx using any of the following examples:
|
||||
```
|
||||
|
||||
|
||||
## build from release tarball
|
||||
|
||||
uses the included prebuilt webdeps
|
||||
|
||||
if you downloaded a [release](https://github.com/9001/copyparty/releases) source tarball from github (for example [copyparty-1.6.15.tar.gz](https://github.com/9001/copyparty/releases/download/v1.6.15/copyparty-1.6.15.tar.gz) so not the autogenerated one) you can build it like so,
|
||||
|
||||
```bash
|
||||
python3 setup.py install --user
|
||||
```
|
||||
|
||||
or if you're packaging it for a linux distro (nice), maybe something like
|
||||
|
||||
```bash
|
||||
bash scripts/run-tests.sh python3 # optional
|
||||
python3 setup.py build
|
||||
python3 setup.py install --skip-build --prefix=/usr --root=$HOME/pe/copyparty
|
||||
```
|
||||
|
||||
|
||||
## complete release
|
||||
|
||||
also builds the sfx so skip the sfx section above
|
||||
|
||||
does everything completely from scratch, straight from your local repo
|
||||
|
||||
in the `scripts` folder:
|
||||
|
||||
* run `make -C deps-docker` to build all dependencies
|
||||
|
||||
@@ -64,6 +64,8 @@ git archive hovudstraum | tar -xC "$rls_dir"
|
||||
echo ">>> export untracked deps"
|
||||
tar -c copyparty/web/deps | tar -xC "$rls_dir"
|
||||
|
||||
scripts/genlic.sh "$rls_dir/copyparty/res/COPYING.txt"
|
||||
|
||||
cd "$rls_dir"
|
||||
find -type d -exec chmod 755 '{}' \+
|
||||
find -type f -exec chmod 644 '{}' \+
|
||||
@@ -93,7 +95,7 @@ rm \
|
||||
.gitattributes \
|
||||
.gitignore
|
||||
|
||||
mv LICENSE LICENSE.txt
|
||||
cp -pv LICENSE LICENSE.txt
|
||||
|
||||
# the regular cleanup memes
|
||||
find -name '*.pyc' -delete
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
parallel=2
|
||||
parallel=1
|
||||
|
||||
[ -e make-sfx.sh ] || cd scripts
|
||||
[ -e make-sfx.sh ] && [ -e deps-docker ] || {
|
||||
|
||||
@@ -98,7 +98,7 @@ class Cfg(Namespace):
|
||||
def __init__(self, a=None, v=None, c=None):
|
||||
ka = {}
|
||||
|
||||
ex = "daw dav_inf dav_mac dotsrch e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js getmod hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_thumb no_vthumb no_zip nrand nw rand vc xdev xlink xvol"
|
||||
ex = "daw dav_inf dav_mac dav_rt dotsrch e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js getmod hardlink ih ihead magic never_symlink nid nih no_acode no_athumb no_dav no_dedup no_del no_dupe no_logues no_mv no_readme no_robots no_sb_md no_sb_lg no_scandir no_thumb no_vthumb no_zip nrand nw rand vc xdev xlink xvol"
|
||||
ka.update(**{k: False for k in ex.split()})
|
||||
|
||||
ex = "dotpart no_rescan no_sendfile no_voldump plain_ip"
|
||||
@@ -107,6 +107,9 @@ class Cfg(Namespace):
|
||||
ex = "css_browser hist js_browser no_forget no_hash no_idx"
|
||||
ka.update(**{k: None for k in ex.split()})
|
||||
|
||||
ex = "s_thead s_tbody"
|
||||
ka.update(**{k: 9 for k in ex.split()})
|
||||
|
||||
ex = "df loris re_maxage rproxy rsp_jtr rsp_slp s_wr_slp theme themes turbo"
|
||||
ka.update(**{k: 0 for k in ex.split()})
|
||||
|
||||
|
||||
Reference in New Issue
Block a user