Compare commits

...

15 Commits

Author SHA1 Message Date
ed
b12131e91c v0.10.1 2021-03-27 02:44:40 +01:00
ed
3b354447b0 v0.10.0 2021-03-27 02:08:07 +01:00
ed
d09ec6feaa tehe 2021-03-27 01:49:58 +01:00
ed
21405c3fda be nice to windows 2021-03-27 01:43:02 +01:00
ed
13e5c96cab finish adding zip-crc (semi-streaming) 2021-03-27 01:27:12 +01:00
ed
426687b75e archive format selection in browser 2021-03-27 01:10:05 +01:00
ed
c8f59fb978 up2k: add folder upload 2021-03-27 00:20:42 +01:00
ed
871dde79a9 download as tar + utf8 zip + optimize walk 2021-03-26 20:43:25 +01:00
ed
e14d81bc6f fix utf8 content-disposition 2021-03-26 02:54:19 +01:00
ed
514d046d1f download folders as zip 2021-03-26 01:51:38 +01:00
ed
4ed9528d36 5x faster reply on 1st req on new conns 2021-03-25 19:29:16 +01:00
ed
625560e642 steal from diodes 2021-03-25 02:59:04 +01:00
ed
73ebd917d1 i know too much about zip now 2021-03-25 02:31:25 +01:00
ed
cd3e0afad2 v0.9.13 2021-03-23 02:13:28 +01:00
ed
d8d1f94a86 v0.9.12 2021-03-23 01:24:37 +01:00
18 changed files with 710 additions and 79 deletions

View File

@@ -72,7 +72,7 @@ you may also want these, especially on servers:
* ☑ symlink/discard existing files (content-matching) * ☑ symlink/discard existing files (content-matching)
* download * download
* ☑ single files in browser * ☑ single files in browser
* folders as zip files * folders as zip / tar files
* ☑ FUSE client (read-only) * ☑ FUSE client (read-only)
* browser * browser
* ☑ tree-view * ☑ tree-view

View File

@@ -261,6 +261,7 @@ def main():
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)") ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname") ap.add_argument("-nih", action="store_true", help="no info hostname")
ap.add_argument("-nid", action="store_true", help="no info disk-usage") ap.add_argument("-nid", action="store_true", help="no info disk-usage")
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)") ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)") ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms") ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")

View File

@@ -1,8 +1,8 @@
# coding: utf-8 # coding: utf-8
VERSION = (0, 9, 11) VERSION = (0, 10, 1)
CODENAME = "the strongest music server" CODENAME = "zip it"
BUILD_DT = (2021, 3, 23) BUILD_DT = (2021, 3, 27)
S_VERSION = ".".join(map(str, VERSION)) S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
import re import re
import os import os
import sys import sys
import stat
import threading import threading
from .__init__ import PY2, WINDOWS from .__init__ import PY2, WINDOWS
@@ -53,6 +54,7 @@ class VFS(object):
self.uwrite, self.uwrite,
self.flags, self.flags,
) )
self._trk(vn)
self.nodes[name] = vn self.nodes[name] = vn
return self._trk(vn.add(src, dst)) return self._trk(vn.add(src, dst))
@@ -127,6 +129,80 @@ class VFS(object):
return [abspath, real, virt_vis] return [abspath, real, virt_vis]
def walk(self, rel, rem, uname, dots, scandir, lstat=False):
"""
recursively yields from ./rem;
rel is a unix-style user-defined vpath (not vfs-related)
"""
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, lstat)
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
rfiles.sort()
rdirs.sort()
yield rel, fsroot, rfiles, rdirs, vfs_virt
for rdir, _ in rdirs:
if not dots and rdir.startswith("."):
continue
wrel = (rel + "/" + rdir).lstrip("/")
wrem = (rem + "/" + rdir).lstrip("/")
for x in self.walk(wrel, wrem, uname, scandir, lstat):
yield x
for n, vfs in sorted(vfs_virt.items()):
if not dots and n.startswith("."):
continue
wrel = (rel + "/" + n).lstrip("/")
for x in vfs.walk(wrel, "", uname, scandir, lstat):
yield x
def zipgen(self, vrem, rems, uname, dots, scandir):
vtops = [["", [self, vrem]]]
if rems:
# list of subfolders to zip was provided,
# add all the ones uname is allowed to access
vtops = []
for rem in rems:
try:
d = rem if not vrem else vrem + "/" + rem
vn = self.get(d, uname, True, False)
vtops.append([rem, vn])
except:
pass
for rel, (vn, rem) in vtops:
for vpath, apath, files, rd, vd in vn.walk(rel, rem, uname, dots, scandir):
# print(repr([vpath, apath, [x[0] for x in files]]))
fnames = [n[0] for n in files]
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
apaths = [os.path.join(apath, n) for n in fnames]
files = list(zip(vpaths, apaths, files))
if not dots:
# dotfile filtering based on vpath (intended visibility)
files = [x for x in files if "/." not in "/" + x[0]]
rm = [x for x in rd if x[0].startswith(".")]
for x in rm:
rd.remove(x)
rm = [k for k in vd.keys() if k.startswith(".")]
for x in rm:
del vd[x]
# up2k filetring based on actual abspath
files = [
x for x in files if "{0}.hist{0}up2k.".format(os.sep) not in x[1]
]
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f
def user_tree(self, uname, readable=False, writable=False): def user_tree(self, uname, readable=False, writable=False):
ret = [] ret = []
opt1 = readable and (uname in self.uread or "*" in self.uread) opt1 = readable and (uname in self.uread or "*" in self.uread)

View File

@@ -7,6 +7,7 @@ import gzip
import time import time
import copy import copy
import json import json
import string
import socket import socket
import ctypes import ctypes
from datetime import datetime from datetime import datetime
@@ -14,6 +15,8 @@ import calendar
from .__init__ import E, PY2, WINDOWS from .__init__ import E, PY2, WINDOWS
from .util import * # noqa # pylint: disable=unused-wildcard-import from .util import * # noqa # pylint: disable=unused-wildcard-import
from .szip import StreamZip
from .star import StreamTar
if not PY2: if not PY2:
unicode = str unicode = str
@@ -52,6 +55,10 @@ class HttpCli(object):
if rem.startswith("/") or rem.startswith("../") or "/../" in rem: if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
raise Exception("that was close") raise Exception("that was close")
def j2(self, name, **kwargs):
tpl = self.conn.hsrv.j2[name]
return tpl.render(**kwargs) if kwargs else tpl
def run(self): def run(self):
"""returns true if connection can be reused""" """returns true if connection can be reused"""
self.keepalive = False self.keepalive = False
@@ -154,7 +161,9 @@ class HttpCli(object):
try: try:
# self.log("pebkac at httpcli.run #2: " + repr(ex)) # self.log("pebkac at httpcli.run #2: " + repr(ex))
self.keepalive = self._check_nonfatal(ex) self.keepalive = self._check_nonfatal(ex)
self.loud_reply("{}: {}".format(str(ex), self.vpath), status=ex.code) self.log("{}\033[0m: {}".format(str(ex), self.vpath), 3)
msg = "<pre>{}: {}\r\n".format(str(ex), self.vpath)
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
return self.keepalive return self.keepalive
except Pebkac: except Pebkac:
return False return False
@@ -417,15 +426,18 @@ class HttpCli(object):
if "srch" in self.uparam or "srch" in body: if "srch" in self.uparam or "srch" in body:
return self.handle_search(body) return self.handle_search(body)
# prefer this over undot; no reason to allow traversion
if "/" in body["name"]:
raise Pebkac(400, "folders verboten")
# up2k-php compat # up2k-php compat
for k in "chunkpit.php", "handshake.php": for k in "chunkpit.php", "handshake.php":
if self.vpath.endswith(k): if self.vpath.endswith(k):
self.vpath = self.vpath[: -len(k)] self.vpath = self.vpath[: -len(k)]
sub = None
name = undot(body["name"])
if "/" in name:
sub, name = name.rsplit("/", 1)
self.vpath = "/".join([self.vpath, sub]).strip("/")
body["name"] = name
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True) vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
body["vtop"] = vfs.vpath body["vtop"] = vfs.vpath
@@ -434,12 +446,22 @@ class HttpCli(object):
body["addr"] = self.ip body["addr"] = self.ip
body["vcfg"] = vfs.flags body["vcfg"] = vfs.flags
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body) if sub:
response = x.get() try:
response = json.dumps(response) dst = os.path.join(vfs.realpath, rem)
os.makedirs(dst)
except:
if not os.path.isdir(dst):
raise Pebkac(400, "some file got your folder name")
self.log(response) x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
self.reply(response.encode("utf-8"), mime="application/json") ret = x.get()
if sub:
ret["name"] = "/".join([sub, ret["name"]])
ret = json.dumps(ret)
self.log(ret)
self.reply(ret.encode("utf-8"), mime="application/json")
return True return True
def handle_search(self, body): def handle_search(self, body):
@@ -580,7 +602,7 @@ class HttpCli(object):
pwd = "x" # nosec pwd = "x" # nosec
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)} h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/") html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
self.reply(html.encode("utf-8"), headers=h) self.reply(html.encode("utf-8"), headers=h)
return True return True
@@ -611,7 +633,8 @@ class HttpCli(object):
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/") vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
esc_paths = [quotep(vpath), html_escape(vpath)] esc_paths = [quotep(vpath), html_escape(vpath)]
html = self.conn.tpl_msg.render( html = self.j2(
"msg",
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths), h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
pre="aight", pre="aight",
click=True, click=True,
@@ -643,7 +666,8 @@ class HttpCli(object):
f.write(b"`GRUNNUR`\n") f.write(b"`GRUNNUR`\n")
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/") vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
html = self.conn.tpl_msg.render( html = self.j2(
"msg",
h2='<a href="/{}?edit">go to /{}?edit</a>'.format( h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
quotep(vpath), html_escape(vpath) quotep(vpath), html_escape(vpath)
), ),
@@ -749,7 +773,8 @@ class HttpCli(object):
).encode("utf-8") ).encode("utf-8")
) )
html = self.conn.tpl_msg.render( html = self.j2(
"msg",
h2='<a href="/{}">return to /{}</a>'.format( h2='<a href="/{}">return to /{}</a>'.format(
quotep(self.vpath), html_escape(self.vpath) quotep(self.vpath), html_escape(self.vpath)
), ),
@@ -1037,16 +1062,75 @@ class HttpCli(object):
self.log("{}, {}".format(logmsg, spd)) self.log("{}, {}".format(logmsg, spd))
return ret return ret
def tx_zip(self, fmt, uarg, vn, rem, items, dots):
if self.args.no_zip:
raise Pebkac(400, "not enabled")
logmsg = "{:4} {} ".format("", self.req)
self.keepalive = False
if not uarg:
uarg = ""
if fmt == "tar":
mime = "application/x-tar"
packer = StreamTar
else:
mime = "application/zip"
packer = StreamZip
fn = items[0] if items and items[0] else self.vpath
if fn:
fn = fn.rstrip("/").split("/")[-1]
else:
fn = self.headers.get("host", "hey")
afn = "".join(
[x if x in (string.ascii_letters + string.digits) else "_" for x in fn]
)
bascii = unicode(string.ascii_letters + string.digits).encode("utf-8")
ufn = fn.encode("utf-8", "xmlcharrefreplace")
if PY2:
ufn = [unicode(x) if x in bascii else "%{:02x}".format(ord(x)) for x in ufn]
else:
ufn = [
chr(x).encode("utf-8")
if x in bascii
else "%{:02x}".format(x).encode("ascii")
for x in ufn
]
ufn = b"".join(ufn).decode("ascii")
cdis = "attachment; filename=\"{}.{}\"; filename*=UTF-8''{}.{}"
cdis = cdis.format(afn, fmt, ufn, fmt)
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir)
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
bgen = packer(fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
bsent = 0
for buf in bgen.gen():
if not buf:
break
try:
self.s.sendall(buf)
bsent += len(buf)
except:
logmsg += " \033[31m" + unicode(bsent) + "\033[0m"
break
spd = self._spd(bsent)
self.log("{}, {}".format(logmsg, spd))
return True
def tx_md(self, fs_path): def tx_md(self, fs_path):
logmsg = "{:4} {} ".format("", self.req) logmsg = "{:4} {} ".format("", self.req)
if "edit2" in self.uparam:
html_path = "web/mde.html"
template = self.conn.tpl_mde
else:
html_path = "web/md.html"
template = self.conn.tpl_md
html_path = os.path.join(E.mod, html_path) tpl = "mde" if "edit2" in self.uparam else "md"
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
template = self.j2(tpl)
st = os.stat(fsenc(fs_path)) st = os.stat(fsenc(fs_path))
# sz_md = st.st_size # sz_md = st.st_size
@@ -1098,7 +1182,7 @@ class HttpCli(object):
def tx_mounts(self): def tx_mounts(self):
rvol = [x + "/" if x else x for x in self.rvol] rvol = [x + "/" if x else x for x in self.rvol]
wvol = [x + "/" if x else x for x in self.wvol] wvol = [x + "/" if x else x for x in self.wvol]
html = self.conn.tpl_mounts.render(this=self, rvol=rvol, wvol=wvol) html = self.j2("splash", this=self, rvol=rvol, wvol=wvol)
self.reply(html.encode("utf-8")) self.reply(html.encode("utf-8"))
return True return True
@@ -1187,6 +1271,11 @@ class HttpCli(object):
return self.tx_file(abspath) return self.tx_file(abspath)
for k in ["zip", "tar"]:
v = self.uparam.get(k)
if v is not None:
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir) fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
stats = {k: v for k, v in vfs_ls} stats = {k: v for k, v in vfs_ls}
vfs_ls = [x[0] for x in vfs_ls] vfs_ls = [x[0] for x in vfs_ls]
@@ -1247,8 +1336,11 @@ class HttpCli(object):
is_dir = stat.S_ISDIR(inf.st_mode) is_dir = stat.S_ISDIR(inf.st_mode)
if is_dir: if is_dir:
margin = "DIR"
href += "/" href += "/"
if self.args.no_zip:
margin = "DIR"
else:
margin = '<a href="{}?zip">zip</a>'.format(quotep(href))
elif fn in hist: elif fn in hist:
margin = '<a href="{}.hist/{}">#{}</a>'.format( margin = '<a href="{}.hist/{}">#{}</a>'.format(
base, html_escape(hist[fn][2], quote=True), hist[fn][0] base, html_escape(hist[fn][2], quote=True), hist[fn][0]
@@ -1295,7 +1387,7 @@ class HttpCli(object):
tags = {} tags = {}
f["tags"] = tags f["tags"] = tags
if not r: if not r:
continue continue
@@ -1306,7 +1398,7 @@ class HttpCli(object):
tags[k] = v tags[k] = v
if icur: if icur:
taglist = [k for k in vn.flags["mte"].split(",") if k in taglist] taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist]
for f in dirs: for f in dirs:
f["tags"] = {} f["tags"] = {}
@@ -1372,16 +1464,20 @@ class HttpCli(object):
dirs.extend(files) dirs.extend(files)
html = self.conn.tpl_browser.render( html = self.j2(
"browser",
vdir=quotep(self.vpath), vdir=quotep(self.vpath),
vpnodes=vpnodes, vpnodes=vpnodes,
files=dirs, files=dirs,
ts=ts, ts=ts,
perms=json.dumps(perms), perms=json.dumps(perms),
taglist=taglist, taglist=taglist,
tag_order=json.dumps(vn.flags["mte"].split(",")), tag_order=json.dumps(
vn.flags["mte"].split(",") if "mte" in vn.flags else []
),
have_up2k_idx=("e2d" in vn.flags), have_up2k_idx=("e2d" in vn.flags),
have_tags_idx=("e2t" in vn.flags), have_tags_idx=("e2t" in vn.flags),
have_zip=(not self.args.no_zip),
logues=logues, logues=logues,
title=html_escape(self.vpath), title=html_escape(self.vpath),
srv_info=srv_info, srv_info=srv_info,

View File

@@ -12,23 +12,6 @@ try:
except: except:
HAVE_SSL = False HAVE_SSL = False
try:
import jinja2
except ImportError:
print(
"""\033[1;31m
you do not have jinja2 installed,\033[33m
choose one of these:\033[0m
* apt install python-jinja2
* {} -m pip install --user jinja2
* (try another python version, if you have one)
* (try copyparty.sfx instead)
""".format(
os.path.basename(sys.executable)
)
)
sys.exit(1)
from .__init__ import E from .__init__ import E
from .util import Unrecv from .util import Unrecv
from .httpcli import HttpCli from .httpcli import HttpCli
@@ -57,14 +40,6 @@ class HttpConn(object):
self.log_func = hsrv.log self.log_func = hsrv.log
self.set_rproxy() self.set_rproxy()
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
self.tpl_mounts = env.get_template("splash.html")
self.tpl_browser = env.get_template("browser.html")
self.tpl_msg = env.get_template("msg.html")
self.tpl_md = env.get_template("md.html")
self.tpl_mde = env.get_template("mde.html")
def set_rproxy(self, ip=None): def set_rproxy(self, ip=None):
if ip is None: if ip is None:
color = 36 color = 36

View File

@@ -2,10 +2,28 @@
from __future__ import print_function, unicode_literals from __future__ import print_function, unicode_literals
import os import os
import sys
import time import time
import socket import socket
import threading import threading
try:
import jinja2
except ImportError:
print(
"""\033[1;31m
you do not have jinja2 installed,\033[33m
choose one of these:\033[0m
* apt install python-jinja2
* {} -m pip install --user jinja2
* (try another python version, if you have one)
* (try copyparty.sfx instead)
""".format(
os.path.basename(sys.executable)
)
)
sys.exit(1)
from .__init__ import E, MACOS from .__init__ import E, MACOS
from .httpconn import HttpConn from .httpconn import HttpConn
from .authsrv import AuthSrv from .authsrv import AuthSrv
@@ -30,6 +48,13 @@ class HttpSrv(object):
self.workload_thr_alive = False self.workload_thr_alive = False
self.auth = AuthSrv(self.args, self.log) self.auth = AuthSrv(self.args, self.log)
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
self.j2 = {
x: env.get_template(x + ".html")
for x in ["splash", "browser", "msg", "md", "mde"]
}
cert_path = os.path.join(E.cfg, "cert.pem") cert_path = os.path.join(E.cfg, "cert.pem")
if os.path.exists(cert_path): if os.path.exists(cert_path):
self.cert_path = cert_path self.cert_path = cert_path

64
copyparty/star.py Normal file
View File

@@ -0,0 +1,64 @@
import tarfile
import threading
from .util import Queue, fsenc
class QFile(object):
"""file-like object which buffers writes into a queue"""
def __init__(self):
self.q = Queue(64)
def write(self, buf):
self.q.put(buf)
class StreamTar(object):
"""construct in-memory tar file from the given path"""
def __init__(self, fgen, **kwargs):
self.ci = 0
self.co = 0
self.qfile = QFile()
self.fgen = fgen
# python 3.8 changed to PAX_FORMAT as default,
# waste of space and don't care about the new features
fmt = tarfile.GNU_FORMAT
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
w = threading.Thread(target=self._gen)
w.daemon = True
w.start()
def gen(self):
while True:
buf = self.qfile.q.get()
if buf is None:
break
self.co += len(buf)
yield buf
yield None
def _gen(self):
for f in self.fgen:
name = f["vp"]
src = f["ap"]
fsi = f["st"]
inf = tarfile.TarInfo(name=name)
inf.mode = fsi.st_mode
inf.size = fsi.st_size
inf.mtime = fsi.st_mtime
inf.uid = 0
inf.gid = 0
self.ci += inf.size
with open(fsenc(src), "rb", 512 * 1024) as f:
self.tar.addfile(inf, f)
self.tar.close()
self.qfile.q.put(None)

244
copyparty/szip.py Normal file
View File

@@ -0,0 +1,244 @@
import time
import zlib
import struct
from datetime import datetime
from .util import yieldfile, sanitize_fn
def dostime2unix(buf):
t, d = struct.unpack("<HH", buf)
ts = (t & 0x1F) * 2
tm = (t >> 5) & 0x3F
th = t >> 11
dd = d & 0x1F
dm = (d >> 5) & 0xF
dy = (d >> 9) + 1980
tt = (dy, dm, dd, th, tm, ts)
tf = "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}"
iso = tf.format(*tt)
dt = datetime.strptime(iso, "%Y-%m-%d %H:%M:%S")
return int(dt.timestamp())
def unixtime2dos(ts):
tt = time.gmtime(ts)
dy, dm, dd, th, tm, ts = list(tt)[:6]
bd = ((dy - 1980) << 9) + (dm << 5) + dd
bt = (th << 11) + (tm << 5) + ts // 2
return struct.pack("<HH", bt, bd)
def gen_fdesc(sz, crc32, z64):
ret = b"\x50\x4b\x07\x08"
fmt = "<LQQ" if z64 else "<LLL"
ret += struct.pack(fmt, crc32, sz, sz)
return ret
def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
"""
does regular file headers
and the central directory meme if h_pos is set
(h_pos = absolute position of the regular header)
"""
# appnote 4.5 / zip 3.0 (2008) / unzip 6.0 (2009) says to add z64
# extinfo for values which exceed H, but that becomes an off-by-one
# (can't tell if it was clamped or exactly maxval), make it obvious
z64 = sz >= 0xFFFFFFFF
z64v = [sz, sz] if z64 else []
if h_pos and h_pos >= 0xFFFFFFFF:
# central, also consider ptr to original header
z64v.append(h_pos)
# confusingly this doesn't bump if h_pos
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
if crc32:
crc32 = struct.pack("<L", crc32)
else:
crc32 = b"\x00" * 4
if h_pos is None:
# 4b magic, 2b min-ver
ret = b"\x50\x4b\x03\x04" + req_ver
else:
# 4b magic, 2b spec-ver, 2b min-ver
ret = b"\x50\x4b\x01\x02\x1e\x03" + req_ver
ret += b"\x00" if pre_crc else b"\x08" # streaming
ret += b"\x08" if utf8 else b"\x00" # appnote 6.3.2 (2007)
# 2b compression, 4b time, 4b crc
ret += b"\x00\x00" + unixtime2dos(lastmod) + crc32
# spec says to put zeros when !crc if bit3 (streaming)
# however infozip does actual sz and it even works on winxp
# (same reasning for z64 extradata later)
vsz = 0xFFFFFFFF if z64 else sz
ret += struct.pack("<LL", vsz, vsz)
# windows support (the "?" replace below too)
fn = sanitize_fn(fn, "/")
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
z64_len = len(z64v) * 8 + 4 if z64v else 0
ret += struct.pack("<HH", len(bfn), z64_len)
if h_pos is not None:
# 2b comment, 2b diskno, 2b internal.attr,
# 4b external.attr (infozip-linux: 0000(a481|ff81)) idk
ret += b"\x00" * 10
# 4b local-header-ofs
ret += struct.pack("<L", min(h_pos, 0xFFFFFFFF))
ret += bfn
if z64v:
ret += struct.pack("<HH" + "Q" * len(z64v), 1, len(z64v) * 8, *z64v)
return ret
def gen_ecdr(items, cdir_pos, cdir_end):
"""
summary of all file headers,
usually the zipfile footer unless something clamps
"""
ret = b"\x50\x4b\x05\x06"
# 2b ndisk, 2b disk0
ret += b"\x00" * 4
cdir_sz = cdir_end - cdir_pos
nitems = min(0xFFFF, len(items))
csz = min(0xFFFFFFFF, cdir_sz)
cpos = min(0xFFFFFFFF, cdir_pos)
need_64 = nitems == 0xFFFF or 0xFFFFFFFF in [csz, cpos]
# 2b tnfiles, 2b dnfiles, 4b dir sz, 4b dir pos
ret += struct.pack("<HHLL", nitems, nitems, csz, cpos)
# 2b comment length
ret += b"\x00\x00"
return [ret, need_64]
def gen_ecdr64(items, cdir_pos, cdir_end):
"""
z64 end of central directory
added when numfiles or a headerptr clamps
"""
ret = b"\x50\x4b\x06\x06"
# 8b own length from hereon
ret += b"\x2c" + b"\x00" * 7
# 2b spec-ver, 2b min-ver
ret += b"\x1e\x03\x2d\x00"
# 4b ndisk, 4b disk0
ret += b"\x00" * 8
# 8b tnfiles, 8b dnfiles, 8b dir sz, 8b dir pos
cdir_sz = cdir_end - cdir_pos
ret += struct.pack("<QQQQ", len(items), len(items), cdir_sz, cdir_pos)
return ret
def gen_ecdr64_loc(ecdr64_pos):
"""
z64 end of central directory locator
points to ecdr64
why
"""
ret = b"\x50\x4b\x06\x07"
# 4b cdisk, 8b start of ecdr64, 4b ndisks
ret += struct.pack("<LQL", 0, ecdr64_pos, 1)
return ret
class StreamZip(object):
def __init__(self, fgen, utf8=False, pre_crc=False):
self.fgen = fgen
self.utf8 = utf8
self.pre_crc = pre_crc
self.pos = 0
self.items = []
def _ct(self, buf):
self.pos += len(buf)
return buf
def gen(self):
for f in self.fgen:
name = f["vp"]
src = f["ap"]
st = f["st"]
sz = st.st_size
ts = st.st_mtime + 1
crc = None
if self.pre_crc:
crc = 0
for buf in yieldfile(src):
crc = zlib.crc32(buf, crc)
crc &= 0xFFFFFFFF
h_pos = self.pos
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
crc = crc or 0
for buf in yieldfile(src):
if not self.pre_crc:
crc = zlib.crc32(buf, crc)
yield self._ct(buf)
crc &= 0xFFFFFFFF
self.items.append([name, sz, ts, crc, h_pos])
z64 = sz >= 4 * 1024 * 1024 * 1024
if z64 or not self.pre_crc:
buf = gen_fdesc(sz, crc, z64)
yield self._ct(buf)
cdir_pos = self.pos
for name, sz, ts, crc, h_pos in self.items:
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
cdir_end = self.pos
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
if need_64:
ecdir64_pos = self.pos
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
yield self._ct(buf)
buf = gen_ecdr64_loc(ecdir64_pos)
yield self._ct(buf)
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
yield self._ct(ecdr)

View File

@@ -232,7 +232,8 @@ class Up2k(object):
(ft if v is True else ff if v is False else fv).format(k, str(v)) (ft if v is True else ff if v is False else fv).format(k, str(v))
for k, v in flags.items() for k, v in flags.items()
] ]
self.log(" ".join(sorted(a)) + "\033[0m") if a:
self.log(" ".join(sorted(a)) + "\033[0m")
reg = {} reg = {}
path = os.path.join(ptop, ".hist", "up2k.snap") path = os.path.join(ptop, ".hist", "up2k.snap")
@@ -288,9 +289,9 @@ class Up2k(object):
self.pp.n = next(dbw[0].execute("select count(w) from up"))[0] self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
excl = [ excl = [
vol.realpath + d.vpath[len(vol.vpath) :] vol.realpath + "/" + d.vpath[len(vol.vpath) :].lstrip("/")
for d in all_vols for d in all_vols
if d.vpath.startswith(vol.vpath + "/") if d != vol and (d.vpath.startswith(vol.vpath + "/") or not vol.vpath)
] ]
n_add = self._build_dir(dbw, top, set(excl), top) n_add = self._build_dir(dbw, top, set(excl), top)
n_rm = self._drop_lost(dbw[0], top) n_rm = self._drop_lost(dbw[0], top)

View File

@@ -576,11 +576,12 @@ def undot(path):
return "/".join(ret) return "/".join(ret)
def sanitize_fn(fn): def sanitize_fn(fn, ok=""):
fn = fn.replace("\\", "/").split("/")[-1] if "/" not in ok:
fn = fn.replace("\\", "/").split("/")[-1]
if WINDOWS: if WINDOWS:
for bad, good in [ for bad, good in [x for x in [
["<", ""], ["<", ""],
[">", ""], [">", ""],
[":", ""], [":", ""],
@@ -590,7 +591,7 @@ def sanitize_fn(fn):
["|", ""], ["|", ""],
["?", ""], ["?", ""],
["*", ""], ["*", ""],
]: ] if x[0] not in ok]:
fn = fn.replace(bad, good) fn = fn.replace(bad, good)
bad = ["con", "prn", "aux", "nul"] bad = ["con", "prn", "aux", "nul"]
@@ -780,6 +781,16 @@ def read_socket_chunked(sr, log=None):
sr.recv(2) # \r\n after each chunk too sr.recv(2) # \r\n after each chunk too
def yieldfile(fn):
with open(fsenc(fn), "rb", 512 * 1024) as f:
while True:
buf = f.read(64 * 1024)
if not buf:
break
yield buf
def hashcopy(actor, fin, fout): def hashcopy(actor, fin, fout):
u32_lim = int((2 ** 31) * 0.9) u32_lim = int((2 ** 31) * 0.9)
hashobj = hashlib.sha512() hashobj = hashlib.sha512()

View File

@@ -41,10 +41,12 @@
<div id="op_cfg" class="opview opbox"> <div id="op_cfg" class="opview opbox">
<h3>key notation</h3> <h3>key notation</h3>
<div id="key_notation"></div> <div id="key_notation"></div>
{%- if have_zip %}
<h3>folder download</h3>
<div id="arc_fmt"></div>
{%- endif %}
<h3>tooltips</h3> <h3>tooltips</h3>
<div> <div><a id="tooltips" class="tglbtn" href="#">enable</a></div>
<a id="tooltips" class="tglbtn" href="#">enable</a>
</div>
</div> </div>
<h1 id="path"> <h1 id="path">

View File

@@ -1149,6 +1149,7 @@ var treectl = (function () {
filecols.set_style(); filecols.set_style();
mukey.render(); mukey.render();
arcfmt.render();
reload_tree(); reload_tree();
reload_browser(); reload_browser();
} }
@@ -1559,6 +1560,70 @@ function addcrc() {
})(); })();
var arcfmt = (function () {
if (!ebi('arc_fmt'))
return { "render": function () { } };
var html = [],
arcfmts = ["tar", "zip", "zip_dos", "zip_crc"],
arcv = ["tar", "zip=utf8", "zip", "zip=crc"];
for (var a = 0; a < arcfmts.length; a++) {
var k = arcfmts[a];
html.push(
'<span><input type="radio" name="arcfmt" value="' + k + '" id="arcfmt_' + k + '">' +
'<label for="arcfmt_' + k + '">' + k + '</label></span>');
}
ebi('arc_fmt').innerHTML = html.join('\n');
var fmt = sread("arc_fmt") || "zip";
ebi('arcfmt_' + fmt).checked = true;
function render() {
var arg = arcv[arcfmts.indexOf(fmt)],
tds = document.querySelectorAll('#files tbody td:first-child a');
for (var a = 0, aa = tds.length; a < aa; a++) {
var o = tds[a], txt = o.textContent, href = o.getAttribute('href');
if (txt != 'tar' && txt != 'zip')
continue;
var ofs = href.lastIndexOf('?');
if (ofs < 0)
throw 'missing arg in url';
o.setAttribute("href", href.slice(0, ofs + 1) + arg);
o.textContent = fmt.split('_')[0];
}
}
function try_render() {
try {
render();
}
catch (ex) {
console.log("arcfmt failed: " + ex);
}
}
function change_fmt(e) {
ev(e);
fmt = this.getAttribute('value');
swrite("arc_fmt", fmt);
try_render();
}
var o = document.querySelectorAll('#arc_fmt input');
for (var a = 0; a < o.length; a++) {
o[a].onchange = change_fmt;
}
return {
"render": try_render
};
})();
function ev_row_tgl(e) { function ev_row_tgl(e) {
ev(e); ev(e);
filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent); filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent);
@@ -1611,3 +1676,4 @@ function reload_browser(not_mp) {
} }
reload_browser(true); reload_browser(true);
mukey.render(); mukey.render();
arcfmt.render();

View File

@@ -147,7 +147,7 @@ var md_opt = {
</script> </script>
<script src="/.cpr/util.js"></script> <script src="/.cpr/util.js"></script>
<script src="/.cpr/deps/marked.full.js"></script> <script src="/.cpr/deps/marked.js"></script>
<script src="/.cpr/md.js"></script> <script src="/.cpr/md.js"></script>
{%- if edit %} {%- if edit %}
<script src="/.cpr/md2.js"></script> <script src="/.cpr/md2.js"></script>

View File

@@ -278,18 +278,27 @@ function up2k_init(have_crypto) {
} }
else files = e.target.files; else files = e.target.files;
if (files.length == 0) if (!files || files.length == 0)
return alert('no files selected??'); return alert('no files selected??');
more_one_file(); more_one_file();
var bad_files = []; var bad_files = [];
var good_files = []; var good_files = [];
var dirs = [];
for (var a = 0; a < files.length; a++) { for (var a = 0; a < files.length; a++) {
var fobj = files[a]; var fobj = files[a];
if (is_itemlist) { if (is_itemlist) {
if (fobj.kind !== 'file') if (fobj.kind !== 'file')
continue; continue;
try {
var wi = fobj.webkitGetAsEntry();
if (wi.isDirectory) {
dirs.push(wi);
continue;
}
}
catch (ex) { }
fobj = fobj.getAsFile(); fobj = fobj.getAsFile();
} }
try { try {
@@ -300,12 +309,69 @@ function up2k_init(have_crypto) {
bad_files.push(fobj.name); bad_files.push(fobj.name);
continue; continue;
} }
good_files.push(fobj); good_files.push([fobj, fobj.name]);
}
if (dirs) {
return read_dirs(null, [], dirs, good_files, bad_files);
}
}
function read_dirs(rd, pf, dirs, good, bad) {
if (!dirs.length) {
if (!pf.length)
return gotallfiles(good, bad);
console.log("retry pf, " + pf.length);
setTimeout(function () {
read_dirs(rd, pf, dirs, good, bad);
}, 50);
return;
} }
if (!rd)
rd = dirs[0].createReader();
rd.readEntries(function (ents) {
var ngot = 0;
ents.forEach(function (dn) {
if (dn.isDirectory) {
dirs.push(dn);
}
else {
var name = dn.fullPath;
if (name.indexOf('/') === 0)
name = name.slice(1);
pf.push(name);
dn.file(function (fobj) {
var idx = pf.indexOf(name);
pf.splice(idx, 1);
try {
if (fobj.size > 0) {
good.push([fobj, name]);
return;
}
}
catch (ex) { }
bad.push(name);
});
}
ngot += 1;
});
// console.log("ngot: " + ngot);
if (!ngot) {
dirs.shift();
rd = null;
}
return read_dirs(rd, pf, dirs, good, bad);
});
}
function gotallfiles(good_files, bad_files) {
if (bad_files.length > 0) { if (bad_files.length > 0) {
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length); var ntot = bad_files.length + good_files.length;
for (var a = 0; a < bad_files.length; a++) var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot);
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
msg += '-- ' + bad_files[a] + '\n'; msg += '-- ' + bad_files[a] + '\n';
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent)) if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
@@ -315,21 +381,21 @@ function up2k_init(have_crypto) {
} }
var msg = ['upload these ' + good_files.length + ' files?']; var msg = ['upload these ' + good_files.length + ' files?'];
for (var a = 0; a < good_files.length; a++) for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
msg.push(good_files[a].name); msg.push(good_files[a][1]);
if (ask_up && !fsearch && !confirm(msg.join('\n'))) if (ask_up && !fsearch && !confirm(msg.join('\n')))
return; return;
for (var a = 0; a < good_files.length; a++) { for (var a = 0; a < good_files.length; a++) {
var fobj = good_files[a]; var fobj = good_files[a][0];
var now = new Date().getTime(); var now = new Date().getTime();
var lmod = fobj.lastModified || now; var lmod = fobj.lastModified || now;
var entry = { var entry = {
"n": parseInt(st.files.length.toString()), "n": parseInt(st.files.length.toString()),
"t0": now, // TODO remove probably "t0": now,
"fobj": fobj, "fobj": fobj,
"name": fobj.name, "name": good_files[a][1],
"size": fobj.size, "size": fobj.size,
"lmod": lmod / 1000, "lmod": lmod / 1000,
"purl": get_evpath(), "purl": get_evpath(),

View File

@@ -88,7 +88,7 @@
width: 30em; width: 30em;
} }
#u2conf.has_btn { #u2conf.has_btn {
width: 46em; width: 48em;
} }
#u2conf * { #u2conf * {
text-align: center; text-align: center;

View File

@@ -73,7 +73,8 @@
<div id="u2btn_ct"> <div id="u2btn_ct">
<div id="u2btn"> <div id="u2btn">
<span id="u2bm"></span><br /> <span id="u2bm"></span><br />
drop files here<br /> drag/drop files<br />
and folders here<br />
(or click me) (or click me)
</div> </div>
</div> </div>

View File

@@ -237,7 +237,10 @@ function goto(dest) {
goto(); goto();
var op = sread('opmode'); var op = sread('opmode');
if (op !== null && op !== '.') if (op !== null && op !== '.')
goto(op); try {
goto(op);
}
catch (ex) { }
})(); })();