Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
389a00ce59 | ||
|
|
7a460de3c2 | ||
|
|
8ea1f4a751 | ||
|
|
1c69ccc6cd | ||
|
|
84b5bbd3b6 | ||
|
|
9ccd327298 | ||
|
|
11df36f3cf | ||
|
|
f62dd0e3cc | ||
|
|
ad18b6e15e | ||
|
|
c00b80ca29 | ||
|
|
92ed4ba3f8 | ||
|
|
7de9775dd9 | ||
|
|
5ce9060e5c | ||
|
|
f727d5cb5a | ||
|
|
4735fb1ebb | ||
|
|
c7d05cc13d | ||
|
|
51c152ff4a | ||
|
|
eeed2a840c |
11
README.md
11
README.md
@@ -249,12 +249,18 @@ some improvement ideas
|
||||
* Windows: if the `up2k.db` (filesystem index) is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
|
||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
|
||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||
* [the database can get stuck](https://github.com/9001/copyparty/issues/10)
|
||||
* has only happened once but that is once too many
|
||||
* luckily not dangerous for file integrity and doesn't really stop uploads or anything like that
|
||||
* but would really appreciate some logs if anyone ever runs into it again
|
||||
* probably more, pls let me know
|
||||
|
||||
## not my bugs
|
||||
|
||||
* [Chrome issue 1317069](https://bugs.chromium.org/p/chromium/issues/detail?id=1317069) -- if you try to upload a folder which contains symlinks by dragging it into the browser, the symlinked files will not get uploaded
|
||||
|
||||
* [Chrome issue 1352210](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210) -- plaintext http may be faster at filehashing than https (but also extremely CPU-intensive)
|
||||
|
||||
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
|
||||
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
|
||||
* "future" because `AudioContext` is broken in the current iOS version (15.1), maybe one day...
|
||||
@@ -1008,6 +1014,10 @@ this is due to `crypto.subtle` [not yet](https://github.com/w3c/webcrypto/issues
|
||||
|
||||
as a result, the hashes are much less useful than they could have been (search the server by sha512, provide the sha512 in the response http headers, ...)
|
||||
|
||||
however it allows for hashing multiple chunks in parallel, greatly increasing upload speed from fast storage (NVMe, raid-0 and such)
|
||||
|
||||
* both the [browser uploader](#uploading) and the [commandline one](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) does this now, allowing for fast uploading even from plaintext http
|
||||
|
||||
hashwasm would solve the streaming issue but reduces hashing speed for sha512 (xxh128 does 6 GiB/s), and it would make old browsers and [iphones](https://bugs.webkit.org/show_bug.cgi?id=228552) unsupported
|
||||
|
||||
* blake2 might be a better choice since xxh is non-cryptographic, but that gets ~15 MiB/s on slower androids
|
||||
@@ -1041,6 +1051,7 @@ when uploading files,
|
||||
|
||||
* if you're cpu-bottlenecked, or the browser is maxing a cpu core:
|
||||
* up to 30% faster uploads if you hide the upload status list by switching away from the `[🚀]` up2k ui-tab (or closing it)
|
||||
* optionally you can switch to the lightweight potato ui by clicking the `[🥔]`
|
||||
* switching to another browser-tab also works, the favicon will update every 10 seconds in that case
|
||||
* unlikely to be a problem, but can happen when uploding many small files, or your internet is too fast, or PC too slow
|
||||
|
||||
|
||||
@@ -47,8 +47,8 @@ CONDITIONAL_UPLOAD = True
|
||||
|
||||
|
||||
def main():
|
||||
fp = sys.argv[1]
|
||||
if CONDITIONAL_UPLOAD:
|
||||
fp = sys.argv[1]
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
md = json.loads(zs)
|
||||
|
||||
@@ -97,7 +97,7 @@ def main():
|
||||
zs = (
|
||||
"ffmpeg -y -hide_banner -nostdin -v warning"
|
||||
+ " -err_detect +crccheck+bitstream+buffer+careful+compliant+aggressive+explode"
|
||||
" -xerror -i"
|
||||
+ " -xerror -i"
|
||||
)
|
||||
|
||||
cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)]
|
||||
|
||||
170
bin/up2k.py
170
bin/up2k.py
@@ -3,7 +3,7 @@ from __future__ import print_function, unicode_literals
|
||||
|
||||
"""
|
||||
up2k.py: upload to copyparty
|
||||
2022-06-16, v0.15, ed <irc.rizon.net>, MIT-Licensed
|
||||
2022-08-13, v0.18, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||
|
||||
- dependencies: requests
|
||||
@@ -22,12 +22,29 @@ import atexit
|
||||
import signal
|
||||
import base64
|
||||
import hashlib
|
||||
import argparse
|
||||
import platform
|
||||
import threading
|
||||
import datetime
|
||||
|
||||
import requests
|
||||
try:
|
||||
import argparse
|
||||
except:
|
||||
m = "\n ERROR: need 'argparse'; download it here:\n https://github.com/ThomasWaldmann/argparse/raw/master/argparse.py\n"
|
||||
print(m)
|
||||
raise
|
||||
|
||||
try:
|
||||
import requests
|
||||
except:
|
||||
if sys.version_info > (2, 7):
|
||||
m = "\n ERROR: need 'requests'; run this:\n python -m pip install --user requests\n"
|
||||
else:
|
||||
m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7"
|
||||
m = [" https://pypi.org/project/" + x + "/#files" for x in m.split()]
|
||||
m = "\n ERROR: need these:\n" + "\n".join(m) + "\n"
|
||||
|
||||
print(m)
|
||||
raise
|
||||
|
||||
|
||||
# from copyparty/__init__.py
|
||||
@@ -126,6 +143,89 @@ class FileSlice(object):
|
||||
return ret
|
||||
|
||||
|
||||
class MTHash(object):
|
||||
def __init__(self, cores):
|
||||
self.f = None
|
||||
self.sz = 0
|
||||
self.csz = 0
|
||||
self.omutex = threading.Lock()
|
||||
self.imutex = threading.Lock()
|
||||
self.work_q = Queue()
|
||||
self.done_q = Queue()
|
||||
self.thrs = []
|
||||
for _ in range(cores):
|
||||
t = threading.Thread(target=self.worker)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
self.thrs.append(t)
|
||||
|
||||
def hash(self, f, fsz, chunksz, pcb=None, pcb_opaque=None):
|
||||
with self.omutex:
|
||||
self.f = f
|
||||
self.sz = fsz
|
||||
self.csz = chunksz
|
||||
|
||||
chunks = {}
|
||||
nchunks = int(math.ceil(fsz / chunksz))
|
||||
for nch in range(nchunks):
|
||||
self.work_q.put(nch)
|
||||
|
||||
ex = ""
|
||||
for nch in range(nchunks):
|
||||
qe = self.done_q.get()
|
||||
try:
|
||||
nch, dig, ofs, csz = qe
|
||||
chunks[nch] = [dig, ofs, csz]
|
||||
except:
|
||||
ex = ex or qe
|
||||
|
||||
if pcb:
|
||||
pcb(pcb_opaque, chunksz * nch)
|
||||
|
||||
if ex:
|
||||
raise Exception(ex)
|
||||
|
||||
ret = []
|
||||
for n in range(nchunks):
|
||||
ret.append(chunks[n])
|
||||
|
||||
self.f = None
|
||||
self.csz = 0
|
||||
self.sz = 0
|
||||
return ret
|
||||
|
||||
def worker(self):
|
||||
while True:
|
||||
ofs = self.work_q.get()
|
||||
try:
|
||||
v = self.hash_at(ofs)
|
||||
except Exception as ex:
|
||||
v = str(ex)
|
||||
|
||||
self.done_q.put(v)
|
||||
|
||||
def hash_at(self, nch):
|
||||
f = self.f
|
||||
ofs = ofs0 = nch * self.csz
|
||||
hashobj = hashlib.sha512()
|
||||
chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
|
||||
while chunk_rem > 0:
|
||||
with self.imutex:
|
||||
f.seek(ofs)
|
||||
buf = f.read(min(chunk_rem, 1024 * 1024 * 12))
|
||||
|
||||
if not buf:
|
||||
raise Exception("EOF at " + str(ofs))
|
||||
|
||||
hashobj.update(buf)
|
||||
chunk_rem -= len(buf)
|
||||
ofs += len(buf)
|
||||
|
||||
digest = hashobj.digest()[:33]
|
||||
digest = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||
return nch, digest, ofs0, chunk_sz
|
||||
|
||||
|
||||
_print = print
|
||||
|
||||
|
||||
@@ -230,8 +330,8 @@ def _scd(err, top):
|
||||
abspath = os.path.join(top, fh.name)
|
||||
try:
|
||||
yield [abspath, fh.stat()]
|
||||
except:
|
||||
err.append(abspath)
|
||||
except Exception as ex:
|
||||
err.append((abspath, str(ex)))
|
||||
|
||||
|
||||
def _lsd(err, top):
|
||||
@@ -240,8 +340,8 @@ def _lsd(err, top):
|
||||
abspath = os.path.join(top, name)
|
||||
try:
|
||||
yield [abspath, os.stat(abspath)]
|
||||
except:
|
||||
err.append(abspath)
|
||||
except Exception as ex:
|
||||
err.append((abspath, str(ex)))
|
||||
|
||||
|
||||
if hasattr(os, "scandir"):
|
||||
@@ -250,15 +350,21 @@ else:
|
||||
statdir = _lsd
|
||||
|
||||
|
||||
def walkdir(err, top):
|
||||
def walkdir(err, top, seen):
|
||||
"""recursive statdir"""
|
||||
atop = os.path.abspath(os.path.realpath(top))
|
||||
if atop in seen:
|
||||
err.append((top, "recursive-symlink"))
|
||||
return
|
||||
|
||||
seen = seen[:] + [atop]
|
||||
for ap, inf in sorted(statdir(err, top)):
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
try:
|
||||
for x in walkdir(err, ap):
|
||||
for x in walkdir(err, ap, seen):
|
||||
yield x
|
||||
except:
|
||||
err.append(ap)
|
||||
except Exception as ex:
|
||||
err.append((ap, str(ex)))
|
||||
else:
|
||||
yield ap, inf
|
||||
|
||||
@@ -273,7 +379,7 @@ def walkdirs(err, tops):
|
||||
stop = os.path.dirname(top)
|
||||
|
||||
if os.path.isdir(top):
|
||||
for ap, inf in walkdir(err, top):
|
||||
for ap, inf in walkdir(err, top, []):
|
||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||
else:
|
||||
d, n = top.rsplit(sep, 1)
|
||||
@@ -322,8 +428,8 @@ def up2k_chunksize(filesize):
|
||||
|
||||
|
||||
# mostly from copyparty/up2k.py
|
||||
def get_hashlist(file, pcb):
|
||||
# type: (File, any) -> None
|
||||
def get_hashlist(file, pcb, mth):
|
||||
# type: (File, any, any) -> None
|
||||
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||
|
||||
chunk_sz = up2k_chunksize(file.size)
|
||||
@@ -331,7 +437,12 @@ def get_hashlist(file, pcb):
|
||||
file_ofs = 0
|
||||
ret = []
|
||||
with open(file.abs, "rb", 512 * 1024) as f:
|
||||
if mth and file.size >= 1024 * 512:
|
||||
ret = mth.hash(f, file.size, chunk_sz, pcb, file)
|
||||
file_rem = 0
|
||||
|
||||
while file_rem > 0:
|
||||
# same as `hash_at` except for `imutex` / bufsz
|
||||
hashobj = hashlib.sha512()
|
||||
chunk_sz = chunk_rem = min(chunk_sz, file_rem)
|
||||
while chunk_rem > 0:
|
||||
@@ -388,8 +499,9 @@ def handshake(req_ses, url, file, pw, search):
|
||||
try:
|
||||
r = req_ses.post(url, headers=headers, json=req)
|
||||
break
|
||||
except:
|
||||
eprint("handshake failed, retrying: {0}\n".format(file.name))
|
||||
except Exception as ex:
|
||||
em = str(ex).split("SSLError(")[-1]
|
||||
eprint("handshake failed, retrying: {0}\n {1}\n\n".format(file.name, em))
|
||||
time.sleep(1)
|
||||
|
||||
try:
|
||||
@@ -398,7 +510,7 @@ def handshake(req_ses, url, file, pw, search):
|
||||
raise Exception(r.text)
|
||||
|
||||
if search:
|
||||
return r["hits"]
|
||||
return r["hits"], False
|
||||
|
||||
try:
|
||||
pre, url = url.split("://")
|
||||
@@ -470,12 +582,19 @@ class Ctl(object):
|
||||
|
||||
if err:
|
||||
eprint("\n# failed to access {0} paths:\n".format(len(err)))
|
||||
for x in err:
|
||||
eprint(x.decode("utf-8", "replace") + "\n")
|
||||
for ap, msg in err:
|
||||
if ar.v:
|
||||
eprint("{0}\n `-{1}\n\n".format(ap.decode("utf-8", "replace"), msg))
|
||||
else:
|
||||
eprint(ap.decode("utf-8", "replace") + "\n")
|
||||
|
||||
eprint("^ failed to access those {0} paths ^\n\n".format(len(err)))
|
||||
|
||||
if not ar.v:
|
||||
eprint("hint: set -v for detailed error messages\n")
|
||||
|
||||
if not ar.ok:
|
||||
eprint("aborting because --ok is not set\n")
|
||||
eprint("hint: aborting because --ok is not set\n")
|
||||
return
|
||||
|
||||
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||
@@ -516,6 +635,8 @@ class Ctl(object):
|
||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
|
||||
self.mth = MTHash(ar.J) if ar.J > 1 else None
|
||||
|
||||
self._fancy()
|
||||
|
||||
def _safe(self):
|
||||
@@ -526,7 +647,7 @@ class Ctl(object):
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
|
||||
get_hashlist(file, None)
|
||||
get_hashlist(file, None, None)
|
||||
|
||||
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
@@ -679,7 +800,7 @@ class Ctl(object):
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
get_hashlist(file, self.cb_hasher)
|
||||
get_hashlist(file, self.cb_hasher, self.mth)
|
||||
with self.mutex:
|
||||
self.hash_f += 1
|
||||
self.hash_c += len(file.cids)
|
||||
@@ -808,6 +929,9 @@ def main():
|
||||
if not VT100:
|
||||
os.system("rem") # enables colors
|
||||
|
||||
cores = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
||||
|
||||
# fmt: off
|
||||
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
||||
NOTE:
|
||||
@@ -818,11 +942,13 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||
|
||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||
ap.add_argument("-v", action="store_true", help="verbose")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||
ap = app.add_argument_group("performance tweaks")
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
|
||||
@@ -41,12 +41,14 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
textdec = new TextDecoder('latin1'),
|
||||
md_ptn = new TextEncoder().encode('youtube.com/watch?v='),
|
||||
file_ids = [], // all IDs found for each good_files
|
||||
md_only = [], // `${id} ${fn}` where ID was only found in metadata
|
||||
mofs = 0,
|
||||
mnchk = 0,
|
||||
mfile = '';
|
||||
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var [fobj, name] = good_files[a],
|
||||
cname = name, // will clobber
|
||||
sz = fobj.size,
|
||||
ids = [],
|
||||
id_ok = false,
|
||||
@@ -57,23 +59,23 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
|
||||
// look for ID in filename; reduce the
|
||||
// metadata-scan intensity if the id looks safe
|
||||
m = /[\[(-]([\w-]{11})[\])]?\.(?:mp4|webm|mkv)$/i.exec(name);
|
||||
m = /[\[(-]([\w-]{11})[\])]?\.(?:mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name);
|
||||
id_ok = !!m;
|
||||
|
||||
while (true) {
|
||||
// fuzzy catch-all;
|
||||
// some ytdl fork did %(title)-%(id).%(ext) ...
|
||||
m = /(?:^|[^\w])([\w-]{11})(?:$|[^\w-])/.exec(name);
|
||||
m = /(?:^|[^\w])([\w-]{11})(?:$|[^\w-])/.exec(cname);
|
||||
if (!m)
|
||||
break;
|
||||
|
||||
name = name.replace(m[1], '');
|
||||
cname = cname.replace(m[1], '');
|
||||
yt_ids.add(m[1]);
|
||||
ids.push(m[1]);
|
||||
}
|
||||
|
||||
// look for IDs in video metadata,
|
||||
if (/\.(mp4|webm|mkv)$/i.exec(name)) {
|
||||
if (/\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name)) {
|
||||
toast.show('inf r', 0, `analyzing file ${a + 1} / ${good_files.length} :\n${name}\n\nhave analysed ${++mnchk} files in ${(Date.now() - t0) / 1000} seconds, ${humantime((good_files.length - (a + 1)) * (((Date.now() - t0) / 1000) / mnchk))} remaining,\n\nbiggest offset so far is ${mofs}, in this file:\n\n${mfile}`);
|
||||
|
||||
// check first and last 128 MiB;
|
||||
@@ -108,8 +110,10 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
|
||||
console.log(`found ${m} @${bofs}, ${name} `);
|
||||
yt_ids.add(m);
|
||||
if (!has(ids, m))
|
||||
if (!has(ids, m)) {
|
||||
ids.push(m);
|
||||
md_only.push(`${m} ${name}`);
|
||||
}
|
||||
|
||||
// bail after next iteration
|
||||
chunk = nchunks - 1;
|
||||
@@ -128,6 +132,13 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
}
|
||||
}
|
||||
|
||||
if (md_only.length)
|
||||
console.log('recovered the following youtube-IDs by inspecting metadata:\n\n' + md_only.join('\n'));
|
||||
else if (yt_ids.size)
|
||||
console.log('did not discover any additional youtube-IDs by inspecting metadata; all the IDs also existed in the filenames');
|
||||
else
|
||||
console.log('failed to find any youtube-IDs at all, sorry');
|
||||
|
||||
if (false) {
|
||||
var msg = `finished analysing ${mnchk} files in ${(Date.now() - t0) / 1000} seconds,\n\nbiggest offset was ${mofs} in this file:\n\n${mfile}`,
|
||||
mfun = function () { toast.ok(0, msg); };
|
||||
@@ -138,7 +149,7 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
return hooks[0]([], [], [], hooks.slice(1));
|
||||
}
|
||||
|
||||
toast.inf(5, `running query for ${yt_ids.size} videos...`);
|
||||
toast.inf(5, `running query for ${yt_ids.size} youtube-IDs...`);
|
||||
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', '/ytq', true);
|
||||
@@ -162,7 +173,7 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
if (wanted_ids.has(file_ids[a][b])) {
|
||||
wanted_files.add(good_files[a]);
|
||||
|
||||
var m = /(.*)\.(mp4|webm|mkv)$/i.exec(name);
|
||||
var m = /(.*)\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name);
|
||||
if (m)
|
||||
wanted_names.add(m[1]);
|
||||
|
||||
|
||||
@@ -118,12 +118,13 @@ class BasicDodge11874(
|
||||
|
||||
|
||||
def lprint(*a: Any, **ka: Any) -> None:
|
||||
txt: str = " ".join(unicode(x) for x in a) + ka.get("end", "\n")
|
||||
eol = ka.pop("end", "\n")
|
||||
txt: str = " ".join(unicode(x) for x in a) + eol
|
||||
printed.append(txt)
|
||||
if not VT100:
|
||||
txt = ansi_re.sub("", txt)
|
||||
|
||||
print(txt, **ka)
|
||||
print(txt, end="", **ka)
|
||||
|
||||
|
||||
def warn(msg: str) -> None:
|
||||
@@ -138,7 +139,7 @@ def ensure_locale() -> None:
|
||||
]:
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, x)
|
||||
lprint("Locale:", x)
|
||||
lprint("Locale: {}\n".format(x))
|
||||
break
|
||||
except:
|
||||
continue
|
||||
@@ -335,6 +336,7 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
|
||||
fk_salt = "hunter2"
|
||||
|
||||
cores = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
|
||||
|
||||
sects = [
|
||||
[
|
||||
@@ -550,9 +552,10 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
|
||||
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything")
|
||||
ap2.add_argument("--logout", metavar="H", type=float, default="8086", help="logout clients after H hours of inactivity (0.0028=10sec, 0.1=6min, 24=day, 168=week, 720=month, 8760=year)")
|
||||
|
||||
ap2 = ap.add_argument_group('yolo options')
|
||||
ap2 = ap.add_argument_group('shutdown options')
|
||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; for example 'idx' will do volume indexing + metadata analysis")
|
||||
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||
@@ -608,8 +611,10 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--no-dhash", action="store_true", help="disable rescan acceleration; do full database integrity check -- makes the db ~5%% smaller and bootup/rescans 3~10x slower")
|
||||
ap2.add_argument("--xdev", action="store_true", help="do not descend into other filesystems (symlink or bind-mount to another HDD, ...)")
|
||||
ap2.add_argument("--xvol", action="store_true", help="skip symlinks leaving the volume root")
|
||||
ap2.add_argument("--hash-mt", metavar="CORES", type=int, default=hcores, help="num cpu cores to use for file hashing; set 0 or 1 for single-core hashing")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
|
||||
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline -- terminate searches running for more than SEC seconds")
|
||||
@@ -625,9 +630,9 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
|
||||
ap2.add_argument("--mtag-v", action="store_true", help="verbose tag scanning; print errors from mtp subprocesses and such")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash")
|
||||
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
|
||||
default=".vq,.aq,vc,ac,res,.fps")
|
||||
default=".vq,.aq,vc,ac,fmt,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
|
||||
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (1, 3, 9)
|
||||
VERSION = (1, 3, 13)
|
||||
CODENAME = "god dag"
|
||||
BUILD_DT = (2022, 8, 4)
|
||||
BUILD_DT = (2022, 8, 15)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -17,6 +17,7 @@ from .bos import bos
|
||||
from .util import (
|
||||
IMPLICATIONS,
|
||||
META_NOBOTS,
|
||||
SQLITE_VER,
|
||||
Pebkac,
|
||||
absreal,
|
||||
fsenc,
|
||||
@@ -1165,7 +1166,7 @@ class AuthSrv(object):
|
||||
|
||||
vfs.bubble_flags()
|
||||
|
||||
e2vs = []
|
||||
have_e2d = False
|
||||
t = "volumes and permissions:\n"
|
||||
for zv in vfs.all_vols.values():
|
||||
if not self.warn_anonwrite:
|
||||
@@ -1184,24 +1185,27 @@ class AuthSrv(object):
|
||||
u = u if u else "\033[36m--none--\033[0m"
|
||||
t += "\n| {}: {}".format(txt, u)
|
||||
|
||||
if "e2v" in zv.flags:
|
||||
e2vs.append(zv.vpath or "/")
|
||||
if "e2d" in zv.flags:
|
||||
have_e2d = True
|
||||
|
||||
t += "\n"
|
||||
|
||||
if e2vs:
|
||||
t += "\n\033[33me2v enabled for the following volumes;\nuploads will be blocked until scan has finished:\n \033[0m"
|
||||
t += " ".join(e2vs) + "\n"
|
||||
if self.warn_anonwrite:
|
||||
if not self.args.no_voldump:
|
||||
self.log(t)
|
||||
|
||||
if self.warn_anonwrite and not self.args.no_voldump:
|
||||
self.log(t)
|
||||
if have_e2d:
|
||||
t = self.chk_sqlite_threadsafe()
|
||||
if t:
|
||||
self.log("\n\033[{}\033[0m\n".format(t))
|
||||
|
||||
try:
|
||||
zv, _ = vfs.get("/", "*", False, True)
|
||||
if self.warn_anonwrite and os.getcwd() == zv.realpath:
|
||||
self.warn_anonwrite = False
|
||||
t = "anyone can write to the current directory: {}\n"
|
||||
self.log(t.format(zv.realpath), c=1)
|
||||
|
||||
self.warn_anonwrite = False
|
||||
except Pebkac:
|
||||
self.warn_anonwrite = True
|
||||
|
||||
@@ -1215,6 +1219,23 @@ class AuthSrv(object):
|
||||
if pwds:
|
||||
self.re_pwd = re.compile("=(" + "|".join(pwds) + ")([]&; ]|$)")
|
||||
|
||||
def chk_sqlite_threadsafe(self) -> str:
|
||||
v = SQLITE_VER[-1:]
|
||||
|
||||
if v == "1":
|
||||
# threadsafe (linux, windows)
|
||||
return ""
|
||||
|
||||
if v == "2":
|
||||
# module safe, connections unsafe (macos)
|
||||
return "33m your sqlite3 was compiled with reduced thread-safety;\n database features (-e2d, -e2t) SHOULD be fine\n but MAY cause database-corruption and crashes"
|
||||
|
||||
if v == "0":
|
||||
# everything unsafe
|
||||
return "31m your sqlite3 was compiled WITHOUT thread-safety!\n database features (-e2d, -e2t) will PROBABLY cause crashes!"
|
||||
|
||||
return "36m cannot verify sqlite3 thread-safety; strange but probably fine"
|
||||
|
||||
def dbg_ls(self) -> None:
|
||||
users = self.args.ls
|
||||
vol = "*"
|
||||
|
||||
@@ -391,7 +391,7 @@ class Ftpd(object):
|
||||
for h, lp in hs:
|
||||
FTPServer((ip, int(lp)), h, ioloop)
|
||||
|
||||
thr = threading.Thread(target=ioloop.loop)
|
||||
thr = threading.Thread(target=ioloop.loop, name="ftp")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
|
||||
@@ -926,7 +926,7 @@ class HttpCli(object):
|
||||
except:
|
||||
raise Pebkac(422, "you POSTed invalid json")
|
||||
|
||||
# self.reply(b" DD" + b"oS Protection ", 503)
|
||||
# self.reply(b"cloudflare", 503)
|
||||
# return True
|
||||
|
||||
if "srch" in self.uparam or "srch" in body:
|
||||
|
||||
@@ -102,7 +102,7 @@ class HttpSrv(object):
|
||||
start_log_thrs(self.log, self.args.log_thrs, nid)
|
||||
|
||||
self.th_cfg: dict[str, Any] = {}
|
||||
t = threading.Thread(target=self.post_init)
|
||||
t = threading.Thread(target=self.post_init, name="hsrv-init2")
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
@@ -171,7 +171,7 @@ class HttpSrv(object):
|
||||
def fun() -> None:
|
||||
self.broker.say("cb_httpsrv_up")
|
||||
|
||||
threading.Thread(target=fun).start()
|
||||
threading.Thread(target=fun, name="sig-hsrv-up1").start()
|
||||
|
||||
while not self.stopping:
|
||||
if self.args.log_conn:
|
||||
|
||||
@@ -15,7 +15,7 @@ class Ico(object):
|
||||
def get(self, ext: str, as_thumb: bool) -> tuple[str, bytes]:
|
||||
"""placeholder to make thumbnails not break"""
|
||||
|
||||
zb = hashlib.md5(ext.encode("utf-8")).digest()[:2]
|
||||
zb = hashlib.sha1(ext.encode("utf-8")).digest()[2:4]
|
||||
if PY2:
|
||||
zb = [ord(x) for x in zb]
|
||||
|
||||
|
||||
@@ -178,7 +178,7 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
|
||||
]
|
||||
|
||||
if typ == "format":
|
||||
kvm = [["duration", ".dur"], ["bit_rate", ".q"]]
|
||||
kvm = [["duration", ".dur"], ["bit_rate", ".q"], ["format_name", "fmt"]]
|
||||
|
||||
for sk, rk in kvm:
|
||||
v1 = strm.get(sk)
|
||||
@@ -239,6 +239,9 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
|
||||
if ".q" in ret:
|
||||
del ret[".q"]
|
||||
|
||||
if "fmt" in ret:
|
||||
ret["fmt"] = ret["fmt"].split(",")[0]
|
||||
|
||||
if ".resw" in ret and ".resh" in ret:
|
||||
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
||||
|
||||
@@ -310,6 +313,7 @@ class MTag(object):
|
||||
"tope",
|
||||
],
|
||||
"title": ["title", "tit2", "\u00a9nam"],
|
||||
"comment": ["comment"],
|
||||
"circle": [
|
||||
"album-artist",
|
||||
"tpe2",
|
||||
|
||||
@@ -146,8 +146,8 @@ class SvcHub(object):
|
||||
self.args.th_dec = list(decs.keys())
|
||||
self.thumbsrv = None
|
||||
if not args.no_thumb:
|
||||
t = "decoder preference: {}".format(", ".join(self.args.th_dec))
|
||||
self.log("thumb", t)
|
||||
t = ", ".join(self.args.th_dec) or "(None available)"
|
||||
self.log("thumb", "decoder preference: {}".format(t))
|
||||
|
||||
if "pil" in self.args.th_dec and not HAVE_WEBP:
|
||||
msg = "disabling webp thumbnails because either libwebp is not available or your Pillow is too old"
|
||||
@@ -206,6 +206,9 @@ class SvcHub(object):
|
||||
self.log("root", t, 1)
|
||||
|
||||
self.retcode = 1
|
||||
self.sigterm()
|
||||
|
||||
def sigterm(self) -> None:
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
|
||||
def cb_httpsrv_up(self) -> None:
|
||||
@@ -269,7 +272,7 @@ class SvcHub(object):
|
||||
def run(self) -> None:
|
||||
self.tcpsrv.run()
|
||||
|
||||
thr = threading.Thread(target=self.thr_httpsrv_up)
|
||||
thr = threading.Thread(target=self.thr_httpsrv_up, name="sig-hsrv-up2")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
@@ -308,7 +311,7 @@ class SvcHub(object):
|
||||
return "cannot reload; already in progress"
|
||||
|
||||
self.reloading = True
|
||||
t = threading.Thread(target=self._reload)
|
||||
t = threading.Thread(target=self._reload, name="reloading")
|
||||
t.daemon = True
|
||||
t.start()
|
||||
return "reload initiated"
|
||||
|
||||
@@ -28,6 +28,7 @@ from .mtag import MParser, MTag
|
||||
from .util import (
|
||||
HAVE_SQLITE3,
|
||||
SYMTIME,
|
||||
MTHash,
|
||||
Pebkac,
|
||||
ProgressPrinter,
|
||||
absreal,
|
||||
@@ -45,6 +46,7 @@ from .util import (
|
||||
s3enc,
|
||||
sanitize_fn,
|
||||
statdir,
|
||||
vjoin,
|
||||
vsplit,
|
||||
w8b64dec,
|
||||
w8b64enc,
|
||||
@@ -155,6 +157,11 @@ class Up2k(object):
|
||||
|
||||
self.fstab = Fstab(self.log_func)
|
||||
|
||||
if self.args.hash_mt < 2:
|
||||
self.mth: Optional[MTHash] = None
|
||||
else:
|
||||
self.mth = MTHash(self.args.hash_mt)
|
||||
|
||||
if self.args.no_fastboot:
|
||||
self.deferred_init()
|
||||
|
||||
@@ -176,6 +183,9 @@ class Up2k(object):
|
||||
all_vols = self.asrv.vfs.all_vols
|
||||
have_e2d = self.init_indexes(all_vols, [])
|
||||
|
||||
if not self.pp and self.args.exit == "idx":
|
||||
return self.hub.sigterm()
|
||||
|
||||
thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
@@ -565,7 +575,6 @@ class Up2k(object):
|
||||
t = "online (running mtp)"
|
||||
if scan_vols:
|
||||
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-scan")
|
||||
thr.daemon = True
|
||||
else:
|
||||
self.pp = None
|
||||
t = "online, idle"
|
||||
@@ -574,6 +583,7 @@ class Up2k(object):
|
||||
self.volstate[vol.vpath] = t
|
||||
|
||||
if thr:
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
return have_e2d
|
||||
@@ -724,6 +734,13 @@ class Up2k(object):
|
||||
if db.n:
|
||||
self.log("commit {} new files".format(db.n))
|
||||
|
||||
if self.args.no_dhash:
|
||||
if db.c.execute("select d from dh").fetchone():
|
||||
db.c.execute("delete from dh")
|
||||
self.log("forgetting dhashes in {}".format(top))
|
||||
elif n_add or n_rm:
|
||||
self._set_tagscan(db.c, True)
|
||||
|
||||
db.c.connection.commit()
|
||||
|
||||
return True, bool(n_add or n_rm or do_vac)
|
||||
@@ -742,7 +759,7 @@ class Up2k(object):
|
||||
xvol: bool,
|
||||
) -> int:
|
||||
if xvol and not rcdir.startswith(top):
|
||||
self.log("skip xvol: [{}] -> [{}]".format(top, rcdir), 6)
|
||||
self.log("skip xvol: [{}] -> [{}]".format(cdir, rcdir), 6)
|
||||
return 0
|
||||
|
||||
if rcdir in seen:
|
||||
@@ -750,29 +767,32 @@ class Up2k(object):
|
||||
self.log(t.format(seen[-1], rcdir, cdir), 3)
|
||||
return 0
|
||||
|
||||
ret = 0
|
||||
seen = seen + [rcdir]
|
||||
unreg: list[str] = []
|
||||
files: list[tuple[int, int, str]] = []
|
||||
|
||||
assert self.pp and self.mem_cur
|
||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||
ret = 0
|
||||
unreg: list[str] = []
|
||||
seen_files = {} # != inames; files-only for dropcheck
|
||||
|
||||
rd = cdir[len(top) :].strip("/")
|
||||
if WINDOWS:
|
||||
rd = rd.replace("\\", "/").strip("/")
|
||||
|
||||
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
|
||||
gl = sorted(g)
|
||||
inames = {x[0]: 1 for x in gl}
|
||||
partials = set([x[0] for x in gl if "PARTIAL" in x[0]])
|
||||
for iname, inf in gl:
|
||||
if self.stop:
|
||||
return -1
|
||||
|
||||
rp = vjoin(rd, iname)
|
||||
abspath = os.path.join(cdir, iname)
|
||||
rp = abspath[len(top) :].lstrip("/")
|
||||
if WINDOWS:
|
||||
rp = rp.replace("\\", "/").strip("/")
|
||||
|
||||
if rei and rei.search(abspath):
|
||||
unreg.append(rp)
|
||||
continue
|
||||
|
||||
nohash = reh.search(abspath) if reh else False
|
||||
lmod = int(inf.st_mtime)
|
||||
sz = inf.st_size
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
@@ -798,19 +818,53 @@ class Up2k(object):
|
||||
self.log("skip type-{:x} file [{}]".format(inf.st_mode, abspath))
|
||||
else:
|
||||
# self.log("file: {}".format(abspath))
|
||||
seen_files[iname] = 1
|
||||
if rp.endswith(".PARTIAL") and time.time() - lmod < 60:
|
||||
# rescan during upload
|
||||
continue
|
||||
|
||||
if not sz and (
|
||||
"{}.PARTIAL".format(iname) in inames
|
||||
or ".{}.PARTIAL".format(iname) in inames
|
||||
"{}.PARTIAL".format(iname) in partials
|
||||
or ".{}.PARTIAL".format(iname) in partials
|
||||
):
|
||||
# placeholder for unfinished upload
|
||||
continue
|
||||
|
||||
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
|
||||
files.append((sz, lmod, iname))
|
||||
|
||||
# folder of 1000 files = ~1 MiB RAM best-case (tiny filenames);
|
||||
# free up stuff we're done with before dhashing
|
||||
gl = []
|
||||
partials.clear()
|
||||
if not self.args.no_dhash:
|
||||
if len(files) < 9000:
|
||||
zh = hashlib.sha1(str(files).encode("utf-8", "replace"))
|
||||
else:
|
||||
zh = hashlib.sha1()
|
||||
_ = [zh.update(str(x).encode("utf-8", "replace")) for x in files]
|
||||
|
||||
dhash = base64.urlsafe_b64encode(zh.digest()[:12]).decode("ascii")
|
||||
sql = "select d from dh where d = ? and h = ?"
|
||||
try:
|
||||
c = db.c.execute(sql, (rd, dhash))
|
||||
drd = rd
|
||||
except:
|
||||
drd = "//" + w8b64enc(rd)
|
||||
c = db.c.execute(sql, (drd, dhash))
|
||||
|
||||
if c.fetchone():
|
||||
return ret
|
||||
|
||||
seen_files = set([x[2] for x in files]) # for dropcheck
|
||||
for sz, lmod, fn in files:
|
||||
if self.stop:
|
||||
return -1
|
||||
|
||||
rp = vjoin(rd, fn)
|
||||
abspath = os.path.join(cdir, fn)
|
||||
nohash = reh.search(abspath) if reh else False
|
||||
|
||||
if fn: # diff-golf
|
||||
|
||||
sql = "select w, mt, sz from up where rd = ? and fn = ?"
|
||||
try:
|
||||
c = db.c.execute(sql, (rd, fn))
|
||||
@@ -827,7 +881,7 @@ class Up2k(object):
|
||||
self.log(t.format(top, rp, len(in_db), rep_db))
|
||||
dts = -1
|
||||
|
||||
if dts == lmod and dsz == sz and (nohash or dw[0] != "#"):
|
||||
if dts == lmod and dsz == sz and (nohash or dw[0] != "#" or not sz):
|
||||
continue
|
||||
|
||||
t = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
||||
@@ -841,7 +895,7 @@ class Up2k(object):
|
||||
|
||||
self.pp.msg = "a{} {}".format(self.pp.n, abspath)
|
||||
|
||||
if nohash:
|
||||
if nohash or not sz:
|
||||
wark = up2k_wark_from_metadata(self.salt, sz, lmod, rd, fn)
|
||||
else:
|
||||
if sz > 1024 * 1024:
|
||||
@@ -870,6 +924,10 @@ class Up2k(object):
|
||||
db.n = 0
|
||||
db.t = time.time()
|
||||
|
||||
if not self.args.no_dhash:
|
||||
db.c.execute("delete from dh where d = ?", (drd,))
|
||||
db.c.execute("insert into dh values (?,?)", (drd, dhash))
|
||||
|
||||
if self.stop:
|
||||
return -1
|
||||
|
||||
@@ -888,15 +946,14 @@ class Up2k(object):
|
||||
t = "forgetting {} shadowed autoindexed files in [{}] > [{}]"
|
||||
self.log(t.format(n, top, rd))
|
||||
|
||||
q = "delete from dh where (d = ? or d like ?||'%')"
|
||||
db.c.execute(q, (erd, erd + "/"))
|
||||
|
||||
q = "delete from up where (rd = ? or rd like ?||'%') and at == 0"
|
||||
db.c.execute(q, (erd, erd + "/"))
|
||||
ret += n
|
||||
|
||||
# drop missing files
|
||||
rd = cdir[len(top) + 1 :].strip("/")
|
||||
if WINDOWS:
|
||||
rd = rd.replace("\\", "/").strip("/")
|
||||
|
||||
q = "select fn from up where rd = ?"
|
||||
try:
|
||||
c = db.c.execute(q, (rd,))
|
||||
@@ -947,6 +1004,7 @@ class Up2k(object):
|
||||
|
||||
self.log("forgetting {} deleted dirs, {} files".format(len(rm), n_rm))
|
||||
for rd in rm:
|
||||
cur.execute("delete from dh where d = ?", (rd,))
|
||||
cur.execute("delete from up where rd = ?", (rd,))
|
||||
|
||||
# then shadowed deleted files
|
||||
@@ -1059,7 +1117,7 @@ class Up2k(object):
|
||||
sz2 = st.st_size
|
||||
mt2 = int(st.st_mtime)
|
||||
|
||||
if nohash:
|
||||
if nohash or not sz2:
|
||||
w2 = up2k_wark_from_metadata(self.salt, sz2, mt2, rd, fn)
|
||||
else:
|
||||
if sz2 > 1024 * 1024 * 32:
|
||||
@@ -1108,10 +1166,43 @@ class Up2k(object):
|
||||
reg = self.register_vpath(ptop, vol.flags)
|
||||
|
||||
assert reg and self.pp
|
||||
cur = self.cur[ptop]
|
||||
|
||||
if not self.args.no_dhash:
|
||||
with self.mutex:
|
||||
c = cur.execute("select k from kv where k = 'tagscan'")
|
||||
if not c.fetchone():
|
||||
return 0, 0, bool(self.mtag)
|
||||
|
||||
ret = self._build_tags_index_2(ptop)
|
||||
|
||||
with self.mutex:
|
||||
self._set_tagscan(cur, False)
|
||||
cur.connection.commit()
|
||||
|
||||
return ret
|
||||
|
||||
def _set_tagscan(self, cur: "sqlite3.Cursor", need: bool) -> bool:
|
||||
if self.args.no_dhash:
|
||||
return False
|
||||
|
||||
c = cur.execute("select k from kv where k = 'tagscan'")
|
||||
if bool(c.fetchone()) == need:
|
||||
return False
|
||||
|
||||
if need:
|
||||
cur.execute("insert into kv values ('tagscan',1)")
|
||||
else:
|
||||
cur.execute("delete from kv where k = 'tagscan'")
|
||||
|
||||
return True
|
||||
|
||||
def _build_tags_index_2(self, ptop: str) -> tuple[int, int, bool]:
|
||||
entags = self.entags[ptop]
|
||||
flags = self.flags[ptop]
|
||||
cur = self.cur[ptop]
|
||||
|
||||
n_add = 0
|
||||
n_rm = 0
|
||||
if "e2tsr" in flags:
|
||||
with self.mutex:
|
||||
@@ -1307,6 +1398,9 @@ class Up2k(object):
|
||||
if "OFFLINE" not in self.volstate[k]:
|
||||
self.volstate[k] = "online, idle"
|
||||
|
||||
if self.args.exit == "idx":
|
||||
self.hub.sigterm()
|
||||
|
||||
def _run_one_mtp(self, ptop: str, gid: int) -> None:
|
||||
if gid != self.gid:
|
||||
return
|
||||
@@ -1599,6 +1693,7 @@ class Up2k(object):
|
||||
write_cur.execute(q, (wark[:16], k, v))
|
||||
ret += 1
|
||||
|
||||
self._set_tagscan(write_cur, True)
|
||||
return ret
|
||||
|
||||
def _orz(self, db_path: str) -> "sqlite3.Cursor":
|
||||
@@ -1622,6 +1717,11 @@ class Up2k(object):
|
||||
self.log("WARN: failed to upgrade from v4", 3)
|
||||
|
||||
if ver == DB_VER:
|
||||
try:
|
||||
self._add_dhash_tab(cur)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
nfiles = next(cur.execute("select count(w) from up"))[0]
|
||||
self.log("OK: {} |{}|".format(db_path, nfiles))
|
||||
@@ -1710,7 +1810,7 @@ class Up2k(object):
|
||||
]:
|
||||
cur.execute(cmd)
|
||||
|
||||
cur.connection.commit()
|
||||
self._add_dhash_tab(cur)
|
||||
self.log("created DB at {}".format(db_path))
|
||||
return cur
|
||||
|
||||
@@ -1725,6 +1825,17 @@ class Up2k(object):
|
||||
|
||||
cur.connection.commit()
|
||||
|
||||
def _add_dhash_tab(self, cur: "sqlite3.Cursor") -> None:
|
||||
# v5 -> v5a
|
||||
for cmd in [
|
||||
r"create table dh (d text, h text)",
|
||||
r"create index dh_d on dh(d)",
|
||||
r"insert into kv values ('tagscan',1)",
|
||||
]:
|
||||
cur.execute(cmd)
|
||||
|
||||
cur.connection.commit()
|
||||
|
||||
def _job_volchk(self, cj: dict[str, Any]) -> None:
|
||||
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
|
||||
if cj["ptop"] not in self.registry:
|
||||
@@ -2625,14 +2736,21 @@ class Up2k(object):
|
||||
fsz = bos.path.getsize(path)
|
||||
csz = up2k_chunksize(fsz)
|
||||
ret = []
|
||||
suffix = " MB, {}".format(path)
|
||||
with open(fsenc(path), "rb", 512 * 1024) as f:
|
||||
if self.mth and fsz >= 1024 * 512:
|
||||
tlt = self.mth.hash(f, fsz, csz, self.pp, prefix, suffix)
|
||||
ret = [x[0] for x in tlt]
|
||||
fsz = 0
|
||||
|
||||
while fsz > 0:
|
||||
# same as `hash_at` except for `imutex` / bufsz
|
||||
if self.stop:
|
||||
return []
|
||||
|
||||
if self.pp:
|
||||
mb = int(fsz / 1024 / 1024)
|
||||
self.pp.msg = "{}{} MB, {}".format(prefix, mb, path)
|
||||
self.pp.msg = prefix + str(mb) + suffix
|
||||
|
||||
hashobj = hashlib.sha512()
|
||||
rem = min(csz, fsz)
|
||||
@@ -2873,11 +2991,17 @@ class Up2k(object):
|
||||
abspath = os.path.join(ptop, rd, fn)
|
||||
self.log("hashing " + abspath)
|
||||
inf = bos.stat(abspath)
|
||||
hashes = self._hashlist_from_file(abspath)
|
||||
if not hashes:
|
||||
return
|
||||
if not inf.st_size:
|
||||
wark = up2k_wark_from_metadata(
|
||||
self.salt, inf.st_size, int(inf.st_mtime), rd, fn
|
||||
)
|
||||
else:
|
||||
hashes = self._hashlist_from_file(abspath)
|
||||
if not hashes:
|
||||
return
|
||||
|
||||
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
|
||||
|
||||
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
|
||||
with self.mutex:
|
||||
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size, ip, at)
|
||||
|
||||
@@ -2893,6 +3017,9 @@ class Up2k(object):
|
||||
def shutdown(self) -> None:
|
||||
self.stop = True
|
||||
|
||||
if self.mth:
|
||||
self.mth.stop = True
|
||||
|
||||
for x in list(self.spools):
|
||||
self._unspool(x)
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import base64
|
||||
import contextlib
|
||||
import hashlib
|
||||
import math
|
||||
import mimetypes
|
||||
import os
|
||||
import platform
|
||||
@@ -21,6 +22,8 @@ import traceback
|
||||
from collections import Counter
|
||||
from datetime import datetime
|
||||
|
||||
from queue import Queue
|
||||
|
||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, VT100, WINDOWS
|
||||
from .__version__ import S_BUILD_DT, S_VERSION
|
||||
from .stolen import surrogateescape
|
||||
@@ -235,8 +238,26 @@ def py_desc() -> str:
|
||||
)
|
||||
|
||||
|
||||
def _sqlite_ver() -> str:
|
||||
try:
|
||||
co = sqlite3.connect(":memory:")
|
||||
cur = co.cursor()
|
||||
try:
|
||||
vs = cur.execute("select * from pragma_compile_options").fetchall()
|
||||
except:
|
||||
vs = cur.execute("pragma compile_options").fetchall()
|
||||
|
||||
v = next(x[0].split("=")[1] for x in vs if x[0].startswith("THREADSAFE="))
|
||||
cur.close()
|
||||
co.close()
|
||||
except:
|
||||
v = "W"
|
||||
|
||||
return "{}*{}".format(sqlite3.sqlite_version, v)
|
||||
|
||||
|
||||
try:
|
||||
from sqlite3 import sqlite_version as SQLITE_VER
|
||||
SQLITE_VER = _sqlite_ver()
|
||||
except:
|
||||
SQLITE_VER = "(None)"
|
||||
|
||||
@@ -476,6 +497,104 @@ class ProgressPrinter(threading.Thread):
|
||||
sys.stdout.flush() # necessary on win10 even w/ stderr btw
|
||||
|
||||
|
||||
class MTHash(object):
|
||||
def __init__(self, cores: int):
|
||||
self.pp: Optional[ProgressPrinter] = None
|
||||
self.f: Optional[typing.BinaryIO] = None
|
||||
self.sz = 0
|
||||
self.csz = 0
|
||||
self.stop = False
|
||||
self.omutex = threading.Lock()
|
||||
self.imutex = threading.Lock()
|
||||
self.work_q: Queue[int] = Queue()
|
||||
self.done_q: Queue[tuple[int, str, int, int]] = Queue()
|
||||
self.thrs = []
|
||||
for n in range(cores):
|
||||
t = threading.Thread(target=self.worker, name="mth-" + str(n))
|
||||
t.daemon = True
|
||||
t.start()
|
||||
self.thrs.append(t)
|
||||
|
||||
def hash(
|
||||
self,
|
||||
f: typing.BinaryIO,
|
||||
fsz: int,
|
||||
chunksz: int,
|
||||
pp: Optional[ProgressPrinter] = None,
|
||||
prefix: str = "",
|
||||
suffix: str = "",
|
||||
) -> list[tuple[str, int, int]]:
|
||||
with self.omutex:
|
||||
self.f = f
|
||||
self.sz = fsz
|
||||
self.csz = chunksz
|
||||
|
||||
chunks: dict[int, tuple[str, int, int]] = {}
|
||||
nchunks = int(math.ceil(fsz / chunksz))
|
||||
for nch in range(nchunks):
|
||||
self.work_q.put(nch)
|
||||
|
||||
ex = ""
|
||||
for nch in range(nchunks):
|
||||
qe = self.done_q.get()
|
||||
try:
|
||||
nch, dig, ofs, csz = qe
|
||||
chunks[nch] = (dig, ofs, csz)
|
||||
except:
|
||||
ex = ex or str(qe)
|
||||
|
||||
if pp:
|
||||
mb = int((fsz - nch * chunksz) / 1024 / 1024)
|
||||
pp.msg = prefix + str(mb) + suffix
|
||||
|
||||
if ex:
|
||||
raise Exception(ex)
|
||||
|
||||
ret = []
|
||||
for n in range(nchunks):
|
||||
ret.append(chunks[n])
|
||||
|
||||
self.f = None
|
||||
self.csz = 0
|
||||
self.sz = 0
|
||||
return ret
|
||||
|
||||
def worker(self) -> None:
|
||||
while True:
|
||||
ofs = self.work_q.get()
|
||||
try:
|
||||
v = self.hash_at(ofs)
|
||||
except Exception as ex:
|
||||
v = str(ex) # type: ignore
|
||||
|
||||
self.done_q.put(v)
|
||||
|
||||
def hash_at(self, nch: int) -> tuple[int, str, int, int]:
|
||||
f = self.f
|
||||
ofs = ofs0 = nch * self.csz
|
||||
chunk_sz = chunk_rem = min(self.csz, self.sz - ofs)
|
||||
if self.stop:
|
||||
return nch, "", ofs0, chunk_sz
|
||||
|
||||
assert f
|
||||
hashobj = hashlib.sha512()
|
||||
while chunk_rem > 0:
|
||||
with self.imutex:
|
||||
f.seek(ofs)
|
||||
buf = f.read(min(chunk_rem, 1024 * 1024 * 12))
|
||||
|
||||
if not buf:
|
||||
raise Exception("EOF at " + str(ofs))
|
||||
|
||||
hashobj.update(buf)
|
||||
chunk_rem -= len(buf)
|
||||
ofs += len(buf)
|
||||
|
||||
bdig = hashobj.digest()[:33]
|
||||
udig = base64.urlsafe_b64encode(bdig).decode("utf-8")
|
||||
return nch, udig, ofs0, chunk_sz
|
||||
|
||||
|
||||
def uprint(msg: str) -> None:
|
||||
try:
|
||||
print(msg, end="")
|
||||
@@ -1208,6 +1327,10 @@ def vsplit(vpath: str) -> tuple[str, str]:
|
||||
return vpath.rsplit("/", 1) # type: ignore
|
||||
|
||||
|
||||
def vjoin(rd: str, fn: str) -> str:
|
||||
return rd + "/" + fn if rd else fn
|
||||
|
||||
|
||||
def w8dec(txt: bytes) -> str:
|
||||
"""decodes filesystem-bytes to wtf8"""
|
||||
if PY2:
|
||||
@@ -1271,7 +1394,7 @@ def db_ex_chk(log: "NamedLogger", ex: Exception, db_path: str) -> bool:
|
||||
if str(ex) != "database is locked":
|
||||
return False
|
||||
|
||||
thr = threading.Thread(target=lsof, args=(log, db_path))
|
||||
thr = threading.Thread(target=lsof, args=(log, db_path), name="dbex")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
|
||||
@@ -504,7 +504,7 @@ html.dy {
|
||||
--a: #000;
|
||||
--a-b: #000;
|
||||
--a-hil: #000;
|
||||
--a-gray: #000;
|
||||
--a-gray: #bbb;
|
||||
--a-dark: #000;
|
||||
|
||||
--btn-fg: #000;
|
||||
@@ -544,6 +544,9 @@ html.dy {
|
||||
|
||||
--tree-bg: #fff;
|
||||
|
||||
--g-sel-bg: #000;
|
||||
--g-fsel-bg: #444;
|
||||
--g-fsel-ts: #000;
|
||||
--g-fg: a;
|
||||
--g-bg: a;
|
||||
--g-b1: a;
|
||||
@@ -1596,9 +1599,6 @@ html.y #tree.nowrap .ntree a+a:hover {
|
||||
margin: .7em 0 .7em .5em;
|
||||
padding-left: .5em;
|
||||
}
|
||||
.opwide>div.fill {
|
||||
display: block;
|
||||
}
|
||||
.opwide>div>div>a {
|
||||
line-height: 2em;
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ var Ls = {
|
||||
"q": "quality / bitrate",
|
||||
"Ac": "audio codec",
|
||||
"Vc": "video codec",
|
||||
"Fmt": "format / container",
|
||||
"Ahash": "audio checksum",
|
||||
"Vhash": "video checksum",
|
||||
"Res": "resolution",
|
||||
@@ -117,6 +118,8 @@ var Ls = {
|
||||
|
||||
"cut_az": "upload files in alphabetical order, rather than smallest-file-first$N$Nalphabetical order can make it easier to eyeball if something went wrong on the server, but it makes uploading slightly slower on fiber / LAN",
|
||||
|
||||
"cut_mt": "use multithreading to accelerate file hashing$N$Nthis uses web-workers and requires$Nmore RAM (up to 512 MiB extra)$N$N30% faster https, 4.5x faster http,$Nand 5.3x faster on android phones",
|
||||
|
||||
"cft_text": "favicon text (blank and refresh to disable)",
|
||||
"cft_fg": "foreground color",
|
||||
"cft_bg": "background color",
|
||||
@@ -287,8 +290,9 @@ var Ls = {
|
||||
|
||||
"u_https1": "you should",
|
||||
"u_https2": "switch to https",
|
||||
"u_https3": "for much better performance",
|
||||
"u_https3": "for better performance",
|
||||
"u_ancient": 'your browser is impressively ancient -- maybe you should <a href="#" onclick="goto(\'bup\')">use bup instead</a>',
|
||||
"u_nowork": "need firefox 53+ or chrome 57+ or iOS 11+",
|
||||
"u_enpot": 'switch to <a href="#">potato UI</a> (may improve upload speed)',
|
||||
"u_depot": 'switch to <a href="#">fancy UI</a> (may reduce upload speed)',
|
||||
"u_gotpot": 'switching to the potato UI for improved upload speed,\n\nfeel free to disagree and switch back!',
|
||||
@@ -345,6 +349,7 @@ var Ls = {
|
||||
"q": "kvalitet / bitrate",
|
||||
"Ac": "lyd-format",
|
||||
"Vc": "video-format",
|
||||
"Fmt": "format / innpakning",
|
||||
"Ahash": "lyd-kontrollsum",
|
||||
"Vhash": "video-kontrollsum",
|
||||
"Res": "oppløsning",
|
||||
@@ -451,6 +456,8 @@ var Ls = {
|
||||
|
||||
"cut_az": "last opp filer i alfabetisk rekkefølge, istedenfor minste-fil-først$N$Nalfabetisk kan gjøre det lettere å anslå om alt gikk bra, men er bittelitt tregere på fiber / LAN",
|
||||
|
||||
"cut_mt": "raskere befaring ved å bruke hele CPU'en$N$Ndenne funksjonen anvender web-workers$Nog krever mer RAM (opptil 512 MiB ekstra)$N$N30% raskere https, 4.5x raskere http,$Nog 5.3x raskere på android-telefoner",
|
||||
|
||||
"cft_text": "ikontekst (blank ut og last siden på nytt for å deaktivere)",
|
||||
"cft_fg": "farge",
|
||||
"cft_bg": "bakgrunnsfarge",
|
||||
@@ -621,8 +628,9 @@ var Ls = {
|
||||
|
||||
"u_https1": "du burde",
|
||||
"u_https2": "bytte til https",
|
||||
"u_https3": "for mye høyere hastighet",
|
||||
"u_https3": "for høyere hastighet",
|
||||
"u_ancient": 'nettleseren din er prehistorisk -- mulig du burde <a href="#" onclick="goto(\'bup\')">bruke bup istedenfor</a>',
|
||||
"u_nowork": "krever firefox 53+, chrome 57+, eller iOS 11+",
|
||||
"u_enpot": 'bytt til <a href="#">enkelt UI</a> (gir sannsynlig raskere opplastning)',
|
||||
"u_depot": 'bytt til <a href="#">snæsent UI</a> (gir sannsynlig tregere opplastning)',
|
||||
"u_gotpot": 'byttet til et enklere UI for å laste opp raskere,\n\ndu kan gjerne bytte tilbake altså!',
|
||||
@@ -844,6 +852,7 @@ ebi('op_cfg').innerHTML = (
|
||||
'<div>\n' +
|
||||
' <h3>' + L.cl_uopts + '</h3>\n' +
|
||||
' <div>\n' +
|
||||
' <a id="hashw" class="tgl btn" href="#" tt="' + L.cut_mt + '">mt</a>\n' +
|
||||
' <a id="u2turbo" class="tgl btn ttb" href="#" tt="' + L.cut_turbo + '">turbo</a>\n' +
|
||||
' <a id="u2tdate" class="tgl btn ttb" href="#" tt="' + L.cut_datechk + '">date-chk</a>\n' +
|
||||
' <a id="flag_en" class="tgl btn" href="#" tt="' + L.cut_flag + '">💤</a>\n' +
|
||||
@@ -861,7 +870,7 @@ ebi('op_cfg').innerHTML = (
|
||||
' </div>\n' +
|
||||
'</div>\n' +
|
||||
'<div><h3>' + L.cl_keytype + '</h3><div id="key_notation"></div></div>\n' +
|
||||
'<div class="fill"><h3>' + L.cl_hiddenc + ' <a href="#" id="hcolsr">' + L.cl_reset + '</h3><div id="hcols"></div></div>'
|
||||
'<div><h3>' + L.cl_hiddenc + ' <a href="#" id="hcolsr">' + L.cl_reset + '</h3><div id="hcols"></div></div>'
|
||||
);
|
||||
|
||||
|
||||
@@ -914,7 +923,7 @@ function opclick(e) {
|
||||
goto(dest);
|
||||
|
||||
var input = QS('.opview.act input:not([type="hidden"])')
|
||||
if (input && !is_touch) {
|
||||
if (input && !TOUCH) {
|
||||
tt.skip = true;
|
||||
input.focus();
|
||||
}
|
||||
@@ -1684,7 +1693,7 @@ var vbar = (function () {
|
||||
if (e.button === 0)
|
||||
can.onmousemove = null;
|
||||
};
|
||||
if (is_touch) {
|
||||
if (TOUCH) {
|
||||
can.ontouchstart = mousedown;
|
||||
can.ontouchmove = mousemove;
|
||||
}
|
||||
@@ -1789,7 +1798,7 @@ function playpause(e) {
|
||||
seek_au_mul(x * 1.0 / rect.width);
|
||||
};
|
||||
|
||||
if (!is_touch)
|
||||
if (!TOUCH)
|
||||
bar.onwheel = function (e) {
|
||||
var dist = Math.sign(e.deltaY) * 10;
|
||||
if (Math.abs(e.deltaY) < 30 && !e.deltaMode)
|
||||
@@ -1829,7 +1838,7 @@ var mpui = (function () {
|
||||
if (++nth > 69) {
|
||||
// android-chrome breaks aspect ratio with unannounced viewport changes
|
||||
nth = 0;
|
||||
if (is_touch) {
|
||||
if (MOBILE) {
|
||||
nth = 1;
|
||||
pbar.onresize();
|
||||
vbar.onresize();
|
||||
@@ -4207,7 +4216,7 @@ document.onkeydown = function (e) {
|
||||
clearTimeout(defer_timeout);
|
||||
clearTimeout(search_timeout);
|
||||
search_timeout = setTimeout(do_search,
|
||||
v && v.length < (is_touch ? 4 : 3) ? 1000 : 500);
|
||||
v && v.length < (MOBILE ? 4 : 3) ? 1000 : 500);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4645,9 +4654,9 @@ var treectl = (function () {
|
||||
return ta[a];
|
||||
};
|
||||
|
||||
r.goto = function (url, push) {
|
||||
r.goto = function (url, push, back) {
|
||||
get_tree("", url, true);
|
||||
r.reqls(url, push, true);
|
||||
r.reqls(url, push, true, back);
|
||||
};
|
||||
|
||||
function get_tree(top, dst, rst) {
|
||||
@@ -4816,9 +4825,10 @@ var treectl = (function () {
|
||||
thegrid.setvis(true);
|
||||
}
|
||||
|
||||
r.reqls = function (url, hpush, no_tree) {
|
||||
r.reqls = function (url, hpush, no_tree, back) {
|
||||
var xhr = new XHR();
|
||||
xhr.top = url;
|
||||
xhr.back = back
|
||||
xhr.hpush = hpush;
|
||||
xhr.ts = Date.now();
|
||||
xhr.open('GET', xhr.top + '?ls' + (r.dots ? '&dots' : ''), true);
|
||||
@@ -4886,6 +4896,12 @@ var treectl = (function () {
|
||||
if (res.readme)
|
||||
show_readme(res.readme);
|
||||
|
||||
if (this.hpush && !this.back) {
|
||||
var ofs = ebi('wrap').offsetTop;
|
||||
if (document.documentElement.scrollTop > ofs)
|
||||
document.documentElement.scrollTop = ofs;
|
||||
}
|
||||
|
||||
wintitle();
|
||||
var fun = r.ls_cb;
|
||||
if (fun) {
|
||||
@@ -5061,7 +5077,7 @@ var treectl = (function () {
|
||||
if (url.search.indexOf('doc=') + 1 && hbase == cbase)
|
||||
return showfile.show(hbase + showfile.sname(url.search), true);
|
||||
|
||||
r.goto(url.pathname);
|
||||
r.goto(url.pathname, false, true);
|
||||
};
|
||||
|
||||
hist_replace(get_evpath() + window.location.hash);
|
||||
|
||||
@@ -16,6 +16,7 @@ function goto_up2k() {
|
||||
// usually it's undefined but some chromes throw on invoke
|
||||
var up2k = null,
|
||||
up2k_hooks = [],
|
||||
hws = [],
|
||||
sha_js = window.WebAssembly ? 'hw' : 'ac', // ff53,c57,sa11
|
||||
m = 'will use ' + sha_js + ' instead of native sha512 due to';
|
||||
|
||||
@@ -718,6 +719,13 @@ function up2k_init(subtle) {
|
||||
"gotallfiles": [gotallfiles] // hooks
|
||||
};
|
||||
|
||||
if (window.WebAssembly) {
|
||||
for (var a = 0; a < Math.min(navigator.hardwareConcurrency || 4, 16); a++)
|
||||
hws.push(new Worker('/.cpr/w.hash.js'));
|
||||
|
||||
console.log(hws.length + " hashers ready");
|
||||
}
|
||||
|
||||
function showmodal(msg) {
|
||||
ebi('u2notbtn').innerHTML = msg;
|
||||
ebi('u2btn').style.display = 'none';
|
||||
@@ -747,7 +755,7 @@ function up2k_init(subtle) {
|
||||
showmodal('<h1>loading ' + fn + '</h1>');
|
||||
import_js('/.cpr/deps/' + fn, unmodal);
|
||||
|
||||
if (is_https) {
|
||||
if (HTTPS) {
|
||||
// chrome<37 firefox<34 edge<12 opera<24 safari<7
|
||||
m = L.u_ancient;
|
||||
setmsg('');
|
||||
@@ -790,7 +798,6 @@ function up2k_init(subtle) {
|
||||
var parallel_uploads = icfg_get('nthread'),
|
||||
uc = {},
|
||||
fdom_ctr = 0,
|
||||
min_filebuf = 0,
|
||||
biggest_file = 0;
|
||||
|
||||
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
||||
@@ -801,6 +808,7 @@ function up2k_init(subtle) {
|
||||
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo, false);
|
||||
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null, false);
|
||||
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort, false);
|
||||
bcfg_bind(uc, 'hashw', 'hashw', !!window.WebAssembly, set_hashw, false);
|
||||
|
||||
var st = {
|
||||
"files": [],
|
||||
@@ -838,6 +846,7 @@ function up2k_init(subtle) {
|
||||
"t": ""
|
||||
},
|
||||
"car": 0,
|
||||
"slow_io": null,
|
||||
"modn": 0,
|
||||
"modv": 0,
|
||||
"mod0": null
|
||||
@@ -1288,8 +1297,13 @@ function up2k_init(subtle) {
|
||||
|
||||
if (!nhash) {
|
||||
var h = L.u_etadone.format(humansize(st.bytes.hashed), pvis.ctr.ok + pvis.ctr.ng);
|
||||
if (st.eta.h !== h)
|
||||
if (st.eta.h !== h) {
|
||||
st.eta.h = ebi('u2etah').innerHTML = h;
|
||||
console.log('{0} hash, {1} up, {2} busy'.format(
|
||||
f2f(st.time.hashing, 1),
|
||||
f2f(st.time.uploading, 1),
|
||||
f2f(st.time.busy, 1)));
|
||||
}
|
||||
}
|
||||
|
||||
if (!nsend && !nhash) {
|
||||
@@ -1665,6 +1679,7 @@ function up2k_init(subtle) {
|
||||
var t = st.todo.hash.shift();
|
||||
st.busy.hash.push(t);
|
||||
st.nfile.hash = t.n;
|
||||
t.t_hashing = Date.now();
|
||||
|
||||
var bpend = 0,
|
||||
nchunk = 0,
|
||||
@@ -1675,30 +1690,23 @@ function up2k_init(subtle) {
|
||||
pvis.setab(t.n, nchunks);
|
||||
pvis.move(t.n, 'bz');
|
||||
|
||||
if (nchunks > 1 && hws.length && uc.hashw)
|
||||
return wexec_hash(t, chunksize, nchunks);
|
||||
|
||||
var segm_next = function () {
|
||||
if (nchunk >= nchunks || (bpend > chunksize && bpend >= min_filebuf))
|
||||
if (nchunk >= nchunks || bpend)
|
||||
return false;
|
||||
|
||||
var reader = new FileReader(),
|
||||
nch = nchunk++,
|
||||
car = nch * chunksize,
|
||||
cdr = car + chunksize,
|
||||
t0 = Date.now();
|
||||
cdr = Math.min(chunksize + car, t.size);
|
||||
|
||||
if (cdr >= t.size)
|
||||
cdr = t.size;
|
||||
|
||||
bpend += cdr - car;
|
||||
st.bytes.hashed += cdr - car;
|
||||
|
||||
function orz(e) {
|
||||
if (!min_filebuf && nch == 1) {
|
||||
min_filebuf = 1;
|
||||
var td = Date.now() - t0;
|
||||
if (td > 50) {
|
||||
min_filebuf = 32 * 1024 * 1024;
|
||||
}
|
||||
}
|
||||
bpend--;
|
||||
segm_next();
|
||||
hash_calc(nch, e.target.result);
|
||||
}
|
||||
reader.onload = function (e) {
|
||||
@@ -1726,6 +1734,7 @@ function up2k_init(subtle) {
|
||||
|
||||
toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err);
|
||||
};
|
||||
bpend++;
|
||||
reader.readAsArrayBuffer(
|
||||
bobslice.call(t.fobj, car, cdr));
|
||||
|
||||
@@ -1733,8 +1742,6 @@ function up2k_init(subtle) {
|
||||
};
|
||||
|
||||
var hash_calc = function (nch, buf) {
|
||||
while (segm_next());
|
||||
|
||||
var orz = function (hashbuf) {
|
||||
var hslice = new Uint8Array(hashbuf).subarray(0, 33),
|
||||
b64str = buf2b64(hslice);
|
||||
@@ -1742,15 +1749,12 @@ function up2k_init(subtle) {
|
||||
hashtab[nch] = b64str;
|
||||
t.hash.push(nch);
|
||||
pvis.hashed(t);
|
||||
|
||||
bpend -= buf.byteLength;
|
||||
if (t.hash.length < nchunks) {
|
||||
if (t.hash.length < nchunks)
|
||||
return segm_next();
|
||||
}
|
||||
|
||||
t.hash = [];
|
||||
for (var a = 0; a < nchunks; a++) {
|
||||
for (var a = 0; a < nchunks; a++)
|
||||
t.hash.push(hashtab[a]);
|
||||
}
|
||||
|
||||
t.t_hashed = Date.now();
|
||||
|
||||
@@ -1782,11 +1786,117 @@ function up2k_init(subtle) {
|
||||
}
|
||||
}, 1);
|
||||
};
|
||||
|
||||
t.t_hashing = Date.now();
|
||||
segm_next();
|
||||
}
|
||||
|
||||
function wexec_hash(t, chunksize, nchunks) {
|
||||
var nchunk = 0,
|
||||
reading = 0,
|
||||
max_readers = 1,
|
||||
opt_readers = 2,
|
||||
free = [],
|
||||
busy = {},
|
||||
nbusy = 0,
|
||||
hashtab = {},
|
||||
mem = (MOBILE ? 128 : 256) * 1024 * 1024;
|
||||
|
||||
for (var a = 0; a < hws.length; a++) {
|
||||
var w = hws[a];
|
||||
free.push(w);
|
||||
w.onmessage = onmsg;
|
||||
mem -= chunksize;
|
||||
if (mem <= 0)
|
||||
break;
|
||||
}
|
||||
|
||||
function go_next() {
|
||||
if (st.slow_io && uc.multitask)
|
||||
// android-chrome filereader latency is ridiculous but scales linearly
|
||||
// (unlike every other platform which instead suffers on parallel reads...)
|
||||
max_readers = opt_readers = free.length;
|
||||
|
||||
if (reading >= max_readers || !free.length || nchunk >= nchunks)
|
||||
return;
|
||||
|
||||
var w = free.pop(),
|
||||
car = nchunk * chunksize,
|
||||
cdr = Math.min(chunksize + car, t.size);
|
||||
|
||||
//console.log('[P ] %d read bgin (%d reading, %d busy)', nchunk, reading + 1, nbusy + 1);
|
||||
w.postMessage([nchunk, t.fobj, car, cdr]);
|
||||
busy[nchunk] = w;
|
||||
nbusy++;
|
||||
reading++;
|
||||
nchunk++;
|
||||
}
|
||||
|
||||
function onmsg(d) {
|
||||
d = d.data;
|
||||
var k = d[0];
|
||||
|
||||
if (k == "panic")
|
||||
return vis_exh(d[1], 'up2k.js', '', '', d[1]);
|
||||
|
||||
if (k == "fail") {
|
||||
pvis.seth(t.n, 1, d[1]);
|
||||
pvis.seth(t.n, 2, d[2]);
|
||||
console.log(d[1], d[2]);
|
||||
|
||||
pvis.move(t.n, 'ng');
|
||||
apop(st.busy.hash, t);
|
||||
st.bytes.finished += t.size;
|
||||
return;
|
||||
}
|
||||
|
||||
if (k == "ferr")
|
||||
return toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + d[1]);
|
||||
|
||||
if (k == "read") {
|
||||
reading--;
|
||||
if (MOBILE && CHROME && st.slow_io === null && d[1] == 1 && d[2] > 1024 * 512) {
|
||||
var spd = Math.floor(d[2] / d[3]);
|
||||
st.slow_io = spd < 40 * 1024;
|
||||
console.log('spd {0}, slow: {1}'.format(spd, st.slow_io));
|
||||
}
|
||||
//console.log('[P ] %d read DONE (%d reading, %d busy)', d[1], reading, nbusy);
|
||||
return go_next();
|
||||
}
|
||||
|
||||
if (k == "done") {
|
||||
var nchunk = d[1],
|
||||
hslice = d[2],
|
||||
sz = d[3];
|
||||
|
||||
free.push(busy[nchunk]);
|
||||
delete busy[nchunk];
|
||||
nbusy--;
|
||||
|
||||
//console.log('[P ] %d HASH DONE (%d reading, %d busy)', nchunk, reading, nbusy);
|
||||
|
||||
hashtab[nchunk] = buf2b64(hslice);
|
||||
st.bytes.hashed += sz;
|
||||
t.hash.push(nchunk);
|
||||
pvis.hashed(t);
|
||||
|
||||
if (t.hash.length < nchunks)
|
||||
return nbusy < opt_readers && go_next();
|
||||
|
||||
t.hash = [];
|
||||
for (var a = 0; a < nchunks; a++)
|
||||
t.hash.push(hashtab[a]);
|
||||
|
||||
t.t_hashed = Date.now();
|
||||
|
||||
pvis.seth(t.n, 2, L.u_hashdone);
|
||||
pvis.seth(t.n, 1, '📦 wait');
|
||||
apop(st.busy.hash, t);
|
||||
st.todo.handshake.push(t);
|
||||
tasker();
|
||||
}
|
||||
}
|
||||
go_next();
|
||||
}
|
||||
|
||||
/////
|
||||
////
|
||||
/// head
|
||||
@@ -2212,7 +2322,7 @@ function up2k_init(subtle) {
|
||||
window.addEventListener('resize', onresize);
|
||||
onresize();
|
||||
|
||||
if (is_touch) {
|
||||
if (MOBILE) {
|
||||
// android-chrome wobbles for a bit; firefox / iOS-safari are OK
|
||||
setTimeout(onresize, 20);
|
||||
setTimeout(onresize, 100);
|
||||
@@ -2366,6 +2476,13 @@ function up2k_init(subtle) {
|
||||
localStorage.removeItem('u2sort');
|
||||
}
|
||||
|
||||
function set_hashw() {
|
||||
if (!window.WebAssembly) {
|
||||
bcfg_set('hashw', false);
|
||||
toast.err(10, L.u_nowork);
|
||||
}
|
||||
}
|
||||
|
||||
ebi('nthread_add').onclick = function (e) {
|
||||
ev(e);
|
||||
bumpthread(1);
|
||||
|
||||
@@ -8,12 +8,27 @@ if (!window['console'])
|
||||
|
||||
var wah = '',
|
||||
HALFMAX = 8192 * 8192 * 8192 * 8192,
|
||||
is_touch = 'ontouchstart' in window,
|
||||
is_https = (window.location + '').indexOf('https:') === 0,
|
||||
IPHONE = is_touch && /iPhone|iPad|iPod/i.test(navigator.userAgent),
|
||||
HTTPS = (window.location + '').indexOf('https:') === 0,
|
||||
TOUCH = 'ontouchstart' in window,
|
||||
MOBILE = TOUCH,
|
||||
CHROME = !!window.chrome,
|
||||
IPHONE = TOUCH && /iPhone|iPad|iPod/i.test(navigator.userAgent),
|
||||
WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent);
|
||||
|
||||
|
||||
try {
|
||||
if (navigator.userAgentData.mobile)
|
||||
MOBILE = true;
|
||||
|
||||
if (navigator.userAgentData.platform == 'Windows')
|
||||
WINDOWS = true;
|
||||
|
||||
if (navigator.userAgentData.brands.some(function (d) { return d.brand == 'Chromium' }))
|
||||
CHROME = true;
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
|
||||
var ebi = document.getElementById.bind(document),
|
||||
QS = document.querySelector.bind(document),
|
||||
QSA = document.querySelectorAll.bind(document),
|
||||
@@ -946,7 +961,7 @@ var tt = (function () {
|
||||
return r.show.bind(this)();
|
||||
|
||||
tev = setTimeout(r.show.bind(this), 800);
|
||||
if (is_touch)
|
||||
if (TOUCH)
|
||||
return;
|
||||
|
||||
this.addEventListener('mousemove', r.move);
|
||||
@@ -1533,13 +1548,13 @@ function xhrchk(xhr, prefix, e404) {
|
||||
var errtxt = (xhr.response && xhr.response.err) || xhr.responseText,
|
||||
fun = toast.err;
|
||||
|
||||
if (xhr.status == 503 && /\bDD(?:wah){0}[o]S [Pp]rote[c]tion|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser/.test(errtxt)) {
|
||||
if (xhr.status == 503 && /[Cc]loud[f]lare|>Just a mo[m]ent|#cf-b[u]bbles|Chec[k]ing your br[o]wser/.test(errtxt)) {
|
||||
var now = Date.now(), td = now - cf_cha_t;
|
||||
if (td < 15000)
|
||||
return;
|
||||
|
||||
cf_cha_t = now;
|
||||
errtxt = 'Cloudflare DD' + wah + 'oS protection kicked in\n\n<strong>trying to fix it...</strong>';
|
||||
errtxt = 'Clou' + wah + 'dflare protection kicked in\n\n<strong>trying to fix it...</strong>';
|
||||
fun = toast.warn;
|
||||
|
||||
qsr('#cf_frame');
|
||||
|
||||
77
copyparty/web/w.hash.js
Normal file
77
copyparty/web/w.hash.js
Normal file
@@ -0,0 +1,77 @@
|
||||
"use strict";
|
||||
|
||||
|
||||
function hex2u8(txt) {
|
||||
return new Uint8Array(txt.match(/.{2}/g).map(function (b) { return parseInt(b, 16); }));
|
||||
}
|
||||
|
||||
|
||||
var subtle = null;
|
||||
try {
|
||||
subtle = crypto.subtle || crypto.webkitSubtle;
|
||||
subtle.digest('SHA-512', new Uint8Array(1)).then(
|
||||
function (x) { },
|
||||
function (x) { load_fb(); }
|
||||
);
|
||||
}
|
||||
catch (ex) {
|
||||
load_fb();
|
||||
}
|
||||
function load_fb() {
|
||||
subtle = null;
|
||||
importScripts('/.cpr/deps/sha512.hw.js');
|
||||
}
|
||||
|
||||
|
||||
onmessage = (d) => {
|
||||
var [nchunk, fobj, car, cdr] = d.data,
|
||||
t0 = Date.now(),
|
||||
reader = new FileReader();
|
||||
|
||||
reader.onload = function (e) {
|
||||
try {
|
||||
//console.log('[ w] %d HASH bgin', nchunk);
|
||||
postMessage(["read", nchunk, cdr - car, Date.now() - t0]);
|
||||
hash_calc(e.target.result);
|
||||
}
|
||||
catch (ex) {
|
||||
postMessage(["panic", ex + '']);
|
||||
}
|
||||
};
|
||||
reader.onerror = function () {
|
||||
var err = reader.error + '';
|
||||
|
||||
if (err.indexOf('NotReadableError') !== -1 || // win10-chrome defender
|
||||
err.indexOf('NotFoundError') !== -1 // macos-firefox permissions
|
||||
)
|
||||
return postMessage(["fail", 'OS-error', err + ' @ ' + car]);
|
||||
|
||||
postMessage(["ferr", err]);
|
||||
};
|
||||
//console.log('[ w] %d read bgin', nchunk);
|
||||
reader.readAsArrayBuffer(
|
||||
File.prototype.slice.call(fobj, car, cdr));
|
||||
|
||||
|
||||
var hash_calc = function (buf) {
|
||||
var hash_done = function (hashbuf) {
|
||||
try {
|
||||
var hslice = new Uint8Array(hashbuf).subarray(0, 33);
|
||||
//console.log('[ w] %d HASH DONE', nchunk);
|
||||
postMessage(["done", nchunk, hslice, cdr - car]);
|
||||
}
|
||||
catch (ex) {
|
||||
postMessage(["panic", ex + '']);
|
||||
}
|
||||
};
|
||||
|
||||
if (subtle)
|
||||
subtle.digest('SHA-512', buf).then(hash_done);
|
||||
else {
|
||||
var u8buf = new Uint8Array(buf);
|
||||
hashwasm.sha512(u8buf).then(function (v) {
|
||||
hash_done(hex2u8(v))
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -1,3 +1,87 @@
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2022-0812-2258 `v1.3.12` quickboot
|
||||
|
||||
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
|
||||
|
||||
## new features
|
||||
*but wait, there's more!* not only do you get the [multithreaded file hashing](https://github.com/9001/copyparty/releases/tag/v1.3.11) but also --
|
||||
* faster bootup and volume reindexing when `-e2ds` (file indexing) is enabled
|
||||
* `3x` faster is probably the average on most instances; more files per folder = faster
|
||||
* `9x` faster on a 36 TiB zfs music/media nas with `-e2ts` (metadata indexing), dropping from 46sec to 5sec
|
||||
* and `34x` on another zfs box, 63sec -> 1.8sec
|
||||
* new arg `--no-dhash` disables the speedhax in case it's buggy (skipping files or audio tags)
|
||||
* add option `--exit idx` to abort and shutdown after volume indexing has finished
|
||||
|
||||
## bugfixes
|
||||
* [u2cli](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy): detect and skip uploading from recursive symlinks
|
||||
* stop reindexing empty files on startup
|
||||
* support fips-compliant cpython builds
|
||||
* replaces md5 with sha1, changing the filetype-associated colors in the gallery view
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2022-0810-2135 `v1.3.11` webworkers
|
||||
|
||||
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
|
||||
|
||||
## new features
|
||||
* multithreaded file hashing! **300%** average speed increase
|
||||
* when uploading files through the browser client, based on web-workers
|
||||
* `4.5x` faster on http from a laptop -- `146` -> `670` MiB/s
|
||||
* ` 30%` faster on https from a laptop -- `552` -> `716` MiB/s
|
||||
* `4.2x` faster on http from android -- `13.5` -> `57.1` MiB/s
|
||||
* `5.3x` faster on https from android -- `13.8` -> `73.3` MiB/s
|
||||
* can be disabled using the `mt` togglebtn in the settings pane, for example if your phone runs out of memory (it eats ~250 MiB extra RAM)
|
||||
* `2.3x` faster [u2cli](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy) (cmd-line client) -- `398` -> `930` MiB/s
|
||||
* `2.4x` faster filesystem indexing on the server
|
||||
* thx to @kipukun for the webworker suggestion!
|
||||
|
||||
## bugfixes
|
||||
* ux: reset scroll when navigating into a new folder
|
||||
* u2cli: better errormsg if the server's tls certificate got rejected
|
||||
* js: more futureproof cloudflare-challenge detection (they got a new one recently)
|
||||
|
||||
## other changes
|
||||
* print warning if the python interpreter was built with an unsafe sqlite
|
||||
* u2cli: add helpful messages on how to make it run on python 2.6
|
||||
|
||||
**trivia:** due to a [chrome bug](https://bugs.chromium.org/p/chromium/issues/detail?id=1352210), http can sometimes be faster than https now ¯\\\_(ツ)\_/¯
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2022-0803-2340 `v1.3.10` folders first
|
||||
|
||||
* read-only demo server at https://a.ocv.me/pub/demo/
|
||||
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
|
||||
|
||||
## new features
|
||||
* faster
|
||||
* tag scanner
|
||||
* on windows: uploading to fat32 or smb
|
||||
* toggle-button to sort folders before files (default-on)
|
||||
* almost the same as before, but now also when sorting by size / date
|
||||
* repeatedly hit `ctrl-c` to force-quit if everything dies
|
||||
* new file-indexing guards
|
||||
* `--xdev` / volflag `:c,xdev` stops if it hits another filesystem (bindmount/symlink)
|
||||
* `--xvol` / volflag `:c,xvol` does not follow symlinks pointing outside the volume
|
||||
* only affects file indexing -- does NOT prevent access!
|
||||
|
||||
## bugfixes
|
||||
* forget uploads that failed to initialize (allows retry in another folder)
|
||||
* wrong filekeys in upload response if volume path contained a symlink
|
||||
* faster shutdown on `ctrl-c` while hashing huge files
|
||||
* ux: fix navpane covering files on horizontal scroll
|
||||
|
||||
## other changes
|
||||
* include version info in the base64 crash-message
|
||||
* ux: make upload errors more visible on mobile
|
||||
|
||||
|
||||
|
||||
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
||||
# 2022-0727-1407 `v1.3.8` more async
|
||||
|
||||
|
||||
@@ -14,10 +14,6 @@ gtar=$(command -v gtar || command -v gnutar) || true
|
||||
realpath() { grealpath "$@"; }
|
||||
}
|
||||
|
||||
which md5sum 2>/dev/null >/dev/null &&
|
||||
md5sum=md5sum ||
|
||||
md5sum="md5 -r"
|
||||
|
||||
mode="$1"
|
||||
|
||||
[ -z "$mode" ] &&
|
||||
|
||||
@@ -69,6 +69,9 @@ pybin=$(command -v python3 || command -v python) || {
|
||||
exit 1
|
||||
}
|
||||
|
||||
[ $CSN ] ||
|
||||
CSN=sfx
|
||||
|
||||
langs=
|
||||
use_gz=
|
||||
zopf=2560
|
||||
@@ -99,9 +102,9 @@ stamp=$(
|
||||
done | sort | tail -n 1 | sha1sum | cut -c-16
|
||||
)
|
||||
|
||||
rm -rf sfx/*
|
||||
mkdir -p sfx build
|
||||
cd sfx
|
||||
rm -rf $CSN/*
|
||||
mkdir -p $CSN build
|
||||
cd $CSN
|
||||
|
||||
tmpdir="$(
|
||||
printf '%s\n' "$TMPDIR" /tmp |
|
||||
@@ -237,7 +240,7 @@ ts=$(date -u +%s)
|
||||
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
|
||||
|
||||
mkdir -p ../dist
|
||||
sfx_out=../dist/copyparty-sfx
|
||||
sfx_out=../dist/copyparty-$CSN
|
||||
|
||||
echo cleanup
|
||||
find -name '*.pyc' -delete
|
||||
@@ -371,7 +374,7 @@ gzres() {
|
||||
}
|
||||
|
||||
|
||||
zdir="$tmpdir/cpp-mksfx"
|
||||
zdir="$tmpdir/cpp-mk$CSN"
|
||||
[ -e "$zdir/$stamp" ] || rm -rf "$zdir"
|
||||
mkdir -p "$zdir"
|
||||
echo a > "$zdir/$stamp"
|
||||
@@ -402,8 +405,8 @@ sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
|
||||
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
|
||||
|
||||
for n in {1..50}; do
|
||||
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | shuf) >list || true
|
||||
s=$(md5sum list | cut -c-16)
|
||||
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | (shuf||gshuf) ) >list || true
|
||||
s=$( (sha1sum||shasum) < list | cut -c-16)
|
||||
grep -q $s "$zdir/h" && continue
|
||||
echo $s >> "$zdir/h"
|
||||
break
|
||||
@@ -423,7 +426,7 @@ pe=bz2
|
||||
|
||||
echo compressing tar
|
||||
# detect best level; bzip2 -7 is usually better than -9
|
||||
for n in {2..9}; do cp tar t.$n; $pc -$n t.$n & done; wait; mv -v $(ls -1S t.*.$pe | tail -n 1) tar.bz2
|
||||
for n in {2..9}; do cp tar t.$n; nice $pc -$n t.$n & done; wait; mv -v $(ls -1S t.*.$pe | tail -n 1) tar.bz2
|
||||
rm t.* || true
|
||||
exts=()
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
parallel=2
|
||||
|
||||
cd ~/dev/copyparty/scripts
|
||||
|
||||
v=$1
|
||||
@@ -21,16 +23,31 @@ v=$1
|
||||
./make-tgz-release.sh $v
|
||||
}
|
||||
|
||||
rm -f ../dist/copyparty-sfx.*
|
||||
rm -f ../dist/copyparty-sfx*
|
||||
shift
|
||||
./make-sfx.sh "$@"
|
||||
f=../dist/copyparty-sfx.py
|
||||
[ -e $f ] ||
|
||||
f=../dist/copyparty-sfx-gz.py
|
||||
f=../dist/copyparty-sfx
|
||||
[ -e $f.py ] ||
|
||||
f=../dist/copyparty-sfx-gz
|
||||
|
||||
$f.py -h >/dev/null
|
||||
|
||||
[ $parallel -gt 1 ] && {
|
||||
printf '\033[%s' s 2r H "0;1;37;44mbruteforcing sfx size -- press enter to terminate" K u "7m $* " K $'27m\n'
|
||||
trap "rm -f .sfx-run; printf '\033[%s' s r u" INT TERM EXIT
|
||||
touch .sfx-run
|
||||
for ((a=0; a<$parallel; a++)); do
|
||||
while [ -e .sfx-run ]; do
|
||||
CSN=sfx$a ./make-sfx.sh re "$@"
|
||||
mv $f$a.py $f.$(wc -c <$f$a.py | awk '{print$1}').py
|
||||
done &
|
||||
done
|
||||
read
|
||||
exit
|
||||
}
|
||||
|
||||
$f -h
|
||||
while true; do
|
||||
mv $f $f.$(wc -c <$f | awk '{print$1}')
|
||||
mv $f.py $f.$(wc -c <$f.py | awk '{print$1}').py
|
||||
./make-sfx.sh re "$@"
|
||||
done
|
||||
|
||||
|
||||
@@ -77,3 +77,4 @@ copyparty/web/splash.js,
|
||||
copyparty/web/ui.css,
|
||||
copyparty/web/up2k.js,
|
||||
copyparty/web/util.js,
|
||||
copyparty/web/w.hash.js,
|
||||
|
||||
@@ -213,11 +213,11 @@ def yieldfile(fn):
|
||||
|
||||
|
||||
def hashfile(fn):
|
||||
h = hashlib.md5()
|
||||
h = hashlib.sha1()
|
||||
for block in yieldfile(fn):
|
||||
h.update(block)
|
||||
|
||||
return h.hexdigest()
|
||||
return h.hexdigest()[:24]
|
||||
|
||||
|
||||
def unpack():
|
||||
|
||||
Reference in New Issue
Block a user