Compare commits

...

55 Commits

Author SHA1 Message Date
ed
dac2fad48e v1.3.8 2022-07-27 16:07:26 +02:00
ed
77f624b01e improve shumantime + use it everywhere 2022-07-27 15:07:04 +02:00
ed
e24ffebfc8 indicate write-activity on splashpage 2022-07-27 14:53:15 +02:00
ed
70d07d1609 perf 2022-07-27 14:01:30 +02:00
ed
bfb3303d87 include client total ETA in upload logs 2022-07-27 12:07:51 +02:00
ed
660705a436 defer volume reindexing on db activity 2022-07-27 11:48:47 +02:00
ed
74a3f97671 cleanup + bump deps 2022-07-27 00:15:49 +02:00
ed
b3e35bb494 async lsof w/ timeout 2022-07-26 22:38:13 +02:00
ed
76adac7c72 up2k-hook-ytid: add mp4/webm/mkv metadata scanner 2022-07-26 22:09:18 +02:00
ed
5dc75ebb67 async e2ts / e2v + forget deleted shadowed 2022-07-26 12:47:40 +02:00
ed
d686ce12b6 lsof db on stuck transaction 2022-07-25 02:07:59 +02:00
ed
d3c40a423e mutagen: support nullduration tags 2022-07-25 01:21:34 +02:00
ed
2fb1e6dab8 mute exception on zip abort 2022-07-25 01:20:38 +02:00
ed
10430b347f fix dumb prisonparty bug 2022-07-22 20:49:35 +02:00
ed
e0e3f6ac3e up2k-hook-ytid: add override 2022-07-22 10:47:10 +02:00
ed
c694cbffdc a11y: improve skip-to-files 2022-07-20 23:44:57 +02:00
ed
bdd0e5d771 a11y: enter = onclick 2022-07-20 23:32:02 +02:00
ed
aa98e427f0 audio-eq: add crossfeed 2022-07-20 01:54:59 +02:00
ed
daa6f4c94c add video hotkeys for digit-seeking 2022-07-17 23:45:02 +02:00
ed
4a76663fb2 ensure free disk space 2022-07-17 22:33:08 +02:00
ed
cebda5028a v1.3.7 2022-07-16 20:48:23 +02:00
ed
3fa377a580 sqlite diag 2022-07-16 20:43:26 +02:00
ed
a11c1005a8 v1.3.6 2022-07-16 03:58:58 +02:00
ed
4a6aea9328 hopefully got this right 2022-07-16 02:24:53 +02:00
ed
4ca041e93e improve autopotato accuracy 2022-07-16 02:23:50 +02:00
ed
52a866a405 batch progress writes 2022-07-16 02:12:56 +02:00
ed
8b6bd0e6ac rescue some exceptions from the promise maelstroms 2022-07-15 23:42:37 +02:00
ed
780fc4639a bbox: chrome doesnt override video onclick 2022-07-15 22:36:35 +02:00
ed
3692fc9d83 bbox: doubletap pic for fullscreen 2022-07-15 22:29:44 +02:00
ed
c2a0b1b4c6 autopotato 2022-07-15 02:39:32 +02:00
ed
21bbdb5419 fix audio-eq on recent chromes 2022-07-15 02:07:48 +02:00
ed
aa1c08962c golf 2022-07-15 02:07:13 +02:00
ed
8a5d0399dd sfx: dont hang supervisors 2022-07-15 02:04:00 +02:00
ed
f2cd0b0c4a sfx: avoid name collisions across reboots 2022-07-15 02:03:41 +02:00
ed
c2b66bbe73 add potato mode 2022-07-14 02:33:35 +02:00
ed
48b957f1d5 add -e2v (file integrity checker) 2022-07-13 00:48:39 +02:00
ed
3683984c8d abort volume indexing on ^C 2022-07-12 21:46:07 +02:00
ed
a3431512d8 push queue/status info to server 2022-07-12 21:22:02 +02:00
ed
d832b787e7 upload smallest-file-first by default 2022-07-12 20:48:38 +02:00
ed
6f75b02723 misc 2022-07-12 03:16:30 +02:00
ed
b8241710bd md-editor fixes 2022-07-12 02:53:33 +02:00
ed
d638404b6a better runahead strategy for 100 GiB+ files 2022-07-12 02:30:49 +02:00
ed
9362ca3ed9 py2 fixes 2022-07-11 23:53:18 +02:00
ed
d1a03c6d17 zerobyte semantics 2022-07-11 23:17:33 +02:00
ed
c6c31702c2 cheaper file deletion 2022-07-11 01:50:18 +02:00
ed
bd2d88c96e add another up2k-hook example 2022-07-11 00:52:59 +02:00
ed
76b1857e4e add support for up2k hooks 2022-07-09 14:02:35 +02:00
ed
095bd17d10 mtp/vidchk: grab some frames at the start too 2022-07-09 13:10:00 +02:00
ed
204bfac3fa mtp/vidchk: write ffprobe metadata to file 2022-07-09 04:33:19 +02:00
ed
ac49b0ca93 mtp: add rclone uploader 2022-07-08 23:47:27 +02:00
ed
c5b04f6fef mtp daisychaining 2022-07-08 22:29:05 +02:00
ed
5c58fda46d only clean thumbs if there are thumbs to clean 2022-07-08 21:13:10 +02:00
ed
062730c70c cleanup 2022-07-06 11:12:36 +02:00
ed
cade1990ce v1.3.5 2022-07-06 02:29:11 +02:00
ed
59b6e61816 build fstab from relabels when mtab is unreadable 2022-07-06 02:28:34 +02:00
47 changed files with 2015 additions and 555 deletions

3
.gitignore vendored
View File

@@ -8,8 +8,9 @@ copyparty.egg-info/
buildenv/
build/
dist/
sfx/
py2/
sfx/
unt/
.venv/
# ide

View File

@@ -57,6 +57,8 @@ try the **[read-only demo server](https://a.ocv.me/pub/demo/)** 👀 running fro
* [server config](#server-config) - using arguments or config files, or a mix of both
* [ftp-server](#ftp-server) - an FTP server can be started using `--ftp 3921`
* [file indexing](#file-indexing)
* [exclude-patterns](#exclude-patterns)
* [periodic rescan](#periodic-rescan) - filesystem monitoring;
* [upload rules](#upload-rules) - set upload rules using volume flags
* [compress uploads](#compress-uploads) - files can be autocompressed on upload
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
@@ -373,6 +375,7 @@ the browser has the following hotkeys (always qwerty)
* `Esc` close viewer
* videos:
* `U/O` skip 10sec back/forward
* `0..9` jump to 0%..90%
* `P/K/Space` play/pause
* `M` mute
* `C` continue playing next video
@@ -481,6 +484,7 @@ see [up2k](#up2k) for details on how it works, or watch a [demo video](https://a
the up2k UI is the epitome of polished inutitive experiences:
* "parallel uploads" specifies how many chunks to upload at the same time
* `[🏃]` analysis of other files should continue while one is uploading
* `[🥔]` shows a simpler UI for faster uploads from slow devices
* `[💭]` ask for confirmation before files are added to the queue
* `[🔎]` switch between upload and [file-search](#file-search) mode
* ignore `[🔎]` if you add files by dragging them into the browser
@@ -663,8 +667,11 @@ through arguments:
* `-e2t` enables metadata indexing on upload
* `-e2ts` also scans for tags in all files that don't have tags yet
* `-e2tsr` also deletes all existing tags, doing a full reindex
* `-e2v` verfies file integrity at startup, comparing hashes from the db
* `-e2vu` patches the database with the new hashes from the filesystem
* `-e2vp` panics and kills copyparty instead
the same arguments can be set as volume flags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts` for disabling:
the same arguments can be set as volume flags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts`, `d2v` for disabling:
* `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
@@ -676,6 +683,8 @@ note:
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
### exclude-patterns
to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash \.iso$` or the volume-flag `:c,nohash=\.iso$`, this has the following consequences:
* initial indexing is way faster, especially when the volume is on a network disk
* makes it impossible to [file-search](#file-search)
@@ -685,12 +694,21 @@ similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noi
if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=`
### periodic rescan
filesystem monitoring; if copyparty is not the only software doing stuff on your filesystem, you may want to enable periodic rescans to keep the index up to date
argument `--re-maxage 60` will rescan all volumes every 60 sec, same as volflag `:c,scan=60` to specify it per-volume
uploads are disabled while a rescan is happening, so rescans will be delayed by `--db-act` (default 10 sec) when there is write-activity going on (uploads, renames, ...)
## upload rules
set upload rules using volume flags, some examples:
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`)
* `:c,df=4g` block uploads if there would be less than 4 GiB free disk space afterwards
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
@@ -964,6 +982,12 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
regarding the frequent server log message during uploads;
`6.0M 106M/s 2.77G 102.9M/s n948 thank 4/0/3/1 10042/7198 00:01:09`
* this chunk was `6 MiB`, uploaded at `106 MiB/s`
* on this http connection, `2.77 GiB` transferred, `102.9 MiB/s` average, `948` chunks handled
* client says `4` uploads OK, `0` failed, `3` busy, `1` queued, `10042 MiB` total size, `7198 MiB` and `00:01:09` left
## why chunk-hashes
@@ -1228,11 +1252,15 @@ if you want thumbnails, `apt -y install ffmpeg`
ideas for context to include in bug reports
in general, commandline arguments (and config file if any)
if something broke during an upload (replacing FILENAME with a part of the filename that broke):
```
journalctl -aS '48 hour ago' -u copyparty | grep -C10 FILENAME | tee bug.log
```
if there's a wall of base64 in the log (thread stacks) then please include that, especially if you run into something freezing up or getting stuck, for example `OperationalError('database is locked')` -- alternatively you can visit `/?stack` to see the stacks live, so http://127.0.0.1:3923/?stack for example
# building

View File

@@ -89,4 +89,7 @@ def main():
if __name__ == "__main__":
main()
try:
main()
except:
pass

76
bin/mtag/rclone-upload.py Normal file
View File

@@ -0,0 +1,76 @@
#!/usr/bin/env python
import json
import os
import subprocess as sp
import sys
import time
try:
from copyparty.util import fsenc
except:
def fsenc(p):
return p.encode("utf-8")
_ = r"""
first checks the tag "vidchk" which must be "ok" to continue,
then uploads all files to some cloud storage (RCLONE_REMOTE)
and DELETES THE ORIGINAL FILES if rclone returns 0 ("success")
deps:
rclone
usage:
-mtp x2=t43200,ay,p2,bin/mtag/rclone-upload.py
explained:
t43200: timeout 12h
ay: only process files which contain audio (including video with audio)
p2: set priority 2 (after vidchk's suggested priority of 1),
so the output of vidchk will be passed in here
complete usage example as vflags along with vidchk:
-vsrv/vidchk:vidchk:r:rw,ed:c,e2dsa,e2ts,mtp=vidchk=t600,p,bin/mtag/vidchk.py:c,mtp=rupload=t43200,ay,p2,bin/mtag/rclone-upload.py:c,mte=+vidchk,rupload
setup: see https://rclone.org/drive/
if you wanna use this script standalone / separately from copyparty,
either set CONDITIONAL_UPLOAD False or provide the following stdin:
{"vidchk":"ok"}
"""
RCLONE_REMOTE = "notmybox"
CONDITIONAL_UPLOAD = True
def main():
if CONDITIONAL_UPLOAD:
fp = sys.argv[1]
zb = sys.stdin.buffer.read()
zs = zb.decode("utf-8", "replace")
md = json.loads(zs)
chk = md.get("vidchk", None)
if chk != "ok":
print(f"vidchk={chk}", file=sys.stderr)
sys.exit(1)
dst = f"{RCLONE_REMOTE}:".encode("utf-8")
cmd = [b"rclone", b"copy", b"--", fsenc(fp), dst]
t0 = time.time()
try:
sp.check_call(cmd)
except:
print("rclone failed", file=sys.stderr)
sys.exit(1)
print(f"{time.time() - t0:.1f} sec")
os.unlink(fsenc(fp))
if __name__ == "__main__":
main()

View File

@@ -1,15 +1,33 @@
#!/usr/bin/env python3
import json
import re
import sys
import subprocess as sp
from copyparty.util import fsenc
from copyparty.mtag import ffprobe
try:
from copyparty.util import fsenc
except:
def fsenc(p):
return p.encode("utf-8")
"""
_ = r"""
inspects video files for errors and such
usage: -mtp vidchk=t600,ay,bin/mtag/vidchk.py
plus stores a bunch of metadata to filename.ff.json
usage:
-mtp vidchk=t600,ay,p,bin/mtag/vidchk.py
explained:
t600: timeout 10min
ay: only process files which contain audio (including video with audio)
p: set priority 1 (lowest priority after initial ffprobe/mutagen for base tags),
makes copyparty feed base tags into this script as json
if you wanna use this script standalone / separately from copyparty,
provide the video resolution on stdin as json: {"res":"1920x1080"}
"""
@@ -17,19 +35,63 @@ FAST = True # parse entire file at container level
# FAST = False # fully decode audio and video streams
# warnings to ignore
harmless = re.compile("^Unsupported codec with id ")
def wfilter(lines):
return [x for x in lines if not harmless.search(x)]
def errchk(so, se, rc):
if rc:
err = (so + se).decode("utf-8", "replace").split("\n", 1)
err = wfilter(err) or err
return f"ERROR {rc}: {err[0]}"
if se:
err = se.decode("utf-8", "replace").split("\n", 1)
err = wfilter(err)
if err:
return f"Warning: {err[0]}"
return None
def main():
fp = sys.argv[1]
md, _ = ffprobe(fp)
zb = sys.stdin.buffer.read()
zs = zb.decode("utf-8", "replace")
md = json.loads(zs)
try:
w = int(md[".resw"][1])
h = int(md[".resh"][1])
w, h = [int(x) for x in md["res"].split("x")]
if not w + h:
raise Exception()
except:
return "could not determine resolution"
if min(w, h) < 720:
# grab streams/format metadata + 2 seconds of frames at the start and end
zs = "ffprobe -hide_banner -v warning -of json -show_streams -show_format -show_packets -show_data_hash crc32 -read_intervals %+2,999999%+2"
cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
so, se = p.communicate()
# spaces to tabs, drops filesize from 69k to 48k
so = b"\n".join(
[
b"\t" * int((len(x) - len(x.lstrip())) / 4) + x.lstrip()
for x in (so or b"").split(b"\n")
]
)
with open(fsenc(f"{fp}.ff.json"), "wb") as f:
f.write(so)
err = errchk(so, se, p.returncode)
if err:
return err
if min(w, h) < 1080:
return "resolution too small"
zs = (
@@ -49,16 +111,7 @@ def main():
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
so, se = p.communicate()
rc = p.returncode
if rc:
err = (so + se).decode("utf-8", "replace").split("\n", 1)[0]
return f"ERROR {rc}: {err}"
if se:
err = se.decode("utf-8", "replace").split("\n", 1)[0]
return f"Warning: {err}"
return None
return errchk(so, se, p.returncode)
if __name__ == "__main__":

View File

@@ -11,13 +11,13 @@ sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
help() { cat <<'EOF'
usage:
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]"
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
example:
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd"
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
example for running straight from source (instead of using an sfx):
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd"
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
note that if you have python modules installed as --user (such as bpm/key detectors),
you should add /home/foo/.local as a VOLDIR

View File

@@ -15,6 +15,8 @@ save one of these as `.epilogue.html` inside a folder to customize it:
point `--js-browser` to one of these by URL:
* [`minimal-up2k.js`](minimal-up2k.js) is similar to the above `minimal-up2k.html` except it applies globally to all write-only folders
* [`up2k-hooks.js`](up2k-hooks.js) lets you specify a ruleset for files to skip uploading
* [`up2k-hook-ytid.js`](up2k-hook-ytid.js) is a more specific example checking youtube-IDs against some API

View File

@@ -18,7 +18,7 @@ var u2min = `
<style>
#ops, #path, #tree, #files, #epi+div+h2,
#u2conf td.c+.c, #u2cards, #u2foot, #srch_dz, #srch_zd {
#u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd {
display: none !important;
}
#u2conf {margin:5em auto 0 auto !important}

View File

@@ -0,0 +1,211 @@
// way more specific example --
// assumes all files dropped into the uploader have a youtube-id somewhere in the filename,
// locates the youtube-ids and passes them to an API which returns a list of IDs which should be uploaded
//
// also tries to find the youtube-id in the embedded metadata
//
// assumes copyparty is behind nginx as /ytq is a standalone service which must be rproxied in place
function up2k_namefilter(good_files, nil_files, bad_files, hooks) {
var passthru = up2k.uc.fsearch;
if (passthru)
return hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
a_up2k_namefilter(good_files, nil_files, bad_files, hooks).then(() => { });
}
function bstrpos(buf, ptn) {
var ofs = 0,
ch0 = ptn[0],
sz = buf.byteLength;
while (true) {
ofs = buf.indexOf(ch0, ofs);
if (ofs < 0 || ofs >= sz)
return -1;
for (var a = 1; a < ptn.length; a++)
if (buf[ofs + a] !== ptn[a])
break;
if (a === ptn.length)
return ofs;
++ofs;
}
}
async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
var t0 = Date.now(),
yt_ids = new Set(),
textdec = new TextDecoder('latin1'),
md_ptn = new TextEncoder().encode('youtube.com/watch?v='),
file_ids = [], // all IDs found for each good_files
mofs = 0,
mnchk = 0,
mfile = '';
for (var a = 0; a < good_files.length; a++) {
var [fobj, name] = good_files[a],
sz = fobj.size,
ids = [],
id_ok = false,
m;
// all IDs found in this file
file_ids.push(ids);
// look for ID in filename; reduce the
// metadata-scan intensity if the id looks safe
m = /[\[(-]([\w-]{11})[\])]?\.(?:mp4|webm|mkv)$/i.exec(name);
id_ok = !!m;
while (true) {
// fuzzy catch-all;
// some ytdl fork did %(title)-%(id).%(ext) ...
m = /(?:^|[^\w])([\w-]{11})(?:$|[^\w-])/.exec(name);
if (!m)
break;
name = name.replace(m[1], '');
yt_ids.add(m[1]);
ids.push(m[1]);
}
// look for IDs in video metadata,
if (/\.(mp4|webm|mkv)$/i.exec(name)) {
toast.show('inf r', 0, `analyzing file ${a + 1} / ${good_files.length} :\n${name}\n\nhave analysed ${++mnchk} files in ${(Date.now() - t0) / 1000} seconds, ${humantime((good_files.length - (a + 1)) * (((Date.now() - t0) / 1000) / mnchk))} remaining,\n\nbiggest offset so far is ${mofs}, in this file:\n\n${mfile}`);
// check first and last 128 MiB;
// pWxOroN5WCo.mkv @ 6edb98 (6.92M)
// Nf-nN1wF5Xo.mp4 @ 4a98034 (74.6M)
var chunksz = 1024 * 1024 * 2, // byte
aspan = id_ok ? 128 : 512; // MiB
aspan = parseInt(Math.min(sz / 2, aspan * 1024 * 1024) / chunksz) * chunksz;
for (var side = 0; side < 2; side++) {
var ofs = side ? Math.max(0, sz - aspan) : 0,
nchunks = aspan / chunksz;
for (var chunk = 0; chunk < nchunks; chunk++) {
var bchunk = await fobj.slice(ofs, ofs + chunksz + 16).arrayBuffer(),
uchunk = new Uint8Array(bchunk, 0, bchunk.byteLength),
bofs = bstrpos(uchunk, md_ptn),
absofs = Math.min(ofs + bofs, (sz - ofs) + bofs),
txt = bofs < 0 ? '' : textdec.decode(uchunk.subarray(bofs)),
m;
//console.log(`side ${ side }, chunk ${ chunk }, ofs ${ ofs }, bchunk ${ bchunk.byteLength }, txt ${ txt.length }`);
while (true) {
// mkv/webm have [a-z] immediately after url
m = /(youtube\.com\/watch\?v=[\w-]{11})/.exec(txt);
if (!m)
break;
txt = txt.replace(m[1], '');
m = m[1].slice(-11);
console.log(`found ${m} @${bofs}, ${name} `);
yt_ids.add(m);
if (!has(ids, m))
ids.push(m);
// bail after next iteration
chunk = nchunks - 1;
side = 9;
if (mofs < absofs) {
mofs = absofs;
mfile = name;
}
}
ofs += chunksz;
if (ofs >= sz)
break;
}
}
}
}
if (false) {
var msg = `finished analysing ${mnchk} files in ${(Date.now() - t0) / 1000} seconds,\n\nbiggest offset was ${mofs} in this file:\n\n${mfile}`,
mfun = function () { toast.ok(0, msg); };
mfun();
setTimeout(mfun, 200);
return hooks[0]([], [], [], hooks.slice(1));
}
toast.inf(5, `running query for ${yt_ids.size} videos...`);
var xhr = new XHR();
xhr.open('POST', '/ytq', true);
xhr.setRequestHeader('Content-Type', 'text/plain');
xhr.onload = xhr.onerror = function () {
if (this.status != 200)
return toast.err(0, `sorry, database query failed; _; \n\nplease let us know so we can look at it, thx!!\n\nerror ${this.status}: ${(this.response && this.response.err) || this.responseText} `);
process_id_list(this.responseText);
};
xhr.send(Array.from(yt_ids).join('\n'));
setTimeout(function () { process_id_list('Nf-nN1wF5Xo\n'); }, 500);
function process_id_list(txt) {
var wanted_ids = new Set(txt.trim().split('\n')),
wanted_names = new Set(), // basenames with a wanted ID
wanted_files = new Set(); // filedrops
for (var a = 0; a < good_files.length; a++) {
var name = good_files[a][1];
for (var b = 0; b < file_ids[a].length; b++)
if (wanted_ids.has(file_ids[a][b])) {
wanted_files.add(good_files[a]);
var m = /(.*)\.(mp4|webm|mkv)$/i.exec(name);
if (m)
wanted_names.add(m[1]);
break;
}
}
// add all files with the same basename as each explicitly wanted file
// (infojson/chatlog/etc when ID was discovered from metadata)
for (var a = 0; a < good_files.length; a++) {
var name = good_files[a][1];
for (var b = 0; b < 3; b++) {
name = name.replace(/\.[^\.]+$/, '');
if (wanted_names.has(name)) {
wanted_files.add(good_files[a]);
break;
}
}
}
function upload_filtered() {
if (!wanted_files.size)
return modal.alert('Good news -- turns out we already have all those.\n\nBut thank you for checking in!');
hooks[0](Array.from(wanted_files), nil_files, bad_files, hooks.slice(1));
}
function upload_all() {
hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
}
var n_skip = good_files.length - wanted_files.size,
msg = `you added ${good_files.length} files; ${n_skip} of them were skipped --\neither because we already have them,\nor because there is no youtube-ID in your filename.\n\n<code>OK</code> / <code>Enter</code> = continue uploading just the ${wanted_files.size} files we definitely need\n\n<code>Cancel</code> / <code>ESC</code> = override the filter; upload ALL the files you added`;
if (!n_skip)
upload_filtered();
else
modal.confirm(msg, upload_filtered, upload_all);
};
}
up2k_hooks.push(function () {
up2k.gotallfiles.unshift(up2k_namefilter);
});

View File

@@ -0,0 +1,45 @@
// hooks into up2k
function up2k_namefilter(good_files, nil_files, bad_files, hooks) {
// is called when stuff is dropped into the browser,
// after iterating through the directory tree and discovering all files,
// before the upload confirmation dialogue is shown
// good_files will successfully upload
// nil_files are empty files and will show an alert in the final hook
// bad_files are unreadable and cannot be uploaded
var file_lists = [good_files, nil_files, bad_files];
// build a list of filenames
var filenames = [];
for (var lst of file_lists)
for (var ent of lst)
filenames.push(ent[1]);
toast.inf(5, "running database query...");
// simulate delay while passing the list to some api for checking
setTimeout(function () {
// only keep webm files as an example
var new_lists = [];
for (var lst of file_lists) {
var keep = [];
new_lists.push(keep);
for (var ent of lst)
if (/\.webm$/.test(ent[1]))
keep.push(ent);
}
// finally, call the next hook in the chain
[good_files, nil_files, bad_files] = new_lists;
hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
}, 1000);
}
// register
up2k_hooks.push(function () {
up2k.gotallfiles.unshift(up2k_namefilter);
});

View File

@@ -275,7 +275,7 @@ def disable_quickedit() -> None:
raise ctypes.WinError(err) # type: ignore
return args
k32 = ctypes.WinDLL("kernel32", use_last_error=True) # type: ignore
k32 = ctypes.WinDLL(str("kernel32"), use_last_error=True) # type: ignore
if PY2:
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
@@ -382,6 +382,7 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
\033[36mdf=1g\033[35m ensure 1 GiB free disk space
\033[0mupload rotation:
(moves all uploads into the specified folder structure)
@@ -394,6 +395,7 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
\033[36md2ts\033[35m disables metadata collection for existing files
\033[36md2ds\033[35m disables onboot indexing, overrides -e2ds*
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
\033[36md2v\033[35m disables file verification, overrides -e2v*
\033[36md2d\033[35m disables all database stuff, overrides -e2*
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
@@ -481,8 +483,10 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (within same filesystem)")
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made")
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead")
ap2.add_argument("--df", metavar="GiB", type=float, default=0, help="ensure GiB free disk space by rejecting upload requests")
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="windows-only: minimum size of incoming uploads through up2k before they are made into sparse files")
ap2.add_argument("--turbo", metavar="LVL", type=int, default=0, help="configure turbo-mode in up2k client; 0 = off and warn if enabled, 1 = off, 2 = on, 3 = on and disable datecheck")
ap2.add_argument("--u2sort", metavar="TXT", type=u, default="s", help="upload order; s=smallest-first, n=alphabetical, fs=force-s, fn=force-n -- alphabetical is a bit slower on fiber/LAN but makes it easier to eyeball if everything went fine")
ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
@@ -585,10 +589,14 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2.add_argument("-e2d", action="store_true", help="enable up2k database, making files searchable + enables upload deduplocation")
ap2.add_argument("-e2ds", action="store_true", help="scan writable folders for new files on startup; sets -e2d")
ap2.add_argument("-e2dsa", action="store_true", help="scans all folders on startup; sets -e2ds")
ap2.add_argument("-e2v", action="store_true", help="verify file integrity; rehash all files and compare with db")
ap2.add_argument("-e2vu", action="store_true", help="on hash mismatch: update the database with the new hash")
ap2.add_argument("-e2vp", action="store_true", help="on hash mismatch: panic and quit copyparty")
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
ap2.add_argument("--db-act", metavar="SEC", type=float, default=10, help="defer any scheduled volume reindexing until SEC seconds after last db write (uploads, renames, ...)")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline -- terminate searches running for more than SEC seconds")
ap2.add_argument("--srch-hits", metavar="N", type=int, default=7999, help="max search results to allow clients to fetch; 125 results will be shown initially")
@@ -755,6 +763,12 @@ def main(argv: Optional[list[str]] = None) -> None:
except:
raise Exception("invalid value for -p")
for arg, kname, okays in [["--u2sort", "u2sort", "s n fs fn"]]:
val = unicode(getattr(al, kname))
if val not in okays.split():
zs = "argument {} cannot be '{}'; try one of these: {}"
raise Exception(zs.format(arg, val, okays))
if HAVE_SSL:
if al.ssl_ver:
configure_ssl_ver(al)

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 3, 4)
VERSION = (1, 3, 8)
CODENAME = "god dag"
BUILD_DT = (2022, 7, 6)
BUILD_DT = (2022, 7, 27)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -20,6 +20,8 @@ from .util import (
Pebkac,
absreal,
fsenc,
get_df,
humansize,
relchk,
statdir,
uncyg,
@@ -72,15 +74,23 @@ class AXS(object):
class Lim(object):
def __init__(self) -> None:
def __init__(self, log_func: Optional["RootLogger"]) -> None:
self.log_func = log_func
self.reg: Optional[dict[str, dict[str, Any]]] = None # up2k registry
self.nups: dict[str, list[float]] = {} # num tracker
self.bups: dict[str, list[tuple[float, int]]] = {} # byte tracker list
self.bupc: dict[str, int] = {} # byte tracker cache
self.nosub = False # disallow subdirectories
self.smin = -1 # filesize min
self.smax = -1 # filesize max
self.dfl = 0 # free disk space limit
self.dft = 0 # last-measured time
self.dfv = 0 # currently free
self.smin = 0 # filesize min
self.smax = 0 # filesize max
self.bwin = 0 # bytes window
self.bmax = 0 # bytes max
@@ -92,18 +102,34 @@ class Lim(object):
self.rotf = "" # rot datefmt
self.rot_re = re.compile("") # rotf check
def log(self, msg: str, c: Union[int, str] = 0) -> None:
if self.log_func:
self.log_func("up-lim", msg, c)
def set_rotf(self, fmt: str) -> None:
self.rotf = fmt
r = re.escape(fmt).replace("%Y", "[0-9]{4}").replace("%j", "[0-9]{3}")
r = re.sub("%[mdHMSWU]", "[0-9]{2}", r)
self.rot_re = re.compile("(^|/)" + r + "$")
def all(self, ip: str, rem: str, sz: float, abspath: str) -> tuple[str, str]:
def all(
self,
ip: str,
rem: str,
sz: int,
abspath: str,
reg: Optional[dict[str, dict[str, Any]]] = None,
) -> tuple[str, str]:
if reg is not None and self.reg is None:
self.reg = reg
self.dft = 0
self.chk_nup(ip)
self.chk_bup(ip)
self.chk_rem(rem)
if sz != -1:
self.chk_sz(sz)
self.chk_df(abspath, sz) # side effects; keep last-ish
ap2, vp2 = self.rot(abspath)
if abspath == ap2:
@@ -111,13 +137,33 @@ class Lim(object):
return ap2, ("{}/{}".format(rem, vp2) if rem else vp2)
def chk_sz(self, sz: float) -> None:
if self.smin != -1 and sz < self.smin:
def chk_sz(self, sz: int) -> None:
if sz < self.smin:
raise Pebkac(400, "file too small")
if self.smax != -1 and sz > self.smax:
if self.smax and sz > self.smax:
raise Pebkac(400, "file too big")
def chk_df(self, abspath: str, sz: int, already_written: bool = False) -> None:
if not self.dfl:
return
if self.dft < time.time():
self.dft = int(time.time()) + 300
self.dfv = get_df(abspath)[0] or 0
for j in list(self.reg.values()) if self.reg else []:
self.dfv -= int(j["size"] / len(j["hash"]) * len(j["need"]))
if already_written:
sz = 0
if self.dfv - sz < self.dfl:
self.dft = min(self.dft, int(time.time()) + 10)
t = "server HDD is full; {} free, need {}"
raise Pebkac(500, t.format(humansize(self.dfv - self.dfl), humansize(sz)))
self.dfv -= int(sz)
def chk_rem(self, rem: str) -> None:
if self.nosub and rem:
raise Pebkac(500, "no subdirectories allowed")
@@ -226,7 +272,7 @@ class VFS(object):
def __init__(
self,
log: Optional[RootLogger],
log: Optional["RootLogger"],
realpath: str,
vpath: str,
axs: AXS,
@@ -569,7 +615,7 @@ class AuthSrv(object):
def __init__(
self,
args: argparse.Namespace,
log_func: Optional[RootLogger],
log_func: Optional["RootLogger"],
warn_anonwrite: bool = True,
) -> None:
self.args = args
@@ -917,13 +963,20 @@ class AuthSrv(object):
vfs.histtab = {zv.realpath: zv.histpath for zv in vfs.all_vols.values()}
for vol in vfs.all_vols.values():
lim = Lim()
lim = Lim(self.log_func)
use = False
if vol.flags.get("nosub"):
use = True
lim.nosub = True
zs = vol.flags.get("df") or (
"{}g".format(self.args.df) if self.args.df else ""
)
if zs:
use = True
lim.dfl = unhumanize(zs)
zs = vol.flags.get("sz")
if zs:
use = True
@@ -1008,7 +1061,7 @@ class AuthSrv(object):
if ptn:
vol.flags[vf] = re.compile(ptn)
for k in ["e2t", "e2ts", "e2tsr"]:
for k in ["e2t", "e2ts", "e2tsr", "e2v", "e2vu", "e2vp"]:
if getattr(self.args, k):
vol.flags[k] = True
@@ -1030,7 +1083,7 @@ class AuthSrv(object):
self._read_volflag(vol.flags, "mtp", self.args.mtp, True)
# d2d drops all database features for a volume
for grp, rm in [["d2d", "e2d"], ["d2t", "e2t"]]:
for grp, rm in [["d2d", "e2d"], ["d2t", "e2t"], ["d2d", "e2v"]]:
if not vol.flags.get(grp, False):
continue
@@ -1052,6 +1105,12 @@ class AuthSrv(object):
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
for grp, rm in [["d2v", "e2v"]]:
if not vol.flags.get(grp, False):
continue
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
# verify tags mentioned by -mt[mp] are used by -mte
local_mtp = {}
local_only_mtp = {}
@@ -1101,6 +1160,7 @@ class AuthSrv(object):
vfs.bubble_flags()
e2vs = []
t = "volumes and permissions:\n"
for zv in vfs.all_vols.values():
if not self.warn_anonwrite:
@@ -1118,8 +1178,16 @@ class AuthSrv(object):
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
u = u if u else "\033[36m--none--\033[0m"
t += "\n| {}: {}".format(txt, u)
if "e2v" in zv.flags:
e2vs.append(zv.vpath or "/")
t += "\n"
if e2vs:
t += "\n\033[33me2v enabled for the following volumes;\nuploads will be blocked until scan has finished:\n \033[0m"
t += " ".join(e2vs) + "\n"
if self.warn_anonwrite and not self.args.no_voldump:
self.log(t)
@@ -1127,7 +1195,7 @@ class AuthSrv(object):
zv, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == zv.realpath:
self.warn_anonwrite = False
t = "anyone can read/write the current directory: {}\n"
t = "anyone can write to the current directory: {}\n"
self.log(t.format(zv.realpath), c=1)
except Pebkac:
self.warn_anonwrite = True

View File

@@ -42,7 +42,7 @@ class BrokerCli(object):
"""
def __init__(self) -> None:
self.log: RootLogger = None
self.log: "RootLogger" = None
self.args: argparse.Namespace = None
self.asrv: AuthSrv = None
self.httpsrv: "HttpSrv" = None

View File

@@ -1,7 +1,12 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import ctypes
try:
import ctypes
except:
pass
import os
import re
import time
@@ -18,9 +23,10 @@ except:
class Fstab(object):
def __init__(self, log: RootLogger):
def __init__(self, log: "RootLogger"):
self.log_func = log
self.trusted = False
self.tab: Optional[VFS] = None
self.cache: dict[str, str] = {}
self.age = 0.0
@@ -49,6 +55,7 @@ class Fstab(object):
self.log(msg.format(path, fs, min_ex()), 3)
return fs
path = path.lstrip("/")
try:
return self.cache[path]
except:
@@ -92,25 +99,48 @@ class Fstab(object):
def relabel(self, path: str, nval: str) -> None:
assert self.tab
self.cache = {}
path = path.lstrip("/")
ptn = re.compile(r"^[^\\/]*")
vn, _ = self.tab._find(path)
vn, rem = self.tab._find(path)
if not self.trusted:
# no mtab access; have to build as we go
if "/" in rem:
self.tab.add("idk", os.path.join(vn.vpath, rem.split("/")[0]))
if rem:
self.tab.add(nval, path)
else:
vn.realpath = nval
return
visit = [vn]
while visit:
vn = visit.pop()
vn.realpath = ptn.sub(nval, vn.realpath)
visit.extend(list(vn.nodes.values()))
self.cache = {}
def get_unix(self, path: str) -> str:
if not self.tab:
self.build_tab()
try:
self.build_tab()
self.trusted = True
except:
# prisonparty or other restrictive environment
self.log("failed to build tab:\n{}".format(min_ex()), 3)
self.tab = VFS(self.log_func, "idk", "/", AXS(), {})
self.trusted = False
assert self.tab
return self.tab._find(path)[0].realpath.split("/")[0]
ret = self.tab._find(path)[0]
if self.trusted or path == ret.vpath:
return ret.realpath.split("/")[0]
else:
return "idk"
def get_w32(self, path: str) -> str:
# list mountpoints: fsutil fsinfo drives
assert ctypes
from ctypes.wintypes import BOOL, DWORD, LPCWSTR, LPDWORD, LPWSTR, MAX_PATH
def echk(rc: int, fun: Any, args: Any) -> None:

View File

@@ -24,12 +24,7 @@ try:
except:
pass
try:
import ctypes
except:
pass
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, WINDOWS, E, unicode
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E, unicode
from .authsrv import VFS # typechk
from .bos import bos
from .star import StreamTar
@@ -48,6 +43,7 @@ from .util import (
fsenc,
gen_filekey,
gencookie,
get_df,
get_spd,
guess_mime,
gzip_orig_sz,
@@ -380,13 +376,21 @@ class HttpCli(object):
if not self._check_nonfatal(pex, post):
self.keepalive = False
msg = str(ex) if pex == ex else min_ex()
self.log("{}\033[0m, {}".format(msg, self.vpath), 3)
em = str(ex)
msg = em if pex == ex else min_ex()
self.log(
"{}\033[0m, {}".format(msg, self.vpath),
6 if em.startswith("client d/c ") else 3,
)
msg = "{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
msg = "{}\r\nURL: {}\r\n".format(em, self.vpath)
if self.hint:
msg += "hint: {}\r\n".format(self.hint)
if "database is locked" in em:
self.conn.hsrv.broker.say("log_stacks")
msg += "hint: important info in the server log\r\n"
msg = "<pre>" + html_escape(msg)
self.reply(msg.encode("utf-8", "replace"), status=pex.code, volsan=True)
return self.keepalive
@@ -1127,8 +1131,10 @@ class HttpCli(object):
except:
self.log("failed to utime ({}, {})".format(fin_path, times))
cinf = self.headers.get("x-up2k-stat", "")
spd = self._spd(post_sz)
self.log("{} thank".format(spd))
self.log("{:70} thank {}".format(spd, cinf))
self.reply(b"thank")
return True
@@ -1287,7 +1293,12 @@ class HttpCli(object):
lim.chk_nup(self.ip)
try:
max_sz = lim.smax if lim else 0
max_sz = 0
if lim:
v1 = lim.smax
v2 = lim.dfv - lim.dfl
max_sz = min(v1, v2) if v1 and v2 else v1 or v2
with ren_open(tnam, "wb", 512 * 1024, **open_args) as zfw:
f, tnam = zfw["orz"]
tabspath = os.path.join(fdir, tnam)
@@ -1302,6 +1313,7 @@ class HttpCli(object):
lim.nup(self.ip)
lim.bup(self.ip, sz)
try:
lim.chk_df(tabspath, sz, True)
lim.chk_sz(sz)
lim.chk_bup(self.ip)
lim.chk_nup(self.ip)
@@ -1915,7 +1927,13 @@ class HttpCli(object):
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vs["volstate"].items()}
else:
vstate = {}
vs = {"scanning": None, "hashq": None, "tagq": None, "mtpq": None}
vs = {
"scanning": None,
"hashq": None,
"tagq": None,
"mtpq": None,
"dbwt": None,
}
if self.uparam.get("ls") in ["v", "t", "txt"]:
if self.uname == "*":
@@ -1925,7 +1943,7 @@ class HttpCli(object):
if vstate:
txt += "\nstatus:"
for k in ["scanning", "hashq", "tagq", "mtpq"]:
for k in ["scanning", "hashq", "tagq", "mtpq", "dbwt"]:
txt += " {}({})".format(k, vs[k])
if rvol:
@@ -1954,6 +1972,7 @@ class HttpCli(object):
hashq=vs["hashq"],
tagq=vs["tagq"],
mtpq=vs["mtpq"],
dbwt=vs["dbwt"],
url_suf=suf,
k304=self.k304(),
)
@@ -2315,26 +2334,14 @@ class HttpCli(object):
except:
self.log("#wow #whoa")
try:
# some fuses misbehave
if not self.args.nid:
if WINDOWS:
try:
bfree = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW( # type: ignore
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
)
srv_info.append(humansize(bfree.value) + " free")
except:
pass
else:
sv = os.statvfs(fsenc(abspath))
free = humansize(sv.f_frsize * sv.f_bfree, True)
total = humansize(sv.f_frsize * sv.f_blocks, True)
srv_info.append("{} free of {}".format(free, total))
except:
pass
if not self.args.nid:
free, total = get_df(abspath)
if total is not None:
h1 = humansize(free or 0)
h2 = humansize(total)
srv_info.append("{} free of {}".format(h1, h2))
elif free is not None:
srv_info.append(humansize(free, True) + " free")
srv_infot = "</span> // <span>".join(srv_info)
@@ -2413,6 +2420,7 @@ class HttpCli(object):
"dtheme": self.args.theme,
"themes": self.args.themes,
"turbolvl": self.args.turbo,
"u2sort": self.args.u2sort,
}
if self.args.js_browser:

View File

@@ -62,7 +62,7 @@ class HttpConn(object):
self.nreq: int = 0 # mypy404
self.nbyte: int = 0 # mypy404
self.u2idx: Optional[U2idx] = None
self.log_func: Util.RootLogger = hsrv.log # mypy404
self.log_func: "Util.RootLogger" = hsrv.log # mypy404
self.log_src: str = "httpconn" # mypy404
self.lf_url: Optional[Pattern[str]] = (
re.compile(self.args.lf_url) if self.args.lf_url else None

View File

@@ -261,8 +261,11 @@ class HttpSrv(object):
)
self.thr_client(sck, addr)
me.name = self.name + "-poolw"
except:
self.log(self.name, "thr_client: " + min_ex(), 3)
except Exception as ex:
if str(ex).startswith("client d/c "):
self.log(self.name, "thr_client: " + str(ex), 6)
else:
self.log(self.name, "thr_client: " + min_ex(), 3)
def shutdown(self) -> None:
self.stopping = True

View File

@@ -46,6 +46,7 @@ class MParser(object):
self.force = False
self.kill = "t" # tree; all children recursively
self.audio = "y"
self.pri = 0 # priority; higher = later
self.ext = []
while True:
@@ -83,6 +84,10 @@ class MParser(object):
self.ext.append(arg[1:])
continue
if arg.startswith("p"):
self.pri = int(arg[1:] or "1")
continue
raise Exception()
@@ -243,7 +248,7 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
class MTag(object):
def __init__(self, log_func: RootLogger, args: argparse.Namespace) -> None:
def __init__(self, log_func: "RootLogger", args: argparse.Namespace) -> None:
self.log_func = log_func
self.args = args
self.usable = True
@@ -432,6 +437,8 @@ class MTag(object):
return r1
def get_mutagen(self, abspath: str) -> dict[str, Union[str, float]]:
ret: dict[str, tuple[int, Any]] = {}
if not bos.path.isfile(abspath):
return {}
@@ -445,7 +452,10 @@ class MTag(object):
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
sz = bos.path.getsize(abspath)
ret = {".q": (0, int((sz / md.info.length) / 128))}
try:
ret[".q"] = (0, int((sz / md.info.length) / 128))
except:
pass
for attr, k, norm in [
["codec", "ac", unicode],
@@ -487,7 +497,9 @@ class MTag(object):
ret, md = ffprobe(abspath)
return self.normalize_tags(ret, md)
def get_bin(self, parsers: dict[str, MParser], abspath: str) -> dict[str, Any]:
def get_bin(
self, parsers: dict[str, MParser], abspath: str, oth_tags: dict[str, Any]
) -> dict[str, Any]:
if not bos.path.isfile(abspath):
return {}
@@ -497,8 +509,8 @@ class MTag(object):
env = os.environ.copy()
env["PYTHONPATH"] = pypath
ret = {}
for tagname, parser in parsers.items():
ret: dict[str, Any] = {}
for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])):
try:
cmd = [parser.bin, abspath]
if parser.bin.endswith(".py"):
@@ -506,6 +518,11 @@ class MTag(object):
args = {"env": env, "timeout": parser.timeout, "kill": parser.kill}
if parser.pri:
zd = oth_tags.copy()
zd.update(ret)
args["sin"] = json.dumps(zd).encode("utf-8", "replace")
if WINDOWS:
args["creationflags"] = 0x4000
else:

View File

@@ -44,7 +44,7 @@ class StreamTar(StreamArc):
def __init__(
self,
log: NamedLogger,
log: "NamedLogger",
fgen: Generator[dict[str, Any], None, None],
**kwargs: Any
):
@@ -65,17 +65,19 @@ class StreamTar(StreamArc):
w.start()
def gen(self) -> Generator[Optional[bytes], None, None]:
while True:
buf = self.qfile.q.get()
if not buf:
break
try:
while True:
buf = self.qfile.q.get()
if not buf:
break
self.co += len(buf)
yield buf
self.co += len(buf)
yield buf
yield None
if self.errf:
bos.unlink(self.errf["ap"])
yield None
finally:
if self.errf:
bos.unlink(self.errf["ap"])
def ser(self, f: dict[str, Any]) -> None:
name = f["vp"]

View File

@@ -17,7 +17,7 @@ except:
class StreamArc(object):
def __init__(
self,
log: NamedLogger,
log: "NamedLogger",
fgen: Generator[dict[str, Any], None, None],
**kwargs: Any
):

View File

@@ -2,7 +2,9 @@
from __future__ import print_function, unicode_literals
import argparse
import base64
import calendar
import gzip
import os
import shlex
import signal
@@ -27,7 +29,7 @@ from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
from .tcpsrv import TcpSrv
from .th_srv import HAVE_PIL, HAVE_VIPS, HAVE_WEBP, ThumbSrv
from .up2k import Up2k
from .util import ansi_re, min_ex, mp, start_log_thrs, start_stackmon
from .util import ansi_re, min_ex, mp, start_log_thrs, start_stackmon, alltrace
class SvcHub(object):
@@ -56,6 +58,7 @@ class SvcHub(object):
self.log_mutex = threading.Lock()
self.next_day = 0
self.tstack = 0.0
if args.sss or args.s >= 3:
args.ss = True
@@ -358,6 +361,9 @@ class SvcHub(object):
print("nailed it", end="")
ret = self.retcode
except:
print("\033[31m[ error during shutdown ]\n{}\033[0m".format(min_ex()))
raise
finally:
if self.args.wintitle:
print("\033]0;\033\\", file=sys.stderr, end="")
@@ -497,3 +503,15 @@ class SvcHub(object):
sck.sendall(b"READY=1")
except:
self.log("sd_notify", min_ex())
def log_stacks(self) -> None:
td = time.time() - self.tstack
if td < 300:
self.log("stacks", "cooldown {}".format(td))
return
self.tstack = time.time()
zb = alltrace().encode("utf-8", "replace")
zb = gzip.compress(zb)
zs = base64.b64encode(zb).decode("ascii")
self.log("stacks", zs)

View File

@@ -218,7 +218,7 @@ def gen_ecdr64_loc(ecdr64_pos: int) -> bytes:
class StreamZip(StreamArc):
def __init__(
self,
log: NamedLogger,
log: "NamedLogger",
fgen: Generator[dict[str, Any], None, None],
utf8: bool = False,
pre_crc: bool = False,
@@ -272,41 +272,44 @@ class StreamZip(StreamArc):
def gen(self) -> Generator[bytes, None, None]:
errors = []
for f in self.fgen:
if "err" in f:
errors.append((f["vp"], f["err"]))
continue
try:
for f in self.fgen:
if "err" in f:
errors.append((f["vp"], f["err"]))
continue
try:
for x in self.ser(f):
try:
for x in self.ser(f):
yield x
except GeneratorExit:
raise
except:
ex = min_ex(5, True).replace("\n", "\n-- ")
errors.append((f["vp"], ex))
if errors:
errf, txt = errdesc(errors)
self.log("\n".join(([repr(errf)] + txt[1:])))
for x in self.ser(errf):
yield x
except:
ex = min_ex(5, True).replace("\n", "\n-- ")
errors.append((f["vp"], ex))
if errors:
errf, txt = errdesc(errors)
self.log("\n".join(([repr(errf)] + txt[1:])))
for x in self.ser(errf):
yield x
cdir_pos = self.pos
for name, sz, ts, crc, h_pos in self.items:
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
cdir_end = self.pos
cdir_pos = self.pos
for name, sz, ts, crc, h_pos in self.items:
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
yield self._ct(buf)
cdir_end = self.pos
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
if need_64:
ecdir64_pos = self.pos
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
yield self._ct(buf)
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
if need_64:
ecdir64_pos = self.pos
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
yield self._ct(buf)
buf = gen_ecdr64_loc(ecdir64_pos)
yield self._ct(buf)
buf = gen_ecdr64_loc(ecdir64_pos)
yield self._ct(buf)
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
yield self._ct(ecdr)
if errors:
bos.unlink(errf["ap"])
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
yield self._ct(ecdr)
finally:
if errors:
bos.unlink(errf["ap"])

View File

@@ -559,14 +559,15 @@ class ThumbSrv(object):
def clean(self, histpath: str) -> int:
ret = 0
for cat in ["th", "ac"]:
ret += self._clean(histpath, cat, "")
top = os.path.join(histpath, cat)
if not bos.path.isdir(top):
continue
ret += self._clean(cat, top)
return ret
def _clean(self, histpath: str, cat: str, thumbpath: str) -> int:
if not thumbpath:
thumbpath = os.path.join(histpath, cat)
def _clean(self, cat: str, thumbpath: str) -> int:
# self.log("cln {}".format(thumbpath))
exts = ["jpg", "webp"] if cat == "th" else ["opus", "caf"]
maxage = getattr(self.args, cat + "_maxage")
@@ -600,7 +601,7 @@ class ThumbSrv(object):
self.log("rm -rf [{}]".format(fp))
shutil.rmtree(fp, ignore_errors=True)
else:
self._clean(histpath, cat, fp)
ndirs += self._clean(cat, fp)
continue

File diff suppressed because it is too large Load Diff

View File

@@ -24,6 +24,11 @@ from datetime import datetime
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, VT100, WINDOWS
from .stolen import surrogateescape
try:
import ctypes
except:
pass
try:
HAVE_SQLITE3 = True
import sqlite3 # pylint: disable=unused-import # typechk
@@ -137,6 +142,9 @@ IMPLICATIONS = [
["e2tsr", "e2ts"],
["e2ts", "e2t"],
["e2t", "e2d"],
["e2vu", "e2v"],
["e2vp", "e2v"],
["e2v", "e2d"],
]
@@ -240,7 +248,7 @@ class _Unrecv(object):
undo any number of socket recv ops
"""
def __init__(self, s: socket.socket, log: Optional[NamedLogger]) -> None:
def __init__(self, s: socket.socket, log: Optional["NamedLogger"]) -> None:
self.s = s
self.log = log
self.buf: bytes = b""
@@ -284,7 +292,7 @@ class _LUnrecv(object):
with expensive debug logging
"""
def __init__(self, s: socket.socket, log: Optional[NamedLogger]) -> None:
def __init__(self, s: socket.socket, log: Optional["NamedLogger"]) -> None:
self.s = s
self.log = log
self.buf = b""
@@ -659,7 +667,9 @@ def ren_open(
class MultipartParser(object):
def __init__(self, log_func: NamedLogger, sr: Unrecv, http_headers: dict[str, str]):
def __init__(
self, log_func: "NamedLogger", sr: Unrecv, http_headers: dict[str, str]
):
self.sr = sr
self.log = log_func
self.headers = http_headers
@@ -979,6 +989,11 @@ def s2hms(s: float, optional_h: bool = False) -> str:
return "{}:{:02}:{:02}".format(h, m, s)
def djoin(*paths: str) -> str:
"""joins without adding a trailing slash on blank args"""
return os.path.join(*[x for x in paths if x])
def uncyg(path: str) -> str:
if len(path) < 2 or not path.startswith("/"):
return path
@@ -1181,15 +1196,30 @@ def s3enc(mem_cur: "sqlite3.Cursor", rd: str, fn: str) -> tuple[str, str]:
def s3dec(rd: str, fn: str) -> tuple[str, str]:
ret = []
for v in [rd, fn]:
if v.startswith("//"):
ret.append(w8b64dec(v[2:]))
# self.log("mojide [{}] {}".format(ret[-1], v[2:]))
else:
ret.append(v)
return (
w8b64dec(rd[2:]) if rd.startswith("//") else rd,
w8b64dec(fn[2:]) if fn.startswith("//") else fn,
)
return ret[0], ret[1]
def db_ex_chk(log: "NamedLogger", ex: Exception, db_path: str) -> bool:
if str(ex) != "database is locked":
return False
thr = threading.Thread(target=lsof, args=(log, db_path))
thr.daemon = True
thr.start()
return True
def lsof(log: "NamedLogger", abspath: str) -> None:
try:
rc, so, se = runcmd([b"lsof", b"-R", fsenc(abspath)], timeout=5)
zs = (so.strip() + "\n" + se.strip()).strip()
log("lsof {} = {}\n{}".format(abspath, rc, zs), 3)
except:
log("lsof failed; " + min_ex(), 3)
def atomic_move(usrc: str, udst: str) -> None:
@@ -1204,6 +1234,24 @@ def atomic_move(usrc: str, udst: str) -> None:
os.rename(src, dst)
def get_df(abspath: str) -> tuple[Optional[int], Optional[int]]:
try:
# some fuses misbehave
if ANYWIN:
bfree = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW( # type: ignore
ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree)
)
return (bfree.value, None)
else:
sv = os.statvfs(fsenc(abspath))
free = sv.f_frsize * sv.f_bfree
total = sv.f_frsize * sv.f_blocks
return (free, total)
except:
return (None, None)
def read_socket(sr: Unrecv, total_size: int) -> Generator[bytes, None, None]:
remains = total_size
while remains > 0:
@@ -1230,7 +1278,7 @@ def read_socket_unbounded(sr: Unrecv) -> Generator[bytes, None, None]:
def read_socket_chunked(
sr: Unrecv, log: Optional[NamedLogger] = None
sr: Unrecv, log: Optional["NamedLogger"] = None
) -> Generator[bytes, None, None]:
err = "upload aborted: expected chunk length, got [{}] |{}| instead"
while True:
@@ -1308,7 +1356,7 @@ def hashcopy(
def sendfile_py(
log: NamedLogger,
log: "NamedLogger",
lower: int,
upper: int,
f: typing.BinaryIO,
@@ -1336,7 +1384,7 @@ def sendfile_py(
def sendfile_kern(
log: NamedLogger,
log: "NamedLogger",
lower: int,
upper: int,
f: typing.BinaryIO,
@@ -1377,7 +1425,7 @@ def sendfile_kern(
def statdir(
logger: Optional[RootLogger], scandir: bool, lstat: bool, top: str
logger: Optional["RootLogger"], scandir: bool, lstat: bool, top: str
) -> Generator[tuple[str, os.stat_result], None, None]:
if lstat and ANYWIN:
lstat = False
@@ -1420,9 +1468,10 @@ def statdir(
def rmdirs(
logger: RootLogger, scandir: bool, lstat: bool, top: str, depth: int
logger: "RootLogger", scandir: bool, lstat: bool, top: str, depth: int
) -> tuple[list[str], list[str]]:
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
"""rmdir all descendants, then self"""
if not os.path.isdir(fsenc(top)):
top = os.path.dirname(top)
depth -= 1
@@ -1446,6 +1495,21 @@ def rmdirs(
return ok, ng
def rmdirs_up(top: str) -> tuple[list[str], list[str]]:
"""rmdir on self, then all parents"""
try:
os.rmdir(fsenc(top))
except:
return [], [top]
par = os.path.dirname(top)
if not par:
return [top], []
ok, ng = rmdirs_up(par)
return [top] + ok, ng
def unescape_cookie(orig: str) -> str:
# mw=idk; doot=qwe%2Crty%3Basd+fgh%2Bjkl%25zxc%26vbn # qwe,rty;asd fgh+jkl%zxc&vbn
ret = ""
@@ -1566,12 +1630,17 @@ def runcmd(
argv: Union[list[bytes], list[str]], timeout: Optional[int] = None, **ka: Any
) -> tuple[int, str, str]:
kill = ka.pop("kill", "t") # [t]ree [m]ain [n]one
sin = ka.pop("sin", None)
if sin:
ka["stdin"] = sp.PIPE
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE, **ka)
if not timeout or PY2:
stdout, stderr = p.communicate()
stdout, stderr = p.communicate(sin)
else:
try:
stdout, stderr = p.communicate(timeout=timeout)
stdout, stderr = p.communicate(sin, timeout=timeout)
except sp.TimeoutExpired:
if kill == "n":
return -18, "", "" # SIGCONT; leave it be
@@ -1620,7 +1689,7 @@ def retchk(
rc: int,
cmd: Union[list[bytes], list[str]],
serr: str,
logger: Optional[NamedLogger] = None,
logger: Optional["NamedLogger"] = None,
color: Union[int, str] = 0,
verbose: bool = False,
) -> None:
@@ -1797,7 +1866,6 @@ def termsize() -> tuple[int, int]:
def ioctl_GWINSZ(fd: int) -> Optional[tuple[int, int]]:
try:
import fcntl
import struct
import termios
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"1234"))

View File

@@ -224,12 +224,13 @@ window.baguetteBox = (function () {
['space, P, K', 'video: play / pause'],
['U', 'video: seek 10sec back'],
['P', 'video: seek 10sec ahead'],
['0..9', 'video: seek 0%..90%'],
['M', 'video: toggle mute'],
['V', 'video: toggle loop'],
['C', 'video: toggle auto-next'],
['<code>[</code>, <code>]</code>', 'video: loop start / end'],
],
d = mknod('table'),
d = mknod('table', 'bbox-halp'),
html = ['<tbody>'];
for (var a = 0; a < list.length; a++)
@@ -238,7 +239,6 @@ window.baguetteBox = (function () {
html.push('<tr><td colspan="2">tap middle of img to hide btns</td></tr>');
html.push('<tr><td colspan="2">tap left/right sides for prev/next</td></tr>');
d.innerHTML = html.join('\n') + '</tbody>';
d.setAttribute('id', 'bbox-halp');
d.onclick = function () {
overlay.removeChild(d);
};
@@ -249,7 +249,7 @@ window.baguetteBox = (function () {
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing || modal.busy)
return;
var k = e.code + '', v = vid();
var k = e.code + '', v = vid(), pos = -1;
if (k == "ArrowLeft" || k == "KeyJ")
showPreviousImage();
@@ -265,6 +265,8 @@ window.baguetteBox = (function () {
playpause();
else if (k == "KeyU" || k == "KeyO")
relseek(k == "KeyU" ? -10 : 10);
else if (k.indexOf('Digit') === 0)
vid().currentTime = vid().duration * parseInt(k.slice(-1)) * 0.1;
else if (k == "KeyM" && v) {
v.muted = vmute = !vmute;
mp_ctl();
@@ -487,9 +489,8 @@ window.baguetteBox = (function () {
var imagesFiguresIds = [];
var imagesCaptionsIds = [];
for (var i = 0, fullImage; i < gallery.length; i++) {
fullImage = mknod('div');
fullImage = mknod('div', 'baguette-img-' + i);
fullImage.className = 'full-image';
fullImage.id = 'baguette-img-' + i;
imagesElements.push(fullImage);
imagesFiguresIds.push('bbox-figure-' + i);
@@ -631,16 +632,14 @@ window.baguetteBox = (function () {
if (is_vid && index != currentIndex)
return; // no preload
var figure = mknod('figure');
figure.id = 'bbox-figure-' + index;
var figure = mknod('figure', 'bbox-figure-' + index);
figure.innerHTML = '<div class="bbox-spinner">' +
'<div class="bbox-double-bounce1"></div>' +
'<div class="bbox-double-bounce2"></div>' +
'</div>';
if (options.captions && imageCaption) {
var figcaption = mknod('figcaption');
figcaption.id = 'bbox-figcaption-' + index;
var figcaption = mknod('figcaption', 'bbox-figcaption-' + index);
figcaption.innerHTML = imageCaption;
figure.appendChild(figcaption);
}
@@ -943,7 +942,8 @@ window.baguetteBox = (function () {
else
timer.rm(rotn);
el.onclick = function (e) {
var ctime = 0;
el.onclick = v ? null : function (e) {
var rc = e.target.getBoundingClientRect(),
x = e.clientX - rc.left,
fx = x / (rc.right - rc.left);
@@ -957,6 +957,11 @@ window.baguetteBox = (function () {
clmod(ebi('bbox-btns'), 'off', 't');
clmod(btnPrev, 'off', 't');
clmod(btnNext, 'off', 't');
if (Date.now() - ctime <= 500)
tglfull();
ctime = Date.now();
};
var prev = QS('.full-image.vis');

View File

@@ -527,6 +527,11 @@ html.dy {
--u2-tab-1-bg: a;
--u2-b1-bg: #000;
--u2-b2-bg: #000;
--u2-o-h-bg: #999;
--u2-o-1h-bg: #999;
--u2-o-bg: #eee;
--u2-o-1-bg: #000;
--ud-b1: a;
--sort-1: a;
@@ -2216,6 +2221,10 @@ html.y #bbox-overlay figcaption a {
#u2notbtn * {
line-height: 1.3em;
}
#u2mu div {
height: 1.2em;
overflow: hidden;
}
#u2tabw {
min-height: 0;
transition: min-height .2s;
@@ -2225,6 +2234,7 @@ html.y #bbox-overlay figcaption a {
display: none;
}
#u2tab {
table-layout: fixed;
border-collapse: collapse;
width: calc(100% - 2em);
max-width: 100em;
@@ -2234,6 +2244,7 @@ html.y #bbox-overlay figcaption a {
max-width: none;
}
#u2tab td {
word-wrap: break-word;
border: 1px solid rgba(128,128,128,0.8);
border-width: 0 0px 1px 0;
padding: .2em .3em;
@@ -2248,7 +2259,19 @@ html.y #bbox-overlay figcaption a {
#u2tab.up.ok td:nth-child(3),
#u2tab.up.bz td:nth-child(3),
#u2tab.up.q td:nth-child(3) {
width: 19em;
width: 18em;
}
@media (max-width: 65em) {
#u2tab {
font-size: .9em;
}
}
@media (max-width: 50em) {
#u2tab.up.ok td:nth-child(3),
#u2tab.up.bz td:nth-child(3),
#u2tab.up.q td:nth-child(3) {
width: 16em;
}
}
#op_up2k.srch td.prog {
font-family: sans-serif;
@@ -2348,7 +2371,7 @@ html.y #bbox-overlay figcaption a {
width: 48em;
}
#u2conf.ww {
width: 74em;
width: 78em;
}
#u2conf.ww #u2c3w {
width: 29em;
@@ -2449,11 +2472,11 @@ html.b #u2conf a.b:hover {
color: var(--fg-max);
font-style: italic;
text-align: center;
font-size: .9em;
margin: 1em 0;
font-size: 1.2em;
margin: .8em 0;
}
#u2foot .warn {
font-size: 1.3em;
font-size: 1.2em;
padding: .5em .8em;
margin: 1em -.6em;
border-width: .1em 0;
@@ -2467,6 +2490,9 @@ html.b #u2conf a.b:hover {
font-size: .9em;
font-weight: normal;
}
#u2foot>*+* {
margin-top: 1.5em;
}
.prog {
font-family: 'scp', monospace, monospace;
}

View File

@@ -148,7 +148,8 @@
have_del = {{ have_del|tojson }},
have_unpost = {{ have_unpost|tojson }},
have_zip = {{ have_zip|tojson }},
turbolvl = {{ turbolvl|tojson }},
turbolvl = {{ turbolvl }},
u2sort = "{{ u2sort }}",
have_emp = {{ have_emp|tojson }},
txt_ext = "{{ txt_ext }}",
{% if no_prism %}no_prism = 1,{% endif %}

View File

@@ -63,6 +63,7 @@ var Ls = {
"ul_par": "parallel uploads:",
"ut_mt": "continue hashing other files while uploading$N$Nmaybe disable if your CPU or HDD is a bottleneck",
"ut_ask": "ask for confirmation before upload starts",
"ut_pot": "improve upload speed on slow devices$Nby making the UI less complex",
"ut_srch": "don't actually upload, instead check if the files already $N exist on the server (will scan all folders you can read)",
"ut_par": "pause uploads by setting it to 0$N$Nincrease if your connection is slow / high latency$N$Nkeep it 1 on LAN or if the server HDD is a bottleneck",
"ul_btn": "drop files / folders<br>here (or click me)",
@@ -113,6 +114,8 @@ var Ls = {
"cut_flag": "ensure only one tab is uploading at a time $N -- other tabs must have this enabled too $N -- only affects tabs on the same domain",
"cut_az": "upload files in alphabetical order, rather than smallest-file-first$N$Nalphabetical order can make it easier to eyeball if something went wrong on the server, but it makes uploading slightly slower on fiber / LAN",
"cft_text": "favicon text (blank and refresh to disable)",
"cft_fg": "foreground color",
"cft_bg": "background color",
@@ -143,7 +146,7 @@ var Ls = {
"mt_caac": "convert aac / m4a to opus\">aac",
"mt_coth": "convert all others (not mp3) to opus\">oth",
"mt_tint": "background level (0-100) on the seekbar$Nto make buffering less distracting",
"mt_eq": "enables the equalizer and gain control;$Nboost 0 = unmodified 100% volume$N$Nenabling the equalizer makes gapless albums fully gapless, so leave it on with all the values at zero if you care about that",
"mt_eq": "enables the equalizer and gain control;$N$Nboost &lt;code&gt;0&lt;/code&gt; = standard 100% volume (unmodified)$N$Nwidth &lt;code&gt;1 &nbsp;&lt;/code&gt; = standard stereo (unmodified)$Nwidth &lt;code&gt;0.5&lt;/code&gt; = 50% left-right crossfeed$Nwidth &lt;code&gt;0 &nbsp;&lt;/code&gt; = mono$N$Nboost &lt;code&gt;-0.8&lt;/code&gt; &amp; width &lt;code&gt;10&lt;/code&gt; = vocal removal :^)$N$Nenabling the equalizer makes gapless albums fully gapless, so leave it on with all the values at zero (except width = 1) if you care about that",
"mb_play": "play",
"mm_hashplay": "play this audio file?",
@@ -285,6 +288,9 @@ var Ls = {
"u_https2": "switch to https",
"u_https3": "for much better performance",
"u_ancient": 'your browser is impressively ancient -- maybe you should <a href="#" onclick="goto(\'bup\')">use bup instead</a>',
"u_enpot": 'switch to <a href="#">potato UI</a> (may improve upload speed)',
"u_depot": 'switch to <a href="#">fancy UI</a> (may reduce upload speed)',
"u_gotpot": 'switching to the potato UI for improved upload speed,\n\nfeel free to disagree and switch back!',
"u_ever": "this is the basic uploader; up2k needs at least<br>chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1",
"u_su2k": 'this is the basic uploader; <a href="#" id="u2yea">up2k</a> is better',
"u_ewrite": 'you do not have write-access to this folder',
@@ -305,6 +311,7 @@ var Ls = {
"u_ehsfin": "server rejected the request to finalize upload",
"u_ehssrch": "server rejected the request to perform search",
"u_ehsinit": "server rejected the request to initiate upload",
"u_ehsdf": "server ran out of disk space!\n\nwill keep retrying, in case someone\nfrees up enough space to continue",
"u_s404": "not found on server",
"u_expl": "explain",
"u_tu": '<p class="warn">WARNING: turbo enabled, <span>&nbsp;client may not detect and resume incomplete uploads; see turbo-button tooltip</span></p>',
@@ -388,6 +395,7 @@ var Ls = {
"ul_par": "samtidige handl.:",
"ut_mt": "fortsett å befare køen mens opplastning foregår$N$Nskru denne av dersom du har en$Ntreg prosessor eller harddisk",
"ut_ask": "bekreft filutvalg før opplastning starter",
"ut_pot": "forbedre ytelsen på trege enheter ved å$Nforenkle brukergrensesnittet",
"ut_srch": "utfør søk istedenfor å laste opp --$Nleter igjennom alle mappene du har lov til å se",
"ut_par": "sett til 0 for å midlertidig stanse opplastning$N$Nhøye verdier (4 eller 8) kan gi bedre ytelse,$Nspesielt på trege internettlinjer$N$Nbør ikke være høyere enn 1 på LAN$Neller hvis serveren sin harddisk er treg",
"ul_btn": "slipp filer / mapper<br>her (eller klikk meg)",
@@ -438,6 +446,8 @@ var Ls = {
"cut_flag": "samkjører nettleserfaner slik at bare én $N kan holde på med befaring / opplastning $N -- andre faner må også ha denne skrudd på $N -- fungerer kun innenfor samme domene",
"cut_az": "last opp filer i alfabetisk rekkefølge, istedenfor minste-fil-først$N$Nalfabetisk kan gjøre det lettere å anslå om alt gikk bra, men er bittelitt tregere på fiber / LAN",
"cft_text": "ikontekst (blank ut og last siden på nytt for å deaktivere)",
"cft_fg": "farge",
"cft_bg": "bakgrunnsfarge",
@@ -468,7 +478,7 @@ var Ls = {
"mt_caac": "konverter aac / m4a-filer til to opus\">aac",
"mt_coth": "konverter alt annet (men ikke mp3) til opus\">andre",
"mt_tint": "nivå av bakgrunnsfarge på søkestripa (0-100),$Ngjør oppdateringer mindre distraherende",
"mt_eq": "aktiver tonekontroll og forsterker;$Nboost 0 = normal volumskala$N$Nreduserer også dødtid imellom sangfiler",
"mt_eq": "aktiver tonekontroll og forsterker;$N$Nboost &lt;code&gt;0&lt;/code&gt; = normal volumskala$N$Nwidth &lt;code&gt;1 &nbsp;&lt;/code&gt; = normal stereo$Nwidth &lt;code&gt;0.5&lt;/code&gt; = 50% blanding venstre-høyre$Nwidth &lt;code&gt;0 &nbsp;&lt;/code&gt; = mono$N$Nboost &lt;code&gt;-0.8&lt;/code&gt; &amp; width &lt;code&gt;10&lt;/code&gt; = instrumental :^)$N$Nreduserer også dødtid imellom sangfiler",
"mb_play": "lytt",
"mm_hashplay": "spill denne sangen?",
@@ -610,6 +620,9 @@ var Ls = {
"u_https2": "bytte til https",
"u_https3": "for mye høyere hastighet",
"u_ancient": 'nettleseren din er prehistorisk -- mulig du burde <a href="#" onclick="goto(\'bup\')">bruke bup istedenfor</a>',
"u_enpot": 'bytt til <a href="#">enkelt UI</a> (gir sannsynlig raskere opplastning)',
"u_depot": 'bytt til <a href="#">snæsent UI</a> (gir sannsynlig tregere opplastning)',
"u_gotpot": 'byttet til et enklere UI for å laste opp raskere,\n\ndu kan gjerne bytte tilbake altså!',
"u_ever": "dette er den primitive opplasteren; up2k krever minst:<br>chrome 21 // firefox 13 // edge 12 // opera 12 // safari 5.1",
"u_su2k": 'dette er den primitive opplasteren; <a href="#" id="u2yea">up2k</a> er bedre',
"u_ewrite": 'du har ikke skrivetilgang i denne mappen',
@@ -630,6 +643,7 @@ var Ls = {
"u_ehsfin": "server nektet forespørselen om å ferdigstille filen",
"u_ehssrch": "server nektet forespørselen om å utføre søk",
"u_ehsinit": "server nektet forespørselen om å begynne en ny opplastning",
"u_ehsdf": "serveren er full!\n\nprøver igjen regelmessig,\ni tilfelle noen rydder litt...",
"u_s404": "ikke funnet på serveren",
"u_expl": "forklar",
"u_tu": '<p class="warn">ADVARSEL: turbo er på, <span>&nbsp;avbrutte opplastninger vil muligens ikke oppdages og gjenopptas; hold musepekeren over turbo-knappen for mer info</span></p>',
@@ -717,6 +731,10 @@ ebi('op_up2k').innerHTML = (
' <label for="multitask" tt="' + L.ut_mt + '">🏃</label>\n' +
' </td>\n' +
' <td class="c" rowspan="2">\n' +
' <input type="checkbox" id="potato" />\n' +
' <label for="potato" tt="' + L.ut_pot + '">🥔</label>\n' +
' </td>\n' +
' <td class="c" rowspan="2">\n' +
' <input type="checkbox" id="ask_up" />\n' +
' <label for="ask_up" tt="' + L.ut_ask + '">💭</label>\n' +
' </td>\n' +
@@ -772,10 +790,10 @@ ebi('op_up2k').innerHTML = (
' </tr>\n' +
' </thead>\n' +
' <tbody></tbody>\n' +
'</table></div>\n' +
'</table><div id="u2mu"></div></div>\n' +
'<p id="u2flagblock"><b>' + L.ul_flagblk + '</p>\n' +
'<p id="u2foot"></p>'
'<div id="u2foot"></div>'
);
@@ -824,6 +842,7 @@ ebi('op_cfg').innerHTML = (
' <a id="u2turbo" class="tgl btn ttb" href="#" tt="' + L.cut_turbo + '">turbo</a>\n' +
' <a id="u2tdate" class="tgl btn ttb" href="#" tt="' + L.cut_datechk + '">date-chk</a>\n' +
' <a id="flag_en" class="tgl btn" href="#" tt="' + L.cut_flag + '">💤</a>\n' +
' <a id="u2sort" class="tgl btn" href="#" tt="' + L.cut_az + '">az</a>\n' +
' </td>\n' +
' </div>\n' +
'</div>\n' +
@@ -1873,6 +1892,7 @@ var audio_eq = (function () {
"gains": [4, 3, 2, 1, 0, 0, 1, 2, 3, 4],
"filters": [],
"amp": 0,
"chw": 1,
"last_au": null,
"acst": {}
};
@@ -1924,6 +1944,7 @@ var audio_eq = (function () {
try {
r.amp = fcfg_get('au_eq_amp', r.amp);
r.chw = fcfg_get('au_eq_chw', r.chw);
var gains = jread('au_eq_gain', r.gains);
if (r.gains.length == gains.length)
r.gains = gains;
@@ -1933,12 +1954,14 @@ var audio_eq = (function () {
r.draw = function () {
jwrite('au_eq_gain', r.gains);
swrite('au_eq_amp', r.amp);
swrite('au_eq_chw', r.chw);
var txt = QSA('input.eq_gain');
for (var a = 0; a < r.bands.length; a++)
txt[a].value = r.gains[a];
QS('input.eq_gain[band="amp"]').value = r.amp;
QS('input.eq_gain[band="chw"]').value = r.chw;
};
r.stop = function () {
@@ -2008,16 +2031,47 @@ var audio_eq = (function () {
for (var a = r.filters.length - 1; a >= 0; a--)
r.filters[a].connect(a > 0 ? r.filters[a - 1] : actx.destination);
if (Math.round(r.chw * 25) != 25) {
var split = actx.createChannelSplitter(2),
merge = actx.createChannelMerger(2),
lg1 = actx.createGain(),
lg2 = actx.createGain(),
rg1 = actx.createGain(),
rg2 = actx.createGain(),
vg1 = 1 - (1 - r.chw) / 2,
vg2 = 1 - vg1;
console.log('chw', vg1, vg2);
merge.connect(r.filters[r.filters.length - 1]);
lg1.gain.value = rg2.gain.value = vg1;
lg2.gain.value = rg1.gain.value = vg2;
lg1.connect(merge, 0, 0);
rg1.connect(merge, 0, 0);
lg2.connect(merge, 0, 1);
rg2.connect(merge, 0, 1);
split.connect(lg1, 0);
split.connect(lg2, 0);
split.connect(rg1, 1);
split.connect(rg2, 1);
r.filters.push(split);
mp.acs.channelCountMode = 'explicit';
}
mp.acs.connect(r.filters[r.filters.length - 1]);
}
function eq_step(e) {
ev(e);
var band = parseInt(this.getAttribute('band')),
var sb = this.getAttribute('band'),
band = parseInt(sb),
step = parseFloat(this.getAttribute('step'));
if (isNaN(band))
if (sb == 'amp')
r.amp = Math.round((r.amp + step * 0.2) * 100) / 100;
else if (sb == 'chw')
r.chw = Math.round((r.chw + step * 0.2) * 100) / 100;
else
r.gains[band] += step;
@@ -2027,15 +2081,18 @@ var audio_eq = (function () {
function adj_band(that, step) {
var err = false;
try {
var band = parseInt(that.getAttribute('band')),
var sb = that.getAttribute('band'),
band = parseInt(sb),
vs = that.value,
v = parseFloat(vs);
if (isNaN(v) || v + '' != vs)
throw new Error('inval band');
if (isNaN(band))
if (sb == 'amp')
r.amp = Math.round((v + step * 0.2) * 100) / 100;
else if (sb == 'chw')
r.chw = Math.round((v + step * 0.2) * 100) / 100;
else
r.gains[band] = v + step;
@@ -2072,6 +2129,7 @@ var audio_eq = (function () {
vs.push([a, hz, r.gains[a]]);
}
vs.push(["amp", "boost", r.amp]);
vs.push(["chw", "width", r.chw]);
for (var a = 0; a < vs.length; a++) {
var b = vs[a][0];
@@ -2194,6 +2252,12 @@ function play(tid, is_ev, seek) {
if (window.thegrid)
thegrid.loadsel();
try {
if (actx.state == 'suspended')
actx.resume();
}
catch (ex) { }
try {
mp.au.play();
if (mp.au.paused)
@@ -2400,12 +2464,11 @@ function eval_hash() {
if (a)
QS(treectl.hidden ? '#path a:nth-last-child(2)' : '#treeul a.hl').focus();
else
QS(thegrid.en ? '#ggrid a' : '#files tbody a').focus();
QS(thegrid.en ? '#ggrid a' : '#files tbody tr[tabindex]').focus();
};
})(a);
var d = mknod('div');
d.setAttribute('id', 'acc_info');
var d = mknod('div', 'acc_info');
document.body.insertBefore(d, ebi('ops'));
})();
@@ -2670,8 +2733,7 @@ var fileman = (function () {
var rui = ebi('rui');
if (!rui) {
rui = mknod('div');
rui.setAttribute('id', 'rui');
rui = mknod('div', 'rui');
document.body.appendChild(rui);
}
@@ -3154,10 +3216,9 @@ var showfile = (function () {
return;
qsr('#prism_css');
var el = mknod('link');
var el = mknod('link', 'prism_css');
el.rel = 'stylesheet';
el.href = '/.cpr/deps/prism' + (light ? '' : 'd') + '.css';
el.setAttribute('id', 'prism_css');
document.head.appendChild(el);
};
@@ -3271,8 +3332,7 @@ var showfile = (function () {
fun = function (el) { };
qsr('#doc');
var el = mknod('pre');
el.setAttribute('id', 'doc');
var el = mknod('pre', 'doc');
el.setAttribute('tabindex', '0');
clmod(ebi('wrap'), 'doc', !is_md);
if (is_md) {
@@ -3300,9 +3360,8 @@ var showfile = (function () {
hfun(get_evpath() + '?doc=' + url.split('/').pop());
qsr('#docname');
el = mknod('span');
el = mknod('span', 'docname');
el.textContent = tname;
el.setAttribute('id', 'docname');
ebi('path').appendChild(el);
r.updtree();
@@ -3437,9 +3496,8 @@ var showfile = (function () {
var thegrid = (function () {
var lfiles = ebi('files'),
gfiles = mknod('div');
gfiles = mknod('div', 'gfiles');
gfiles.setAttribute('id', 'gfiles');
gfiles.style.display = 'none';
gfiles.innerHTML = (
'<div id="ghead" class="ghead">' +
@@ -3938,6 +3996,9 @@ document.onkeydown = function (e) {
}
}
if (k == 'Enter' && ae && (ae.onclick || ae.hasAttribute('tabIndex')))
return ev(e) && ae.click() || true;
if (aet && aet != 'a' && aet != 'tr' && aet != 'pre')
return;

View File

@@ -13,8 +13,7 @@ audio_eq.apply = function () {
var can = ebi('fft_can');
if (!can) {
can = mknod('canvas');
can.setAttribute('id', 'fft_can');
can = mknod('canvas', 'fft_can');
can.style.cssText = 'position:absolute;left:0;bottom:5em;width:' + w + 'px;height:' + h + 'px;z-index:9001';
document.body.appendChild(can);
can.width = w;

View File

@@ -173,8 +173,7 @@ md_plug_err = function (ex, js) {
o.textContent = lns[ln - 1];
}
}
var errbox = mknod('div');
errbox.setAttribute('id', 'md_errbox');
var errbox = mknod('div', 'md_errbox');
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
errbox.textContent = msg;
errbox.onclick = function () {

View File

@@ -36,6 +36,11 @@
width: 55em;
width: min(55em, calc(100% - 2em));
}
#mtw.single.editor,
#mw.single.editor {
width: calc(100% - 1em);
left: .5em;
}
#mp {

View File

@@ -16,8 +16,7 @@ var dom_sbs = ebi('sbs');
var dom_nsbs = ebi('nsbs');
var dom_tbox = ebi('toolsbox');
var dom_ref = (function () {
var d = mknod('div');
d.setAttribute('id', 'mtr');
var d = mknod('div', 'mtr');
dom_swrap.appendChild(d);
d = ebi('mtr');
// hide behind the textarea (offsetTop is not computed if display:none)
@@ -509,6 +508,20 @@ function setsel(s) {
}
// cut/copy current line
function md_cut(cut) {
var s = linebounds();
if (s.car != s.cdr)
return;
dom_src.setSelectionRange(s.n1, s.n2 + 1, 'forward');
setTimeout(function () {
var i = cut ? s.n1 : s.car;
dom_src.setSelectionRange(i, i, 'forward');
}, 1);
}
// indent/dedent
function md_indent(dedent) {
var s = getsel(),
@@ -955,6 +968,10 @@ var set_lno = (function () {
md_p_jump(dn);
return false;
}
if (ev.code == "KeyX" || ev.code == "KeyC") {
md_cut(ev.code == "KeyX");
return true; //sic
}
}
else {
if (ev.code == "Tab" || kc == 9) {

View File

@@ -36,6 +36,7 @@
<tr><td>hash-q</td><td>{{ hashq }}</td></tr>
<tr><td>tag-q</td><td>{{ tagq }}</td></tr>
<tr><td>mtp-q</td><td>{{ mtpq }}</td></tr>
<tr><td>db-act</td><td id="u">{{ dbwt }}</td></tr>
</table>
</td><td>
<table class="vols">
@@ -50,8 +51,8 @@
</table>
</td></tr></table>
<div class="btns">
<a id="d" href="/?stack" tt="shows the state of all active threads">dump stack</a>
<a id="e" href="/?reload=cfg" tt="reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes">reload cfg</a>
<a id="d" href="/?stack">dump stack</a>
<a id="e" href="/?reload=cfg">reload cfg</a>
</div>
{%- endif %}

View File

@@ -23,6 +23,12 @@ var Ls = {
"r1": "gå hjem",
".s1": "kartlegg",
"t1": "handling",
"u2": "tid siden noen sist skrev til serveren$N( opplastning / navneendring / ... )$N$N17d = 17 dager$N1h23 = 1 time 23 minutter$N4m56 = 4 minuter 56 sekunder",
},
"eng": {
"d2": "shows the state of all active threads",
"e2": "reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes",
"u2": "time since the last server write$N( upload / rename / ... )$N$N17d = 17 days$N1h23 = 1 hour 23 minutes$N4m56 = 4 minutes 56 seconds",
}
},
d = Ls[sread("lang") || lang];
@@ -40,5 +46,10 @@ for (var k in (d || {})) {
}
tt.init();
if (!ebi('c'))
QS('input[name="cppwd"]').focus();
var o = QS('input[name="cppwd"]');
if (!ebi('c') && o.offsetTop + o.offsetHeight < window.innerHeight)
o.focus();
o = ebi('u');
if (o && /[0-9]+$/.exec(o.innerHTML))
o.innerHTML = shumantime(o.innerHTML);

View File

@@ -15,6 +15,7 @@ function goto_up2k() {
// chrome requires https to use crypto.subtle,
// usually it's undefined but some chromes throw on invoke
var up2k = null,
up2k_hooks = [],
sha_js = window.WebAssembly ? 'hw' : 'ac', // ff53,c57,sa11
m = 'will use ' + sha_js + ' instead of native sha512 due to';
@@ -135,14 +136,19 @@ function up2k_flagbus() {
}
function U2pvis(act, btns, uc) {
function U2pvis(act, btns, uc, st) {
var r = this;
r.act = act;
r.ctr = { "ok": 0, "ng": 0, "bz": 0, "q": 0 };
r.tab = [];
r.hq = {};
r.head = 0;
r.tail = -1;
r.wsz = 3;
r.npotato = 99;
r.modn = 0;
r.modv = 0;
r.mod0 = null;
var markup = {
'404': '<span class="err">404</span>',
@@ -180,6 +186,9 @@ function U2pvis(act, btns, uc) {
};
r.is_act = function (card) {
if (uc.potato && !uc.fsearch)
return false;
if (r.act == "done")
return card == "ok" || card == "ng";
@@ -196,11 +205,14 @@ function U2pvis(act, btns, uc) {
if (!r.is_act(fo.in))
return;
var obj = ebi('f{0}{1}'.format(nfile, field.slice(1)));
var k = 'f' + nfile + '' + field.slice(1),
obj = ebi(k);
obj.innerHTML = field == 'ht' ? (markup[html] || html) : html;
if (field == 'hp') {
obj.style.color = '';
obj.style.background = '';
delete r.hq[nfile];
}
};
@@ -238,18 +250,13 @@ function U2pvis(act, btns, uc) {
nb = fo.bt * (++fo.nh / fo.cb.length),
p = r.perc(nb, 0, fobj.size, fobj.t_hashing);
fo.hp = '{0}%, {1}, {2} MB/s'.format(
f2f(p[0], 2), p[1], f2f(p[2], 2)
);
fo.hp = f2f(p[0], 2) + '%, ' + p[1] + ', ' + f2f(p[2], 2) + ' MB/s';
if (!r.is_act(fo.in))
return;
var obj = ebi('f{0}p'.format(fobj.n)),
o1 = p[0] - 2, o2 = p[0] - 0.1, o3 = p[0];
var o1 = p[0] - 2, o2 = p[0] - 0.1, o3 = p[0];
obj.innerHTML = fo.hp;
obj.style.color = '#fff';
obj.style.background = 'linear-gradient(90deg, #025, #06a ' + o1 + '%, #09d ' + o2 + '%, #222 ' + o3 + '%, #222 99%, #555)';
r.hq[fobj.n] = [fo.hp, '#fff', 'linear-gradient(90deg, #025, #06a ' + o1 + '%, #09d ' + o2 + '%, #222 ' + o3 + '%, #222 99%, #555)'];
};
r.prog = function (fobj, nchunk, cbd) {
@@ -260,14 +267,12 @@ function U2pvis(act, btns, uc) {
fo.bd += delta;
var p = r.perc(fo.bd, fo.bd0, fo.bt, fobj.t_uploading);
fo.hp = '{0}%, {1}, {2} MB/s'.format(
f2f(p[0], 2), p[1], f2f(p[2], 2)
);
fo.hp = f2f(p[0], 2) + '%, ' + p[1] + ', ' + f2f(p[2], 2) + ' MB/s';
if (!r.is_act(fo.in))
return;
var obj = ebi('f{0}p'.format(fobj.n)),
var obj = ebi('f' + fobj.n + 'p'),
o1 = p[0] - 2, o2 = p[0] - 0.1, o3 = p[0];
if (!obj) {
@@ -304,9 +309,7 @@ function U2pvis(act, btns, uc) {
throw new Error('see console');
}
obj.innerHTML = fo.hp;
obj.style.color = '#fff';
obj.style.background = 'linear-gradient(90deg, #050, #270 ' + o1 + '%, #4b0 ' + o2 + '%, #222 ' + o3 + '%, #222 99%, #555)';
r.hq[fobj.n] = [fo.hp, '#fff', 'linear-gradient(90deg, #050, #270 ' + o1 + '%, #4b0 ' + o2 + '%, #222 ' + o3 + '%, #222 99%, #555)'];
};
r.move = function (nfile, newcat) {
@@ -320,6 +323,10 @@ function U2pvis(act, btns, uc) {
fo.in = newcat;
r.ctr[oldcat]--;
r.ctr[newcat]++;
while (st.car < r.tab.length && has(['ok', 'ng'], r.tab[st.car].in))
st.car++;
r.drawcard(oldcat);
r.drawcard(newcat);
if (r.is_act(newcat)) {
@@ -342,7 +349,10 @@ function U2pvis(act, btns, uc) {
};
r.bzw = function () {
var first = QS('#u2tab>tbody>tr:first-child');
var mod = 0,
t0 = Date.now(),
first = QS('#u2tab>tbody>tr:first-child');
if (!first)
return;
@@ -352,14 +362,132 @@ function U2pvis(act, btns, uc) {
while (r.head - first > r.wsz) {
qsr('#f' + (first++));
mod++;
}
while (last - r.tail < r.wsz && last < r.tab.length - 2) {
while (last - r.tail < r.wsz && last < r.tab.length - 1) {
var obj = ebi('f' + (++last));
if (!obj)
if (!obj) {
r.addrow(last);
mod++;
}
}
if (mod && r.modn < 200 && ebi('repl').offsetTop) {
if (++r.modn >= 10) {
if (r.modn == 10)
r.mod0 = Date.now();
r.modv += Date.now() - t0;
}
if (r.modn >= 200) {
var n = r.modn - 10,
ipu = r.modv / n,
spu = (Date.now() - r.mod0) / n,
ir = spu / ipu;
console.log('bzw:', f2f(ipu, 2), ' spu:', f2f(spu, 2), ' ir:', f2f(ir, 2), ' tab:', r.tab.length);
// efficiency estimates;
// ir: 5=16% 4=50%,30% 27=100%
// ipu: 2.7=16% 2=30% 1.6=50% 1.8=100% (ng for big files)
if (ipu >= 1.5 && ir <= 9 && r.tab.length >= 1000 && r.tab[Math.floor(r.tab.length / 3)].bt <= 1024 * 1024 * 4)
r.go_potato();
}
}
};
r.potatolabels = function () {
var ode = ebi('u2depotato'),
oen = ebi('u2enpotato');
if (!ode)
return;
ode.style.display = uc.potato ? '' : 'none';
oen.style.display = uc.potato ? 'none' : '';
}
r.potato = function () {
ebi('u2tabw').style.minHeight = '';
QS('#u2cards a[act="bz"]').click();
timer[uc.potato ? "add" : "rm"](draw_potato);
timer[uc.potato ? "rm" : "add"](apply_html);
r.potatolabels();
};
r.go_potato = function () {
r.go_potato = noop;
var ode = mknod('div', 'u2depotato'),
oen = mknod('div', 'u2enpotato'),
u2f = ebi('u2foot'),
btn = ebi('potato');
ode.innerHTML = L.u_depot;
oen.innerHTML = L.u_enpot;
if (sread('potato') === null) {
btn.click();
toast.inf(30, L.u_gotpot);
localStorage.removeItem('potato');
}
u2f.appendChild(ode);
u2f.appendChild(oen);
ode.onclick = oen.onclick = btn.onclick;
r.potatolabels();
};
function draw_potato() {
if (++r.npotato < 2)
return;
r.npotato = 0;
var html = [
"<p>files: &nbsp; <b>{0}</b> finished, &nbsp; <b>{1}</b> failed, &nbsp; <b>{2}</b> busy, &nbsp; <b>{3}</b> queued</p>".format(
r.ctr.ok, r.ctr.ng, r.ctr.bz, r.ctr.q)];
while (r.head < r.tab.length && has(["ok", "ng"], r.tab[r.head].in))
r.head++;
var act = null;
if (r.head < r.tab.length)
act = r.tab[r.head];
if (act)
html.push("<p>file {0} of {1} : &nbsp; {2} &nbsp; <code>{3}</code></p>\n<div>{4}</div>".format(
r.head + 1, r.tab.length, act.ht, act.hp, act.hn));
html = html.join('\n');
if (r.hpotato == html)
return;
r.hpotato = html;
ebi('u2mu').innerHTML = html;
}
function apply_html() {
var oq = {}, n = 0;
for (var k in r.hq) {
var o = ebi('f' + k + 'p');
if (!o)
continue;
oq[k] = o;
n++;
}
if (!n)
return;
for (var k in oq) {
var o = oq[k],
v = r.hq[k];
o.innerHTML = v[0];
o.style.color = v[1];
o.style.background = v[2];
}
r.hq = {};
}
r.drawcard = function (cat) {
var cards = QSA('#u2cards>a>span');
@@ -441,8 +569,7 @@ function U2pvis(act, btns, uc) {
if (as_html)
return '<tr id="f' + nfile + '">' + ret + '</tr>';
var obj = mknod('tr');
obj.setAttribute('id', 'f' + nfile);
var obj = mknod('tr', 'f' + nfile);
obj.innerHTML = ret;
return obj;
};
@@ -476,6 +603,7 @@ function U2pvis(act, btns, uc) {
}
r.changecard(r.act);
r.potato();
}
@@ -527,7 +655,7 @@ function Donut(uc, st) {
}
r.on = function (ya) {
r.fc = r.tc = 99;
r.fc = r.tc = r.dc = 99;
r.eta = null;
r.base = pos();
optab.innerHTML = ya ? svg() : optab.getAttribute('ico');
@@ -543,10 +671,16 @@ function Donut(uc, st) {
var t = st.bytes.total - r.base,
v = pos() - r.base,
ofs = el.style.strokeDashoffset = o - o * v / t;
ofs = o - o * v / t;
if (!uc.potato || ++r.dc >= 4) {
el.style.strokeDashoffset = ofs;
r.dc = 0;
}
if (++r.tc >= 10) {
wintitle(f2f(v * 100 / t, 1) + '%, ' + r.eta + 's, ', true);
wintitle("{0}%, {1}, #{2}, ".format(
f2f(v * 100 / t, 1), shumantime(r.eta), st.files.length - st.nfile.upload), true);
r.tc = 0;
}
@@ -578,6 +712,12 @@ function fsearch_explain(n) {
function up2k_init(subtle) {
var r = {
"init_deps": init_deps,
"set_fsearch": set_fsearch,
"gotallfiles": [gotallfiles] // hooks
};
function showmodal(msg) {
ebi('u2notbtn').innerHTML = msg;
ebi('u2btn').style.display = 'none';
@@ -612,7 +752,10 @@ function up2k_init(subtle) {
m = L.u_ancient;
setmsg('');
}
ebi('u2foot').innerHTML = '<big>' + m + '</big>';
qsr('#u2depmsg');
var o = mknod('div', 'u2depmsg');
o.innerHTML = m;
ebi('u2foot').appendChild(o);
}
loading_deps = true;
}
@@ -647,17 +790,24 @@ function up2k_init(subtle) {
var parallel_uploads = icfg_get('nthread'),
uc = {},
fdom_ctr = 0,
min_filebuf = 0;
min_filebuf = 0,
biggest_file = 0;
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
bcfg_bind(uc, 'potato', 'potato', false, set_potato, false);
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
bcfg_bind(uc, 'turbo', 'u2turbo', turbolvl > 1, draw_turbo, false);
bcfg_bind(uc, 'datechk', 'u2tdate', turbolvl < 3, null, false);
bcfg_bind(uc, 'az', 'u2sort', u2sort.indexOf('n') + 1, set_u2sort, false);
var st = {
"files": [],
"nfile": {
"hash": 0,
"upload": 0
},
"seen": {},
"todo": {
"head": [],
@@ -681,7 +831,16 @@ function up2k_init(subtle) {
"hashing": 0,
"uploading": 0,
"busy": 0
}
},
"eta": {
"h": "",
"u": "",
"t": ""
},
"car": 0,
"modn": 0,
"modv": 0,
"mod0": null
};
function push_t(arr, t) {
@@ -693,9 +852,13 @@ function up2k_init(subtle) {
});
}
var pvis = new U2pvis("bz", '#u2cards', uc),
var pvis = new U2pvis("bz", '#u2cards', uc, st),
donut = new Donut(uc, st);
r.ui = pvis;
r.st = st;
r.uc = uc;
var bobslice = null;
if (window.File)
bobslice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice;
@@ -758,8 +921,14 @@ function up2k_init(subtle) {
catch (ex) { }
ev(e);
e.dataTransfer.dropEffect = 'copy';
e.dataTransfer.effectAllowed = 'copy';
try {
e.dataTransfer.dropEffect = 'copy';
e.dataTransfer.effectAllowed = 'copy';
}
catch (ex) {
document.body.ondragenter = document.body.ondragleave = document.body.ondragover = null;
return modal.alert('your browser does not support drag-and-drop uploading');
}
clmod(ebi('drops'), 'vis', 1);
var v = this.getAttribute('v');
if (v)
@@ -808,6 +977,8 @@ function up2k_init(subtle) {
if (err)
return modal.alert('sorry, ' + err);
toast.inf(0, 'Scanning files...');
if ((dz == 'up_dz' && uc.fsearch) || (dz == 'srch_dz' && !uc.fsearch))
tgl_fsearch();
@@ -906,7 +1077,8 @@ function up2k_init(subtle) {
if (!dirs.length) {
if (!pf.length)
return gotallfiles(good, nil, bad);
// call first hook, pass list of remaining hooks to call
return r.gotallfiles[0](good, nil, bad, r.gotallfiles.slice(1));
console.log("retry pf, " + pf.length);
setTimeout(function () {
@@ -954,6 +1126,12 @@ function up2k_init(subtle) {
}
function gotallfiles(good_files, nil_files, bad_files) {
if (toast.txt == 'Scanning files...')
toast.hide();
if (uc.fsearch && !uc.turbo)
nil_files = [];
var ntot = good_files.concat(nil_files, bad_files).length;
if (bad_files.length) {
var msg = L.u_badf.format(bad_files.length, ntot);
@@ -1002,6 +1180,13 @@ function up2k_init(subtle) {
var evpath = get_evpath(),
draw_each = good_files.length < 50;
if (!uc.az)
good_files.sort(function (a, b) {
a = a[0].size;
b = b[0].size;
return a < b ? -1 : a > b ? 1 : 0;
});
for (var a = 0; a < good_files.length; a++) {
var fobj = good_files[a][0],
name = good_files[a][1],
@@ -1032,6 +1217,9 @@ function up2k_init(subtle) {
if (uc.fsearch)
entry.srch = 1;
if (biggest_file < entry.size)
biggest_file = entry.size;
try {
if (st.seen[fdir][key])
continue;
@@ -1047,9 +1235,9 @@ function up2k_init(subtle) {
entry.purl + uricom_enc(entry.name)).join(' '),
'📐 ' + L.u_hashing,
''
], fobj.size, draw_each);
], entry.size, draw_each);
st.bytes.total += fobj.size;
st.bytes.total += entry.size;
st.files.push(entry);
if (!entry.size)
push_t(st.todo.handshake, entry);
@@ -1074,7 +1262,7 @@ function up2k_init(subtle) {
}
more_one_file();
var etaref = 0, etaskip = 0, utw_minh = 0;
var etaref = 0, etaskip = 0, utw_minh = 0, utw_read = 0;
function etafun() {
var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length,
nsend = st.busy.upload.length + st.todo.upload.length,
@@ -1087,18 +1275,32 @@ function up2k_init(subtle) {
//ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1);
if (++utw_read >= 20) {
utw_read = 0;
utw_minh = parseInt(ebi('u2tabw').style.minHeight || '0');
}
var minh = QS('#op_up2k.act') && st.is_busy ? Math.max(utw_minh, ebi('u2tab').offsetHeight + 32) : 0;
if (utw_minh < minh || !utw_minh) {
utw_minh = minh;
ebi('u2tabw').style.minHeight = utw_minh + 'px';
}
if (!nhash)
ebi('u2etah').innerHTML = L.u_etadone.format(humansize(st.bytes.hashed), pvis.ctr["ok"] + pvis.ctr["ng"]);
if (!nhash) {
var h = L.u_etadone.format(humansize(st.bytes.hashed), pvis.ctr.ok + pvis.ctr.ng);
if (st.eta.h !== h)
st.eta.h = ebi('u2etah').innerHTML = h;
}
if (!nsend && !nhash)
ebi('u2etau').innerHTML = ebi('u2etat').innerHTML = (
L.u_etadone.format(humansize(st.bytes.uploaded), pvis.ctr["ok"] + pvis.ctr["ng"]));
if (!nsend && !nhash) {
var h = L.u_etadone.format(humansize(st.bytes.uploaded), pvis.ctr.ok + pvis.ctr.ng);
if (st.eta.u !== h)
st.eta.u = ebi('u2etau').innerHTML = h;
if (st.eta.t !== h)
st.eta.t = ebi('u2etat').innerHTML = h;
}
if (!st.busy.hash.length && !hashing_permitted())
nhash = 0;
@@ -1129,19 +1331,21 @@ function up2k_init(subtle) {
for (var a = 0; a < t.length; a++) {
var rem = st.bytes.total - t[a][2],
bps = t[a][1] / t[a][3],
hid = t[a][0],
eid = hid.slice(-1),
eta = Math.floor(rem / bps);
if (t[a][1] < 1024 || t[a][3] < 0.1) {
ebi(t[a][0]).innerHTML = L.u_etaprep;
ebi(hid).innerHTML = L.u_etaprep;
continue;
}
donut.eta = eta;
if (etaskip)
continue;
ebi(t[a][0]).innerHTML = '{0}, {1}/s, {2}'.format(
st.eta[eid] = '{0}, {1}/s, {2}'.format(
humansize(rem), humansize(bps, 1), humantime(eta));
if (!etaskip)
ebi(hid).innerHTML = st.eta[eid];
}
if (++etaskip > 2)
etaskip = 0;
@@ -1171,6 +1375,10 @@ function up2k_init(subtle) {
st.busy.handshake.length)
return false;
if (t.n - st.car > 8)
// prevent runahead from a stuck upload (slow server hdd)
return false;
if ((uc.multitask ? 1 : 0) <
st.todo.upload.length +
st.busy.upload.length)
@@ -1184,9 +1392,14 @@ function up2k_init(subtle) {
return false;
if (uc.multitask) {
var ahead = st.bytes.hashed - st.bytes.finished;
return ahead < 1024 * 1024 * 1024 * 4 &&
st.todo.handshake.length + st.busy.handshake.length < 16;
if (!uc.az)
return st.todo.handshake.length + st.busy.handshake.length < 2;
var ahead = st.bytes.hashed - st.bytes.finished,
nmax = ahead < biggest_file / 8 ? 32 : 16;
return ahead < biggest_file &&
st.todo.handshake.length + st.busy.handshake.length < nmax;
}
return handshakes_permitted() && 0 ==
st.todo.handshake.length +
@@ -1213,16 +1426,7 @@ function up2k_init(subtle) {
running = true;
while (true) {
var now = Date.now(),
oldest_active = Math.min( // gzip take the wheel
st.todo.head.length ? st.todo.head[0].n : st.files.length,
st.todo.hash.length ? st.todo.hash[0].n : st.files.length,
st.todo.upload.length ? st.todo.upload[0].nfile : st.files.length,
st.todo.handshake.length ? st.todo.handshake[0].n : st.files.length,
st.busy.head.length ? st.busy.head[0].n : st.files.length,
st.busy.hash.length ? st.busy.hash[0].n : st.files.length,
st.busy.upload.length ? st.busy.upload[0].nfile : st.files.length,
st.busy.handshake.length ? st.busy.handshake[0].n : st.files.length),
is_busy = oldest_active < st.files.length;
is_busy = st.car < st.files.length;
if (was_busy && !is_busy) {
for (var a = 0; a < st.files.length; a++) {
@@ -1250,25 +1454,7 @@ function up2k_init(subtle) {
donut.on(is_busy);
if (!is_busy) {
var sr = uc.fsearch,
ok = pvis.ctr["ok"],
ng = pvis.ctr["ng"],
t = uc.ask_up ? 0 : 10;
if (ok && ng)
toast.warn(t, (sr ? L.ur_sm : L.ur_um).format(ok, ng));
else if (ok > 1)
toast.ok(t, (sr ? L.ur_aso : L.ur_auo).format(ok));
else if (ok)
toast.ok(t, sr ? L.ur_1so : L.ur_1uo);
else if (ng > 1)
toast.err(t, (sr ? L.ur_asn : L.ur_aun).format(ng));
else if (ng)
toast.err(t, sr ? L.ur_1sn : L.ur_1un);
timer.rm(etafun);
timer.rm(donut.do);
utw_minh = 0;
uptoast();
}
else {
timer.add(donut.do);
@@ -1321,7 +1507,7 @@ function up2k_init(subtle) {
if (st.todo.head.length &&
st.busy.head.length < parallel_uploads &&
(!is_busy || st.todo.head[0].n - oldest_active < parallel_uploads * 2)) {
(!is_busy || st.todo.head[0].n - st.car < parallel_uploads * 2)) {
exec_head();
mou_ikkai = true;
}
@@ -1346,6 +1532,31 @@ function up2k_init(subtle) {
mou_ikkai = true;
}
if (is_busy && st.modn < 100) {
var t0 = Date.now() + (ebi('repl').offsetTop ? 0 : 0);
if (++st.modn >= 10) {
if (st.modn == 10)
st.mod0 = Date.now();
st.modv += Date.now() - t0;
}
if (st.modn >= 100) {
var n = st.modn - 10,
ipu = st.modv / n,
spu = (Date.now() - st.mod0) / n,
ir = spu / ipu;
console.log('tsk:', f2f(ipu, 2), ' spu:', f2f(spu, 2), ' ir:', f2f(ir, 2));
// efficiency estimates;
// ir: 8=16% 11=60% 16=90% 24=100%
// ipu: 1=40% .8=60% .3=100%
if (ipu >= 0.5 && ir <= 15)
pvis.go_potato();
}
}
if (!mou_ikkai || crashed)
return defer();
}
@@ -1354,6 +1565,30 @@ function up2k_init(subtle) {
return taskerd;
})();
function uptoast() {
var sr = uc.fsearch,
ok = pvis.ctr.ok,
ng = pvis.ctr.ng,
t = uc.ask_up ? 0 : 10;
console.log('toast', ok, ng);
if (ok && ng)
toast.warn(t, (sr ? L.ur_sm : L.ur_um).format(ok, ng));
else if (ok > 1)
toast.ok(t, (sr ? L.ur_aso : L.ur_auo).format(ok));
else if (ok)
toast.ok(t, sr ? L.ur_1so : L.ur_1uo);
else if (ng > 1)
toast.err(t, (sr ? L.ur_asn : L.ur_aun).format(ng));
else if (ng)
toast.err(t, sr ? L.ur_1sn : L.ur_1un);
timer.rm(etafun);
timer.rm(donut.do);
utw_minh = 0;
}
function chill(t) {
var now = Date.now();
if ((t.coolmul || 0) < 2 || now - t.cooldown < t.coolmul * 700)
@@ -1429,6 +1664,7 @@ function up2k_init(subtle) {
function exec_hash() {
var t = st.todo.hash.shift();
st.busy.hash.push(t);
st.nfile.hash = t.n;
var bpend = 0,
nchunk = 0,
@@ -1466,7 +1702,7 @@ function up2k_init(subtle) {
hash_calc(nch, e.target.result);
}
reader.onload = function (e) {
try { orz(e); } catch (ex) { vis_exh(ex + '', '', '', '', ex); }
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
};
reader.onerror = function () {
var err = reader.error + '';
@@ -1499,7 +1735,7 @@ function up2k_init(subtle) {
var hash_calc = function (nch, buf) {
while (segm_next());
var hash_done = function (hashbuf) {
var orz = function (hashbuf) {
var hslice = new Uint8Array(hashbuf).subarray(0, 33),
b64str = buf2b64(hslice);
@@ -1525,6 +1761,10 @@ function up2k_init(subtle) {
tasker();
};
var hash_done = function (hashbuf) {
try { orz(hashbuf); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
};
if (subtle)
subtle.digest('SHA-512', buf).then(hash_done);
else setTimeout(function () {
@@ -1592,7 +1832,7 @@ function up2k_init(subtle) {
tasker();
};
xhr.onload = function (e) {
try { orz(e); } catch (ex) { vis_exh(ex + '', '', '', '', ex); }
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
};
xhr.open('HEAD', t.purl + uricom_enc(t.name) + '?raw', true);
@@ -1748,6 +1988,8 @@ function up2k_init(subtle) {
f2f(spd1, 2), isNaN(spd2) ? '--' : f2f(spd2, 2)));
pvis.move(t.n, 'ok');
if (!pvis.ctr.bz && !pvis.ctr.q)
uptoast();
}
else {
if (t.t_uploaded)
@@ -1792,6 +2034,9 @@ function up2k_init(subtle) {
t.want_recheck = true;
}
}
if (rsp.indexOf('server HDD is full') + 1)
return toast.err(0, L.u_ehsdf + "\n\n" + rsp.replace(/.*; /, ''));
if (err != "") {
pvis.seth(t.n, 1, "ERROR");
pvis.seth(t.n, 2, err);
@@ -1806,7 +2051,7 @@ function up2k_init(subtle) {
}
}
xhr.onload = function (e) {
try { orz(e); } catch (ex) { vis_exh(ex + '', '', '', '', ex); }
try { orz(e); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
};
var req = {
@@ -1845,6 +2090,7 @@ function up2k_init(subtle) {
function exec_upload() {
var upt = st.todo.upload.shift();
st.busy.upload.push(upt);
st.nfile.upload = upt.nfile;
var npart = upt.npart,
t = st.files[upt.nfile],
@@ -1892,12 +2138,15 @@ function up2k_init(subtle) {
tasker();
}
function do_send() {
var xhr = new XMLHttpRequest();
var xhr = new XMLHttpRequest(),
bfin = Math.floor(st.bytes.finished / 1024 / 1024),
btot = Math.floor(st.bytes.total / 1024 / 1024);
xhr.upload.onprogress = function (xev) {
pvis.prog(t, npart, xev.loaded);
};
xhr.onload = function (xev) {
try { orz(xhr); } catch (ex) { vis_exh(ex + '', '', '', '', ex); }
try { orz(xhr); } catch (ex) { vis_exh(ex + '', 'up2k.js', '', '', ex); }
};
xhr.onerror = function (xev) {
if (crashed)
@@ -1912,6 +2161,9 @@ function up2k_init(subtle) {
xhr.open('POST', t.purl, true);
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
xhr.setRequestHeader("X-Up2k-Wark", t.wark);
xhr.setRequestHeader("X-Up2k-Stat", "{0}/{1}/{2}/{3} {4}/{5} {6}".format(
pvis.ctr.ok, pvis.ctr.ng, pvis.ctr.bz, pvis.ctr.q, btot, btot - bfin,
st.eta.t.split(' ').pop()));
xhr.setRequestHeader('Content-Type', 'application/octet-stream');
if (xhr.overrideMimeType)
xhr.overrideMimeType('Content-Type', 'application/octet-stream');
@@ -1943,7 +2195,7 @@ function up2k_init(subtle) {
ebi('u2conf').className = ebi('u2cards').className = ebi('u2etaw').className = wide;
}
wide = write && wem > 78 ? 'ww' : wide;
wide = write && wem > 82 ? 'ww' : wide;
parent = ebi(wide == 'ww' && write ? 'u2c3w' : 'u2c3t');
var its = [ebi('u2etaw'), ebi('u2cards')];
if (its[0].parentNode !== parent) {
@@ -2017,24 +2269,27 @@ function up2k_init(subtle) {
}
function draw_turbo() {
var msg = uc.fsearch ? L.u_ts : L.u_tu,
omsg = uc.fsearch ? L.u_tu : L.u_ts,
html = ebi('u2foot').innerHTML,
ohtml = html;
var msg = (turbolvl || !uc.turbo) ? null : uc.fsearch ? L.u_ts : L.u_tu,
html = ebi('u2foot').innerHTML;
if (turbolvl || !uc.turbo)
msg = null;
if (msg && html.indexOf(msg) + 1)
return;
if (msg && html.indexOf(msg) === -1)
html = html.replace(omsg, '') + msg;
else if (!msg)
html = html.replace(L.u_tu, '').replace(L.u_ts, '');
qsr('#u2turbomsg');
if (!msg)
return;
if (html !== ohtml)
ebi('u2foot').innerHTML = html;
var o = mknod('div', 'u2turbomsg');
o.innerHTML = msg;
ebi('u2foot').appendChild(o);
}
draw_turbo();
function set_potato() {
pvis.potato();
set_fsearch();
}
function set_fsearch(new_state) {
var fixed = false;
@@ -2073,6 +2328,10 @@ function up2k_init(subtle) {
ebi('u2tab').className = (uc.fsearch ? 'srch ' : 'up ') + pvis.act;
var potato = uc.potato && !uc.fsearch;
ebi('u2cards').style.display = ebi('u2tab').style.display = potato ? 'none' : '';
ebi('u2mu').style.display = potato ? '' : 'none';
draw_turbo();
onresize();
}
@@ -2096,9 +2355,12 @@ function up2k_init(subtle) {
}
}
function nop(e) {
ev(e);
this.click();
function set_u2sort() {
if (u2sort.indexOf('f') < 0)
return;
bcfg_set('u2sort', uc.az = u2sort.indexOf('n') + 1);
localStorage.removeItem('u2sort');
}
ebi('nthread_add').onclick = function (e) {
@@ -2123,7 +2385,12 @@ function up2k_init(subtle) {
if (parallel_uploads < 1)
bumpthread(1);
return { "init_deps": init_deps, "set_fsearch": set_fsearch, "ui": pvis, "st": st, "uc": uc }
setTimeout(function () {
for (var a = 0; a < up2k_hooks.length; a++)
up2k_hooks[a]();
}, 1);
return r;
}

View File

@@ -16,10 +16,18 @@ var wah = '',
var ebi = document.getElementById.bind(document),
QS = document.querySelector.bind(document),
QSA = document.querySelectorAll.bind(document),
mknod = document.createElement.bind(document),
XHR = XMLHttpRequest;
function mknod(et, eid) {
var ret = document.createElement(et);
if (eid)
ret.id = eid;
return ret;
}
function qsr(sel) {
var el = QS(sel);
if (el)
@@ -160,8 +168,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
try {
var exbox = ebi('exbox');
if (!exbox) {
exbox = mknod('div');
exbox.setAttribute('id', 'exbox');
exbox = mknod('div', 'exbox');
document.body.appendChild(exbox);
var s = mknod('style');
@@ -222,6 +229,11 @@ function ev(e) {
}
function noope(e) {
ev(e);
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
if (!String.prototype.endsWith)
String.prototype.endsWith = function (search, this_len) {
@@ -630,7 +642,7 @@ function humansize(b, terse) {
function humantime(v) {
if (v >= 60 * 60 * 24)
return v;
return shumantime(v);
try {
return /.*(..:..:..).*/.exec(new Date(v * 1000).toUTCString())[1];
@@ -641,12 +653,39 @@ function humantime(v) {
}
function shumantime(v) {
if (v < 10)
return f2f(v, 2) + 's';
if (v < 60)
return f2f(v, 1) + 's';
v = parseInt(v);
var st = [[60 * 60 * 24, 60 * 60, 'd'], [60 * 60, 60, 'h'], [60, 1, 'm']];
for (var a = 0; a < st.length; a++) {
var m1 = st[a][0],
m2 = st[a][1],
ch = st[a][2];
if (v < m1)
continue;
var v1 = parseInt(v / m1),
v2 = ('0' + parseInt((v % m1) / m2)).slice(-2);
return v1 + ch + (v1 >= 10 ? '' : v2);
}
}
function clamp(v, a, b) {
return Math.min(Math.max(v, a), b);
}
function has(haystack, needle) {
try { return haystack.includes(needle); } catch (ex) { }
for (var a = 0; a < haystack.length; a++)
if (haystack[a] == needle)
return true;
@@ -866,8 +905,8 @@ var timer = (function () {
var tt = (function () {
var r = {
"tt": mknod("div"),
"th": mknod("div"),
"tt": mknod("div", 'tt'),
"th": mknod("div", 'tth'),
"en": true,
"el": null,
"skip": false,
@@ -875,8 +914,6 @@ var tt = (function () {
};
r.th.innerHTML = '?';
r.tt.setAttribute('id', 'tt');
r.th.setAttribute('id', 'tth');
document.body.appendChild(r.tt);
document.body.appendChild(r.th);
@@ -1045,9 +1082,8 @@ var toast = (function () {
var r = {},
te = null,
scrolling = false,
obj = mknod('div');
obj = mknod('div', 'toast');
obj.setAttribute('id', 'toast');
document.body.appendChild(obj);
r.visible = false;
r.txt = null;
@@ -1130,8 +1166,7 @@ var modal = (function () {
r.busy = false;
r.show = function (html) {
o = mknod('div');
o.setAttribute('id', 'modal');
o = mknod('div', 'modal');
o.innerHTML = '<table><tr><td><div id="modalc">' + html + '</div></td></tr></table>';
document.body.appendChild(o);
document.addEventListener('keydown', onkey);
@@ -1186,7 +1221,8 @@ var modal = (function () {
return;
setTimeout(function () {
ebi('modal-ok').focus();
if (ctr = ebi('modal-ok'))
ctr.focus();
}, 20);
ev(e);
}
@@ -1496,9 +1532,8 @@ function xhrchk(xhr, prefix, e404) {
fun = toast.warn;
qsr('#cf_frame');
var fr = mknod('iframe');
var fr = mknod('iframe', 'cf_frame');
fr.src = '/?cf_challenge';
fr.setAttribute('id', 'cf_frame');
document.body.appendChild(fr);
}

10
docs/notes.md Normal file
View File

@@ -0,0 +1,10 @@
# up2k.js
## potato detection
* tsk 0.25/8.4/31.5 bzw 1.27/22.9/18 = 77% (38.4s, 49.7s)
* 4c locale #1313, ff-102,deb-11 @ ryzen4500u wifi -> win10
* profiling shows 2sec heavy gc every 2sec
* tsk 0.41/4.1/10 bzw 1.41/9.9/7 = 73% (13.3s, 18.2s)
* 4c locale #1313, ch-103,deb-11 @ ryzen4500u wifi -> win10

View File

@@ -48,7 +48,10 @@ avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} c
## time between first and last upload
python3 -um copyparty -nw -v srv::rw -i 127.0.0.1 2>&1 | tee log
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} !a{a=t;sa=s} {b=t;sb=s} END {print b-a,sa,sb}'
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} t<p{t+=86400} !a{a=t;sa=s} {b=t;sb=s} END {print b-a,sa,sb}'
# or if the client youre measuring dies for ~15sec every once ina while and you wanna filter those out,
cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} t<p{t+=86400} !p{a=t;p=t;r=0;next} t-p>1{printf "%.3f += %.3f - %.3f (%.3f) # %.3f -> %.3f\n",r,p,a,p-a,p,t;r+=p-a;a=t} {p=t} END {print r+p-a}'
##
@@ -182,7 +185,7 @@ brew install python@2
pip install virtualenv
# readme toc
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#|]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#|]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|exclude-patterns|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
# fix firefox phantom breakpoints,
# suggestions from bugtracker, doesnt work (debugger is not attachable)

View File

@@ -2,9 +2,9 @@ FROM alpine:3.16
WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.9.0 \
ver_marked=4.0.17 \
ver_marked=4.0.18 \
ver_mde=2.16.1 \
ver_codemirror=5.65.6 \
ver_codemirror=5.65.7 \
ver_fontawesome=5.13.0 \
ver_zopfli=1.0.3

View File

@@ -106,7 +106,7 @@ tmpdir="$(
[ $repack ] && {
old="$tmpdir/pe-copyparty"
echo "repack of files in $old"
cp -pR "$old/"*{j2,ftp,copyparty} .
cp -pR "$old/"*{py2,j2,ftp,copyparty} .
}
[ $repack ] || {

View File

@@ -22,13 +22,16 @@ v=$1
}
rm -f ../dist/copyparty-sfx.*
shift
./make-sfx.sh "$@"
f=../dist/copyparty-sfx.py
./make-sfx.sh
$f -h
[ -e $f ] ||
f=../dist/copyparty-sfx-gz.py
$f -h
while true; do
mv $f $f.$(wc -c <$f | awk '{print$1}')
./make-sfx.sh re $ar
./make-sfx.sh re "$@"
done
# git tag -d v$v; git push --delete origin v$v

View File

@@ -224,11 +224,15 @@ def unpack():
"""unpacks the tar yielded by `data`"""
name = "pe-copyparty"
tag = "v" + str(STAMP)
withpid = "{}.{}".format(name, os.getpid())
top = tempfile.gettempdir()
opj = os.path.join
final = opj(top, name)
mine = opj(top, withpid)
for suf in range(0, 9001):
withpid = "{}.{}.{}".format(name, os.getpid(), suf)
mine = opj(top, withpid)
if not os.path.exists(mine):
break
tar = opj(mine, "tar")
try:
@@ -360,11 +364,12 @@ def utime(top):
def confirm(rv):
msg()
msg("retcode", rv if rv else traceback.format_exc())
msg("*** hit enter to exit ***")
try:
raw_input() if PY2 else input()
except:
pass
if WINDOWS:
msg("*** hit enter to exit ***")
try:
raw_input() if PY2 else input()
except:
pass
sys.exit(rv or 1)

View File

@@ -10,9 +10,10 @@ import pprint
import tarfile
import tempfile
import unittest
from argparse import Namespace
from tests import util as tu
from tests.util import Cfg
from copyparty.authsrv import AuthSrv
from copyparty.httpcli import HttpCli
@@ -22,56 +23,6 @@ def hdr(query):
return h.format(query).encode("utf-8")
class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None):
super(Cfg, self).__init__(
a=a or [],
v=v or [],
c=c,
rproxy=0,
rsp_slp=0,
s_wr_slp=0,
s_wr_sz=512 * 1024,
ed=False,
nw=False,
unpost=600,
no_mv=False,
no_del=False,
no_zip=False,
no_thumb=False,
no_athumb=False,
no_vthumb=False,
no_voldump=True,
no_scandir=False,
no_sendfile=True,
no_rescan=True,
no_logues=False,
no_readme=False,
re_maxage=0,
ihead=False,
nih=True,
mtp=[],
mte="a",
mth="",
textfiles="",
doctitle="",
html_head="",
lang="eng",
theme=0,
themes=0,
turbo=0,
logout=573,
hist=None,
no_idx=None,
no_hash=None,
force_js=False,
no_robots=False,
js_browser=None,
css_browser=None,
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr no_acode".split()}
)
class TestHttpCli(unittest.TestCase):
def setUp(self):
self.td = tu.get_ramdisk()

View File

@@ -8,44 +8,14 @@ import shutil
import tempfile
import unittest
from textwrap import dedent
from argparse import Namespace
from tests import util as tu
from tests.util import Cfg
from copyparty.authsrv import AuthSrv, VFS
from copyparty import util
class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None):
ex = "nw e2d e2ds e2dsa e2t e2ts e2tsr no_logues no_readme no_acode force_js no_robots no_thumb no_athumb no_vthumb"
ex = {k: False for k in ex.split()}
ex2 = {
"mtp": [],
"mte": "a",
"mth": "",
"doctitle": "",
"html_head": "",
"hist": None,
"no_idx": None,
"no_hash": None,
"js_browser": None,
"css_browser": None,
"no_voldump": True,
"re_maxage": 0,
"rproxy": 0,
"rsp_slp": 0,
"s_wr_slp": 0,
"s_wr_sz": 512 * 1024,
"lang": "eng",
"theme": 0,
"themes": 0,
"turbo": 0,
"logout": 573,
}
ex.update(ex2)
super(Cfg, self).__init__(a=a or [], v=v or [], c=c, **ex)
class TestVFS(unittest.TestCase):
def setUp(self):
self.td = tu.get_ramdisk()

View File

@@ -7,6 +7,7 @@ import threading
import tempfile
import platform
import subprocess as sp
from argparse import Namespace
WINDOWS = platform.system() == "Windows"
@@ -89,6 +90,40 @@ def get_ramdisk():
return subdir(ret)
class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None):
ka = {}
ex = "e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp ed emp force_js ihead no_acode no_athumb no_del no_logues no_mv no_readme no_robots no_scandir no_thumb no_vthumb no_zip nid nih nw"
ka.update(**{k: False for k in ex.split()})
ex = "no_rescan no_sendfile no_voldump"
ka.update(**{k: True for k in ex.split()})
ex = "css_browser hist js_browser no_hash no_idx"
ka.update(**{k: None for k in ex.split()})
ex = "re_maxage rproxy rsp_slp s_wr_slp theme themes turbo df"
ka.update(**{k: 0 for k in ex.split()})
ex = "doctitle favico html_head mth textfiles"
ka.update(**{k: "" for k in ex.split()})
super(Cfg, self).__init__(
a=a or [],
v=v or [],
c=c,
s_wr_sz=512 * 1024,
unpost=600,
u2sort="s",
mtp=[],
mte="a",
lang="eng",
logout=573,
**ka
)
class NullBroker(object):
def say(*args):
pass